Given data in the form x, y such that y = A sin(B(x) + C) + D, identify A, B, C, and D using Tensorflow.
I have written the following code to do so, but unfortunately it does not learn. Note here the problem is not to predict the sine curve correctly, but to identify the variables. Bonus points if it is possible to change the function's form to y = A * X_2 * sin (B(X_1) + C) + D.
x = np.linspace(0, 100, 1000)
A = np.random.normal(1)
B = np.random.normal(.5)
C = np.random.normal(1)
D = np.random.normal(1)
y = A*np.sin((B*x) + C) + D
x = tf.constant([x.astype('float32')])
y = tf.constant([y.astype('float32')])
class Addition(tf.Module):
def __init__(self, inputs, name=None):
super().__init__(name=name)
self.b_1 = tf.Variable(tf.random.normal([inputs]), name='b1')
self.b_2 = tf.Variable(tf.random.normal([inputs]), name='b2')
def __call__(self, x):
out = tf.math.multiply(x, self.b_1) + self.b_2
return out
class Sinusoid(tf.Module):
def __init__(self, inputs, name=None):
super().__init__(name=name)
def __call__(self, x):
sine = tf.math.sin(x)
return sine
class Sine_Model(tf.Module):
def __init__(self, name=None):
super().__init__(name=name)
self.add_1 = Addition(inputs=1)
self.sin_1 = Sinusoid(inputs=1)
self.add_2 = Addition(inputs=1)
def __call__(self, x):
x = self.add_1(x)
x = self.sin_1(x)
x = self.add_2(x)
return x
model = Sine_Model(name='sine')
loss_object = tf.keras.losses.MeanSquaredError()
optimizer = tf.keras.optimizers.Adam(learning_rate=.1)
train_loss = tf.keras.metrics.Mean(name='train_loss')
@tf.function
def train_step(x, y):
with tf.GradientTape() as tape:
predictions = model(x)
loss = loss_object(y, predictions)
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
train_loss(loss)
EPOCHS = 200
for epoch in range(EPOCHS):
# Reset the metrics at the start of the next epoch
train_loss.reset_states()
train_step(x, y)
template = 'Epoch {}, Loss: {}'
#print(template.format(epoch + 1,
# train_loss.result()))
y_predicted = sine_model(x)
plt.scatter(x, y_predicted.numpy()[0])
plt.scatter(x, y, c='r')
I did see an answer to this question using scipy here. But I would like to see if it is possible to do using Tensorflow specifically, as I am interested in modularity and would like to be able to solve the problem noted as a bonus above (y = A * X_2 * sin (B(X_1) + C) + D).
Thanks!