I seem to have a specific code because I couldn't find what I want on the web, so here's my problem:
I have coded a NN that takes an array of a specific length and should give me a singe value as output:
model = tf.keras.Sequential()
model.add(layers.Embedding(input_dim=int(input_len_array), output_dim=8 * int(input_len_array)))
model.add(layers.GRU(32 * int(input_len_array), return_sequences=True))
# Last layer...
model.add(layers.Dense(1, activation='tanh'))
After that I create a Custom_loss function:
custom_loss(x_, y_):
sess = tf.Session()
Sortino = self.__backtest(x_, y_)
def loss(y_true, y_pred):
print('Sortino: ', Sortino)
# The Optimizer will MAXIMIZE the Sortino so we compute -Sortino
return tf.convert_to_tensor(-Sortino)
return loss
After that I compile my model and I give it the whole batch of values in the tensor X and Y:
self.model.compile(optimizer='adam', loss=custom_loss(x, y))
Inside the Custom loss I call the function self.__backtest which is defined as below:
def __backtest(self, x_: tf.Tensor, y_r: tf.Tensor, timesteps=40):
my_list = []
sess = tf.Session()
# Defining the Encoder
# enc = OneHotEncoder(handle_unknown='ignore')
# X = [[-1, 0], [0, 1], [1, 2]]
# enc.fit(X)
# sess.run(x_)[i, :] is <class 'numpy.ndarray'>
print('in backest: int(x_.get_shape())', x_.get_shape())
for i in range(int(x_.get_shape()[0])):
output_of_nn = self.model.predict(sess.run(x_)[i, :] / np.linalg.norm(sess.run(x_)[i, :]))
# categorical_output = tf.keras.utils.to_categorical(output_of_nn)
my_list.append(scaled_output * sess.run(y_r)[i])
if i < 10:
print('First 10 scaled output: ', scaled_output)
if i > 0:
capital_evolution.append(capital_evolution[-1] * (my_list[-1] + 1))
my_array = np.array(my_list)
if len(my_array) < 10:
return -10
try:
Sortino = my_array.mean() / my_array.std()
except:
Sortino = -10
return Sortino
The computer is'nt able to run the code and gives me this error:
ValueError: An operation has `None` for gradient. Please make sure that all of your ops have a gradient defined (i.e. are differentiable). Common ops without gradient: K.argmax, K.round, K.eval.
I would be more that grateful if someone could give the solution!! MANY THANKS!!