Basically, i want to compute top k of precision@n and recall@n of each epoch in keras neural networks. After a long search, i found custom function which are...
def precision(y_true, y_pred):
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))
precision = true_positives / (predicted_positives + K.epsilon())
return precision
def recall(y_true, y_pred):
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))
recall = true_positives / (possible_positives + K.epsilon())
return recall
// this is for model compilation
model.compile(optimizer=RMSprop(lr=learning_rate), loss='binary_crossentropy' , metrics=[precision,recall])
for epoch in xrange(epochs): t1 = time() user_input, item_input, labels = get_train_instances(train, num_negatives)
for Training
hist = model.fit([np.array(user_input), np.array(item_input)], np.array(labels),
batch_size=batch_size, nb_epoch=1, verbose=0, shuffle=True)
loss,precision,recall = hist.history['loss'][0],hist.history["precision"][0] , hist.history["recall"][0]
print('Iteration %d [%.1f s]: loss = %.4f , precision= %.4f , recall=%.4f [%.1f s]' % (epoch, t2-t1, loss,precision,recall, time()-t2))
// this is my output and it seems wrong ... enter image description here
I hope now you understand...