-5

Basically, i want to compute top k of precision@n and recall@n of each epoch in keras neural networks. After a long search, i found custom function which are...

def precision(y_true, y_pred):

true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))
precision = true_positives / (predicted_positives + K.epsilon())
return precision

def recall(y_true, y_pred):

true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))
recall = true_positives / (possible_positives + K.epsilon())
return recall

// this is for model compilation

model.compile(optimizer=RMSprop(lr=learning_rate), loss='binary_crossentropy' , metrics=[precision,recall])

for epoch in xrange(epochs): t1 = time() user_input, item_input, labels = get_train_instances(train, num_negatives)

for Training

hist = model.fit([np.array(user_input), np.array(item_input)], np.array(labels),
batch_size=batch_size, nb_epoch=1, verbose=0, shuffle=True) loss,precision,recall = hist.history['loss'][0],hist.history["precision"][0] , hist.history["recall"][0] print('Iteration %d [%.1f s]: loss = %.4f , precision= %.4f , recall=%.4f [%.1f s]' % (epoch, t2-t1, loss,precision,recall, time()-t2))

// this is my output and it seems wrong ... enter image description here

I hope now you understand...

  • I think this link will be usseful for you: https://datascience.stackexchange.com/questions/13746/how-to-define-a-custom-performance-metric-in-keras – user3591356 Apr 21 '18 at 15:01

1 Answers1

1

Your best bet is to use the classification report capability in SciKitLearn. It's a general classification evaluation function but I use it with Keras regularly.

from sklearn.preprocessing import LabelBinarizer
from sklearn.metrics import classification_report

from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Conv2D, MaxPooling2D
from keras.layers.normalization import BatchNormalization

from keras.datasets import cifar10

# Load CIFAR-10 dataset
(trainX, trainY), (testX, testY) = cifar10.load_data()
trainX = trainX / 255.0
testX = testX / 255.0

# Transform labels from int to one-hot vectors
lb = LabelBinarizer()
trainY = lb.fit_transform(trainY)
testY = lb.fit_transform(testY)

# CNN architecture with Keras
model = Sequential()
model.add(Conv2D(input_shape=trainX[0,:,:,:].shape, filters=32, 
                 use_bias=True, kernel_size=(3,3)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Conv2D(filters=96, use_bias=False, kernel_size=(5,5), strides=2))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Conv2D(filters=96, use_bias=False, kernel_size=(5,5), strides=2))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Flatten())
model.add(BatchNormalization())
model.add(Dense(256))
model.add(Activation('relu'))
model.add(Dropout(0.4))
model.add(Dense(n_classes, activation="softmax"))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])

# Train the model
n_epochs = 25
batch_size = 256
callbacks_list = None
H = model.fit(trainX, trainY, validation_data=(testX, testY), 
              epochs=n_epochs, batch_size=batch_size, callbacks=callbacks_list)
print('Done!!!')

# Evaluate TEST model class prediction accuracy
print("[INFO] Evaluating network...")
predictions = model.predict(testX, batch_size=batch_size)
target_names = [str(x) for x in lb.classes_]
print(classification_report(testY.argmax(axis=1),
                            predictions.argmax(axis=1),
                            target_names=target_names))

# Evaluate TRAIN model class prediction accuracy
print("[INFO] Evaluating network...")
trainPreds = model.predict(trainX, batch_size=batch_size)
target_names = [str(x) for x in lb.classes_]
print(classification_report(trainY.argmax(axis=1),
                            trainPreds.argmax(axis=1),
                            target_names=target_names))

The calls to classification report will give you precision and recall statistics on each class that appear as follows:

[INFO] Evaluating network...
precision recall f1-score support

      0       0.78      0.83      0.80      1000
      1       0.85      0.92      0.88      1000
      2       0.77      0.58      0.66      1000
      3       0.66      0.54      0.59      1000
      4       0.72      0.72      0.72      1000
      5       0.72      0.63      0.67      1000
      6       0.69      0.91      0.78      1000
      7       0.81      0.83      0.82      1000
      8       0.85      0.87      0.86      1000
      9       0.82      0.86      0.84      1000
      avg     0.77      0.77      0.76     10000  
T3am5hark
  • 856
  • 6
  • 9
  • Thank you for your prompt response, but i need to compute precision and recall on top k item. could you please give me an idea about that ... – Muhammad shahrukh khan Apr 09 '18 at 18:18
  • 1
    I would recommend clarifying in your question exactly what you’re looking for. It is not clear to me what you’re asking for. – T3am5hark Apr 09 '18 at 18:58