I am using a tensorflow estimator model for multi-label text classification. After having the prediction, I need to serve the model for prediction, but I am not getting the correct path.
eval_input_fn = tf.estimator.inputs.numpy_input_fn(
{'descriptions': np.array(test_descriptions).astype(np.str)},
test_encoded.astype(np.int32),
shuffle=False)
##"Code for saving the model"
import pickle
model.save('my_model.h5')
with open('tokenizer.pickle', 'wb') as handle:
pickle.dump(tokenizer, handle, protocol=pickle.HIGHEST_PROTOCOL)
##Saving Model part ends
estimator.evaluate(input_fn=eval_input_fn)
raw_text = ("Pemeriksaan pilihan makanan kita dan makanan yang kita masukkan "
"ke dalam tubuh kita. Berdasarkan memoar Jonathan Safran Foer.")
predict_input_fn = tf.estimator.inputs.numpy_input_fn(
{'descriptions': np.array([raw_text]).astype(np.str)},
shuffle=False)
prediction = estimator.predict(predict_input_fn)
for movie_genres in prediction:
top_2 = movie_genres['probabilities'].argsort()[-3:][::-1]
for genre in top_2:
text_genre = encoder.classes_[genre]
print(text_genre)
I tried tensorflow serving but it didn't work.