I find some problems importing models training by tensorflow and keras python models. With flask is easy but using deeplearning4j or org.tensorflow I have some problems. it seems java libraries are still undeveloped.
In this case, the tokenization is different using pickle in flask than using json in deeplearning4j.
This is the code in flask and the output for the string "I am disputing with my mortgage confusing misleading term":
@app.route('/')
def home():
return flask.render_template('complaints_classify.html')
@app.route('/predict',methods=['POST'])
def predict():
print('preditct\n')
a = request.form['complaint']
print(a)
#invoke the model
import tensorflow as tf
#loaded_model =tf.keras.models.load_model('/home/ean/anaconda3/envs/my_env/flask_projects/models/modelLstm.h5')
loaded_model =tf.keras.models.load_model('/home/ean/anaconda3/envs/my_env/modelLstm.h5')
loaded_model.summary()
# loading
with open('/home/ean/anaconda3/envs/my_env/tokenizer.pickle', 'rb') as handle:
tokenizer = pickle.load(handle)
print(tokenizer.get_config())
# load weights into new ()model
print("Loaded model from disk")
labels = ['Credit card','Debt collection','Credit Reporting','Mortgage','Payday loan','Student loan']
text = [a]
text = np.array(text)
print("text en numpy:\n")
print(text)
to_exclude = '!"#$%&()*+-./:;<=>?@[\]^_`{|}~\t\n'
X = tokenizer.texts_to_sequences(text)
print(X)
X = pad_sequences(X, maxlen=50)
print(X)
pred= loaded_model.predict(X)
print(pred)
print(np.argmax(pred))
print(text, labels[np.argmax(pred)])
data = {labels[np.argmax(pred)]}
return flask.render_template('complaints_classify.html',complaint=format(text),prediction_text='{}'.format(data))
And the output is:
[[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1]]
Predicting very well:
['I am disputing with my mortgage confusing misleading term'] Mortgage
And this is the code using DeepLearning4j beta6.
public class GuestbookController {
@RequestMapping(value = "/micro-service")
public String hello() throws Exception {
String modeloStr = "";
try {
System.out.println("Estamos en:"+InetAddress.getLocalHost().getHostAddress());
String[] labels = {"Credit card","Debt collection","Credit Reporting","Mortgage","Payday loan","Student loan"};
// load the model
//String simpleMlp = new ClassPathResource("/home/oscar/curso/microservicio-ML/model/modelLstm.h5").getFile().getPath();
String simpleMlp = new ClassPathResource("modelLstm.h5").getFile().getPath();
System.out.println("Fichero leido\n");
MultiLayerNetwork model = KerasModelImport.importKerasSequentialModelAndWeights(simpleMlp,false);
for(int i= 0;i<model.getLayers().length;i++){
modeloStr=modeloStr+(model.getLayers()[i]).getConfig()+"\n";
}
System.out.println(modeloStr);
String[] texts = new String[] {"I am disputing with my mortgage confusing misleading term"};
String path = "tokenizer.json";
KerasTokenizer tokenizer = KerasTokenizer.fromJson(Resources.asFile(path).getAbsolutePath());
//KerasTokenizer tokenizer = new KerasTokenizer(50);
//tokenizer.fitOnTexts(texts));
Integer[][] sequences = tokenizer.textsToSequences(texts);
Integer [][] tmp10=sequences;
String tmp11 ="";
for(int p= 0;p<tmp10.length;p++){
for (int q=0;q<tmp10[p].length;q++){
tmp11=tmp11+(tmp10[p][q])+"\n";
}
}
System.out.println("-------Sequences:"+tmp11);
System.out.println(tokenizer.textsToMatrix(texts, TokenizerMode.FREQ));
And the output is:
-------Sequences:1
La salida de: tokenizer.textsToMatrix(texts, TokenizerMode.FREQ)
[[ 0, 1.0000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
I really appreciate any help about how to solve it.
Many Thanks!!!