I have an estimator using a TensorHub text_embedding column, like so:
my_dataframe = pandas.DataFrame(columns=["title"})
# populate data
labels = []
# populate labels with 0|1
embedded_text_feature_column = hub.text_embedding_column(
key="title"
,module_spec="https://tfhub.dev/google/nnlm-en-dim128-with-normalization/1")
estimator = tf.estimator.LinearClassifier(
feature_columns = [ embedded_text_feature_column ]
,optimizer=tf.train.FtrlOptimizer(
learning_rate=0.1
,l1_regularization_strength=1.0
)
,model_dir=model_dir
)
estimator.train(
input_fn=tf.estimator.inputs.pandas_input_fn(
x=my_dataframe
,y=labels
,batch_size=128
,num_epochs=None
,shuffle=True
,num_threads=5
)
,steps=5000
)
export(estimator, "/tmp/my_model")
How can I export and serve the model so that it accepts strings as input to predictions? I have a serving_input_receiver_fn
as follows, and tried quite a few more, but I'm quite confused as to what it needs to look like so that I can serve it (with saved_model_cli, say) and call it with title strings (or a simple JSON structure) as input.
def export(estimator, dir_path):
def serving_input_receiver_fn():
feature_spec = tf.feature_column.make_parse_example_spec(hub.text_embedding_column(
key="title"
,module_spec="https://tfhub.dev/google/nnlm-en-dim128-with-normalization/1"))
return tf.estimator.export.build_parsing_serving_input_receiver_fn(feature_spec)
estimator.export_savedmodel(
export_dir_base=dir_path
,serving_input_receiver_fn=serving_input_receiver_fn()
)