I am getting shape inference error when I try to run openvino model optimizer on a simple custom layer model. Here is unit test:
import os
import shutil
import pytest
import subprocess
import numpy as np
import tensorflow as tf
from keras import Model
from keras.layers import Input, Layer
class CustomLayer(Layer):
def call(self, inputs):
arr_0 = inputs[0][:, 0, 0]
arr_1 = inputs[1][:, 0, 1]
diff = tf.abs(tf.subtract(arr_0, arr_1))
large_diff = tf.where(diff > 1., 1., 0.)
# return diff # this return statement works with model optimizer
return large_diff # this return statement fails with model optimizer
@pytest.fixture
def model():
input_1 = Input(shape=(10, 2), dtype=tf.float32)
input_2 = Input(shape=(10, 2), dtype=tf.float32)
output = CustomLayer()([input_1, input_2])
model = Model(inputs=[input_1, input_2], outputs=output)
return model
def test_init(model: Model):
input_1 = np.array(range(20, 40)).reshape(1, 10, 2)
input_2 = np.array(range(40, 60)).reshape(1, 10, 2)
output = model([input_1, input_2])
assert np.allclose(output, 1.)
def test_save_convert_model(model: Model):
dir_path = os.path.dirname(os.path.realpath(__file__))
dir_path = os.path.join(dir_path, 'resources/infershape')
shutil.rmtree(dir_path, ignore_errors=True)
model.save(dir_path)
inp = "input_3,input_4"
ins = "[1,10,2],[1,10,2]"
mo = "/opt/intel/openvino/deployment_tools/model_optimizer/mo_tf.py"
cmd = "python {}".format(mo)
cmd = "{} "\
"--saved_model_dir {} "\
"--output_dir {} "\
"--input {} "\
"--input_shape {} ".format(cmd, dir_path, dir_path, inp, ins)
print(cmd)
subprocess.run(cmd, shell=True, check=True)
Here is the error message that is produced.
tensorflow/core/grappler/optimizers/meta_optimizer.cc:1137] Optimization results for grappler item: graph_to_optimize
function_optimizer: Graph size after: 22 nodes (18), 21 edges (18), time = 1.352ms.
function_optimizer: function_optimizer did nothing. time = 0.03ms.
[ ERROR ] Cannot infer shapes or values for node "PartitionedCall/model_1/custom_layer_1/SelectV2".
[ ERROR ] Input 0 of node PartitionedCall/model_1/custom_layer_1/SelectV2 was passed float from PartitionedCall/model_1/custom_layer_1/Greater_port_0_ie_placeholder:0 incompatible with expected bool.
[ ERROR ]
[ ERROR ] It can happen due to bug in custom shape infer function <function tf_native_tf_node_infer at 0x7f2efcac24c0>.
[ ERROR ] Or because the node inputs have incorrect values/shapes.
[ ERROR ] Or because input shapes are incorrect (embedded to the model or passed via --input_shape).
[ ERROR ] Run Model Optimizer with --log_level=DEBUG for more information.
[ ERROR ] Exception occurred during running replacer "REPLACEMENT_ID" (<class 'extensions.middle.PartialInfer.PartialInfer'>): Stopped shape/value propagation at "PartitionedCall/model_1/custom_layer_1/SelectV2" node.
Please tell me what to either change in my code or if openvino issue, please tell me so I can handle differently. Thanks.