When I run my model(training=False)
with batch_size = 2
, model uses trained parameters for BatchNorm, but when I run model with batch_size = 1
, my model don't use trained parameters and do InstanceNorm
in fact. How can I run model with batch_size = 1
as batch_size = 2
?
class ContextExtractor(tf.keras.Model):
def __init__(self, model_name, pretrained_shape):
super().__init__()
self.model = self.__get_model(model_name, pretrained_shape)
def call(self, x, training=False, **kwargs):
features = self.model(x, training=training)
return features
def __get_model(self, model_name, pretrained_shape):
if model_name == "mobilenetv2":
return MobileNetV2(input_shape=pretrained_shape,
weights='imagenet',
alpha=0.35,
include_top=False,
pooling='avg')
context_extractor = ContextExtractor("mobilenetv2", (224, 224, 3))
bc = tf.random.uniform((10, 224, 224, 3))
a1 = context_extractor(bc, training=False)
print(hash(a1[0].numpy().tobytes()))
# -8545286774071995675
a2 = context_extractor(bc[:2], training=False)
print(hash(a2[0].numpy().tobytes()))
# -8545286774071995675 <-- the same
a3 = context_extractor(bc[:1], training=False)
print(hash(a3[0].numpy().tobytes()))
# 8191470914604248680 <-- another result!