class BinaryTruePositives(tf.keras.metrics.Metric):
def __init__(self, name='Results', **kwargs):
super(BinaryTruePositives, self).__init__(name=name, **kwargs)
self.true_positives = self.add_weight(name='tp', initializer='zeros')
def update_state(self, y_true, y_pred, sample_weight=None):
y_true=tf.reshape(y_true, [16, 4])
y_true=tf.cast(y_true, dtype=tf.float32)
y_true = tf.squeeze(y_true)
print("before pred")
tf.print(y_pred)
y_pred = tf.sign(y_pred)
print("pred after sign")
tf.print(y_pred)
print("true")
tf.print(y_true)
equal_t = tf.equal(y_true, y_pred)
reduce_t = tf.reduce_all(equal_t, axis=1)
tf.print(reduce_t)
z=tf.keras.backend.mean(reduce_t)
tf.print(z)
self.true_positives.assign_add(z)
def result(self):
return self.true_positives
def reset_states(self):
self.true_positives.assign(0.)
During the execution of model training, it throws an error of shape. Even though I
reshape y_true.
Batch_size=16
InvalidArgumentError: [_Derived_] Incompatible shapes: [64] vs.
[16,4]
[[{{node
StatefulPartitionedCall/StatefulPartitionedCall/mean_squared_error/Sq ua redDifference}}]] [[StatefulPartitionedCall]] [[import/StatefulPartitionedCall_1/ReduceDataset]] [Op:__inference_wrapped_function_48795] [[{{node Function call stack: wrapped_function -> wrapped_function -> wrapped_function
classes are defined as [1, 1, 1, 1] for 0 class, [-1, -1, -1, -1] for 1 class, [1,-1, 1, -1] for 3 class
y_pred is [16,4] shape, but even though after reshaping y_true, it works for the first batch_size then displays an error of incompatible shapes