Say I have a Custom Layer
:
class Custom_Layer(keras.layers.Layer):
def __init__(self, **kwargs):
self.w_0 = tf.Variable(tf.random_uniform_initializer(),trainable=True)
self.b_0 = tf.Variable(tf.zeros_initializer(),trainable=True)
....
def call(self, inputs):
output = A_Method(self, inputs)
return output
def A_Method(self, TensorA):
....
return something
if I want to decorate @tf.function(with input_signature)
to A_Method
to control tracing
@tf.function(input_signature=[???, tf.TensorSpec(shape=None)])
def A_Method(self, TensorA):
....
return something
what spec should i put for self
? I tried putting tf.TensorSpec
but it raised an error
___Updated the question___ :
Im quite new to tensorflow sorry if the code is weird or doesnt make sense. the reason I do this is I found RNN took a long time for first epoch to get started, I dont know if this custom layer can do something alike but is taking less time. but ultimately I believe the slow initialize time is because of tensorflow retracing repeatedly even on same input_spec - input_shape
.
I use this layer repeatedly as,
input_layer = Input(shape=( X_.shape[1],X_.shape[2]), name='input')
for loop :
Hard_Code_RNN_Layer(input_layer[:,:, slicing])
then I ran .experimental_get_tracing_count()
count is 300
which really shouldn't be above 10, thats why I wanted to take this method out def Mimic_RNN(self, step_input, step_state)
remove it from the class
and try giving it an input_signature. Please see below :
def Initialize_Variable(input_dim, units):
w_init = tf.random_normal_initializer()
b_init = tf.zeros_initializer()
w_0 = tf.Variable(initial_value=w_init(shape=(input_dim, units)))
b_0 = tf.Variable(initial_value=b_init(shape=(units)))
return w_0, b_0
def Initialize_One_Variable(input_dim, units):
w_init = tf.random_uniform_initializer()
R_kernal = tf.Variable(initial_value=w_init(shape=(input_dim, units)))
return R_kernal
class Hard_Code_RNN_Layer(keras.layers.Layer):
def __init__(self, input_tuple, Sequencee=True, **kwargs):
super(Hard_Code_RNN_Layer, self).__init__()
input_shape, units = input_tuple
self.Hidden_Size = (int)(input_shape * 0.85)
self.inputshape = input_shape
self.units = units
self.thiseq = Sequencee
self.Uz = Initialize_One_Variable(self.Hidden_Size, self.Hidden_Size)
self.Ur = Initialize_One_Variable(self.Hidden_Size, self.Hidden_Size)
self.w_hz, self.b_hz = Initialize_Variable(self.units, self.Hidden_Size)
self.w_out, self.b_out = Initialize_Variable(self.Hidden_Size,self.units)
self.w_0, self.b_0 = Initialize_Variable(self.inputshape,self.units)
def get_config(self):
cfg = super().get_config()
return cfg
def Layer_Method(inputs, w_h, b_h):
return tf.matmul(inputs, w_h) + b_h
def Mimic_RNN(self, step_input, step_state): <-----------input_signature_this
x__j = self.Layer_Method(step_input, self.w_0, self.b_0)
r = tf.sigmoid(tf.matmul(step_state, self.Ur))
z = tf.sigmoid(tf.matmul(step_state, self.Uz))
h__ = tf.nn.relu(tf.matmul(x__j, self.w_hz) + tf.multiply(r, step_state) + self.b_hz)
h = (1-z) * h__ + z * step_state
output__ = tf.nn.relu(tf.matmul(h, self.w_out) + self.b_out)
return output__, h
def call(self, inputs):
unstack = tf.unstack(inputs, axis=1)
out1, hiddd = self.Mimic_RNN(step_input=unstack[0], step_state=tf.zeros_like(unstack[0][:,0:self.Hidden_Size]))
out2, hiddd = self.Mimic_RNN(step_input=unstack[1], step_state=hiddd)
out3, hiddd = self.Mimic_RNN(step_input=unstack[2], step_state=hiddd)
if(self.thiseq):
return tf.stack([out1, out2, out3], axis =1 )
else:
return out3