def kl_divergence(p, p_hat):
return (p * K.log(p / p_hat)) + ((1 - p) * K.log((1 - p) / (1 - p_hat)))
class SparseActivityRegularizer(Regularizer):
sparsityBeta = None
def __init__(self, l1=0., l2=0., p=0.01, sparsityBeta=0.1):
self.p = p
self.sparsityBeta = sparsityBeta
def set_layer(self, layer):
self.layer = layer
def __call__(self,loss):
#p_hat needs to be the average activation of the units in the hidden layer.
p_hat = T.sum(T.mean(self.layer.get_output(True) , axis=0))
loss += self.sparsityBeta * kl_divergence(self.p, p_hat)
return loss
def get_config(self):
return {"name": self.__class__.__name__,
"p": self.l1}
when I call this custom regularizer in the model as shown below
dr=0.5
inputs = Input(shape=(392,))
x = Dense(1000,activation='relu',activity_regularizer=SparseActivityRegularizer())(inputs)
x=Dropout(dr)(x)
out= Dense(392, activation='sigmoid')(x)
model = Model(inputs=inputs, outputs=out)
model.compile(loss=euc_dist_keras,
optimizer='adadelta', metrics=["accuracy"])
model.summary()
filepath="weightdae.best.hdf5"
checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1,
save_best_only=True, mode='min')
callbacks_list = [checkpoint,TensorBoard(log_dir='/tmp/autoencoder')]
hist = model.fit(ptilde, p,
nb_epoch=40,
shuffle=True,
validation_data=(ptilde_val,p_val),
batch_size=32,
callbacks=callbacks_list)
I get the following error
AttributeError: 'SparseActivityRegularizer' object has no attribute 'layer'
Can someone please help me in solving this error? I have checked the implementation of the regularizer, and activity regularizers in keras have been implemented in the same way. But here somehow it cannot find the attribute 'layer' and throws this error.