0

I have removed the embedding layer in BILSTM model

model = Sequential()
model.add(Bidirectional(LSTM(input_length=max_len, return_sequences=True)))
model.add(LSTM(units=32,  dropout=0.05, recurrent_dropout=0.35, return_sequences=True))
model.add(attention(return_sequences=True)) # receive 3D and output 3D
model.add(LSTM(32,return_sequences=True)))
model.add(Dense(7, activation='sigmoid'))
model.summary()

model.compile('adam', 'binary_crossentropy')
model.fit(xtrain,ytrain, epochs=3)

Attention module is given as

class attention(Layer):
    
    def __init__(self, return_sequences=True):
        self.return_sequences = return_sequences
        super(attention,self).__init__()
        
    def build(self, input_shape):
        
        self.W=self.add_weight(name="att_weight", shape=(input_shape[-1],1),
                               initializer="normal")
        self.b=self.add_weight(name="att_bias", shape=(input_shape[1],1),
                               initializer="zeros")
        
        super(attention,self).build(input_shape)
        
    def call(self, x):
        
        e = K.tanh(K.dot(x,self.W)+self.b)
        a = K.softmax(e, axis=1)
        output = x*a
        
        if self.return_sequences:
            return output
        
        return K.sum(output, axis=1)

input_shape=(320, 6) The model is showing bulid error. I have taken the code from a link given https://colab.research.google.com/drive/1ddbQ-YPSJ4xuHOmZbfSHSYJJ1otBsoNv?usp=sharing&authuser=2#scrollTo=2YitiVo0xXC7

poorna
  • 61
  • 1
  • 9

0 Answers0