I would like to add an LSTM layer before the softmax layer so that I can keep track of the context of a sequence and use it for prediction. Following is my implementation but I get every time the following error. Please help me to solve this error.
ValueError: Input 0 is incompatible with layer lstm_1: expected ndim=3, found ndim=2
common_model = Sequential()
common_model.add(Conv2D(32, (3, 3), input_shape=self.state_size, padding='same', activation='relu'))
common_model.add(Dropout(0.2))
common_model.add(Conv2D(32, (3, 3), activation='relu', padding='same'))
common_model.add(MaxPooling2D(pool_size=(2, 2)))
common_model.add(Flatten())
common_model.add(Dense(512, activation='relu'))
common_model.add(Dropout(0.5))
common_model.add(Dense(512, activation='relu'))
common_model.add(Dropout(0.5))
common_model.add(Dense(512, activation='relu'))
common_model.add(Dropout(0.5))
agent_model = Sequential()
agent_model.add(common_model)
agent_model.add(LSTM(512, return_sequences=False))
agent_model.add(Dense(self.action_size, activation='softmax'))
agent_model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=self.agent_learning_rate))
critic_model = Sequential()
critic_model.add(common_model)
critic_model.add(Dense(1, activation='linear'))
critic_model.compile(loss="mse", optimizer=Adam(lr=self.critic_learning_rate))