I am currently having some issue loading my model, as it includes a lambda layer..
This is my neural network layer which uses lambda layer.
#
# Python scritpt - Keras RCNN model.
#
import keras
from keras.models import Model
from keras.layers import Input, Dense, Dropout, Flatten, Activation
from keras.layers import merge, Conv2D, MaxPooling2D, Input
from keras.layers.normalization import BatchNormalization
from keras.layers.core import Lambda
import numpy as np
from keras.layers import add
from keras import backend as K
# RCL:
# BatchNorm(Relu(conv(L-1) + conv(L)))
#
def make_RCNN(input,number_of_rcl,num_of_filter, filtersize,alpha,pool):
feed_forward = Conv2D(filters=num_of_filter, kernel_size=1, name='init')(input)
for x in xrange(number_of_rcl):
output = RCL(feed_forward,num_of_filter,filtersize,alpha,pool)
feed_forward = output
return feed_forward
def RCL(feed_forward_input,num_of_filter, filtersize, alpha,pool):
conv = Conv2D(filters=num_of_filter, kernel_size=filtersize, padding='same')
recurrent_input = conv(feed_forward_input)
merged = add([feed_forward_input,recurrent_input])
conv_relu = Lambda(lambda x : K.relu(x,alpha=alpha))(merged)
conv_relu_batchnorm = BatchNormalization()(conv_relu)
if pool:
conv_relu_batchnorm_pool = MaxPooling2D()(conv_relu_batchnorm)
return conv_relu_batchnorm_pool
else:
return conv_relu_batchnorm
input = Input(shape=(30,30,3))
output = make_RCNN(input,number_of_rcl=3,num_of_filter=3,filtersize=3,alpha=0.2, pool=True)
model = Model(input = input, output = output)
model.compile(optimizer='rmsprop', loss='binary_crossentropy')
model.summary()
how do i remove the layer, without altering the functionality?...