0

I would like to write a network with custom connection and custom activation functions for some channels. Check the figure.

enter image description here

let's say: 10 hidden-layers and 128 channels. Using this answer I have tried the next:

from keras.models import Model
from keras.layers import Dense, Input, Concatenate, Lambda

inputTensor = Input((3,))

ss

group1 = Lambda(lambda x: x[:,0:1], output_shape=((1,)))(inputTensor)
group2 = Lambda(lambda x: x[:,0:1], output_shape=((1,)))(inputTensor)
group3 = Lambda(lambda x: x[:,1:2], output_shape=((1,)))(inputTensor)
group4 = Lambda(lambda x: x[:,1:2], output_shape=((1,)))(inputTensor)
group5 = Lambda(lambda x: x[:,2:], output_shape=((1,)))(inputTensor)
group6 = Lambda(lambda x: x[:,2:], output_shape=((1,)))(inputTensor)

for the second layer:

xsin = Dense(1,activation=sin_activation,use_bias=True)
group1 = xsin(group1)
xcos = Dense(1,activation=cos_activation,use_bias=True)
group2 = xcos(group2)
ysin = Dense(1,activation=sin_activation,use_bias=True)
group3 = ysin(group3)
ycos = Dense(1,activation=cos_activation,use_bias=True)
group4 = ycos(group4)
zsin = Dense(1,activation=sin_activation,use_bias=True)
group5 = zsin(group5)
zcos = Dense(1,activation=cos_activation,use_bias=True)
group6 = zcos(group6)
outputTensor = Concatenate()([group1,group2,group3,group4,group5,group6])

Connect the custom layer with the Fully-connected-block:

block1 = Dense(128,activation='relu',use_bias=True)(outputTensor)

create the Fully-connected-block

block2 = Dense(128,activation='relu',use_bias=True)(block1)
block3 = Dense(128,activation='relu',use_bias=True)(block2)
block4 = Dense(128,activation='relu',use_bias=True)(block3)
block5 = Dense(128,activation='relu',use_bias=True)(block4)
block6 = Dense(128,activation='relu',use_bias=True)(block5)
block7 = Dense(128,activation='relu',use_bias=True)(block6)
block8 = Dense(128,activation='relu',use_bias=True)(block7)
block9 = Dense(128,activation='relu',use_bias=True)(block8)
block10 = Dense(128,activation='relu',use_bias=True)(block9)
outputamplitude = Dense(1,activation='elu',use_bias=True)(block10)

#create the model:

model = Model(inputTensor,outputamplitude)

custom activation function:

# Custom activation function
from keras.layers import Activation
from keras import backend as K
from keras.utils.generic_utils import get_custom_objects

def sin_activation(x):
    return K.sin(x)

def cos_activation(x):
    return K.cos(x)

get_custom_objects().update({'sin_activation': Activation(sin_activation)})
get_custom_objects().update({'cos_activation': Activation(cos_activation)})
Paul Goyes
  • 59
  • 8

1 Answers1

0

SOLVED:

# Custom activation function
from keras.layers import Activation
from keras import backend as K
from keras.utils.generic_utils import get_custom_objects

def sin_activation(x):
    return K.sin(x)

def cos_activation(x):
    return K.cos(x)

get_custom_objects().update({'sin_activation': Activation(sin_activation)})
get_custom_objects().update({'cos_activation': Activation(cos_activation)})
Paul Goyes
  • 59
  • 8