1

I am transitioning from Pytorch to TensorFlow 1.12 and would like to know whether it is possible to define tf.keras.Sequential classes within a tf.keras.Model and run those in eager mode.

I constructed this minimum non-working example and would be grateful if someone could advise where I am going wrong. I have also used tf.contrib.eager.Network classes (with more success) however, since they are scheduled for deprecation I tried avoiding those.

import numpy as np
import tensorflow as tf
import tensorflow.contrib.eager as tfe
from keras.models import Sequential
from keras.layers import Dense, Activation
from tensorflow.train import AdamOptimizer

tf.enable_eager_execution()

class MLP(tf.keras.Model):
    def __init__(self, in_dim, out_dim, hidden_dim, num_layers, activation):
        super(MLP, self).__init__()
        model = Sequential()
        in_features = in_dim
        for layer in range(num_layers):
            model.add(Dense(hidden_dim,))
            model.add(Activation(activation))
            in_features = hidden_dim
        model.add(Dense(out_dim, input_shape=(hidden_dim,)))
        self.model = model

    def call(self, inputs):
        return self.model(inputs)

model = MLP(10, 1, 20, 4, 'relu')

optim = AdamOptimizer(learning_rate=1e-4)

for v in model.variables:
    print(v)

z = tf.convert_to_tensor(np.random.randn(100, 10), dtype=tf.float32)
with tfe.GradientTape() as tape:
    tape.watch(z)
    u = model(z)
    loss = tf.reduce_mean(tf.abs(u))
grad = tape.gradient(loss, model.trainable_variables)
optim.apply_gradients(zip(grad, model.trainable_variables))
print(loss.numpy())
Vlad
  • 8,225
  • 5
  • 33
  • 45
Carl
  • 11
  • 1

1 Answers1

0

Use from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Activation instead of: from keras.models import Sequential from keras.layers import Dense, Activation

Carl
  • 11
  • 1