1

I want to combine 2 networks to one network while keeping the weights of the original network.

I saved the weights in in their numpy form using:

for i in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES):
    weights[i.name] = i.eval()

I can't find a way to load the weights into the new network's variables. Is there a way to load the weights to all the variables?

I tried the following but get en error:

for i in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES):
    i.initializer = weights[i.name]

The error:

AttributeError: can't set attribute
nbro
  • 15,395
  • 32
  • 113
  • 196
user3902310
  • 35
  • 1
  • 6

1 Answers1

3

You can write both functions

def save_to_dict(sess, collection=tf.GraphKeys.TRAINABLE_VARIABLES):
    return {v.name: sess.run(v) for v in tf.global_variables()}


def load_from_dict(sess, data):
    for v in tf.global_variables():
        if v.name in data.keys():
            sess.run(v.assign(data[v.name]))

The trick is to simply iterate over all variables and just check whether they exists in the dictionary, like

import tensorflow as tf
import numpy as np


def save_to_dict(sess, collection=tf.GraphKeys.TRAINABLE_VARIABLES):
    return {v.name: sess.run(v) for v in tf.global_variables()}


def load_from_dict(sess, data):
    for v in tf.global_variables():
        if v.name in data.keys():
            sess.run(v.assign(data[v.name]))


def network(x):
    x = tf.layers.dense(x, 512, activation=tf.nn.relu, name='fc0')
    x = tf.layers.dense(x, 512, activation=tf.nn.relu, name='fc1')
    x = tf.layers.dense(x, 512, activation=tf.nn.relu, name='fc2')
    x = tf.layers.dense(x, 512, activation=tf.nn.relu, name='fc3')
    x = tf.layers.dense(x, 512, activation=tf.nn.relu, name='fc4')
    return x


element = np.random.randn(8, 10)
weights = None

# first session
with tf.Session() as sess:

    x = tf.placeholder(dtype=tf.float32, shape=[None, 10])
    y = network(x)
    sess.run(tf.global_variables_initializer())

    # first evaluation
    expected = sess.run(y, {x: element})

    # dump as dict
    weights = save_to_dict(sess)

# destroy session and graph
tf.reset_default_graph()

# second session
with tf.Session() as sess:

    x = tf.placeholder(dtype=tf.float32, shape=[None, 10])
    y = network(x)
    sess.run(tf.global_variables_initializer())

    # use randomly initialized parameters
    actual = sess.run(y, {x: element})
    assert np.sum(np.abs(actual - expected)) > 0  # should NOT match

    # load previous parameters
    load_from_dict(sess, weights)

    actual = sess.run(y, {x: element})
    assert np.sum(np.abs(actual - expected)) == 0  # should match

This way, you can simply drop some parameters from the dictionary, change the weights before loading and even change the parameter-name.

Patwie
  • 4,360
  • 1
  • 21
  • 41