Given that I trained several different models on the same data and all the neural networks I trained have the same architecture I would like to know if it's possible to restore those models, average their weights and initialise my weights using the average.
This is an example of how the graph might look. Basically what I need is an average of the weights I am going to load.
import tensorflow as tf
import numpy as np
#init model1 weights
weights = {
'w1': tf.Variable(),
'w2': tf.Variable()
}
# init model1 biases
biases = {
'b1': tf.Variable(),
'b2': tf.Variable()
}
#init model2 weights
weights2 = {
'w1': tf.Variable(),
'w2': tf.Variable()
}
# init model2 biases
biases2 = {
'b1': tf.Variable(),
'b2': tf.Variable(),
}
# this the average I want to create
w = {
'w1': tf.Variable(
tf.add(weights["w1"], weights2["w1"])/2
),
'w2': tf.Variable(
tf.add(weights["w2"], weights2["w2"])/2
),
'w3': tf.Variable(
tf.add(weights["w3"], weights2["w3"])/2
)
}
# init biases
b = {
'b1': tf.Variable(
tf.add(biases["b1"], biases2["b1"])/2
),
'b2': tf.Variable(
tf.add(biases["b2"], biases2["b2"])/2
),
'b3': tf.Variable(
tf.add(biases["b3"], biases2["b3"])/2
)
}
weights_saver = tf.train.Saver({
'w1' : weights['w1'],
'w2' : weights['w2'],
'b1' : biases['b1'],
'b2' : biases['b2']
})
weights_saver2 = tf.train.Saver({
'w1' : weights2['w1'],
'w2' : weights2['w2'],
'b1' : biases2['b1'],
'b2' : biases2['b2']
})
And this what I am want to get when I run the tf session. c contains the weights I want to use in order to start the training.
# Create a session for running operations in the Graph.
init_op = tf.global_variables_initializer()
init_op2 = tf.local_variables_initializer()
with tf.Session() as sess:
coord = tf.train.Coordinator()
# Initialize the variables (like the epoch counter).
sess.run(init_op)
sess.run(init_op2)
weights_saver.restore(
sess,
'my_model1/model_weights.ckpt'
)
weights_saver2.restore(
sess,
'my_model2/model_weights.ckpt'
)
a = sess.run(weights)
b = sess.run(weights2)
c = sess.run(w)