Can someone explain the weird results I get from this trivial code? Am I doing sth wrong? Why input params a_C and a_G change? What has the difference result to do with the passed values?
#-----------------------------
def dummy_function(a_C, a_G):
diff = tf.subtract(a_C, a_G)
sqr = tf.square(diff)
return a_C, a_G, diff, sqr
#-----------------------------
tf.reset_default_graph()
with tf.Session() as test:
tf.set_random_seed(1)
a_C = tf.random_normal([1], mean=1, stddev=4)
a_G = tf.random_normal([1], mean=1, stddev=4)
a_C_returned, a_G_returned, diff, sqr = dummy_function(a_C, a_G)
print("a_C = " + str(a_C.eval()))
print("a_G = " + str(a_G.eval()))
print("a_C_returned = " + str(a_C_returned.eval()))
print("a_G_returned = " + str(a_G_returned.eval()))
print("diff = " + str(diff.eval()))
print("sqr = " + str(sqr.eval()))
#-----------------------------
# results
a_C = [-1.68344498]
a_G = [-0.39043474]
a_C_returned = [ 4.70364952]
a_G_returned = [ 0.84769011]
diff = [-9.30598831]
sqr = [ 25.68828583]
Thank's in advance for any help, Best regards, Kasia