1. 程式人生 > >Tensorflow學習率的learning rate decay

Tensorflow學習率的learning rate decay

x = tf.Variable(1.0)
y = x.assign_add(1)
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    print sess.run(x)
    print sess.run(y)
    print sess.run(x)

輸出 1,2,2注意其x會變的

import tensorflow as tf

global_step = tf.Variable(0, trainable=False)

initial_learning_rate = 0.1 #初始學習率
learning_rate = tf.train.exponential_decay(initial_learning_rate, global_step=global_step, decay_steps=10,decay_rate=0.9) opt = tf.train.GradientDescentOptimizer(learning_rate) add_global = global_step.assign_add(1
) with tf.Session() as sess: tf.global_variables_initializer().run() print(sess.run(learning_rate)) for i in range(1): _, rate = sess.run([add_global, learning_rate]) print(rate)