学习率衰减
#!/usr/bin/env python# encoding: utf-8'''@author: lele Ye@contact: 1750112338@qq.com@software: pycharm 2018.2@file: 03学习率衰减.py@time: 2018/9/10 14:48@desc:'''import tensorflow as tfglobal_...
·
#!/usr/bin/env python
# encoding: utf-8
'''
@author: lele Ye
@contact: 1750112338@qq.com
@software: pycharm 2018.2
@file: 03学习率衰减.py
@time: 2018/9/10 14:48
@desc:
'''
import tensorflow as tf
global_step = tf.Variable(0,trainable=False)
initial_learning_rate = 0.1
# decayed_learning_rate = learning_rate *decay_rate ^ (global_step / decay_steps)
# 经过上述式子推导得到,经过每10步 学习率降为原来的0.9 最终的 0.034867834=0.1*0.9^10
learning_rate = tf.train.exponential_decay(initial_learning_rate,global_step=global_step,
decay_steps=10,decay_rate=0.9)
opt = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
# 每次在计算图被执行都在之前的结果上增加1
add_global = global_step.assign_add(1)
# add_global = global_step.assign(2)
with tf.Session() as sess:
tf.global_variables_initializer().run()
print(sess.run(learning_rate))
for i in range(100):
g,rate = sess.run([add_global,learning_rate])
print(g,rate)
更多推荐
已为社区贡献18条内容
所有评论(0)