1. 程式人生 > >Tensorflow構建RNN做時間序列預測(二)

Tensorflow構建RNN做時間序列預測(二)

batch_size = 32
epoch=30
batch=len(X)//batch_size

saver = tf.train.Saver(tf.global_variables())
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    for epo in range(epoch):
        sess.run(tf.assign(learning_rate, 0.002 * (0.97 ** epo)))
        all_loss = 0.0
        for bat in range(batch):
            x_,y_=trainds.next_batch(batch_size=batch_size)
            train_loss, _ = sess.run([loss, train_op], feed_dict={input_data: x_, input_label: y_})
            all_loss = all_loss + train_loss
        print epoch, ' Loss: ', all_loss * 1.0 / batch
    saver.save(sess,'./rnn/lstm_time_series.model')

輸出的loss:

0, ' Loss: ', 6.7700897301397012
1, ' Loss: ', 1.1685861868243064
2, ' Loss: ', 0.71336806205011183
3, ' Loss: ', 0.54247458711747198
4, ' Loss: ', 0.21835865681209871
5, ' Loss: ', 0.16824594909144985
6, ' Loss: ', 0.10639311529455646
7, ' Loss: ', 0.098137174042001854
8, ' Loss: ', 0.10529603232299128
9, ' Loss: ', 0.082613819549160625
10, ' Loss: ', 0.080355119320654103
11, ' Loss: ', 0.095183456016163667
12, ' Loss: ', 0.10465885686778253
13, ' Loss: ', 0.068995154312541412
14, ' Loss: ', 0.079072338859400446
15, ' Loss: ', 0.068569363064823613
16, ' Loss: ', 0.063023589551448822
17, ' Loss: ', 0.071573086082935333
18, ' Loss: ', 0.092125113572805159
19, ' Loss: ', 0.083372838554843773
20, ' Loss: ', 0.06890119540114556
21, ' Loss: ', 0.059814115265204061
22, ' Loss: ', 0.057354894496740833
23, ' Loss: ', 0.067549686999090253
24, ' Loss: ', 0.061683842371548378
25, ' Loss: ', 0.059716279828740705
26, ' Loss: ', 0.06250881067206783
27, ' Loss: ', 0.060046499054278099
28, ' Loss: ', 0.062477104245654995
29, ' Loss: ', 0.053828210359619509