1. 程式人生 > >tensorboard 用法(二)

tensorboard 用法(二)

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 22 13:04:34 2018

@author: lg
"""


#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'


# 準備訓練資料,假設其分佈大致符合 y = 1.2x + 0.0
n_train_samples = 200
X_train = np.linspace(-5, 5, n_train_samples)
Y_train = 1.2*X_train + np.random.uniform(-1.0, 1.0, n_train_samples)  # 加一點隨機擾動


# 準備驗證資料,用於驗證模型的好壞
n_test_samples = 50
X_test = np.linspace(-5, 5, n_test_samples)
Y_test = 1.2*X_test


# 引數學習演算法相關變數設定
learning_rate = 0.01
batch_size = 20
summary_dir = 'logs'

print('~~~~~~~~~~開始設計計算圖~~~~~~~~')

# 使用 placeholder 將訓練資料/驗證資料送入網路進行訓練/驗證
# shape=None 表示形狀由送入的張量的形狀來確定
with tf.name_scope('Input'):
    X = tf.placeholder(dtype=tf.float32, shape=None, name='X')
    Y = tf.placeholder(dtype=tf.float32, shape=None, name='Y')


# 決策函式(引數初始化)
with tf.name_scope('Inference'):
    W = tf.Variable(initial_value=tf.truncated_normal(shape=[1]), name='weight')
    b = tf.Variable(initial_value=tf.truncated_normal(shape=[1]), name='bias')
    Y_pred = tf.multiply(X, W) + b


# 損失函式(MSE)
with tf.name_scope('Loss'):
    loss = tf.reduce_mean(tf.square(Y_pred - Y), name='loss')
    tf.summary.scalar('loss', loss)


# 引數學習演算法(Mini-batch SGD)
with tf.name_scope('Optimization'):
    optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss)


# 初始化所有變數
init = tf.global_variables_initializer()


# 彙總記錄節點
merge = tf.summary.merge_all()


# 開啟會話,進行訓練
with tf.Session() as sess:
    sess.run(init)
    summary_writer = tf.summary.FileWriter(logdir=summary_dir, graph=sess.graph)

    for i in range(201):
        j = np.random.randint(0, 10)  # 總共200訓練資料,分十份[0, 9]
        X_batch = X_train[batch_size*j: batch_size*(j+1)]
        Y_batch = Y_train[batch_size*j: batch_size*(j+1)]

        _, summary, train_loss, W_pred, b_pred = sess.run([optimizer, merge, loss, W, b], feed_dict={X: X_batch, Y: Y_batch})
        test_loss = sess.run(loss, feed_dict={X: X_test, Y: Y_test})

        # 將所有日誌寫入檔案
        summary_writer.add_summary(summary, global_step=i)
        print('step:{}, losses:{}, test_loss:{}, w_pred:{}, b_pred:{}'.format(i, train_loss, test_loss, W_pred[0], b_pred[0]))

        if i == 200:
            # plot the results
            plt.plot(X_train, Y_train, 'bo', label='Train data')
            plt.plot(X_test, Y_test, 'gx', label='Test data')
            plt.plot(X_train, X_train * W_pred + b_pred, 'r', label='Predicted data')
            plt.legend()
            plt.show()

    summary_writer.close()