1. 程式人生 > >簡單神經網絡TensorFlow實現

簡單神經網絡TensorFlow實現

2.7 傳播 oms finished 方差 atm 學習 輸入 oss

學習TensorFlow筆記

import tensorflow as tf

#定義變量
#Variable 定義張量及shape
w1= tf.Variable(tf.random_normal([2, 3], stddev=1, seed=1))
w2= tf.Variable(tf.random_normal([3, 1], stddev=1, seed=1))
with tf.Session() as sess:
    print(sess.run(w1.initializer))
    print(sess.run(w2.initializer))
#None
#None



#打印張量,查看數據shape等信息
print(w1)
print(w2)
#<tf.Variable ‘Variable:0‘ shape=(2, 3) dtype=float32_ref>
#<tf.Variable ‘Variable_1:0‘ shape=(3, 1) dtype=float32_ref>

#tf.constan是一個計算,結果為一個張量,保存在變量x中
x = tf.constant([[0.7, 0.9]])
print(x)
#Tensor("Const:0", shape=(1, 2), dtype=float32)
with tf.Session() as sess:
    print(sess.run(x))
#[[ 0.69999999  0.89999998]]


#定義前向傳播的神經網絡
#matmul做矩陣乘法
a = tf.matmul(x, w1)   # x shape=(1, 2)   w1 shape=(2, 3)

print(a)
#Tensor("MatMul:0", shape=(1, 3), dtype=float32)

y = tf.matmul(a, w2)  #a shape=(1, 3)   w2 shape=(3, 1)
print(y)
#Tensor("MatMul_1:0", shape=(1, 1), dtype=float32)


#調用會話輸出結果
with tf.Session() as sess:
    sess.run(w1.initializer)
    sess.run(w2.initializer)
    print(sess.run(a))
    #[[-2.76635647  1.12854266  0.57783246]]
    print(sess.run(y))
    #[[ 3.95757794]]

#placeholder
x=tf.placeholder(tf.float32,shape=(1,2),name="input")
a=tf.matmul(x,w1)
y=tf.matmul(a,w2)
sess=tf.Session()
init_op=tf.global_variables_initializer()
sess.run(init_op)

print(sess.run(y,feed_dict={x:[[0.8,0.9]]}))
#[[ 4.2442317]]
x = tf.placeholder(tf.float32, shape=(3, 2), name="input")
a = tf.matmul(x, w1)
y = tf.matmul(a, w2)

sess = tf.Session()
#使用tf.global_variables_initializer()來初始化所有的變量
init_op = tf.global_variables_initializer()
sess.run(init_op)

print(sess.run(y, feed_dict={x: [[0.7,0.9],[0.1,0.4],[0.5,0.8]]}))

‘‘‘
[[ 3.95757794]
 [ 1.15376544]
 [ 3.16749239]]
‘‘‘

  整體神經網絡的實現

import tensorflow as tf
from   numpy.random import RandomState
#定義神經網絡的參數,輸入和輸出節點
batch_size=8
#均值為0 方差為1 隨機分布滿足正態分布 shape為2*3
w1=tf.Variable(tf.random_normal([2,3],stddev=1,seed=1))
w2=tf.Variable(tf.random_normal([3, 1], stddev=1, seed=1))
#shape 根據數據自動計算 batchsize個
x=tf.placeholder(tf.float32,shape=(None,2))
y_=tf.placeholder(tf.float32,shape=(None,1))

#定義前向傳播過程,損失函數及反向傳播算法

a=tf.matmul(x,w1)
y=tf.matmul(a,w2)
#損失函數 使用交叉熵
#優化方法使用AdamOptimizer
cross_entropy=-tf.reduce_mean(y_*tf.log(tf.clip_by_value(y,1e-10,1.0)))
train_step=tf.train.AdamOptimizer(learning_rate=0.001).minimize(cross_entropy)

rdm=RandomState(1)
#隨機生成128個數據 shape 128*2
X=rdm.rand(128,2)

#Y的值是模擬的 ,實際假設x2+x1如果大於1則標簽Y為1 否則標簽Y為0
Y=[[int(x1+x2<1)] for (x1,x2) in X]

#創建一個會話 ,運算計算圖
#全局初始化變量
STEPS = 5000
with tf.Session() as sess:
    init_op=tf.global_variables_initializer()
    sess.run(init_op)
    # 輸出目前(未經訓練)的參數取值。
    print("w1:", sess.run(w1))
    print("w2:", sess.run(w2))
    print("\n")
    for i in range(STEPS):
        start = (i * batch_size) % 128
        end = (i * batch_size) % 128 + batch_size
        sess.run(train_step, feed_dict={x: X[start:end], y_: Y[start:end]})
        if i % 1000 == 0:
            total_cross_entropy = sess.run(cross_entropy, feed_dict={x: X, y_: Y})
            print("After %d training step(s), cross entropy on all data is %g" % (i, total_cross_entropy))
            # 輸出訓練後的參數取值。
    print("\n")
    print("w1:", sess.run(w1))
    print("w2:", sess.run(w2))

‘‘‘

w1: [[-0.81131822  1.48459876  0.06532937]
 [-2.4427042   0.0992484   0.59122431]]
w2: [[-0.81131822]
 [ 1.48459876]
 [ 0.06532937]]


After 0 training step(s), cross entropy on all data is 0.0674925
After 1000 training step(s), cross entropy on all data is 0.0163385
After 2000 training step(s), cross entropy on all data is 0.00907547
After 3000 training step(s), cross entropy on all data is 0.00714436
After 4000 training step(s), cross entropy on all data is 0.00578471


w1: [[-1.96182752  2.58235407  1.68203771]
 [-3.46817183  1.06982315  2.11788988]]
w2: [[-1.82471502]
 [ 2.68546653]
 [ 1.41819501]]

Process finished with exit code 0
‘‘‘

  

簡單神經網絡TensorFlow實現