1. 程式人生 > >單隱藏層神經網路 實現邏輯閘

單隱藏層神經網路 實現邏輯閘

開發十年,就只剩下這套架構體系了! >>>   

import numpy as np
from numpy import exp
from sklearn import datasets


def sigma(z):
    if z>=0:
        return 1/(1+exp(-z))
    else:
        return exp(z)/(1+exp(z))
        
        
class Net:
    def __init__(self, X, Y, num):
        # num 是隱藏層神經元個數
        self.num = num
        self.B = np.zeros(num)
        self.X = X
        self.Y = Y
        # self.wB = np.ones(num+1) #隱藏層和輸出層間的連線權和閾值
        np.random.seed(2)
        self.wB = np.random.random(num+1) #隱藏層和輸出層間的連線權和閾值
        # self.WX = np.array([np.ones(X.shape[1]+1)*0.01 for i in range(num)]) #輸入層和隱藏層間的連線權和閾值
        self.WX = np.array([np.random.random(X.shape[1]+1)*0.01 for i in range(num)]) #輸入層和隱藏層間的連線權和閾值
    def cal_g(self, Xi, yi):
        # print(Xi)
        # 計算 輸出層到隱藏層的 梯度變數, Xi 是一個輸入
        # 計算 隱藏層的值
        for i in range(self.num):
            self.B[i] = sigma(np.dot(np.append(Xi, np.array([-1])).T, self.WX[i]))
            # 使用tanh當隱層啟用函式,可比sigma更快收斂
            # self.B[i] = np.tanh(np.dot(np.append(Xi, np.array([-1])).T, self.WX[i]))
            # print(self.B[i])
        # 計算 輸出層的值
        y = sigma(np.dot(np.append(self.B, np.array([-1])).T, self.wB))
        g = (yi-y)*y*(1-y)
        self.g = g
        # print(f"g:{g}")
        return g    
    def cal_e(self, Xi, yi):
        # 計算 隱藏層到輸入層的 梯度變數
        # 先計算g
        if not self.g:
            g = self.cal_g(Xi, yi)
        else: g = self.g
        ret = []
        for i in range(self.num):
            b = self.B[i]
            e = b*(1-b)*g*self.wB[i]
            ret.append(e)
        self.e = ret
        
    def learn(self, pace=0.1, max_num=300):
        r = self.Y.shape[0]
        for j in range(max_num):
            for i in range(r):
                Xi = self.X[i]
                yi = self.Y[i]
                self.cal_g(Xi, yi)
                self.cal_e(Xi, yi)
                g = self.g
                es = self.e
                self.wB += pace*g*np.append(self.B, np.array([-1])) # 更新 隱層 和輸出層之間的權值
                for k in range(self.num):
                    self.WX[k] += pace*es[k]*np.append(Xi,np.array([-1]))
        return True
    def __call__(self, Xi):
        for i in range(self.num):
            # self.B[i] = np.tanh(np.dot(np.append(Xi, np.array([-1])).T, self.WX[i]))
            self.B[i] = sigma(np.dot(np.append(Xi, np.array([-1])).T, self.WX[i]))
        # 計算 輸出層的值
        y = sigma(np.dot(np.append(self.B, np.array([-1])).T, self.wB))
        return 1 if y>=0.5 else 0
def test():        
    iris = datasets.load_iris()

    def get_i(n):
        while n:
            yield n%2
            n = n>>1
    def get_t(n):
        t = []
        for i in get_i(n):
            t.append(i)
        while len(t)<4:
            t.append(0)
        return t
    X = np.array([(1,1),(1,0),(0,0),(0,1)])
    for Y in np.array([get_t(n) for n in range(15)]):
        net = Net(X, Y, 2)
        net.learn(max_num=30000)
        a = np.array([net(np.array(x)) for x in ([1,1],[1,0],[0,0],[0,1])])
        print(f"Y:{Y}\ta:{a}\t{all(a==Y)}")
test()