BP神经网络Python实现

# encoding:utf-8
import numpy as np
import math
import random
def rand(a, b):
    return (b-a)*random.random() + a

def make_matrix(m, n, fill=0.0):
    mat = []
    for i in range(m):
        mat.append([fill]*n)
    return mat

def sigmoid(x):
    return 1.0 / (1.0+math.exp(-x))

def sigmoid_derivate(x):
    return x*(1-x)

class BPNeuralNetwork:
    def __init__(self):
        self.input_n = 0
        self.hidden_n = 0
        self.output_n = 0
        self.input_cells = []
        self.hidden_cells = []
        self.output_cells = []
        self.input_weights = []
        self.output_weights = []

    def setup(self, ni, nh, no):
        self.input_n = ni +1
        self.hidden_n = nh
        self.output_n = no

        self.input_cells = [1.0] * self.input_n
        self.hidden_cells = [1.0] * self.hidden_n
        self.output_cells = [1.0] * self.output_n

        self.input_weights = make_matrix(self.input_n, self.hidden_n)
        self.output_weights = make_matrix(self.hidden_n, self.output_n)

        #random active
        for i in range(self.input_n):
            for h in range(self.hidden_n):
                self.input_weights[i][h] = rand(-0.2, 0.2)
        for h in range(self.hidden_n):
            for o in range(self.output_n):
                self.output_weights[h][o] = rand(-2.0, 2.0)

    def predict(self, inputs):
        for i in range(self.input_n - 1):
            self.input_cells[i] = inputs[i]

        for j in range(self.hidden_n):
            total = 0.0
            for i in range(self.input_n):
                total += self.input_cells[i] * self.input_weights[i][j]
            self.hidden_cells[j] = sigmoid(total)

        for k in range(self.output_n):
            total = 0.0
            for j in range(self.hidden_n):
                total += self.hidden_cells[j] * self.output_weights[j][k]
            self.output_cells[k] = sigmoid(total)

        return self.output_cells[:]

    def back_propagate(self, case, label, learn):
        self.predict(case)
        #计算输出层的误差
        output_deltas = [0.0] * self.output_n
        for k in range(self.output_n):
            error = label[k] - self.output_cells[k]

            output_deltas[k] = sigmoid_derivate(self.output_cells[k])*error

        hidden_deltas = [0.0] * self.hidden_n
        for j in range(self.hidden_n):
            error = 0.0
            for k in range(self.output_n):
                error += output_deltas[k] * self.output_weights[j][k]
                hidden_deltas[j] = sigmoid_derivate(self.hidden_cells[j]) * error
        #更新输出层权重
        for j in range(self.hidden_n):
            for k in range(self.output_n):
                self.output_weights[j][k] += learn * output_deltas[k] * self.hidden_cells[j]
        #更新隐藏层权重
        for i in range(self.input_n):
            for j in range(self.hidden_n):
                self.input_weights[i][j] += learn*hidden_deltas[j]*self.input_cells[i]

        error = 0
        for o in range(len(label)):
            error += 0.5 * (label[o] - self.output_cells[o]) ** 2
            return error

    def train(self, cases, labels, limit=100, learn=0.05):
        for i in range(len(cases)):
            error = 0
            label = labels[i]
            case = cases[i]
            error += self.back_propagate(case, label, learn)
        pass
    def test(self):
        cases = [
            [0,0],
            [0,1],
            [1,0],
            [1,1],
        ]
        labels = [[0],[1],[1],[0]]
        self.setup(2,5,1)
        self.train(cases,labels,10000,0.05)
        for case in cases:
            print(self.predict(case))

if __name__=="__main__":
    nn = BPNeuralNetwork()
    nn.test()
[0.8011297025491382]
[0.8150046347993829]
[0.8064371429753873]
[0.8200696997295588]

 

你可能感兴趣的:(tensorflow)