单隐层神经网络自己代码实现

-- coding: utf-8 --

"""
Created on Fri Sep 21 20:46:51 2018

@author: jianle
"""
import numpy as np
import matplotlib.pyplot as plot
from planar_utils import *
from testCases import *

--------载入数据库

noisy_circles, noisy_moons, blobs, gaussian_quantiles, no_structure=load_extra_datasets()
datasets={"noisy_circles":noisy_circles,
"noisy_moons":noisy_moons,
"blobs": blobs,
"gaussian_quantiles": gaussian_quantiles,
"no_structure": no_structure
}

[X,Y]=datasets["gaussian_quantiles"]
X=X.T
Y.shape=(1,Y.shape[0]) #error 重新定义数组维度

Y=Y.reshape(1,Y.shape[0])

x0=X[0,:]
x1=X[1,:]

-----------绘制数据集-----------------------

plot.scatter(x0,x1,c=np.squeeze(Y),s=60,cmap=plot.cm.Spectral)

-------定义模型每层单元的数量----------------

nx=np.shape(X)[0]
nh=4
ny=np.shape(Y)[0]

----------随机初始化模型参数-----------------

np.random.seed(2)
W1=np.random.randn(nh,nx)0.01
b1=np.zeros((nh,1))
W2=np.random.randn(ny,nh)
0.01
b2=np.zeros((ny,1))

parameters={"W1":W1,
"b1":b1,
"W2":W2,
"b2":b2
}

------------test----------------------------

print("W1="+str(W1))
print("b1="+str(b1))
print("W2="+str(W2))
print("b2="+str(b2))

--------------向前传播----------------------

def forward(parameters,X,Y):
W1=parameters["W1"]
b1=parameters["b1"]
W2=parameters["W2"]
b2=parameters["b2"]
Z1=np.dot(W1,X)+b1
A1=np.tanh(Z1)
Z2=np.dot(W2,A1)+b2
A2=sigmoid(Z2)
cache={"Z1":Z1,
"A1":A1,
"Z2":Z2,
"A2":A2
}
return(A2,cache)

--------------向后传播----------------------

def backforward(parameters,cache,X,Y):
W2=parameters["W2"]
A2=cache["A2"]
A1=cache["A1"]
m=np.shape(X)[1]
dZ2=A2-Y
dW2=(1/m)np.dot(dZ2,A1.T)
db2=(1/m)
np.sum(dZ2,axis=1,keepdims=True)
dZ1=np.multiply(np.dot(W2.T,dZ2),1-np.power(A1,2))
dW1=(1/m)np.dot(dZ1,X.T)
db1=(1/m)
np.sum(dZ1,axis=1,keepdims=True)
grads={"dW1":dW1,
"db1":db1,
"dW2":dW2,
"db2":db2
}
return(grads)

------------更新模型参数---------------

def update(parameters,grads,learning=1.2):
W1=parameters["W1"]
b1=parameters["b1"]
W2=parameters["W2"]
b2=parameters["b2"]

dW1=grads["dW1"]
db1=grads["db1"]
dW2=grads["dW2"]
db2=grads["db2"]

W1=W1-learning*dW1
b1=b1-learning*db1
W2=W2-learning*dW2
b2=b2-learning*db2
#更新
parameters={"W1":W1,
            "b1":b1,
            "W2":W2,
            "b2":b2
           }
return (parameters)

运行

Num=8000
for i in range(Num):
A2,cache=forward(parameters,X,Y)
grads=backforward(parameters,cache,X,Y)
parameters=update(parameters,grads,learning=1.2)

def predict(parameters,X):
A2,cache=forward(parameters,X,Y)
predection=np.round(A2)
return predection

-----------绘制预测结果图---------------

plot_decision_boundary(lambda x:predict(parameters,x.T),X,Y)

TIM图片20180921222828.png

你可能感兴趣的:(单隐层神经网络自己代码实现)