一步步搭建多层神经网络以及应用

import numpy as np
import h5py
import matplotlib.pyplot as plt
import testCases
from dnn_utils import sigmoid,sigmoid_backward,relu,relu_backward
import lr_utils

np.random.seed(1)

def initialize_parameters(n_x,n_h,n_y):
W1=np.random.randn(n_h,n_x)*0.01
b1=np.zeros((n_h,1))
W2=np.random.randn((n_y,n_h))*0.01
b2=np.zeros((n_y,1))

assert(W1.shape ==(n_h,n_x))
assert(b1.shape==(n_h,1))
assert(W2.shape==(n_y,n_h))
assert(b2.shape==(n_y,1))

parameters={"W1":W1,"b1":b1,"W2":W2,"b2":b2}

return parameters

def initialize_parameters_deep(layers_dims):
np.random.seed(3)
parameters ={}
L=len(layers_dims)

for l in range(1,L):
    parameters["W"+str(l)] =np.random.randn(layers_dims[l],layers_dims[l-1])/np.sqrt(layers_dims[l-1])

    parameters["b"+str(l)] =np.zeros((layers_dims[l],1))

    assert(parameters["W"+str(l)].shape ==(layers_dims[l],layers_dims[l-1]))
    assert(parameters["b"+str(l)].shape==(layers_dims[l],1))

return parameters

#前向传播函数
def linear_forward(A,W,b):
Z=np.dot(W,A)+b
assert(Z.shape==(W.shape[0],A.shape[1]))
cache=(A,W,b)
return Z,cache

def linear_activation_forward(A_prev,W,b,activation):
if activation ==“sigmoid”:
Z,linear_cache =linear_forward(A_prev,W,b)
A,activation_cache =sigmoid(Z)

elif activation =="relu":
    Z

你可能感兴趣的:(神经网络,机器学习,深度学习)