计算梯度的三种方法

转自:https://blog.csdn.net/xuluhui123/article/details/54407669

# coding=gbk

"""
function : f(x,y,z) = (x+y)z
"""
# first method   解析法
def grad1(x,y,z):
    dx = z
    dy = z
    dz = (x+y)
    return (dx,dy,dz)
# second method  数值法
def grad2(x,y,z,epi): 
    # dx
    fx1 = (x+epi+y)*z
    fx2 = (x-epi+y)*z
    dx = (fx1-fx2)/(2*epi)
    # dy
    fy1 = (x+y+epi)*z
    fy2 = (x+y-epi)*z
    dy = (fy1-fy2)/(2*epi)
    # dz
    fz1 = (x+y)*(z+epi)
    fz2 = (x+y)*(z-epi)
    dz = (fz1-fz2)/(2*epi)
    return (dx,dy,dz)
# third method 反向传播法
def grad3(x,y,z): 
    # forward
    p = x+y;
    f = p*z;    
    # backward
    dp = z
    dz = p
    dx = 1 * dp
    dy = 1 * dp
    return (dx,dy,dz)

print (": %.2f %.2f %.2f"%(grad1(1,2,3)))       
print (": %.2f %.2f %.2f"%(grad2(1,2,3,1e-5)))
print (": %.2f %.2f %.2f"%(grad3(1,2,3)))

你可能感兴趣的:(deeplearning)