mxnet随笔-梯度与反向传播

#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 10 16:13:29 2018

@author: myhaspl
"""
from mxnet import nd
from mxnet import autograd
x = nd.array([[1, 2], [3, 4]])
x.attach_grad()#在ndarray里准备存储梯度
with autograd.record():#定义f(x)
    y=2*x*x
#反向传播backward()
y.backward()
#f'(x)=4*x
z=x.grad
print x
print z

[[1. 2.]
[3. 4.]]

[[ 4. 8.]
[12. 16.]]

######################

#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 10 16:13:29 2018

@author: myhaspl
"""
from mxnet import nd
from mxnet import autograd

def f(x): 
    b=x
    while b.norm().asscalar() < 100: #计算欧氏距离(norm)
        b=b*2#y=ax  ,a=2*2*.....*2
        print b
    if b.sum().asscalar() >= 0: 
        y = b[0]
    else:
        y = b[1]
    return y

x = nd.array([1,4])
x.attach_grad()#在ndarray里准备存储梯度
with autograd.record():#定义f(x)
    y=f(x)
#反向传播backward()
y.backward()
#f'(x)=a,y=ax
z=x.grad
print "======="
print [z,x,y,y/x]#a=y/x

[2. 8.]

[ 4. 16.]

[ 8. 32.]

[16. 64.]

[ 32. 128.]

=======
[
[32. 0.]
,
[1. 4.]
,
[32.]
,
[32. 8.]
]

你可能感兴趣的:(AI)