mxnet-梯度与反向传播


#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 10 16:13:29 2018

@author: myhaspl
"""
from mxnet import nd
from mxnet import autograd
x = nd.array([[1, 2], [3, 4]])
x.attach_grad()#在ndarray里准备存储梯度
with autograd.record():#定义f(x)
    y=2*x*x
#反向传播backward()
y.backward()
#f'(x)=4*x
z=x.grad
print x
print z

[[1. 2.]
 [3. 4.]]
<NDArray 2x2 @cpu(0)>

[[ 4.  8.]
 [12. 16.]]
<NDArray 2x2 @cpu(0)>

######################

#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 10 16:13:29 2018

@author: myhaspl
"""
from mxnet import nd
from mxnet import autograd

def f(x): 
    b=x
    while b.norm().asscalar() < 100: #计算欧氏距离(norm)
        b=b*2#y=ax  ,a=2*2*.....*2
        print b
    if b.sum().asscalar() >= 0: 
        y = b[0]
    else:
        y = b[1]
    return y

x = nd.array([1,4])
x.attach_grad()#在ndarray里准备存储梯度
with autograd.record():#定义f(x)
    y=f(x)
#反向传播backward()
y.backward()
#f'(x)=a,y=ax
z=x.grad
print "======="
print [z,x,y,y/x]#a=y/x

[2. 8.]
<NDArray 2 @cpu(0)>

[ 4. 16.]
<NDArray 2 @cpu(0)>

[ 8. 32.]
<NDArray 2 @cpu(0)>

[16. 64.]
<NDArray 2 @cpu(0)>

[ 32. 128.]
<NDArray 2 @cpu(0)>

=======
[
[32.  0.]
<NDArray 2 @cpu(0)>, 
[1. 4.]
<NDArray 2 @cpu(0)>, 
[32.]
<NDArray 1 @cpu(0)>, 
[32.  8.]
<NDArray 2 @cpu(0)>]

猜你喜欢

转载自blog.51cto.com/13959448/2316718