2D函数最小值优化

import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import torch

def himmelblau(x):
    return (x[0] ** 2 + x[1] - 11) ** 2 + (x[0] + x[1] ** 2 -7) ** 2

x = np.arange(-6,6,0.1)
y = np.arange(-6,6,0.1)
print('x,y range:', x.shape, y.shape)
X,Y = np.meshgrid(x,y)
print('X,Y maps:',X.shape,Y.shape)
Z = himmelblau([X,Y])

fig = plt.figure('himmelblau')
ax = fig.gca(projection='3d')
ax.plot_surface(X,Y,Z)
ax.view_init(60,-30)
ax.set_xlabel('x')
ax.set_ylabel('y')
plt.show()

#分别用[0.,0.],[4.,0.],[-4.,0.]对x进行初始化
x = torch.tensor([0.,0.], requires_grad=True)
optimizer = torch.optim.Adam([x], lr=1e-3)
for step in range(20000):
    pred = himmelblau(x)

    optimizer.zero_grad()  #将梯度手动清零,避免梯度累加
    pred.backward()   #反向传播,计算当前的梯度值
    optimizer.step()  #根据梯度更新网络参数

    if step % 2000 == 0:
        print('strp{}: x = {}, f(x) = {}'.format(step,x.tolist(),pred.item()))
x,y range: (120,) (120,)
X,Y maps: (120, 120) (120, 120)
strp0: x = [0.0009999999310821295, 0.0009999999310821295], f(x) = 170.0
strp2000: x = [2.3331806659698486, 1.9540692567825317], f(x) = 13.730920791625977
strp4000: x = [2.9820079803466797, 2.0270984172821045], f(x) = 0.014858869835734367
strp6000: x = [2.999983549118042, 2.0000221729278564], f(x) = 1.1074007488787174e-08
strp8000: x = [2.9999938011169434, 2.0000083446502686], f(x) = 1.5572823031106964e-09
strp10000: x = [2.999997854232788, 2.000002861022949], f(x) = 1.8189894035458565e-10
strp12000: x = [2.9999992847442627, 2.0000009536743164], f(x) = 1.6370904631912708e-11
strp14000: x = [2.999999761581421, 2.000000238418579], f(x) = 1.8189894035458565e-12
strp16000: x = [3.0, 2.0], f(x) = 0.0
strp18000: x = [3.0, 2.0], f(x) = 0.0

可以看到初始值为(0,0)时函数的最小值的坐标为(3,2)。
2D函数最小值优化_第1张图片
而不同的初始值得到的结果也是不同的,所以初始值不能随意设置。

你可能感兴趣的:(#,PyTorch)