数值优化:牛顿法+armijo 求rosenbrock多维函数

计算231次求的最优解

#Rosenbrock_f.py

import numpy as np 

def cal_rosenbrock(x):

    # 计算rosenbrock函数的值
    return sum(100.0*(x[1:]-x[:-1]**2.0)**2.0 + (1-x[:-1])**2.0)



def rosen_der(x):
    xm = x[1:-1]
    xm_m1 = x[:-2]
    xm_p1 = x[2:]
    der = np.zeros_like(x)
    der[1:-1] = 200*(xm-xm_m1**2) - 400*(xm_p1 - xm**2)*xm - 2*(1-xm)
    der[0] = -400*x[0]*(x[1]-x[0]**2) - 2*(1-x[0])
    der[-1] = 200*(x[-1]-x[-2]**2)
    return der

def rosen_hess(x):
    x = np.asarray(x)
    H = np.diag(-400*x[:-1],1) - np.diag(400*x[:-1],-1)
    diagonal = np.zeros_like(x)
    diagonal[0] = 1200*x[0]**2-400*x[1]+2
    diagonal[-1] = 200
    diagonal[1:-1] = 202 + 1200*x[1:-1]**2 - 400*x[2:]
    H = H + np.diag(diagonal)
    return H


def armijo(x,error,alpha = 1):

    # armijo法求步长alpha
    def loss(alpha,x,loss = 0):
        temp = []
        for i in range(11):
            temp.append(x[i])
        for i in range(11):
            temp[i] -= alpha * error[i]
        for i in range(10):
            loss += 100 * (temp[i + 1] - temp[i] ** 2) ** 2 + (temp[i] - 1) ** 2

        return loss

    def check(alpha,x):
        return loss(alpha,x) > loss_0 - sigma * alpha * np.dot(error,error)

    sigma = 0.02
    rho = 0.4
    loss_0 = loss(0,x)

    if check(alpha,x) == False:
        alpha = 1
    elif check(alpha,x):
        alpha = 0.02
        while check(alpha,x):
            alpha *= rho

    return alpha

def for_rosenbrock_func(max_iter_count = 1000):
    pre_x = np.zeros((11,),dtype=np.float32)
    loss = 10
    cnt = 0
    while loss > 0.001 and cnt < max_iter_count:
        error = np.zeros((11,), dtype=np.float32)
        Hess = np.zeros((11,), dtype=np.float32)

        # 计算梯度
        error = rosen_der(pre_x)
        Hess = np.linalg.inv(rosen_hess(pre_x))

        dk = np.dot(Hess,error)

        #沿下降梯度找寻最优解

        alpha = armijo(pre_x,dk)

        for j in range(11):
            pre_x[j] -= alpha * dk[j]

        loss = cal_rosenbrock(pre_x)  # 最小值为0

        print("count: ", cnt, "the loss:", loss,"step:",alpha)
        cnt += 1
    return pre_x

if __name__ == '__main__':
    w = for_rosenbrock_func()  
    print(w)

你可能感兴趣的:(数值优化)