21、梯度法及共轭梯度法
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 20 21:56:21 2019
@author: zhangchaoyu
"""
"""
当离最优值较远时,利用梯度法,接近极小点时用收敛较快的其他方法
"""
import math
import copy
"""
1、f(x,y) = x*x + 25*y*y
2、f(x,y) = (x-1)(x-1) + (y-1)(y-1)
"""
def gradient(X):
x = X[0]
y = X[1]
#return [2*x, 50*y]
return [2*x-2, 2*y-2]
def Hessian():
#return [[2,0],[0,50]]
return[[2,0],[0,2]]
#采用最佳步长,梯度*梯度/(梯度*海森*梯度)
def step_best(G, H, P):
num1 = multiplication_M_and_M([G], trans([P]))[0][0]
num2 = multiplication_M_and_M(multiplication_M_and_M([P], H), trans([P]))[0][0]
return -num1/num2
def alpha_conjugate(G, H, P):
num1 = multiplication_M_and_M(multiplication_M_and_M([G], H), trans([P]))[0][0]
num2 = multiplication_M_and_M(multiplication_M_and_M([P], H), trans([P]))[0][0]
return num1/num2
def multiplication_M_and_V(H, G):
P = []
for i in range(len(H)):
P.append(-sum([H[i][j]*G[j] for j in range(len(H[i]))]))
return P
def multiplication_M_and_M(A, B):
C = []
for i in range(len(A)):
temp = []
for j in range(len(B[0])):
temp.append(sum([A[i][k]*B[k][j] for k in range(len(A[i]))]))
C.append(copy.deepcopy(temp))
return C
def trans(A):
return [[A[j][i] for j in range(len(A))] for i in range(len(A[0]))]
#梯度法,这里步长取得规则:下一次梯度与这一次垂直
def Gradient(X0):
X = X0
G = gradient(X)
while math.sqrt(G[0]*G[0]+G[1]*G[1]) > 1e-5:
H = Hessian()
step = step_best(G, H, G)
X = [X[i]+step*G[i] for i in range(len(X))]
G = gradient(X)
print(X)
return X
#共轭梯度法
#f(x,y) = 1.5x*x + 0.5y*y - xy - 2x
def Gradient_conjugate(X0):
X = X0
G = gradient(X)
P = [-G[i] for i in range(len(G))]
H = Hessian()
while math.sqrt(G[0]*G[0]+G[1]*G[1]) > 1e-5:
step = step_best(G, H, P)
X = [X[i]+step*P[i] for i in range(len(X))]
G = gradient(X)
alpha = alpha_conjugate(G, H, P)
P = [-G[i]+alpha*P[i] for i in range(len(P))]
print(X)
return X
X0 = [2,2]
X0 = [-2,4]
X0 = [100,50000]
X0 = [0,0]
X = Gradient(X0)
X = Gradient_conjugate(X0)
版权声明:本文为chaoyuzhang原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。