差分进化算法(Differential Evolution,DE)是一种新兴的进化计算技术。它是由Storn等人于1995年提出的,其最初的设想是用于解决切比雪夫多项式问题,后来发现它也是解决复杂优化问题的有效优化技术。
代码实例:
import random
import numpy as np
from matplotlib import pyplot as plt
# 支持中文
plt.rcParams['font.sans-serif'] = ['SimHei'] # 用来正常显示中文标签
plt.rcParams['axes.unicode_minus'] = False # 用来正常显示负号
def random_array(shape: list):
"""
创造一个二维伪随机数组。
:return:返回一个[0-1]范围的随机数组。
"""
if len(shape) != 2:
print('shape = [a, b],请重试。')
return None
array = np.zeros(shape)
for i in range(shape[0]):
for j in range(shape[1]):
array[i, j] = random.random()
return array
def differentialEvolution(low, up, function, D=10, F0=0.8, CR=0.1, G=400, NP=50, DE_vector=0, max_value=True):
"""
自适应差分进化算法,差分算法的改进版。F随着进化代数而变化,在迭代初期的时候,F较大,可以保持种群
的多样性,随着迭代次数的增加,F减小,能够保存优良的种群信息,避免破坏最优解。
Args:
low:寻优区间下限。
up:寻优区间上限。
function:适应度函数,需要求最优值的函数表示,应返回一个适应度数值。
D:染色体长度。
F0:初始变异算子。变异算子F∈[0,2]是一个实常数因数,它决定偏差向量的放大比例。
CR_temp:交叉算子CR是一个范围在[0,1]内的实数,他控制着一个实验向量参数来自于随机选择的变异向
量,而不是原来向量的概率。
G:最大进化代数,表示算法迭代的最大次数。
NP:种群数量,一般取5D~10D之间。
DE_vector:DE_vector=1,2,3,4,0。
DE_vector=0 表示DE/rand/1/bin
DE_vector=1 表示DE/best/1/bin
DE_vector=2 表示DE/rand-to-best/1/bin
DE_vector=3 表示DE/best/2/bin
DE_vector=4 表示DE/rand/2/bin
max_value:是否求最大值,默认True,若为False则表示求最小值。
:returns population trace。返回一个最终种群和函数值。
"""
# 种群初始化
v = np.zeros([NP, D])
u = np.zeros([NP, D])
x = low + (up - low) * random_array([NP, D])
trace = np.zeros(G)
for gen in range(G):
ob = np.zeros(NP)
for i in range(NP):
ob[i] = function(x[i])
if max_value:
# 求最大值
fitness_best, index = np.max(ob), np.argmax(ob)
else:
# 求最小值
fitness_best, index = np.min(ob), np.argmin(ob)
lamb = np.exp(1 - (G / (G + 1 - gen)))
F = F0 * pow(2, lamb)
# 变异
if DE_vector == 0:
for m in range(NP):
r1 = random.randint(1, NP - 1)
while r1 == m:
r1 = random.randint(1, NP - 1)
r2 = random.randint(1, NP - 1)
while r2 == r1 or r2 == m:
r2 = random.randint(1, NP - 1)
r3 = random.randint(1, NP - 1)
while r3 == 2 or r3 == r1 or r3 == m:
r3 = random.randint(1, NP - 1)
v[m, :] = x[r1, :] + F * (x[r2, :] - x[r3, :])
# # 以上可以用如下式子简化
# r=np.random.choice(list(set(range(0, NP)) - {m}), 3, replace=False)
# v[m, :] = x[r[0], :] + F * (x[r[1], :] - x[r[2], :])
elif DE_vector == 1:
for m in range(NP):
r1 = random.randint(1, NP - 1)
while r1 == m:
r1 = random.randint(1, NP - 1)
r2 = random.randint(1, NP - 1)
while r2 == r1 or r2 == m:
r2 = random.randint(1, NP - 1)
v[m, :] = x[index, :] + F * (x[r1, :] - x[r2, :])
elif DE_vector == 2:
Lambda = 0.35
for m in range(NP):
r1 = random.randint(1, NP - 1)
while r1 == m:
r1 = random.randint(1, NP - 1)
r2 = random.randint(1, NP - 1)
while r2 == r1 or r2 == m:
r2 = random.randint(1, NP - 1)
v[m, :] = x[m, :] + Lambda * (x[index, :] - x[m, :]) + F * (x[r1, :] - x[r2, :])
elif DE_vector == 3:
for m in range(NP):
r1 = random.randint(1, NP - 1)
while r1 == m:
r1 = random.randint(1, NP - 1)
r2 = random.randint(1, NP - 1)
while r2 == r1 or r2 == m:
r2 = random.randint(1, NP - 1)
r3 = random.randint(1, NP - 1)
while r3 == r2 or r3 == r1 or r3 == m:
r3 = random.randint(1, NP - 1)
r4 = random.randint(1, NP - 1)
while r4 == r3 or r4 == r2 or r4 == r1 or r4 == m:
r4 = random.randint(1, NP - 1)
v[m, :] = x[index, :] + F * (x[r1, :] - x[r2, :] + x[r3, :] - x[r4, :])
else:
for m in range(NP - 1):
r1 = random.randint(1, NP - 1)
while r1 == m:
r1 = random.randint(1, NP - 1)
r2 = random.randint(1, NP - 1)
while r2 == r1 or r2 == m:
r2 = random.randint(1, NP - 1)
r3 = random.randint(1, NP - 1)
while r3 == r2 or r3 == r1 or r3 == m:
r3 = random.randint(1, NP - 1)
r4 = random.randint(1, NP - 1)
while r4 == r3 or r4 == r2 or r4 == r1 or r4 == m:
r4 = random.randint(1, NP - 1)
r5 = random.randint(1, NP - 1)
while r5 == r4 or r5 == r3 or r5 == r2 or r5 == r1 or r5 == m:
r5 = random.randint(1, NP - 1)
v[m, :] = x[r5, :] + F * (x[r1, :] - x[r2, :] + x[r3, :] - x[r4, :])
# 交叉操作
r = random.randint(1, D)
for i in range(D):
cr = random.random()
if cr <= CR or i == r:
u[:, i] = v[:, i]
else:
u[:, i] = x[:, i]
# 边界条件处理
for m in range(NP):
for n in range(D):
if u[m, n] < low:
u[m, n] = low
if u[m, n] > up:
u[m, n] = up
# 自然选择
ob_1 = np.zeros(NP)
for i in range(NP):
ob_1[i] = function(u[i, :])
if max_value:
# 求最大值
for i in range(NP):
if ob_1[i] > ob[i]:
x[i, :] = u[i, :]
else:
# 求最小值
for i in range(NP):
if ob_1[i] < ob[i]:
x[i, :] = u[i, :]
trace[gen] = fitness_best
return x, trace
def function(individual):
y = 0
for x in individual:
y += pow(x, 2)
return y
population, fitness = differentialEvolution(-20, 20, function, DE_vector=4, max_value=False)
plt.figure(0)
plt.title('差分进化算法')
plt.xlabel('迭代次数')
plt.ylabel('目标函数值')
plt.plot(fitness)
plt.show()
代码借鉴自差分进化算法原理及matlab代码实现。
包子阳. 智能优化算法及其MATLAB实例. 2版 ed. 北京: 电子工业出版社, 2018. Print. ↩︎