self.model.add(layers.Dense(10,activation=‘relu’))
self.model.build(input_shape=(4,28*28))
self.model.summary()
self.model.compile(optimizer=optimizers.Adam(lr=0.01),
loss=losses.CategoricalCrossentropy(from_logits=True),
metrics=[‘accuracy’])
history = self.model.fit(self.train_db, epochs=5, validation_data=self.validation_db, validation_freq=1,verbose=2)
#返回最后一个epoch训练后的验证准确率,用于适应度评估
return history.history[‘val_accuracy’][-1]
def testLayer(self):
network = Sequential([layers.Dense(256, activation=‘relu’),
layers.Dense(128, activation=‘relu’),
layers.Dense(64, activation=‘relu’),
layers.Dense(32, activation=‘relu’),
layers.Dense(10)])
network.build(input_shape=(4, 28*28))
network.summary()
network.compile(optimizer=optimizers.Adam(lr=0.01),
loss=losses.CategoricalCrossentropy(from_logits=True),
metrics=[‘accuracy’] # 设置测量指标为准确率
)
history = network.fit(self.train_db, epochs=5, validation_data=self.validation_db, validation_freq=1,verbose=2)
#打印结果
print(history.history[‘val_accuracy’][-1])
def formatParams(self, params):
return “‘hidden_layer_sizes’={}”.format(self.convertParams(params))
使用遗传算法优化MLP架构
现在,我们已经有了MLP的体系结构配置,以及确定每种配置的MLP准确率的方法,接下来,创建基于遗传算法的优化程序以对配置进行搜索——隐藏层的数量以及每层中的节点数量——产生最佳分类准确率。
详细的步骤在注释中进行介绍
from deap import base
from deap import creator
from deap import tools
import random
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
#创建MlpLayersTest类的实例,用于测试隐藏层架构的各种组合
test = MLPLayers()