本文主要是采用粒子群算法PSO优化LSTM超参数
pip3 install pyswarm
def build_model(neurons1, neurons2, dropout):
X_train, y_train, X_test, y_test = process_data()
# X_train, y_train = create_dataset(X_train, y_train, steps)
# X_test, y_test = create_dataset(X_test, y_test, steps)
nb_features = X_train.shape[2]
input1 = X_train.shape[1]
model1 = Sequential()
model1.add(LSTM(
input_shape=(input1, nb_features),
units=neurons1,
return_sequences=True))
model1.add(Dropout(dropout))
model1.add(LSTM(
units=neurons2,
return_sequences=False))
model1.add(Dropout(dropout))
model1.add(Dense(units=1))
model1.add(Activation("linear"))
model1.compile(loss='mse', optimizer='Adam', metrics='mae')
return model1, X_train, y_train, X_test, y_test
if __name__ == '__main__':
if __name__ == '__main__':
'''
神经网络第一层神经元个数
神经网络第二层神经元个数
dropout比率
batch_size
'''
UP = [150, 15, 0.5, 16]
DOWN = [50, 5, 0.05, 8]
# # 开始优化
pso_ = pso(training, lb=DOWN, ub = UP)
pso_.run()
print('best_params is ', pso_.gbest_x)
print('best_precision is', 1 - pso_.gbest_y)
# 训练模型 使用ssa找到的最好的神经元个数
neurons1 = int(pso_.gbest_x[0])
neurons2 = int(pso_.gbest_x[1])
dropout = pso_.gbest_x[2]
batch_size = int(pso_.gbest_x[3])
# neurons1 = 64
# neurons2 = 64
# dropout = 0.01
# batch_size = 32
model, X_train, y_train, X_test, y_test = build_model(neurons1, neurons2, dropout)
history1 = model.fit(X_train, y_train, epochs=150, batch_size=batch_size, validation_split=0.2, verbose=1,
callbacks=[EarlyStopping(monitor='val_loss', patience=9, restore_best_weights=True)])
# 测试集预测
y_score = model.predict(X_test)
# 反归一化
y_score = scaler.inverse_transform(y_score.reshape(-1, 1))
y_test = scaler.inverse_transform(y_test.reshape(-1, 1))
print("==========evaluation==============\n")
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_absolute_error #平方绝对误差
import math
MAE = mean_absolute_error(y_test, y_score)
print('MAE: %.4f ' % MAE)
RMSE = math.sqrt(mean_squared_error(y_test, y_score))
print('RMSE: %.4f ' % (RMSE))
# MAPE和SMAPE
def mape(y_true, y_pred):
return np.mean(np.abs((y_pred - y_true) / y_true)) * 100
def smape(y_true, y_pred):
return 2.0 * np.mean(np.abs(y_pred - y_true) / (np.abs(y_pred) + np.abs(y_true))) * 100
MAPE = mape(y_test, y_score)
print('MAPE: %.4f ' % MAPE)
SMAPE = smape(y_test, y_score)
print('SMAPE: %.4f ' % SMAPE)
粒子群算法优化,也算是比较老点的算法,但其仍然具有一定的价值
备注:
需要源代码和数据集,或者想要沟通交流,请私聊,谢谢.