PSO-LSTM 回归预测 Tensorflow框架下执行

加载相应的包

import pandas as pd
import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, Dense
from tensorflow.keras import metrics
from sklearn.preprocessing import MinMaxScaler
from pyswarm import pso
from tensorflow.keras import optimizers
from keras.layers import Dropout

加载数据集 进行归一化和划分 这边时间步长设置为1

# 加载数据
df = pd.read_excel(r'C:\Users\Admin\Desktop\XAJ.xlsx', sheet_name='PREQ',index_col=0)
column_names = df.columns
scaler = MinMaxScaler()
data = scaler.fit_transform(df)
target = data[:,-1]
features = data[:,:-1]

# 划分训练集和测试集
size = len(features)
train_size = int(size * 0.8)
x_train = features[:train_size]
y_train = target[:train_size]
x_test = features[train_size:]
y_test = target[train_size:]

time_steps = 1
# 训练集
x_train_lstm = np.zeros((x_train.shape[0] - time_steps + 1, time_steps, x_train.shape[1]))
y_train_lstm = np.zeros((y_train.shape[0] - time_steps + 1))

# 验证集
x_test_lstm = np.zeros((x_test.shape[0] - time_steps + 1, time_steps, x_test.shape[1]))
y_test_lstm = np.zeros((y_test.shape[0] - time_steps + 1))

# 写入数据
for i in range(x_train_lstm.shape[0]):
    x_train_lstm[i] = x_train[i:i + time_steps]
    y_train_lstm[i] = y_train[i + time_steps - 1]
for i in range(x_test_lstm.shape[0]):
    x_test_lstm[i] = x_test[i:i + time_steps]
    y_test_lstm[i] = y_test[i + time_steps - 1]

加载LSTM模型 主要优化的学习率和神经单元数 为防止过拟合设置Dropout层设置为0.05

def train_lstm_model(params):
    learning_rate = params[0]
    num_neurons = int(params[1])

    # Build LSTM model
    model = Sequential()
    model.add(LSTM(num_neurons, input_shape=(time_steps, x_train.shape[1])))
    model.add(Dropout(0.05))
    model.add(Dense(1))
    optimizer = optimizers.Adam(learning_rate=learning_rate)
    model.compile(loss='mean_squared_error', optimizer=optimizer, metrics=[metrics.mean_absolute_error])

    history = model.fit(x_train_lstm, y_train_lstm, epochs=300, batch_size=8)
    score = model.evaluate(x_test_lstm, y_test_lstm)
    return score[0]

PSO对参数进行优化 适应度函数选择mse

def objective(params):
    learning_rate = params[0]
    num_neurons = params[1]
    lstm_params = [learning_rate, int(num_neurons)]
    mse = train_lstm_model(lstm_params)

    return mse


lb = [0.001, 32]
ub = [0.1, 128]
opt_params, mse = pso(objective, lb, ub, swarmsize=20, omega=1.2, phip=2, phig=2,maxiter=100)

显示优化后的结果 并反归一化将预测结果输出到excel中

# 打印最佳参数
model = Sequential()
model.add(LSTM(int(opt_params[1]), input_shape=(time_steps, x_train.shape[1])))
model.add(Dropout(0.05))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer=optimizers.Adam(learning_rate=opt_params[0]), metrics=[metrics.mean_absolute_error])
model.fit(x_train_lstm, y_train_lstm, epochs=300, batch_size=8)
score = model.evaluate(x_test_lstm, y_test_lstm)
print("Best parameters: learning_rate = {:.6f}, num_neurons = {}".format(opt_params[0], int(opt_params[1])))
print(score)
pred_test = model.predict(x_test_lstm)
pred_train = model.predict(x_train_lstm)
y = np.append(pred_train,pred_test,axis=0)
x = np.append(x_train,x_test,axis=0)
xy = np.append(x,y,axis=1)
xyf = scaler.inverse_transform(xy)
xys = pd.DataFrame(xyf,columns=column_names)
xys.to_excel(r'C:\Users\Admin\Desktop\归一化\min.xlsx', sheet_name='PREQ', index=False)

你可能感兴趣的:(lstm,回归,tensorflow)