[自用代码]基于LSTM的广州车牌售价预测

文章目录

    • 1. 数据
    • 2. 代码

明天是拍牌日了,用AI协助生成了一段时间序列预测代码,主要是用来预测明天车牌均价。其实做的挺low的,没有考虑多维数据之间的关系以及社会事件影响,仅仅是单步预测,偏差是挺大的,符合个人预期的模型预测结果是17745。

1. 数据

首先,上数据:

time,sig_min,sig_avg,com_min,com_avg
2012/8/28,10000,22822,10000,25515
2012/9/25,10000,14138,10000,15978
2012/10/25,10000,11067,10000,12674
2012/11/26,10000,10779,10000,12575
2012/12/25,10000,10955,10000,12303
2013/1/25,10000,10854,10000,12583
2013/2/25,10000,10573,10000,11762
2013/3/25,10000,10585,10000,11688
2013/4/25,10000,10998,10000,12017
2013/5/27,11600,12627,10000,12548
2013/6/25,14000,15743,10000,13117
2013/7/25,18700,20631,12500,14789
2013/8/26,23900,26599,15100,17385
2013/9/25,10000,30802,18000,20643
2013/10/25,10000,16384,10000,19510
2013/11/25,10000,11813,10000,14304
2013/12/25,10700,11959,10000,12958
2014/1/26,11500,12541,10000,12601
2014/2/25,11600,12777,10000,12494
2014/3/27,12300,13085,10300,12203
2014/4/26,13100,14074,10800,12452
2014/5/27,14500,15391,11000,12902
2014/6/26,16000,17024,10000,13134
2014/7/25,14800,18310,10000,12845
2014/8/26,10000,16654,10000,12375
2014/9/25,10000,12382,10000,11997
2014/10/25,10000,11442,10000,11836
2014/11/25,10000,11207,10000,11614
2014/12/25,10300,11421,10000,11604
2015/1/25,12000,13137,10000,11882
2015/2/25,13200,14331,10000,12391
2015/3/25,14500,15436,10000,11883
2015/4/27,16800,17798,10000,11593
2015/5/25,20200,21506,10000,11604
2015/6/25,26000,27672,10000,11822
2015/7/27,35000,37805,10000,12152
2015/8/25,10000,36231,10000,11878
2015/9/25,13500,16886,10000,11590
2015/10/26,16000,17487,10000,11254
2015/11/25,19000,20609,10000,11811
2015/12/25,24600,27077,10000,12009
2016/1/25,21000,25727,10000,12106
2016/2/25,19500,21884,10000,11560
2016/3/25,21100,23315,10000,11513
2016/4/25,23800,25701,10000,11496
2016/5/25,25000,27127,10000,11186
2016/6/27,23400,28541,10000,11420
2016/7/25,10100,24324,10000,11355
2016/8/25,15100,17330,10000,11114
2016/9/26,18100,19614,10000,11277
2016/10/25,19500,21551,10000,11269
2016/11/25,18600,22300,10000,11427
2016/12/26,15300,20591,10000,10905
2017/1/25,15100,17508,10000,11093
2017/2/28,15900,17419,10000,11114
2017/3/27,17300,18358,10000,10945
2017/4/25,18900,20127,10000,11061
2017/5/25,21500,22996,10000,11063
2017/6/26,23900,25498,10000,11136
2017/7/25,25000,26668,10100,11170
2017/8/25,26800,28561,10800,11663
2017/9/25,28800,30535,11800,15659
2017/10/25,30000,32449,13800,18038
2017/11/27,30700,34046,16800,18982
2017/12/25,18000,32312,24000,26123
2018/1/25,21000,25213,28000,30832
2018/2/26,22800,24560,32200,34718
2018/3/26,25300,26939,39100,41653
2018/4/25,32100,34455,51000,56158
2018/5/25,41300,44225,62000,68498
2018/6/25,52000,57283,62800,77611
2018/7/25,12100,56152,10000,70079
2018/8/27,25500,31654,12000,25880
2018/9/25,29700,32205,17200,22696
2018/10/25,33100,35434,24200,26597
2018/11/26,36800,39585,32300,35922
2018/12/25,37000,40985,42300,46032
2019/1/25,33900,39196,46000,51488
2019/2/25,33000,36240,47800,52206
2019/3/25,36000,37953,52100,55791
2019/4/25,38600,40395,55000,59376
2019/5/27,40000,42846,40100,59223
2019/6/25,14100,28966,10000,32803
2019/7/25,18800,22047,12100,18842
2019/8/26,21800,23779,14500,18542
2019/9/26,20200,24164,10000,17071
2019/10/25,10000,20306,10000,13567
2019/11/25,13200,15917,10000,12835
2019/12/25,13800,15461,10000,12411
2020/2/3,14800,16050,11800,13461
2020/2/25,16000,17086,13000,14612
2020/3/25,17200,18495,14800,16474
2020/4/26,15200,17820,10000,15306
2020/5/25,17800,18918,10000,12581
2020/6/28,20900,21929,10000,12446
2020/7/27,24800,26011,12000,13418
2020/8/25,25800,30005,14600,15555
2020/9/25,18500,26837,18200,19354
2020/10/26,13800,20025,23500,25366
2020/11/25,16300,18217,29400,31520
2020/12/25,19700,21711,37600,40356
2021/1/25,20000,21960,34800,39309
2021/2/25,21000,22494,39800,41983
2021/3/25,23500,24826,46200,48350
2021/4/25,25900,27142,53900,56205
2021/5/25,26500,29423,60900,63757
2021/6/25,22800,27542,45000,62128
2021/7/26,21000,24914,37800,49138
2021/8/25,20000,22687,28000,37841
2021/9/26,20000,21615,25000,29955
2021/10/25,20000,21524,24800,28819
2021/11/25,20000,21557,25900,28535
2021/12/27,18800,21543,28000,30775
2022/1/25,15000,19692,31000,32618
2022/2/25,12800,17105,33500,35970
2022/3/25,13900,15392,35700,37760
2022/4/25,15200,16221,38500,40464
2022/5/25,16500,17555,32000,40992
2022/6/27,16600,18311,10000,32871
2022/7/25,17000,18329,15000,20381
2022/8/25,17300,18593,16900,20372
2022/9/26,17000,18616,18600,20595
2022/10/25,10000,17636,18600,21608
2022/11/25,11000,12808,18500,20747
2022/12/26,11900,12826,18600,21577
2023/1/28,12500,13330,21000,22919
2023/2/27,13500,14356,23500,24904
2023/3/27,15000,15902,26600,27809
2023/4/25,17100,18074,28900,30477
2023/5/25,19800,20629,29500,32452
2023/6/25,21800,23263,10000,29652
2023/7/25,10000,19591,10000,19111
2023/8/25,10000,12599,10000,14093
2023/9/25,10500,12101,11100,13421
2023/10/25,12000,13366,11800,13657
2023/11/27,13500,14688,13000,14662
2023/12/25,15000,16120,15000,16227

直接在“广州本地宝”就能查询到,这里是将日期、个人最低值、个人平均值、单位最低值、单位平均值罗列出为:“time,sig_min,sig_avg,com_min,com_avg”

2. 代码

LSTM是用pytorch搭建的,上代码:

import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import Dataset, DataLoader
import numpy as np
import pandas as pd

# 创建自定义数据集类
class CustomDataset(Dataset):
    def __init__(self, data, seq_length):
        self.data = data
        self.seq_length = seq_length

    def __len__(self):
        return len(self.data) - self.seq_length

    def __getitem__(self, idx):
        return (
            torch.from_numpy(np.array(self.data[idx:idx+self.seq_length])).float(),
            torch.from_numpy(np.array([self.data[idx+self.seq_length]])).float()
        )


# 从CSV文件读取数据
df = pd.read_csv('price_change.csv')  # 替换为你的CSV文件路径
input_column = 'sig_avg'  # 替换为你想要读取的列名
input_data = df[input_column].values

# 创建数据集实例
dataset = CustomDataset(input_data, seq_length)

# 创建数据加载器
batch_size = 1
train_loader = DataLoader(dataset, batch_size=batch_size, shuffle=False)

# 创建LSTM模型类
class LSTMNet(nn.Module):
    def __init__(self, input_size, hidden_size, output_size):
        super(LSTMNet, self).__init__()
        self.hidden_size = hidden_size
        self.lstm = nn.LSTM(input_size, hidden_size)
        self.fc = nn.Linear(hidden_size, output_size)

    def forward(self, x):
        lstm_out, _ = self.lstm(x.view(len(x), 1, -1))
        output = self.fc(lstm_out[-1])
        return output

# result
result = []    
for seq_length in range(1,2):
# seq_length  序列长度
    
    # 实例化模型
    input_size = seq_length
    hidden_size = 10
    output_size = 1
    model = LSTMNet(input_size, hidden_size, output_size)

    # 定义损失函数和优化器
    criterion = nn.MSELoss()
    optimizer = optim.SGD(model.parameters(), lr=0.01)

    # 训练模型
    num_epochs = 100
    # 训练模型并记录损失值
    losses = []
    for epoch in range(num_epochs):
        running_loss = 0.0
        for inputs, targets in train_loader:
            optimizer.zero_grad()
            outputs = model(inputs)
            loss = criterion(outputs, targets)
            loss.backward()
            optimizer.step()
            running_loss += loss.item()
        if epoch%10 == 0:
            epoch_loss = running_loss / len(train_loader)
            losses.append(epoch_loss)
            print(f'Epoch [{epoch+1}/{num_epochs}], Loss: {epoch_loss:.6f}')

    # 进行预测
    input_sequence = torch.from_numpy(input_data[-seq_length:]).float()
    # print(input_sequence)
    predicted_value = model(input_sequence).item()
    # print("Predicted value:", predicted_value)
    result.append(int(predicted_value))

print(result)

这里搭建了一个单层的lstm用来进行训练预测,但是loss值一直比较大,并没有仔细调整超参数,得到比较符合预期的结果是17745,希望能给有相关经验的同学提供一些思路。其实咱们学的东西不止可以用来写论文,也可以尝试在生活中应用一下。

你可能感兴趣的:(自用代码,lstm,人工智能,rnn)