李沐 线性回归 + 基础优化算法

这个是线性回归 + 基础优化算法课程的简单实现从零开始的被我删了=-=

import numpy as np
import torch
from torch.utils import data
from d2l import torch as d2l
from torch import nn


true_w=torch.tensor([2,-3.4])
true_b=4.2
features,labels=d2l.synthetic_data(true_w,true_b,1000)

def load_array(data_arrays,batch_size,is_train=True):#构造pytorch数据迭代器
    dataset=data.TensorDataset(*data_arrays)
    return data.DataLoader(dataset,batch_size,shuffle=is_train)#随机挑选batch_size个样本,shuffle是否打乱数据

batch_size=10
data_iter=load_array((features,labels),batch_size)

next(iter(data_iter))

net =  nn.Sequential(nn.Linear(2,1))
net[0].weight.data.normal_(0,0.01)
net[0].bias.data.fill_(0)
     
loss=nn.MSELoss()#损失函数
trainer=torch.optim.SGD(net.parameters(),lr=0.03)#优化算法,传入net中的参数,学习率
     
num_epochs=3
for  epoch in range(num_epochs):
    for x,y in data_iter:
        l=loss(net(x),y)#计算损失
        trainer.zero_grad()#梯度清零
        l.backward()#求 sum
        trainer.step()#模型更新
    l=loss(net(features),labels)#
    print(f'epoch{epoch+1},loss{l:f}')

你可能感兴趣的:(学习笔记,算法,pytorch,深度学习)