python torch 学习率 StepLR的使用

python torch 学习率 StepLR的使用

python torch 学习率 StepLR的使用_第1张图片

代码

from torch.optim.lr_scheduler import StepLR
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as mp
from torchvision import models

if __name__ == '__main__':
	# 初始学习率
    initial_lr = 0.0008 
    # 循环次数
    epochs = 300
    # 模型加载
    model = models.resnet50(pretrained=False)
    criterion = nn.CrossEntropyLoss()
    optimizer = optim.Adam(model.parameters(), lr=initial_lr)
    scheduler_1 = StepLR(optimizer, step_size=5, gamma=0.910)
	
	# 循环学习
    lr_lst = []
    index_lst = []
    for index, epoch in enumerate(range(epochs)):
        optimizer.zero_grad()
        optimizer.step()
        print("第%d轮的学习率:%.7f" % (epoch, optimizer.param_groups[0]['lr']))
        scheduler_1.step()
        lr_lst.append(optimizer.param_groups[0]['lr'])
        index_lst.append(index)
	
	# 图片显示
    mp.figure('LR', facecolor='lightgray')
    mp.subplot(111)
    mp.title('Lr scheduler', fontsize=16)
    mp.plot(index_lst, lr_lst, label='lr')
    mp.grid(linestyle=':')
    mp.legend()

    mp.tight_layout()
    mp.show()

你可能感兴趣的:(pytorch,python,python,学习)