1. 训练数据不能被batch_size整除时,将Dataloader里的drop_last设置为Ture

      1. train_loader = DataLoader(MyDataSet, BATCH_SIZE, drop_last=True)
    2. 设置自己调节的learning rate

    https://www.cnblogs.com/wanghui-garcia/p/10895397.html

    1. def adjust_learning_rate(optimizer, epoch, lr):
    2. """Sets the learning rate to the initial LR decayed by 10 every 2 epochs"""
    3. lr *= (0.1 ** (epoch // 2))
    4. for param_group in optimizer.param_groups:
    5. param_group['lr'] = lr
    6. model = AlexNet(num_classes=2)
    7. optimizer = optim.SGD(params = model.parameters(), lr=10)
    8. plt.figure()
    9. x = list(range(10))
    10. y = []
    11. lr_init = optimizer.param_groups[0]['lr']
    12. for epoch in range(10):
    13. adjust_learning_rate(optimizer, epoch, lr_init)
    14. lr = optimizer.param_groups[0]['lr']
    15. print(epoch, lr)
    16. y.append(lr)
    17. plt.plot(x,y)