训练数据不能被batch_size整除时,将Dataloader里的drop_last设置为Ture
train_loader = DataLoader(MyDataSet, BATCH_SIZE, drop_last=True)
设置自己调节的learning rate
https://www.cnblogs.com/wanghui-garcia/p/10895397.html
def adjust_learning_rate(optimizer, epoch, lr):
"""Sets the learning rate to the initial LR decayed by 10 every 2 epochs"""
lr *= (0.1 ** (epoch // 2))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
model = AlexNet(num_classes=2)
optimizer = optim.SGD(params = model.parameters(), lr=10)
plt.figure()
x = list(range(10))
y = []
lr_init = optimizer.param_groups[0]['lr']
for epoch in range(10):
adjust_learning_rate(optimizer, epoch, lr_init)
lr = optimizer.param_groups[0]['lr']
print(epoch, lr)
y.append(lr)
plt.plot(x,y)