# Assuming optimizer uses lr = 0.05 for all groups # lr = 0.05 if epoch < 30 # lr = 0.005 if 30 <= epoch < 60 # lr = 0.0005 if 60 <= epoch < 90 # ... scheduler = StepLR(optimizer, step_size=30, gamma=0.1) for epoch inrange(100): train(...) validate(...) scheduler.step()
在fit函数中加上权重更新
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18
deffit(epochs,model,loss_func,opt,train_dl,valid_dl): epoch = 0 pre_loss = 0 whileTrue: model.train() for xb,yb in train_dl: loss_batch(model,loss_func,xb,yb.long(),opt) scheduler.step() model.eval() with torch.no_grad(): losses,nums = zip(*[loss_batch(model,loss_func,xb,yb) for xb,yb in valid_dl]) val_loss = np.sum(np.multiply(losses,nums)) / np.sum(nums) print(epoch,val_loss) import math if math.fabs(val_loss-pre_loss)<1e-7or epoch > 100: break pre_loss = val_loss epoch = epoch + 1
# Assuming optimizer uses lr = 0.05 for all groups # lr = 0.05 if epoch < 30 # lr = 0.005 if 30 <= epoch < 80 # lr = 0.0005 if epoch >= 80 scheduler = MultiStepLR(optimizer, milestones=[30,80], gamma=0.1) for epoch inrange(100): train(...) validate(...) scheduler.step()
# Assuming optimizer uses lr = 0.05 for all groups # lr = 0.025 if epoch == 0 # lr = 0.03125 if epoch == 1 # lr = 0.0375 if epoch == 2 # lr = 0.04375 if epoch == 3 # lr = 0.05 if epoch >= 4 scheduler = LinearLR(self.opt, start_factor=0.5, total_iters=4) for epoch inrange(100): train(...) validate(...) scheduler.step()
# Assuming optimizer uses lr = 1. for all groups # lr = 0.1 if epoch == 0 # lr = 0.1 if epoch == 1 # lr = 0.9 if epoch == 2 # lr = 0.81 if epoch == 3 # lr = 0.729 if epoch == 4 scheduler1 = ConstantLR(self.opt, factor=0.1, total_iters=2) scheduler2 = ExponentialLR(self.opt, gamma=0.9) scheduler = SequentialLR(self.opt, schedulers=[scheduler1, scheduler2], milestones=[2]) for epoch inrange(100): train(...) validate(...) scheduler.step()
# Assuming optimizer uses lr = 1. for all groups # lr = 0.09 if epoch == 0 # lr = 0.081 if epoch == 1 # lr = 0.729 if epoch == 2 # lr = 0.6561 if epoch == 3 # lr = 0.59049 if epoch >= 4 scheduler1 = ConstantLR(self.opt, factor=0.1, total_iters=2) scheduler2 = ExponentialLR(self.opt, gamma=0.9) scheduler = ChainedScheduler([scheduler1, scheduler2]) for epoch inrange(100): train(...) validate(...) scheduler.step()
# Assuming optimizer uses lr = 0.05 for all groups # lr = 0.025 if epoch == 0 # lr = 0.025 if epoch == 1 # lr = 0.025 if epoch == 2 # lr = 0.025 if epoch == 3 # lr = 0.05 if epoch >= 4 scheduler = ConstantLR(self.opt, factor=0.5, total_iters=4) for epoch inrange(100): train(...) validate(...) scheduler.step()