Spaces:
Runtime error
Runtime error
File size: 3,166 Bytes
d4f8fc2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
@Author : Peike Li
@Contact : peike.li@yahoo.com
@File : warmup_scheduler.py
@Time : 3/28/19 2:24 PM
@Desc :
@License : This source code is licensed under the license found in the
LICENSE file in the root directory of this source tree.
"""
import math
from torch.optim.lr_scheduler import _LRScheduler
class GradualWarmupScheduler(_LRScheduler):
""" Gradually warm-up learning rate with cosine annealing in optimizer.
Proposed in 'Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour'.
"""
def __init__(self, optimizer, total_epoch, eta_min=0, warmup_epoch=10, last_epoch=-1):
self.total_epoch = total_epoch
self.eta_min = eta_min
self.warmup_epoch = warmup_epoch
super(GradualWarmupScheduler, self).__init__(optimizer, last_epoch)
def get_lr(self):
if self.last_epoch <= self.warmup_epoch:
return [self.eta_min + self.last_epoch*(base_lr - self.eta_min)/self.warmup_epoch for base_lr in self.base_lrs]
else:
return [self.eta_min + (base_lr-self.eta_min)*(1+math.cos(math.pi*(self.last_epoch-self.warmup_epoch)/(self.total_epoch-self.warmup_epoch))) / 2 for base_lr in self.base_lrs]
class SGDRScheduler(_LRScheduler):
""" Consine annealing with warm up and restarts.
Proposed in `SGDR: Stochastic Gradient Descent with Warm Restarts`.
"""
def __init__(self, optimizer, total_epoch=150, start_cyclical=100, cyclical_base_lr=7e-4, cyclical_epoch=10, eta_min=0, warmup_epoch=10, last_epoch=-1):
self.total_epoch = total_epoch
self.start_cyclical = start_cyclical
self.cyclical_epoch = cyclical_epoch
self.cyclical_base_lr = cyclical_base_lr
self.eta_min = eta_min
self.warmup_epoch = warmup_epoch
super(SGDRScheduler, self).__init__(optimizer, last_epoch)
def get_lr(self):
if self.last_epoch < self.warmup_epoch:
return [self.eta_min + self.last_epoch*(base_lr - self.eta_min)/self.warmup_epoch for base_lr in self.base_lrs]
elif self.last_epoch < self.start_cyclical:
return [self.eta_min + (base_lr-self.eta_min)*(1+math.cos(math.pi*(self.last_epoch-self.warmup_epoch)/(self.start_cyclical-self.warmup_epoch))) / 2 for base_lr in self.base_lrs]
else:
return [self.eta_min + (self.cyclical_base_lr-self.eta_min)*(1+math.cos(math.pi* ((self.last_epoch-self.start_cyclical)% self.cyclical_epoch)/self.cyclical_epoch)) / 2 for base_lr in self.base_lrs]
if __name__ == '__main__':
import matplotlib.pyplot as plt
import torch
model = torch.nn.Linear(10, 2)
optimizer = torch.optim.SGD(params=model.parameters(), lr=7e-3, momentum=0.9, weight_decay=5e-4)
scheduler_warmup = SGDRScheduler(optimizer, total_epoch=150, eta_min=7e-5, warmup_epoch=10, start_cyclical=100, cyclical_base_lr=3.5e-3, cyclical_epoch=10)
lr = []
for epoch in range(0,150):
scheduler_warmup.step(epoch)
lr.append(scheduler_warmup.get_lr())
plt.style.use('ggplot')
plt.plot(list(range(0,150)), lr)
plt.show()
|