Skip to content

Instantly share code, notes, and snippets.

Last active Dec 15, 2021
What would you like to do?
Pytorch Slanted Triangular Learning Rate Scheduler
class STLR(torch.optim.lr_scheduler._LRScheduler):
def __init__(self, optimizer, max_mul, ratio, steps_per_cycle, decay=1, last_epoch=-1):
self.max_mul = max_mul - 1
self.turning_point = steps_per_cycle // (ratio + 1)
self.steps_per_cycle = steps_per_cycle
self.decay = decay
super().__init__(optimizer, last_epoch)
def get_lr(self):
residual = self.last_epoch % self.steps_per_cycle
multiplier = self.decay ** (self.last_epoch // self.steps_per_cycle)
if residual <= self.turning_point:
multiplier *= self.max_mul * (residual / self.turning_point)
multiplier *= self.max_mul * (
(self.steps_per_cycle - residual) /
(self.steps_per_cycle - self.turning_point))
return [lr * (1 + multiplier) for lr in self.base_lrs]
Copy link

king-menin commented Jan 17, 2020

Can you explain what does mean all parameters and how do these match with original paper

Copy link

ceshine commented Jan 19, 2020

You have to be more specific, i.e., specifying which part you don't understand. In most case, just try the optimizer and plot the learning rates should be enough for you to know how it works.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment