Open-Sora/opensora/utils/lr_scheduler.py
2024-06-05 10:12:52 +08:00

23 lines
907 B
Python

from torch.optim.lr_scheduler import _LRScheduler
class LinearWarmupLR(_LRScheduler):
"""Linearly warmup learning rate and then linearly decay.
Args:
optimizer (:class:`torch.optim.Optimizer`): Wrapped optimizer.
warmup_steps (int, optional): Number of warmup steps, defaults to 0
last_step (int, optional): The index of last step, defaults to -1. When last_step=-1,
the schedule is started from the beginning or When last_step=-1, sets initial lr as lr.
"""
def __init__(self, optimizer, warmup_steps: int = 0, last_epoch: int = -1):
self.warmup_steps = warmup_steps
super().__init__(optimizer, last_epoch=last_epoch)
def get_lr(self):
if self.last_epoch < self.warmup_steps:
return [(self.last_epoch + 1) / (self.warmup_steps + 1) * lr for lr in self.base_lrs]
else:
return self.base_lrs