mirror of https://github.com/hpcaitech/ColossalAI
[NFC] polish <colossalai/nn/lr_scheduler/poly.py> code style (#1267)
parent
c92f84fcdb
commit
0cf8e8e91c
|
@ -17,7 +17,12 @@ class PolynomialLR(_LRScheduler):
|
|||
the schedule is started from the beginning or When last_epoch=-1, sets initial lr as lr.
|
||||
"""
|
||||
|
||||
def __init__(self, optimizer, total_steps: int, end_lr: float = 0.0001, power: float = 1.0, last_epoch: int = -1,
|
||||
def __init__(self,
|
||||
optimizer,
|
||||
total_steps: int,
|
||||
end_lr: float = 0.0001,
|
||||
power: float = 1.0,
|
||||
last_epoch: int = -1,
|
||||
**kwargs):
|
||||
if end_lr < 0:
|
||||
raise ValueError(f'end_lr must >= 0, got {end_lr}')
|
||||
|
@ -30,11 +35,9 @@ class PolynomialLR(_LRScheduler):
|
|||
return self._get_closed_form_lr()
|
||||
|
||||
def _get_closed_form_lr(self):
|
||||
return [
|
||||
(base_lr - self.end_lr) * ((1 - min(self.last_epoch, self.total_steps) /
|
||||
self.total_steps) ** self.power) + self.end_lr
|
||||
for base_lr in self.base_lrs
|
||||
]
|
||||
return [(base_lr - self.end_lr) *
|
||||
((1 - min(self.last_epoch, self.total_steps) / self.total_steps)**self.power) + self.end_lr
|
||||
for base_lr in self.base_lrs]
|
||||
|
||||
|
||||
@LR_SCHEDULERS.register_module
|
||||
|
@ -51,8 +54,13 @@ class PolynomialWarmupLR(WarmupScheduler):
|
|||
the schedule is started from the beginning or When last_epoch=-1, sets initial lr as lr.
|
||||
"""
|
||||
|
||||
def __init__(self, optimizer, total_steps: int, warmup_steps: int = 0, end_lr: float = 0.0001, power: float = 1.0,
|
||||
last_epoch: int = -1, **kwargs):
|
||||
base_scheduler = PolynomialLR(
|
||||
optimizer, total_steps - warmup_steps, end_lr=end_lr, power=power)
|
||||
def __init__(self,
|
||||
optimizer,
|
||||
total_steps: int,
|
||||
warmup_steps: int = 0,
|
||||
end_lr: float = 0.0001,
|
||||
power: float = 1.0,
|
||||
last_epoch: int = -1,
|
||||
**kwargs):
|
||||
base_scheduler = PolynomialLR(optimizer, total_steps - warmup_steps, end_lr=end_lr, power=power)
|
||||
super().__init__(optimizer, warmup_steps, base_scheduler, last_epoch=last_epoch)
|
||||
|
|
Loading…
Reference in New Issue