mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
13 lines
628 B
13 lines
628 B
from .cosine import CosineAnnealingLR, CosineAnnealingWarmupLR, FlatAnnealingLR, FlatAnnealingWarmupLR
|
|
from .linear import LinearWarmupLR
|
|
from .multistep import MultiStepLR, MultiStepWarmupLR
|
|
from .onecycle import OneCycleLR
|
|
from .poly import PolynomialLR, PolynomialWarmupLR
|
|
from .torch import LambdaLR, MultiplicativeLR, StepLR, ExponentialLR
|
|
|
|
__all__ = [
|
|
'CosineAnnealingLR', 'CosineAnnealingWarmupLR', 'FlatAnnealingLR', 'FlatAnnealingWarmupLR', 'LinearWarmupLR',
|
|
'MultiStepLR', 'MultiStepWarmupLR', 'OneCycleLR', 'PolynomialLR', 'PolynomialWarmupLR', 'LambdaLR',
|
|
'MultiplicativeLR', 'StepLR', 'ExponentialLR'
|
|
]
|