mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
14 lines
651 B
14 lines
651 B
3 years ago
|
from .cosine import CosineAnnealingLR, CosineAnnealingWarmupLR, FlatAnnealingLR, FlatAnnealingWarmupLR
|
||
|
from .linear import LinearWarmupLR, LinearWarmupDecay
|
||
|
from .multistep import MultiStepLR, MultiStepWarmupLR
|
||
|
from .onecycle import OneCycleLR
|
||
|
from .poly import PolynomialLR, PolynomialWarmupLR
|
||
|
from .torch import LambdaLR, MultiplicativeLR, StepLR, ExponentialLR
|
||
|
|
||
|
__all__ = [
|
||
|
'CosineAnnealingLR', 'CosineAnnealingWarmupLR', 'FlatAnnealingLR', 'FlatAnnealingWarmupLR', 'LinearWarmupLR',
|
||
|
'MultiStepLR', 'MultiStepWarmupLR', 'OneCycleLR', 'PolynomialLR', 'PolynomialWarmupLR', 'LambdaLR',
|
||
|
'MultiplicativeLR', 'StepLR',
|
||
|
'ExponentialLR'
|
||
|
]
|