You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/nn/lr_scheduler/__init__.py

24 lines
673 B

3 years ago
from .cosine import CosineAnnealingLR, CosineAnnealingWarmupLR, FlatAnnealingLR, FlatAnnealingWarmupLR
from .linear import LinearWarmupLR
3 years ago
from .multistep import MultiStepLR, MultiStepWarmupLR
from .onecycle import OneCycleLR
from .poly import PolynomialLR, PolynomialWarmupLR
from .torch import ExponentialLR, LambdaLR, MultiplicativeLR, StepLR
3 years ago
__all__ = [
"CosineAnnealingLR",
"CosineAnnealingWarmupLR",
"FlatAnnealingLR",
"FlatAnnealingWarmupLR",
"LinearWarmupLR",
"MultiStepLR",
"MultiStepWarmupLR",
"OneCycleLR",
"PolynomialLR",
"PolynomialWarmupLR",
"LambdaLR",
"MultiplicativeLR",
"StepLR",
"ExponentialLR",
3 years ago
]