You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/nn/lr_scheduler/__init__.py

24 lines
673 B

from .cosine import CosineAnnealingLR, CosineAnnealingWarmupLR, FlatAnnealingLR, FlatAnnealingWarmupLR
from .linear import LinearWarmupLR
from .multistep import MultiStepLR, MultiStepWarmupLR
from .onecycle import OneCycleLR
from .poly import PolynomialLR, PolynomialWarmupLR
from .torch import ExponentialLR, LambdaLR, MultiplicativeLR, StepLR
__all__ = [
"CosineAnnealingLR",
"CosineAnnealingWarmupLR",
"FlatAnnealingLR",
"FlatAnnealingWarmupLR",
"LinearWarmupLR",
"MultiStepLR",
"MultiStepWarmupLR",
"OneCycleLR",
"PolynomialLR",
"PolynomialWarmupLR",
"LambdaLR",
"MultiplicativeLR",
"StepLR",
"ExponentialLR",
]