mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
13 lines
439 B
13 lines
439 B
from .utils import CPU_ADAM_CNT
|
|
from .colossalai_optimizer import ColossalaiOptimizer
|
|
from .fused_adam import FusedAdam
|
|
from .fused_lamb import FusedLAMB
|
|
from .fused_sgd import FusedSGD
|
|
from .lamb import Lamb
|
|
from .lars import Lars
|
|
from .cpu_adam import CPUAdam
|
|
from .hybrid_adam import HybridAdam
|
|
|
|
__all__ = ['ColossalaiOptimizer', 'FusedLAMB', 'FusedAdam', 'FusedSGD',
|
|
'Lamb', 'Lars', 'CPUAdam', 'HybridAdam', 'CPU_ADAM_CNT']
|