You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/zero/__init__.py

14 lines
390 B

from .gemini import GeminiAdamOptimizer, GeminiDDP, GeminiOptimizer, get_static_torch_model
from .low_level import LowLevelZeroOptimizer
from .wrapper import zero_model_wrapper, zero_optim_wrapper
__all__ = [
"GeminiDDP",
"GeminiOptimizer",
"GeminiAdamOptimizer",
"zero_model_wrapper",
"zero_optim_wrapper",
"LowLevelZeroOptimizer",
"get_static_torch_model",
]