mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
13 lines
390 B
13 lines
390 B
from .gemini import GeminiAdamOptimizer, GeminiDDP, GeminiOptimizer, get_static_torch_model |
|
from .low_level import LowLevelZeroOptimizer |
|
from .wrapper import zero_model_wrapper, zero_optim_wrapper |
|
|
|
__all__ = [ |
|
"GeminiDDP", |
|
"GeminiOptimizer", |
|
"GeminiAdamOptimizer", |
|
"zero_model_wrapper", |
|
"zero_optim_wrapper", |
|
"LowLevelZeroOptimizer", |
|
"get_static_torch_model", |
|
]
|
|
|