mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
17 lines
490 B
17 lines
490 B
from .chunk import ChunkManager, TensorInfo, TensorState, search_chunk_configuration |
|
from .gemini_ddp import GeminiDDP |
|
from .gemini_mgr import GeminiManager |
|
from .gemini_optimizer import GeminiAdamOptimizer, GeminiOptimizer |
|
from .utils import get_static_torch_model |
|
|
|
__all__ = [ |
|
"GeminiManager", |
|
"TensorInfo", |
|
"TensorState", |
|
"ChunkManager", |
|
"search_chunk_configuration", |
|
"GeminiDDP", |
|
"get_static_torch_model", |
|
"GeminiAdamOptimizer", |
|
"GeminiOptimizer", |
|
]
|
|
|