mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
19 lines
600 B
19 lines
600 B
from .colo_parameter import ColoParameter
|
|
from .colo_tensor import ColoTensor
|
|
from .comm_spec import CollectiveCommPattern, CommSpec
|
|
from .param_op_hook import ColoParamOpHook, ColoParamOpHookManager
|
|
from .utils import convert_dim_partition_dict, convert_parameter, merge_same_dim_mesh_list, named_params_with_colotensor
|
|
|
|
__all__ = [
|
|
"ColoTensor",
|
|
"convert_parameter",
|
|
"named_params_with_colotensor",
|
|
"ColoParameter",
|
|
"ColoParamOpHook",
|
|
"ColoParamOpHookManager",
|
|
"CommSpec",
|
|
"CollectiveCommPattern",
|
|
"convert_dim_partition_dict",
|
|
"merge_same_dim_mesh_list",
|
|
]
|