You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/tensor/__init__.py

19 lines
600 B

from .colo_parameter import ColoParameter
from .colo_tensor import ColoTensor
from .comm_spec import CollectiveCommPattern, CommSpec
from .param_op_hook import ColoParamOpHook, ColoParamOpHookManager
from .utils import convert_dim_partition_dict, convert_parameter, merge_same_dim_mesh_list, named_params_with_colotensor
__all__ = [
"ColoTensor",
"convert_parameter",
"named_params_with_colotensor",
"ColoParameter",
"ColoParamOpHook",
"ColoParamOpHookManager",
"CommSpec",
"CollectiveCommPattern",
"convert_dim_partition_dict",
"merge_same_dim_mesh_list",
]