ColossalAI/colossalai/tensor/d_tensor/__init__.py

50 lines
1.2 KiB
Python

from .api import (
compute_global_numel,
customized_distributed_tensor_to_param,
distribute_tensor,
distribute_tensor_with_customization,
get_device_mesh,
get_global_shape,
get_layout,
get_shard_dim_1d,
get_sharding_spec,
init_as_dtensor,
init_tensor_as_customization_distributed,
is_customized_distributed_tensor,
is_distributed_tensor,
is_sharded,
redistribute,
shard_colwise,
shard_rowwise,
sharded_tensor_to_param,
to_global,
to_global_for_customized_distributed_tensor,
)
from .layout import Layout
from .sharding_spec import ShardingSpec
__all__ = [
"is_distributed_tensor",
"distribute_tensor",
"init_as_dtensor",
"to_global",
"is_sharded",
"shard_rowwise",
"shard_colwise",
"sharded_tensor_to_param",
"compute_global_numel",
"get_sharding_spec",
"get_global_shape",
"get_device_mesh",
"redistribute",
"get_layout",
"get_shard_dim_1d",
"is_customized_distributed_tensor",
"distribute_tensor_with_customization",
"init_tensor_as_customization_distributed",
"to_global_for_customized_distributed_tensor",
"customized_distributed_tensor_to_param",
"Layout",
"ShardingSpec",
]