You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/tensor/d_tensor/__init__.py

48 lines
1.2 KiB

from .api import (
compute_global_numel,
customized_distributed_tensor_to_param,
distribute_tensor,
init_as_dtensor,
distribute_tensor_with_customization,
init_tensor_as_customization_distributed,
get_device_mesh,
get_global_shape,
get_layout,
get_sharding_spec,
is_customized_distributed_tensor,
is_distributed_tensor,
is_sharded,
redistribute,
shard_colwise,
shard_rowwise,
sharded_tensor_to_param,
to_global,
to_global_for_customized_distributed_tensor,
)
from .layout import Layout
from .sharding_spec import ShardingSpec
__all__ = [
"is_distributed_tensor",
"distribute_tensor",
"init_as_dtensor",
"to_global",
"is_sharded",
"shard_rowwise",
"shard_colwise",
"sharded_tensor_to_param",
"compute_global_numel",
"get_sharding_spec",
"get_global_shape",
"get_device_mesh",
"redistribute",
"get_layout",
"is_customized_distributed_tensor",
"distribute_tensor_with_customization",
"init_tensor_as_customization_distributed",
"to_global_for_customized_distributed_tensor",
"customized_distributed_tensor_to_param",
"Layout",
"ShardingSpec",
]