mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
16 lines
605 B
16 lines
605 B
from .spec import ComputePattern, ParallelAction, TensorSpec
|
|
from .op_wrapper import (
|
|
colo_op_impl,)
|
|
from .colo_tensor import ColoTensor
|
|
from .colo_parameter import ColoParameter
|
|
from .utils import convert_parameter, named_params_with_colotensor
|
|
from ._ops import *
|
|
from .optim.colo_optimizer import ColoOptimizer
|
|
from . import distspec
|
|
from .dist_spec_mgr import DistSpecManager
|
|
|
|
__all__ = [
|
|
'ColoTensor', 'convert_parameter', 'colo_op_impl', 'ComputePattern', 'TensorSpec', 'ParallelAction',
|
|
'named_params_with_colotensor', 'ColoOptimizer', 'ColoParameter', 'distspec', 'DistSpecManager'
|
|
]
|