Making large AI models cheaper, faster and more accessible
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 

17 lines
619 B

from .colo_init_context import ColoInitContext, post_process_colo_init_ctx
from .ophooks import BaseOpHook, register_ophooks_recursively
from .stateful_tensor import StatefulTensor
from .stateful_tensor_mgr import StatefulTensorMgr
from .tensor_placement_policy import AutoTensorPlacementPolicy, CPUTensorPlacementPolicy, CUDATensorPlacementPolicy
__all__ = [
"StatefulTensorMgr",
"StatefulTensor",
"CPUTensorPlacementPolicy",
"CUDATensorPlacementPolicy",
"AutoTensorPlacementPolicy",
"register_ophooks_recursively",
"BaseOpHook",
"ColoInitContext",
"post_process_colo_init_ctx",
]