You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/utils/__init__.py

31 lines
660 B

from .common import (
_cast_float,
conditional_context,
disposable,
ensure_path_exists,
free_storage,
get_current_device,
get_non_persistent_buffers_set,
is_ddp_ignored,
set_seed,
)
from .multi_tensor_apply import multi_tensor_applier
from .tensor_detector import TensorDetector
from .timer import MultiTimer, Timer
3 years ago
__all__ = [
"conditional_context",
"Timer",
"MultiTimer",
"multi_tensor_applier",
"TensorDetector",
"ensure_path_exists",
"disposable",
"_cast_float",
"free_storage",
"set_seed",
"get_current_device",
"is_ddp_ignored",
"get_non_persistent_buffers_set",
]