You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/utils/__init__.py

33 lines
725 B

from .common import (
_cast_float,
conditional_context,
disposable,
ensure_path_exists,
free_storage,
is_ddp_ignored,
set_seed,
)
from .cuda import empty_cache, get_current_device, set_device, set_to_cuda, synchronize
from .multi_tensor_apply import multi_tensor_applier
from .tensor_detector import TensorDetector
from .timer import MultiTimer, Timer
__all__ = [
'conditional_context',
'get_current_device',
'synchronize',
'empty_cache',
'set_to_cuda',
'Timer',
'MultiTimer',
'multi_tensor_applier',
'TensorDetector',
'ensure_path_exists',
'disposable',
'_cast_float',
'free_storage',
'set_seed',
'is_ddp_ignored',
'set_device',
]