You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/legacy/utils/__init__.py

54 lines
1.4 KiB

from .checkpointing import load_checkpoint, save_checkpoint
from .common import (
clip_grad_norm_fp32,
copy_tensor_parallel_attributes,
count_zeros_fp32,
is_dp_rank_0,
is_model_parallel_parameter,
is_no_pp_or_last_stage,
is_tp_rank_0,
is_using_ddp,
is_using_pp,
is_using_sequence,
param_is_not_tensor_parallel_duplicate,
print_rank_0,
switch_virtual_pipeline_parallel_rank,
sync_model_param,
)
from .data_sampler import DataParallelSampler, get_dataloader
from .memory import (
colo_device_memory_capacity,
colo_device_memory_used,
colo_get_cpu_memory_capacity,
colo_set_cpu_memory_capacity,
colo_set_process_memory_fraction,
report_memory_usage,
)
__all__ = [
"DataParallelSampler",
"get_dataloader",
"save_checkpoint",
"load_checkpoint",
"colo_device_memory_capacity",
"colo_device_memory_used",
"colo_get_cpu_memory_capacity",
"colo_set_cpu_memory_capacity",
"colo_set_process_memory_fraction",
"report_memory_usage",
"clip_grad_norm_fp32",
"copy_tensor_parallel_attributes",
"count_zeros_fp32",
"is_dp_rank_0",
"is_model_parallel_parameter",
"is_no_pp_or_last_stage",
"is_tp_rank_0",
"is_using_ddp",
"is_using_pp",
"is_using_sequence",
"param_is_not_tensor_parallel_duplicate",
"print_rank_0",
"switch_virtual_pipeline_parallel_rank",
"sync_model_param",
]