mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
39 lines
865 B
39 lines
865 B
from .comparison import ( |
|
assert_close, |
|
assert_close_loose, |
|
assert_equal, |
|
assert_equal_in_group, |
|
assert_hf_output_close, |
|
assert_not_equal, |
|
check_state_dict_equal, |
|
) |
|
from .pytest_wrapper import run_on_environment_flag |
|
from .utils import ( |
|
DummyDataloader, |
|
clear_cache_before_run, |
|
free_port, |
|
parameterize, |
|
rerun_if_address_is_in_use, |
|
rerun_on_exception, |
|
skip_if_not_enough_gpus, |
|
spawn, |
|
) |
|
|
|
__all__ = [ |
|
"assert_equal", |
|
"assert_not_equal", |
|
"assert_close", |
|
"assert_close_loose", |
|
"assert_equal_in_group", |
|
"parameterize", |
|
"rerun_on_exception", |
|
"rerun_if_address_is_in_use", |
|
"skip_if_not_enough_gpus", |
|
"free_port", |
|
"spawn", |
|
"clear_cache_before_run", |
|
"run_on_environment_flag", |
|
"check_state_dict_equal", |
|
"assert_hf_output_close", |
|
"DummyDataloader", |
|
]
|
|
|