mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
29 lines
1.1 KiB
29 lines
1.1 KiB
import pytest |
|
from lazy_init_utils import SUPPORT_LAZY, check_lazy_init |
|
|
|
from tests.kit.model_zoo import COMMON_MODELS, IS_FAST_TEST, model_zoo |
|
|
|
|
|
@pytest.mark.skipif(not SUPPORT_LAZY, reason="requires torch >= 1.12.0") |
|
@pytest.mark.parametrize( |
|
"subset", |
|
( |
|
[COMMON_MODELS] |
|
if IS_FAST_TEST |
|
else ["torchvision", "diffusers", "timm", "transformers", "torchaudio", "deepfm", "dlrm"] |
|
), |
|
) |
|
@pytest.mark.parametrize("default_device", ["cpu", "cuda"]) |
|
def test_models_lazy_init(subset, default_device): |
|
sub_model_zoo = model_zoo.get_sub_registry(subset, allow_empty=True) |
|
for name, entry in sub_model_zoo.items(): |
|
# TODO(ver217): lazy init does not support weight norm, skip these models |
|
if name in ("torchaudio_wav2vec2_base", "torchaudio_hubert_base") or name.startswith( |
|
("transformers_vit", "transformers_blip2", "transformers_whisper") |
|
): |
|
continue |
|
check_lazy_init(entry, verbose=True, default_device=default_device) |
|
|
|
|
|
if __name__ == "__main__": |
|
test_models_lazy_init("transformers", "cpu")
|
|
|