mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
20 lines
764 B
20 lines
764 B
2 years ago
|
import pytest
|
||
2 years ago
|
from lazy_init_utils import SUPPORT_LAZY, check_lazy_init
|
||
2 years ago
|
|
||
|
from tests.kit.model_zoo import model_zoo
|
||
|
|
||
|
|
||
2 years ago
|
@pytest.mark.skipif(not SUPPORT_LAZY, reason='requires torch >= 1.12.0')
|
||
2 years ago
|
@pytest.mark.parametrize('subset', ['torchvision', 'diffusers', 'timm', 'transformers', 'torchaudio', 'deepfm', 'dlrm'])
|
||
|
def test_torchvision_models_lazy_init(subset):
|
||
|
sub_model_zoo = model_zoo.get_sub_registry(subset)
|
||
|
for name, entry in sub_model_zoo.items():
|
||
|
# TODO(ver217): lazy init does not support weight norm, skip these models
|
||
|
if name in ('torchaudio_wav2vec2_base', 'torchaudio_hubert_base'):
|
||
|
continue
|
||
2 years ago
|
check_lazy_init(entry, verbose=True)
|
||
2 years ago
|
|
||
|
|
||
|
if __name__ == '__main__':
|
||
|
test_torchvision_models_lazy_init('torchvision')
|