[test] fix gemini checkpoint and gpt test (#4620)

pull/4614/head^2
Hongxin Liu 2023-09-05 16:02:23 +08:00 committed by GitHub
parent e71d245293
commit bd18678478
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 2 additions and 3 deletions

View File

@ -32,7 +32,7 @@ def exam_from_pretrained(plugin_type: str, model_name: str, shard=True, size_per
elif plugin_type == 'zero':
plugin = LowLevelZeroPlugin(stage=2, max_norm=1.0, initial_scale=32)
elif plugin_type == 'gemini':
plugin = GeminiPlugin(placement_policy='cuda', precision="fp16", initial_scale=32)
plugin = GeminiPlugin(precision="fp16", initial_scale=32)
else:
raise ValueError(f"Plugin with type {plugin_type} is invalid, please check your argument.")

View File

@ -102,7 +102,6 @@ def check_forward_backward(model_fn, data_gen_fn, output_transform_fn, loss_fn,
torch.cuda.empty_cache()
@pytest.mark.skip(reason="This test will hang in CI")
@parameterize('test_config', [{
'tp_size': 2,
'pp_size': 2,
@ -220,7 +219,7 @@ def check_gpt2_3d(rank, world_size, port):
run_gpt2_3d_test()
@pytest.mark.skip(reason="This test will hang in CI")
@pytest.mark.dist
@rerun_if_address_is_in_use()
@clear_cache_before_run()