|
|
|
@ -82,8 +82,8 @@ def check_forward_backward(model_fn, data_gen_fn, output_transform_fn, loss_fn,
|
|
|
|
|
atol, rtol = 5e-3, 5e-3
|
|
|
|
|
|
|
|
|
|
if stage_manager is None or stage_manager.is_first_stage():
|
|
|
|
|
check_grad(whisper, sharded_whisper, row_layer_for_check, tp_group, atol=atol, rtol=rtol, dim=0)
|
|
|
|
|
check_grad(whisper, sharded_whisper, col_layer_for_check, tp_group, atol=atol, rtol=rtol, dim=1)
|
|
|
|
|
check_grad(whisper, sharded_whisper, row_layer_for_check, tp_group, atol=atol, rtol=rtol, dim=1)
|
|
|
|
|
check_grad(whisper, sharded_whisper, col_layer_for_check, tp_group, atol=atol, rtol=rtol, dim=0)
|
|
|
|
|
|
|
|
|
|
# check weights after optimizer.step()
|
|
|
|
|
org_optimizer.step()
|
|
|
|
@ -99,7 +99,7 @@ def check_forward_backward(model_fn, data_gen_fn, output_transform_fn, loss_fn,
|
|
|
|
|
tp_group,
|
|
|
|
|
atol=atol,
|
|
|
|
|
rtol=rtol,
|
|
|
|
|
dim=0,
|
|
|
|
|
dim=1,
|
|
|
|
|
verbose=False)
|
|
|
|
|
check_weight(whisper,
|
|
|
|
|
sharded_whisper,
|
|
|
|
@ -155,12 +155,39 @@ def run_whisper_test(test_config):
|
|
|
|
|
torch.cuda.empty_cache()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@parameterize('test_config', [
|
|
|
|
|
{
|
|
|
|
|
'tp_size': 2,
|
|
|
|
|
'pp_size': 2,
|
|
|
|
|
'num_microbatches': 4,
|
|
|
|
|
'enable_all_optimization': False,
|
|
|
|
|
'use_lazy_init': False,
|
|
|
|
|
'precision': 'fp32',
|
|
|
|
|
'initial_scale': 1,
|
|
|
|
|
},
|
|
|
|
|
])
|
|
|
|
|
def run_whisper_3d_test(test_config):
|
|
|
|
|
sub_model_zoo = model_zoo.get_sub_registry('transformers_whisper')
|
|
|
|
|
|
|
|
|
|
for name, (model_fn, data_gen_fn, output_transform_fn, loss_fn, _) in sub_model_zoo.items():
|
|
|
|
|
check_forward_backward(model_fn, data_gen_fn, output_transform_fn, loss_fn, test_config)
|
|
|
|
|
|
|
|
|
|
clear_layout_converter()
|
|
|
|
|
torch.cuda.empty_cache()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_whisper(rank, world_size, port):
|
|
|
|
|
disable_existing_loggers()
|
|
|
|
|
colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
|
|
|
|
|
run_whisper_test()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_whisper_3d(rank, world_size, port):
|
|
|
|
|
disable_existing_loggers()
|
|
|
|
|
colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
|
|
|
|
|
run_whisper_3d_test()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.dist
|
|
|
|
|
@rerun_if_address_is_in_use()
|
|
|
|
|
@clear_cache_before_run()
|
|
|
|
@ -168,5 +195,13 @@ def test_whisper():
|
|
|
|
|
spawn(check_whisper, 4)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.largedist
|
|
|
|
|
@rerun_if_address_is_in_use()
|
|
|
|
|
@clear_cache_before_run()
|
|
|
|
|
def test_whisper_3d():
|
|
|
|
|
spawn(check_whisper_3d, 8)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
|
test_whisper()
|
|
|
|
|
test_whisper_3d()
|
|
|
|
|