2021-10-28 16:21:23 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- encoding: utf-8 -*-
|
|
|
|
|
|
|
|
import pytest
|
2021-12-16 02:32:08 +00:00
|
|
|
import torch
|
2023-04-06 06:51:35 +00:00
|
|
|
from checks_2d.check_layer_2d import (
|
|
|
|
check_classifier_given_embed_weight,
|
|
|
|
check_classifier_no_given_weight,
|
|
|
|
check_embed,
|
|
|
|
check_layernorm,
|
|
|
|
check_linear,
|
|
|
|
check_loss,
|
|
|
|
check_patch_embed,
|
|
|
|
check_vocab_parallel_classifier_given_embed_weight,
|
|
|
|
check_vocab_parallel_classifier_no_given_weight,
|
|
|
|
check_vocab_parallel_embed,
|
|
|
|
check_vocab_parallel_loss,
|
|
|
|
)
|
|
|
|
from checks_2d.check_operation_2d import check_AB, check_ABT, check_ATB
|
|
|
|
|
2021-10-28 16:21:23 +00:00
|
|
|
from colossalai.core import global_context as gpc
|
2021-12-27 07:04:32 +00:00
|
|
|
from colossalai.initialize import launch
|
2022-02-14 03:15:02 +00:00
|
|
|
from colossalai.logging import disable_existing_loggers
|
2023-04-06 06:51:35 +00:00
|
|
|
from colossalai.testing import rerun_if_address_is_in_use, spawn
|
2021-10-28 16:21:23 +00:00
|
|
|
|
2022-03-25 09:25:12 +00:00
|
|
|
CONFIG = dict(parallel=dict(pipeline=dict(size=1), tensor=dict(size=4, mode='2d')),)
|
2021-10-28 16:21:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
def check_operations():
|
|
|
|
check_AB()
|
|
|
|
check_ABT()
|
|
|
|
check_ATB()
|
|
|
|
|
|
|
|
|
|
|
|
def check_layer():
|
|
|
|
check_linear()
|
|
|
|
check_layernorm()
|
2022-02-14 03:15:02 +00:00
|
|
|
check_embed()
|
|
|
|
check_patch_embed()
|
|
|
|
check_vocab_parallel_embed()
|
|
|
|
check_classifier_no_given_weight()
|
|
|
|
check_vocab_parallel_classifier_no_given_weight()
|
|
|
|
check_classifier_given_embed_weight()
|
|
|
|
check_vocab_parallel_classifier_given_embed_weight()
|
|
|
|
check_loss()
|
|
|
|
check_vocab_parallel_loss()
|
|
|
|
|
2021-10-28 16:21:23 +00:00
|
|
|
|
2021-12-29 15:32:10 +00:00
|
|
|
def check_layer_and_operation(rank, world_size, port):
|
2022-02-14 03:15:02 +00:00
|
|
|
disable_existing_loggers()
|
|
|
|
launch(config=CONFIG, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
|
2021-12-16 02:32:08 +00:00
|
|
|
|
2022-02-14 03:15:02 +00:00
|
|
|
torch.backends.cuda.matmul.allow_tf32 = False
|
|
|
|
torch.backends.cudnn.allow_tf32 = False
|
|
|
|
torch.backends.cudnn.deterministic = True
|
2021-12-27 07:04:32 +00:00
|
|
|
# check_operations()
|
2021-10-28 16:21:23 +00:00
|
|
|
check_layer()
|
|
|
|
gpc.destroy()
|
2021-12-16 02:32:08 +00:00
|
|
|
torch.cuda.empty_cache()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.dist
|
2022-04-14 16:33:04 +00:00
|
|
|
@rerun_if_address_is_in_use()
|
2021-12-16 02:32:08 +00:00
|
|
|
def test_2d():
|
2023-04-06 06:51:35 +00:00
|
|
|
spawn(check_layer_and_operation, 4)
|
2021-10-28 16:21:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
test_2d()
|