You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/tests/test_layers/test_2d/test_2d.py

48 lines
963 B

3 years ago
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import pytest
from colossalai.core import global_context as gpc
from colossalai.initialize import init_dist
from test_layer import check_linear, check_layernorm, check_attention, check_mlp, check_transformerlayer
from test_operation import check_AB, check_ABT, check_ATB
CONFIG = dict(
parallel=dict(
pipeline=dict(size=1),
tensor=dict(
size=4,
mode='2d'
)
),
)
def check_operations():
check_AB()
check_ABT()
check_ATB()
def check_layer():
check_linear()
check_layernorm()
check_attention()
check_mlp()
check_transformerlayer()
@pytest.mark.dist
@pytest.mark.skip("This test should be invoked by test.sh in the same folder as it runs on multiple gpus")
def test_2d():
init_dist(config=CONFIG)
gpc.set_seed()
check_operations()
check_layer()
gpc.destroy()
if __name__ == '__main__':
test_2d()