mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
37 lines
1.3 KiB
37 lines
1.3 KiB
2 years ago
|
from functools import partial
|
||
|
|
||
|
import pytest
|
||
|
import torch.multiprocessing as mp
|
||
|
|
||
|
from colossalai.device import AlphaBetaProfiler
|
||
|
from colossalai.initialize import launch
|
||
|
from colossalai.logging import disable_existing_loggers
|
||
|
from colossalai.testing import parameterize, rerun_if_address_is_in_use
|
||
|
from colossalai.utils import free_port
|
||
|
|
||
|
|
||
|
def check_alpha_beta(rank, physical_devices, world_size, port):
|
||
|
disable_existing_loggers()
|
||
|
launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
|
||
|
profiler = AlphaBetaProfiler(physical_devices)
|
||
|
best_logical_mesh = profiler.search_best_logical_mesh()
|
||
|
|
||
|
if physical_devices == [0, 1, 2, 3]:
|
||
|
assert best_logical_mesh == [[0, 1], [2, 3]]
|
||
|
elif physical_devices == [0, 3]:
|
||
|
assert best_logical_mesh == [[0, 3]]
|
||
|
|
||
|
|
||
|
@pytest.mark.skip(reason="Skip because assertion may fail for CI devices")
|
||
|
@pytest.mark.dist
|
||
|
@parameterize('physical_devices', [[0, 1, 2, 3], [0, 3]])
|
||
|
@rerun_if_address_is_in_use()
|
||
|
def test_profile_alpha_beta(physical_devices):
|
||
|
world_size = 4
|
||
|
run_func = partial(check_alpha_beta, physical_devices=physical_devices, world_size=world_size, port=free_port())
|
||
|
mp.spawn(run_func, nprocs=world_size)
|
||
|
|
||
|
|
||
|
if __name__ == '__main__':
|
||
|
test_profile_alpha_beta()
|