mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
15 lines
389 B
15 lines
389 B
2 years ago
|
import pytest
|
||
|
|
||
|
from colossalai.device import profile_alpha_beta
|
||
|
|
||
|
|
||
|
@pytest.mark.skip(reason="Skip because assertion fails for CI devices")
|
||
|
def test_profile_alpha_beta():
|
||
|
physical_devices = [0, 1, 2, 3]
|
||
|
(alpha, beta) = profile_alpha_beta(physical_devices)
|
||
|
assert alpha > 0 and alpha < 1e-4 and beta > 0 and beta < 1e-10
|
||
|
|
||
|
|
||
|
if __name__ == '__main__':
|
||
|
test_profile_alpha_beta()
|