2023-03-10 05:21:05 +00:00
|
|
|
import pytest
|
|
|
|
import torch
|
|
|
|
import torchvision.models as tm
|
2023-03-22 05:38:11 +00:00
|
|
|
from packaging import version
|
|
|
|
|
2023-04-06 06:51:35 +00:00
|
|
|
from colossalai.testing.utils import clear_cache_before_run, parameterize
|
2023-03-22 05:38:11 +00:00
|
|
|
from tests.test_analyzer.test_fx.zoo import tm_models, tmm_models
|
2023-03-10 05:21:05 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
from colossalai._analyzer._subclasses import MetaTensorMode
|
|
|
|
from colossalai._analyzer.fx import symbolic_trace
|
|
|
|
from colossalai._analyzer.fx.passes.shape_prop import shape_prop_pass
|
|
|
|
from colossalai._analyzer.fx.symbolic_profile import register_shape_impl
|
2023-03-22 05:38:11 +00:00
|
|
|
|
2023-03-10 05:21:05 +00:00
|
|
|
@register_shape_impl(torch.nn.functional.linear)
|
|
|
|
def linear_impl(*args, **kwargs):
|
|
|
|
assert True
|
|
|
|
return torch.nn.functional.linear(*args, **kwargs)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def _check_gm_validity(gm: torch.fx.GraphModule):
|
|
|
|
for node in gm.graph.nodes:
|
|
|
|
assert node.meta['info'].outputs, f'In {gm.__class__.__name__}, {node} has no output shape.'
|
|
|
|
if node.op in [
|
2023-03-22 05:38:11 +00:00
|
|
|
'call_module', # can apply to params
|
|
|
|
'call_function', # can apply to params
|
|
|
|
'call_method', # can apply to params
|
2023-03-10 05:21:05 +00:00
|
|
|
]:
|
2023-03-22 05:38:11 +00:00
|
|
|
assert hasattr(node.meta['info'], 'inputs'), f'In {gm.__class__.__name__}, {node} has no input shape.'
|
2023-03-10 05:21:05 +00:00
|
|
|
|
|
|
|
|
2023-03-22 05:38:11 +00:00
|
|
|
@pytest.mark.skipif(version.parse(torch.__version__) < version.parse('1.12.0'), reason='torch version < 12')
|
2023-04-06 06:51:35 +00:00
|
|
|
@clear_cache_before_run()
|
2023-03-22 05:38:11 +00:00
|
|
|
@parameterize('m', tm_models)
|
2023-03-10 05:21:05 +00:00
|
|
|
def test_torchvision_shape_prop(m):
|
|
|
|
with MetaTensorMode():
|
|
|
|
model = m()
|
|
|
|
data = torch.rand(100, 3, 224, 224)
|
|
|
|
meta_args = {
|
|
|
|
"x": data,
|
|
|
|
}
|
|
|
|
gm = symbolic_trace(model, meta_args=meta_args)
|
|
|
|
shape_prop_pass(gm, data)
|
|
|
|
_check_gm_validity(gm)
|
|
|
|
|
|
|
|
|
2023-03-22 05:38:11 +00:00
|
|
|
@pytest.mark.skipif(version.parse(torch.__version__) < version.parse('1.12.0'), reason='torch version < 12')
|
2023-04-06 06:51:35 +00:00
|
|
|
@clear_cache_before_run()
|
2023-03-22 05:38:11 +00:00
|
|
|
@parameterize('m', tmm_models)
|
2023-03-10 05:21:05 +00:00
|
|
|
def test_timm_shape_prop(m):
|
|
|
|
with MetaTensorMode():
|
|
|
|
model = m()
|
|
|
|
data = torch.rand(100, 3, 224, 224)
|
|
|
|
meta_args = {
|
|
|
|
"x": data,
|
|
|
|
}
|
2023-03-22 05:38:11 +00:00
|
|
|
|
2023-03-10 05:21:05 +00:00
|
|
|
gm = symbolic_trace(model, meta_args=meta_args)
|
|
|
|
shape_prop_pass(gm, data)
|
|
|
|
_check_gm_validity(gm)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2023-03-22 05:38:11 +00:00
|
|
|
test_torchvision_shape_prop()
|
|
|
|
test_timm_shape_prop()
|