mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
37 lines
952 B
37 lines
952 B
2 years ago
|
from torch.fx import GraphModule
|
||
|
|
||
|
from .passes import ShapeProp, graph_profile_pass, shape_prop_pass
|
||
|
from .passes.graph_profile import FlopProfiler
|
||
|
|
||
|
|
||
|
def register_flop_count_impl(func):
|
||
|
def wrapper(impl):
|
||
|
FlopProfiler._custom_flop_count_impl[func] = impl
|
||
|
return impl
|
||
|
|
||
|
return wrapper
|
||
|
|
||
|
|
||
|
def register_shape_impl(func):
|
||
|
def wrapper(impl):
|
||
|
ShapeProp._custom_dispatch_func[func] = impl
|
||
|
return impl
|
||
|
|
||
|
return wrapper
|
||
|
|
||
|
|
||
|
def symbolic_profile(module: GraphModule, *args, verbose=False) -> GraphModule:
|
||
|
"""Symbolically profile a model with sample inputs.
|
||
|
|
||
|
Args:
|
||
|
module (GraphModule): The module to be profiled
|
||
|
args (Tuple): The sample inputs
|
||
|
verbose (bool): Whether to print the profiling result
|
||
|
|
||
|
Returns:
|
||
|
GraphModule: The profiled module
|
||
|
"""
|
||
|
module = shape_prop_pass(module, *args)
|
||
|
module = graph_profile_pass(module, *args, verbose=verbose)
|
||
|
return module
|