You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/tests/components_to_test/__init__.py

20 lines
440 B

from . import (
beit,
bert,
gpt2,
hanging_param_model,
inline_op_model,
nested_model,
repeated_computed_layers,
resnet,
simple_net,
)
from .utils import run_fwd, run_fwd_bwd
from . import albert # isort:skip
__all__ = [
'bert', 'gpt2', 'hanging_param_model', 'inline_op_model', 'nested_model', 'repeated_computed_layers', 'resnet',
'simple_net', 'run_fwd_bwd', 'albert', 'beit', 'run_fwd'
]