mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
20 lines
420 B
20 lines
420 B
from . import (
|
|
beit,
|
|
bert,
|
|
gpt2,
|
|
hanging_param_model,
|
|
inline_op_model,
|
|
nested_model,
|
|
repeated_computed_layers,
|
|
resnet,
|
|
simple_net,
|
|
)
|
|
from .utils import run_fwd_bwd
|
|
|
|
from . import albert # isort:skip
|
|
|
|
__all__ = [
|
|
'bert', 'gpt2', 'hanging_param_model', 'inline_op_model', 'nested_model', 'repeated_computed_layers', 'resnet',
|
|
'simple_net', 'run_fwd_bwd', 'albert', 'beit'
|
|
]
|