You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/pipeline/__init__.py

4 lines
162 B

from .pipelinable import PipelinableContext, PipelinableModel
from .layer_sepc import LayerSpec
__all__ = ['PipelinableModel', 'PipelinableContext', 'LayerSpec']