ColossalAI/colossalai/shardformer/layer/__init__.py

24 lines
766 B
Python

from .dropout import DropoutForParallelInput, DropoutForReplicatedInput
from .embedding import Embedding1D, VocabParallelEmbedding1D
from .linear import Linear1D_Col, Linear1D_Row
from .loss import cross_entropy_1d
from .normalization import FusedLayerNorm, FusedRMSNorm
from .parallel_module import ParallelModule
from .qkv_fused_linear import FusedLinear1D_Col, GPT2FusedLinearConv1D_Col, GPT2FusedLinearConv1D_Row
__all__ = [
"Embedding1D",
"VocabParallelEmbedding1D",
"Linear1D_Col",
"Linear1D_Row",
"GPT2FusedLinearConv1D_Col",
"GPT2FusedLinearConv1D_Row",
"DropoutForParallelInput",
"DropoutForReplicatedInput",
"cross_entropy_1d",
"FusedLayerNorm",
"FusedRMSNorm",
"FusedLinear1D_Col",
"ParallelModule",
]