mirror of https://github.com/hpcaitech/ColossalAI
35 lines
1.1 KiB
Python
35 lines
1.1 KiB
Python
from ._operation import all_to_all_comm
|
|
from .attn import AttnMaskType, ColoAttention
|
|
from .dropout import DropoutForParallelInput, DropoutForReplicatedInput
|
|
from .embedding import Embedding1D, PaddingEmbedding, VocabParallelEmbedding1D
|
|
from .linear import Linear1D_Col, Linear1D_Row, PaddingLMHead, VocabParallelLMHead1D
|
|
from .loss import cross_entropy_1d
|
|
from .normalization import FusedLayerNorm, FusedRMSNorm, LayerNorm, RMSNorm
|
|
from .parallel_module import ParallelModule
|
|
from .qkv_fused_linear import FusedLinear1D_Col, GPT2FusedLinearConv1D_Col, GPT2FusedLinearConv1D_Row
|
|
|
|
__all__ = [
|
|
"Embedding1D",
|
|
"VocabParallelEmbedding1D",
|
|
"Linear1D_Col",
|
|
"Linear1D_Row",
|
|
"GPT2FusedLinearConv1D_Col",
|
|
"GPT2FusedLinearConv1D_Row",
|
|
"DropoutForParallelInput",
|
|
"DropoutForReplicatedInput",
|
|
"cross_entropy_1d",
|
|
"BaseLayerNorm",
|
|
"LayerNorm",
|
|
"RMSNorm",
|
|
"FusedLayerNorm",
|
|
"FusedRMSNorm",
|
|
"FusedLinear1D_Col",
|
|
"ParallelModule",
|
|
"PaddingEmbedding",
|
|
"PaddingLMHead",
|
|
"VocabParallelLMHead1D",
|
|
"AttnMaskType",
|
|
"ColoAttention",
|
|
"all_to_all_comm",
|
|
]
|