mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
27 lines
839 B
27 lines
839 B
from .dropout import DropoutForParallelInput, DropoutForReplicatedInput
|
|
from .embedding import Embedding1D, VocabParallelEmbedding1D
|
|
from .linear import Linear1D_Col, Linear1D_Row
|
|
from .loss import cross_entropy_1d
|
|
from .normalization import FusedLayerNorm, FusedRMSNorm, LayerNorm, RMSNorm
|
|
from .parallel_module import ParallelModule
|
|
from .qkv_fused_linear import FusedLinear1D_Col, GPT2FusedLinearConv1D_Col, GPT2FusedLinearConv1D_Row
|
|
|
|
__all__ = [
|
|
"Embedding1D",
|
|
"VocabParallelEmbedding1D",
|
|
"Linear1D_Col",
|
|
"Linear1D_Row",
|
|
"GPT2FusedLinearConv1D_Col",
|
|
"GPT2FusedLinearConv1D_Row",
|
|
"DropoutForParallelInput",
|
|
"DropoutForReplicatedInput",
|
|
"cross_entropy_1d",
|
|
"BaseLayerNorm",
|
|
"LayerNorm",
|
|
"RMSNorm",
|
|
"FusedLayerNorm",
|
|
"FusedRMSNorm",
|
|
"FusedLinear1D_Col",
|
|
"ParallelModule",
|
|
]
|