You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/legacy/nn/layer/parallel_2d/__init__.py

23 lines
458 B

from ._operation import reduce_by_batch_2d, split_batch_2d
from .layers import (
Classifier2D,
Embedding2D,
LayerNorm2D,
Linear2D,
PatchEmbedding2D,
VocabParallelClassifier2D,
VocabParallelEmbedding2D,
)
__all__ = [
"split_batch_2d",
"reduce_by_batch_2d",
"Linear2D",
"LayerNorm2D",
"Classifier2D",
"PatchEmbedding2D",
"Embedding2D",
"VocabParallelEmbedding2D",
"VocabParallelClassifier2D",
]