mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
9 lines
421 B
9 lines
421 B
from ._operation import reduce_by_batch_2d, split_tensor_2d
|
|
from .layers import (Classifier2D, Embedding2D, LayerNorm2D, Linear2D, PatchEmbedding2D, VocabParallelClassifier2D,
|
|
VocabParallelEmbedding2D)
|
|
|
|
__all__ = [
|
|
'split_tensor_2d', 'reduce_by_batch_2d', 'Linear2D', 'LayerNorm2D', 'Classifier2D', 'PatchEmbedding2D',
|
|
'Embedding2D', 'VocabParallelEmbedding2D', 'VocabParallelClassifier2D'
|
|
]
|