mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
12 lines
658 B
12 lines
658 B
3 years ago
|
from ._operation import Matmul_AB_2D, Matmul_ABT_2D, Matmul_ATB_2D, Add_Bias_2D, matmul_2d
|
||
|
from ._transformer import TransformerMLP2D, TransformerSelfAttention2D, TransformerLayer2D
|
||
|
from ._vit import ViTMLP2D, ViTSelfAttention2D, ViTHead2D, ViTPatchEmbedding2D, ViTTokenFuser2D, ViTInputSplitter2D
|
||
|
from .layers import Linear2D, LayerNorm2D
|
||
|
|
||
|
__all__ = [
|
||
|
'Matmul_AB_2D', 'Matmul_ABT_2D', 'Matmul_ATB_2D', 'Add_Bias_2D', 'matmul_2d',
|
||
|
'TransformerMLP2D', 'TransformerSelfAttention2D', 'TransformerLayer2D',
|
||
|
'ViTMLP2D', 'ViTSelfAttention2D', 'ViTHead2D', 'ViTPatchEmbedding2D', 'ViTTokenFuser2D', 'ViTInputSplitter2D',
|
||
|
'Linear2D', 'LayerNorm2D'
|
||
|
]
|