mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
10 lines
389 B
10 lines
389 B
from .layer_norm import MixedFusedLayerNorm as LayerNorm
|
|
from .mha.mha import ColoAttention
|
|
from .multihead_attention import MultiHeadAttention
|
|
from .scaled_softmax import AttnMaskType, FusedScaleMaskSoftmax, ScaledUpperTriangMaskedSoftmax
|
|
|
|
__all__ = [
|
|
'LayerNorm', 'MultiHeadAttention', 'FusedScaleMaskSoftmax', 'ScaledUpperTriangMaskedSoftmax', 'ColoAttention',
|
|
'AttnMaskType'
|
|
]
|