mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
15 lines
470 B
15 lines
470 B
10 months ago
|
from .flash_attention_dao_cuda import FlashAttentionDaoCudaExtension
|
||
|
from .flash_attention_npu import FlashAttentionNpuExtension
|
||
8 months ago
|
from .flash_attention_sdpa_cuda import FlashAttentionSdpaCudaExtension
|
||
10 months ago
|
|
||
|
try:
|
||
8 months ago
|
# TODO: remove this after updating openmoe example
|
||
10 months ago
|
import flash_attention # noqa
|
||
|
|
||
|
HAS_FLASH_ATTN = True
|
||
|
except:
|
||
|
HAS_FLASH_ATTN = False
|
||
|
|
||
|
|
||
8 months ago
|
__all__ = ["FlashAttentionDaoCudaExtension", "FlashAttentionSdpaCudaExtension", "FlashAttentionNpuExtension"]
|