You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/kernel/triton/__init__.py

32 lines
1.0 KiB

try:
import triton
HAS_TRITON = True
except ImportError:
HAS_TRITON = False
print("Triton is not installed. Please install Triton to use Triton kernels.")
# There may exist import error even if we have triton installed.
if HAS_TRITON:
from .context_attn_unpad import context_attention_unpadded
from .flash_decoding import flash_decoding_attention
from .fused_rotary_embedding import fused_rotary_embedding
from .gptq_triton import gptq_fused_linear_triton
from .kvcache_copy import copy_kv_to_blocked_cache
from .no_pad_rotary_embedding import rotary_embedding
from .rms_layernorm import rms_layernorm
from .rotary_cache_copy import get_xine_cache
from .softmax import softmax
__all__ = [
"context_attention_unpadded",
"flash_decoding_attention",
"copy_kv_to_blocked_cache",
"softmax",
"rms_layernorm",
"gptq_fused_linear_triton",
"rotary_embedding",
"fused_rotary_embedding",
"get_xine_cache",
]