Browse Source

fix typo under extensions/ (#5330)

pull/5331/head
digger yu 10 months ago committed by GitHub
parent
commit
6a3086a505
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 16
      extensions/README.md
  2. 2
      extensions/cuda_extension.py
  3. 2
      extensions/flash_attention/flash_attention_dao_cuda.py
  4. 2
      extensions/flash_attention/flash_attention_xformers_cuda.py
  5. 2
      extensions/triton_extension.py

16
extensions/README.md

@ -3,12 +3,12 @@
## 📌 Table of Contents
- [🔌 Extensions](#-extensions)
- [📌 Table of Contents](#-table-of-contents)
- [📚 Introduction](#-introduction)
- [🪅 Design](#-design)
- [🛠 API Usage](#-api-usage)
- [🏗 Write a customized extension](#-write-a-customized-extension)
- [ Acknowledgement](#-acknowledgement)
- [📌 Table of Contents](#-table-of-contents)
- [📚 Introduction](#-introduction)
- [🪅 Design](#-design)
- [🛠 API Usage](#-api-usage)
- [🏗 Write a customized extension](#-write-a-customized-extension)
- [ Acknowledgement](#-acknowledgement)
## 📚 Introduction
@ -46,12 +46,12 @@ kernel = CPUAdamLoader().load()
- Case 2: Load a specific kernel
This case applies if you are familar with the extensions available.
This case applies if you are familiar with the extensions available.
```python
from colossalai.kernel.kernel_loader import CPUAdamLoader
# load the kernel by giving the kernal name
# load the kernel by giving the kernel name
kernel = CPUAdamLoader().load(ext_name="cpu_adam_arm")
```

2
extensions/cuda_extension.py

@ -20,7 +20,7 @@ class _CudaExtension(_CppExtension):
"""
def is_hardware_available(self) -> bool:
# cuda extension can only be built if cuda is availabe
# cuda extension can only be built if cuda is available
try:
import torch

2
extensions/flash_attention/flash_attention_dao_cuda.py

@ -6,7 +6,7 @@ class FlashAttentionDaoCudaExtension(_Extension):
super().__init__(name="flash_attention_dao_cuda", support_aot=False, support_jit=False, priority=10)
def is_hardware_available(self) -> bool:
# cuda extension can only be built if cuda is availabe
# cuda extension can only be built if cuda is available
try:
import torch

2
extensions/flash_attention/flash_attention_xformers_cuda.py

@ -6,7 +6,7 @@ class FlashAttentionXformersCudaExtension(_Extension):
super().__init__(name="flash_attention_xformers_cuda", support_aot=False, support_jit=False)
def is_hardware_available(self) -> bool:
# cuda extension can only be built if cuda is availabe
# cuda extension can only be built if cuda is available
try:
import torch

2
extensions/triton_extension.py

@ -8,7 +8,7 @@ class _TritonExtension(_Extension):
super().__init__(name, support_aot=False, support_jit=True, priority=priority)
def is_hardware_compatible(self) -> bool:
# cuda extension can only be built if cuda is availabe
# cuda extension can only be built if cuda is available
try:
import torch

Loading…
Cancel
Save