mirror of https://github.com/hpcaitech/ColossalAI
[workflow] supported test on CUDA 10.2 (#3841)
parent
3229f93e30
commit
54e97ed7ea
|
@ -19,26 +19,26 @@ jobs:
|
|||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- id: set-matrix
|
||||
env:
|
||||
TORCH_VERSIONS: ${{ inputs.torch_version }}
|
||||
CUDA_VERSIONS: ${{ inputs.cuda_version }}
|
||||
run: |
|
||||
IFS=','
|
||||
DOCKER_IMAGE=()
|
||||
- id: set-matrix
|
||||
env:
|
||||
TORCH_VERSIONS: ${{ inputs.torch_version }}
|
||||
CUDA_VERSIONS: ${{ inputs.cuda_version }}
|
||||
run: |
|
||||
IFS=','
|
||||
DOCKER_IMAGE=()
|
||||
|
||||
for tv in $TORCH_VERSIONS
|
||||
do
|
||||
for cv in $CUDA_VERSIONS
|
||||
do
|
||||
DOCKER_IMAGE+=("\"hpcaitech/pytorch-cuda:${tv}-${cv}\"")
|
||||
done
|
||||
done
|
||||
for tv in $TORCH_VERSIONS
|
||||
do
|
||||
for cv in $CUDA_VERSIONS
|
||||
do
|
||||
DOCKER_IMAGE+=("\"hpcaitech/pytorch-cuda:${tv}-${cv}\"")
|
||||
done
|
||||
done
|
||||
|
||||
container=$( IFS=',' ; echo "${DOCKER_IMAGE[*]}" )
|
||||
container="[${container}]"
|
||||
echo "$container"
|
||||
echo "::set-output name=matrix::{\"container\":$(echo "$container")}"
|
||||
container=$( IFS=',' ; echo "${DOCKER_IMAGE[*]}" )
|
||||
container="[${container}]"
|
||||
echo "$container"
|
||||
echo "::set-output name=matrix::{\"container\":$(echo "$container")}"
|
||||
|
||||
build:
|
||||
name: Test for PyTorch Compatibility
|
||||
|
@ -70,6 +70,17 @@ jobs:
|
|||
- uses: actions/checkout@v2
|
||||
with:
|
||||
ssh-key: ${{ secrets.SSH_KEY_FOR_CI }}
|
||||
- name: Download cub for CUDA 10.2
|
||||
run: |
|
||||
CUDA_VERSION=$(cat $CUDA_HOME/version.txt | grep "CUDA Version" | awk '{print $NF}' | cut -d. -f1,2)
|
||||
|
||||
# check if it is CUDA 10.2
|
||||
# download cub
|
||||
if [ "$CUDA_VERSION" = "10.2" ]; then
|
||||
wget https://github.com/NVIDIA/cub/archive/refs/tags/1.8.0.zip
|
||||
unzip 1.8.0.zip
|
||||
cp -r cub-1.8.0/cub/ colossalai/kernel/cuda_native/csrc/kernels/include/
|
||||
fi
|
||||
- name: Install Colossal-AI
|
||||
run: |
|
||||
pip install -r requirements/requirements.txt
|
||||
|
|
|
@ -3,8 +3,8 @@ name: Compatibility Test on PR
|
|||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'version.txt'
|
||||
- '.compatibility'
|
||||
- "version.txt"
|
||||
- ".compatibility"
|
||||
|
||||
jobs:
|
||||
matrix_preparation:
|
||||
|
@ -58,6 +58,18 @@ jobs:
|
|||
- uses: actions/checkout@v2
|
||||
with:
|
||||
ssh-key: ${{ secrets.SSH_KEY_FOR_CI }}
|
||||
- name: Download cub for CUDA 10.2
|
||||
run: |
|
||||
CUDA_VERSION=$(cat $CUDA_HOME/version.txt | grep "CUDA Version" | awk '{print $NF}' | cut -d. -f1,2)
|
||||
|
||||
# check if it is CUDA 10.2
|
||||
# download cub
|
||||
if [ "$CUDA_VERSION" = "10.2" ]; then
|
||||
wget https://github.com/NVIDIA/cub/archive/refs/tags/1.8.0.zip
|
||||
unzip 1.8.0.zip
|
||||
cp -r cub-1.8.0/cub/ colossalai/kernel/cuda_native/csrc/kernels/include/
|
||||
fi
|
||||
|
||||
- name: Install Colossal-AI
|
||||
run: |
|
||||
pip install -v --no-cache-dir .
|
||||
|
|
16
README.md
16
README.md
|
@ -362,6 +362,22 @@ If you want to install and enable CUDA kernel fusion (compulsory installation wh
|
|||
CUDA_EXT=1 pip install .
|
||||
```
|
||||
|
||||
For Users with CUDA 10.2, you can still build ColossalAI from source. However, you need to manually download the cub library and copy it to the corresponding directory.
|
||||
|
||||
```bash
|
||||
# clone the repository
|
||||
git clone https://github.com/hpcaitech/ColossalAI.git
|
||||
cd ColossalAI
|
||||
|
||||
# download the cub library
|
||||
wget https://github.com/NVIDIA/cub/archive/refs/tags/1.8.0.zip
|
||||
unzip 1.8.0.zip
|
||||
cp -r cub-1.8.0/cub/ colossalai/kernel/cuda_native/csrc/kernels/include/
|
||||
|
||||
# install
|
||||
CUDA_EXT=1 pip install .
|
||||
```
|
||||
|
||||
<p align="right">(<a href="#top">back to top</a>)</p>
|
||||
|
||||
## Use Docker
|
||||
|
|
|
@ -48,5 +48,20 @@ If you don't want to install and enable CUDA kernel fusion (compulsory installat
|
|||
pip install .
|
||||
```
|
||||
|
||||
For Users with CUDA 10.2, you can still build ColossalAI from source. However, you need to manually download the cub library and copy it to the corresponding directory.
|
||||
|
||||
```bash
|
||||
# clone the repository
|
||||
git clone https://github.com/hpcaitech/ColossalAI.git
|
||||
cd ColossalAI
|
||||
|
||||
# download the cub library
|
||||
wget https://github.com/NVIDIA/cub/archive/refs/tags/1.8.0.zip
|
||||
unzip 1.8.0.zip
|
||||
cp -r cub-1.8.0/cub/ colossalai/kernel/cuda_native/csrc/kernels/include/
|
||||
|
||||
# install
|
||||
CUDA_EXT=1 pip install .
|
||||
```
|
||||
|
||||
<!-- doc-test-command: echo "installation.md does not need test" -->
|
||||
|
|
|
@ -47,4 +47,20 @@ CUDA_EXT=1 pip install .
|
|||
pip install .
|
||||
```
|
||||
|
||||
如果您在使用CUDA 10.2,您仍然可以从源码安装ColossalA。但是您需要手动下载cub库并将其复制到相应的目录。
|
||||
|
||||
```bash
|
||||
# clone the repository
|
||||
git clone https://github.com/hpcaitech/ColossalAI.git
|
||||
cd ColossalAI
|
||||
|
||||
# download the cub library
|
||||
wget https://github.com/NVIDIA/cub/archive/refs/tags/1.8.0.zip
|
||||
unzip 1.8.0.zip
|
||||
cp -r cub-1.8.0/cub/ colossalai/kernel/cuda_native/csrc/kernels/include/
|
||||
|
||||
# install
|
||||
CUDA_EXT=1 pip install .
|
||||
```
|
||||
|
||||
<!-- doc-test-command: echo "installation.md does not need test" -->
|
||||
|
|
Loading…
Reference in New Issue