|
|
|
@ -30,7 +30,7 @@ jobs:
|
|
|
|
|
github.event.repository.full_name == 'hpcaitech/ColossalAI' |
|
|
|
|
runs-on: [self-hosted, gpu] |
|
|
|
|
container: |
|
|
|
|
image: hpcaitech/pytorch-cuda:1.12.0-11.3.0 |
|
|
|
|
image: hpcaitech/pytorch-cuda:2.0.0-11.7.0 |
|
|
|
|
options: --rm |
|
|
|
|
timeout-minutes: 5 |
|
|
|
|
defaults: |
|
|
|
@ -54,7 +54,7 @@ jobs:
|
|
|
|
|
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI' |
|
|
|
|
runs-on: [self-hosted, gpu] |
|
|
|
|
container: |
|
|
|
|
image: hpcaitech/pytorch-cuda:1.12.0-11.3.0 |
|
|
|
|
image: hpcaitech/pytorch-cuda:2.0.0-11.7.0 |
|
|
|
|
options: --rm |
|
|
|
|
timeout-minutes: 5 |
|
|
|
|
defaults: |
|
|
|
@ -140,7 +140,7 @@ jobs:
|
|
|
|
|
if: needs.detect.outputs.anyLibraryFileChanged == 'true' |
|
|
|
|
runs-on: [self-hosted, gpu] |
|
|
|
|
container: |
|
|
|
|
image: hpcaitech/pytorch-cuda:1.12.0-11.3.0 |
|
|
|
|
image: hpcaitech/pytorch-cuda:2.0.0-11.7.0 |
|
|
|
|
options: --gpus all --rm -v /data/scratch/cifar-10:/data/scratch/cifar-10 -v /data/scratch/llama-tiny:/data/scratch/llama-tiny |
|
|
|
|
timeout-minutes: 60 |
|
|
|
|
defaults: |
|
|
|
@ -268,7 +268,7 @@ jobs:
|
|
|
|
|
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI' |
|
|
|
|
runs-on: [self-hosted, gpu] |
|
|
|
|
container: |
|
|
|
|
image: hpcaitech/pytorch-cuda:1.12.0-11.3.0 |
|
|
|
|
image: hpcaitech/pytorch-cuda:2.0.0-11.7.0 |
|
|
|
|
options: --rm |
|
|
|
|
timeout-minutes: 5 |
|
|
|
|
defaults: |
|
|
|
@ -299,7 +299,7 @@ jobs:
|
|
|
|
|
github.event.repository.full_name == 'hpcaitech/ColossalAI' |
|
|
|
|
runs-on: [self-hosted, gpu] |
|
|
|
|
container: |
|
|
|
|
image: hpcaitech/pytorch-cuda:1.12.0-11.3.0 |
|
|
|
|
image: hpcaitech/pytorch-cuda:2.0.0-11.7.0 |
|
|
|
|
options: --rm |
|
|
|
|
timeout-minutes: 5 |
|
|
|
|
defaults: |
|
|
|
|