mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
12 lines
334 B
12 lines
334 B
#!/bin/bash |
|
set -xe |
|
|
|
export DATA=/data/scratch/cifar-10 |
|
|
|
pip install -r requirements.txt |
|
|
|
# TODO: skip ci test due to time limits, train.py needs to be rewritten. |
|
|
|
# for plugin in "torch_ddp" "torch_ddp_fp16" "low_level_zero"; do |
|
# colossalai run --nproc_per_node 4 train.py --interval 0 --target_acc 0.84 --plugin $plugin |
|
# done
|
|
|