mirror of https://github.com/hpcaitech/ColossalAI
aibig-modeldata-parallelismdeep-learningdistributed-computingfoundation-modelsheterogeneous-traininghpcinferencelarge-scalemodel-parallelismpipeline-parallelism
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
19 lines
333 B
19 lines
333 B
set -xe |
|
pip install -r requirements.txt |
|
|
|
BS=4 |
|
for PLUGIN in "torch_ddp" "torch_ddp_fp16" "low_level_zero" "gemini" |
|
do |
|
for GPUNUM in 1 4 |
|
do |
|
|
|
colossalai run \ |
|
--nproc_per_node ${GPUNUM} \ |
|
--master_port 29505 \ |
|
opt_benchmark.py \ |
|
--model_name_or_path "facebook/opt-125m" \ |
|
--plugin ${PLUGIN} \ |
|
--batch_size ${BS} |
|
|
|
done |
|
done
|
|
|