mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
22 lines
300 B
22 lines
300 B
export BS=16
|
|
export MEMCAP=0
|
|
export MODEL="6.7b"
|
|
export GPUNUM=1
|
|
|
|
for MODEL in "6.7b" "13b" "1.3b"
|
|
do
|
|
for GPUNUM in 8 1
|
|
do
|
|
for BS in 16 24 32 8
|
|
do
|
|
for MEMCAP in 0 40
|
|
do
|
|
pkill -9 torchrun
|
|
pkill -9 python
|
|
|
|
env BS=$BS MEM_CAP=$MEMCAP MODEL=$MODEL GPUNUM=$GPUNUM bash ./run_gemini.sh
|
|
done
|
|
done
|
|
done
|
|
done
|