mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
22 lines
269 B
22 lines
269 B
2 years ago
|
export BS=16
|
||
|
export MEMCAP=0
|
||
|
export MODEL="6.7b"
|
||
|
export GPUNUM=1
|
||
|
|
||
|
for MODEL in "6.7b" "13b" "1.3b"
|
||
|
do
|
||
|
for GPUNUM in 8 1
|
||
|
do
|
||
|
for BS in 16 24 32 8
|
||
|
do
|
||
|
for MEMCAP in 0 40
|
||
|
do
|
||
|
pkill -9 torchrun
|
||
|
pkill -9 python
|
||
|
|
||
|
bash ./run_clm.sh $BS $MEMCAP $MODEL $GPUNUM
|
||
|
done
|
||
|
done
|
||
|
done
|
||
|
done
|