mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
16 lines
504 B
16 lines
504 B
ROOT=$(realpath $(dirname $0))
|
|
PY_SCRIPT=${ROOT}/benchmark_llama.py
|
|
GPU=$(nvidia-smi -L | head -1 | cut -d' ' -f4 | cut -d'-' -f1)
|
|
|
|
mkdir -p logs
|
|
|
|
# benchmark llama2-7b one single GPU
|
|
for bsz in 16 32 64; do
|
|
python3 ${PY_SCRIPT} -m llama2-7b --tp_size 1 --pp_size 1 -b $bsz -s 256 --output_len 128 | tee logs/${GPU}_${bsz}_256.txt
|
|
done
|
|
|
|
|
|
for bsz in 4 8 16 32 64; do
|
|
python3 ${PY_SCRIPT} -m llama2-7b --tp_size 1 --pp_size 1 -b $bsz -s 1024 --output_len 128 | tee logs/${GPU}_${bsz}_1024.txt
|
|
done
|