mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
12 lines
488 B
12 lines
488 B
# distplan in ["colossalai", "pytorch"]
|
|
export DISTPAN="colossalai"
|
|
|
|
# The following options only valid when DISTPAN="colossalai"
|
|
export TPDEGREE=1
|
|
export GPUNUM=1
|
|
export PLACEMENT='cpu'
|
|
export USE_SHARD_INIT=False
|
|
export BATCH_SIZE=4
|
|
|
|
env OMP_NUM_THREADS=12 torchrun --standalone --nproc_per_node=${GPUNUM} --master_port 29501 train.py --tp_degree=${TPDEGREE} --batch_size=${BATCH_SIZE} --placement ${PLACEMENT} --shardinit ${USE_SHARD_INIT} --distplan ${DISTPAN} 2>&1 | tee run.log
|