mirror of https://github.com/hpcaitech/ColossalAI
13 lines
363 B
Bash
13 lines
363 B
Bash
|
torchrun --standalone --nproc_per_node=4 train_sft.py \
|
||
|
--pretrain "/path/to/LLaMa-7B/" \
|
||
|
--model 'llama' \
|
||
|
--strategy colossalai_zero2 \
|
||
|
--log_interval 10 \
|
||
|
--save_path /path/to/Coati-7B \
|
||
|
--dataset /path/to/data.json \
|
||
|
--batch_size 4 \
|
||
|
--accimulation_steps 8 \
|
||
|
--lr 2e-5 \
|
||
|
--max_datasets_size 512 \
|
||
|
--max_epochs 1 \
|