use llm partition (#159)

Co-authored-by: qa-caif-cicd <qa-caif-cicd@pjlab.org.cn>
pull/165/head
kkscilife 2023-08-01 17:49:01 +08:00 committed by GitHub
parent fbe6ef1da5
commit 7fbf85eac9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 5 additions and 5 deletions

View File

@ -53,7 +53,7 @@ jobs:
export PYTHONPATH=$PWD:$PYTHONPATH
sh ./ci_scripts/model/convert_to_hf.sh
cd ./hf_ckpt
srun -p llm2 python ../ci_scripts/model/loaded_as_transformer.py
srun -p llm python ../ci_scripts/model/loaded_as_transformer.py
cd ..
rm -rf $GITHUB_WORKSPACE/hf_ckpt
@ -65,4 +65,4 @@ jobs:
- name: chat-model-in-hf
run: |
source activate internlm-env-test
srun -p llm2 python ./ci_scripts/model/demo_load_7B_chat_model.py
srun -p llm python ./ci_scripts/model/demo_load_7B_chat_model.py

View File

@ -1,7 +1,7 @@
#!/bin/bash
rm -rf /mnt/petrelfs/qa-caif-cicd/data/lm_data/cn_data/result.*
srun -p llm2 python tools/tokenizer.py --text_input_path /mnt/petrelfs/qa-caif-cicd/data/lm_data/cn_data/raw_data.txt --bin_output_path /mnt/petrelfs/qa-caif-cicd/data/lm_data/cn_data/result.bin
srun -p llm python tools/tokenizer.py --text_input_path /mnt/petrelfs/qa-caif-cicd/data/lm_data/cn_data/raw_data.txt --bin_output_path /mnt/petrelfs/qa-caif-cicd/data/lm_data/cn_data/result.bin
file_one="/mnt/petrelfs/qa-caif-cicd/data/lm_data/cn_data/result.bin"
file_two="/mnt/petrelfs/qa-caif-cicd/data/lm_data/cn_data/result.bin.meta"

View File

@ -2,7 +2,7 @@
rm -rf $GITHUB_WORKSPACE/llm_ckpts/20
srun -p llm2 --quotatype=spot -n 8 --ntasks-per-node=8 --gpus-per-task=1 python train.py --config ./ci_scripts/train/ci_7B_sft.py
srun -p llm --quotatype=spot -n 8 --ntasks-per-node=8 --gpus-per-task=1 python train.py --config ./ci_scripts/train/ci_7B_sft.py
file_dir="$GITHUB_WORKSPACE/llm_ckpts/20/*.pt"
source ./ci_scripts/common/basic_func.sh

View File

@ -1,7 +1,7 @@
#!/bin/bash
rm -rf $GITHUB_WORKSPACE/llm_ckpts/20
srun -p llm2 -N 1 torchrun --nnodes=1 --nproc_per_node=8 --master_port=29501 train.py --config ./ci_scripts/train/ci_7B_sft.py --launcher "torch"
srun -p llm -N 1 torchrun --nnodes=1 --nproc_per_node=8 --master_port=29501 train.py --config ./ci_scripts/train/ci_7B_sft.py --launcher "torch"
file_dir="$GITHUB_WORKSPACE/llm_ckpts/20/*.pt"
source ./ci_scripts/common/basic_func.sh