Update daily_tests.yaml

pull/746/head
zhulinJulia24 2024-06-18 12:35:07 +08:00 committed by GitHub
parent 0fcd87c7ee
commit 9e1881704b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 7 additions and 6 deletions

View File

@ -27,18 +27,19 @@ jobs:
conda create -n internlm-model-latest --clone ${CONDA_BASE_ENV}
source activate internlm-model-latest
pip install transformers==${{ matrix.transformers-version }}
pip install torch==2.2.2 torchvision==0.17.2 --index-url https://download.pytorch.org/whl/cu118
wget https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+cu118torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
pip install flash_attn-2.5.8+cu118torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
pip install sentencepiece auto-gptq==0.6.0 lmdeploy[all]
srun -p ${SLURM_PARTITION} --kill-on-bad-exit=1 --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --gpus-per-task=2 pytest -s -v --color=yes ./tests/test_hf_model.py
conda deactivate
- name: load_latest_hf_model
if: matrix.transformers-version == 'latest'
run: |
conda create -n internlm-model-latest --clone ${CONDA_BASE_ENV}
source activate internlm-model-latest
pip install transformers
- name: run_test
run: |
conda create -n internlm-model-latest --clone ${CONDA_BASE_ENV}
source activate internlm-model-latest
pip install torch==2.2.2 torchvision==0.17.2 --index-url https://download.pytorch.org/whl/cu118
wget https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+cu118torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
pip install flash_attn-2.5.8+cu118torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
pip install sentencepiece auto-gptq==0.6.0 lmdeploy[all]
srun -p ${SLURM_PARTITION} --kill-on-bad-exit=1 --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --gpus-per-task=2 pytest -s -v --color=yes ./tests/test_hf_model.py
conda deactivate