Update daily_tests.yaml

pull/746/head
zhulinJulia24 2024-06-18 14:20:10 +08:00 committed by GitHub
parent a8fd7db00b
commit 91ab284dfd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 2 additions and 3 deletions

View File

@ -13,7 +13,7 @@ jobs:
runs-on: [t_cluster]
strategy:
matrix:
transformers-version: [4.34.0, latest]
transformers-version: [4.36.0, latest]
steps:
- name: mask env
run: |
@ -37,8 +37,7 @@ jobs:
run: |
source activate internlm-model-latest
pip install torch==2.2.2 torchvision==0.17.2 --index-url https://download.pytorch.org/whl/cu118
wget https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+cu118torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
pip install flash_attn-2.5.8+cu118torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
pip install /mnt/petrelfs/qa-caif-cicd/resource/flash_attn-2.5.8+cu118torch2.2cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
pip install sentencepiece auto-gptq==0.6.0 lmdeploy[all]
srun -p ${SLURM_PARTITION} --kill-on-bad-exit=1 --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --gpus-per-task=2 pytest -s -v --color=yes ./tests/test_hf_model.py
conda deactivate