[CI] add transformers version converage (#674)

pull/689/head
zhulinJulia24 2024-01-30 23:07:31 +08:00 committed by GitHub
parent 3599ddd0e4
commit 8194c348c1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 18 additions and 16 deletions

View File

@ -11,6 +11,9 @@ env:
jobs:
HF_model:
runs-on: [t_cluster]
strategy:
matrix:
transformers-version: [4.34.0, latest]
steps:
- name: mask env
run: |
@ -19,33 +22,32 @@ jobs:
- uses: actions/checkout@v3
- name: load_hf_model
if: matrix.transformers-version != 'latest'
run: |
conda create -n internlm-model-latest --clone ${CONDA_BASE_ENV}
source activate internlm-model-latest
pip install transformers==${{ matrix.transformers-version }}
pip install sentencepiece
srun -p ${SLURM_PARTITION} --kill-on-bad-exit=1 --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --gpus-per-task=2 pytest -s -v --color=yes ./tests/test_hf_model.py
conda deactivate
- name: load_latest_hf_model
if: matrix.transformers-version == 'latest'
run: |
conda create -n internlm-model-latest --clone ${CONDA_BASE_ENV}
source activate internlm-model-latest
# TODO:test other version of transformers
pip install transformers
pip install sentencepiece
srun -p ${SLURM_PARTITION} --kill-on-bad-exit=1 --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --gpus-per-task=2 pytest -s -v --color=yes ./tests/test_hf_model.py
conda deactivate
- name: remove_env
if: always()
run: |
conda env remove --name internlm-model-latest
clear_env:
if: ${{ !cancelled() }}
needs: [HF_model]
runs-on: [t_cluster]
timeout-minutes: 10
steps:
- name: mask env
run: |
echo "::add-mask::${{env.WORKSPACE_PREFIX}}"
echo "::add-mask::$path_prefix"
- name: remove_env
run: |
conda env remove --name internlm-model-latest
notify_to_feishu:
if: ${{ always() && !cancelled() && contains(needs.*.result, 'failure') && (github.ref_name == 'develop' || github.ref_name == 'main') }}
needs: [HF_model,clear_env]
needs: [HF_model]
runs-on: [t_cluster]
steps:
- name: mask env