From 8194c348c1e9667e1cf9191750d56afdcf6c56d9 Mon Sep 17 00:00:00 2001 From: zhulinJulia24 <145004780+zhulinJulia24@users.noreply.github.com> Date: Tue, 30 Jan 2024 23:07:31 +0800 Subject: [PATCH] [CI] add transformers version converage (#674) --- .github/workflows/daily_tests.yaml | 34 ++++++++++++++++-------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/.github/workflows/daily_tests.yaml b/.github/workflows/daily_tests.yaml index d088c96..1e1376f 100644 --- a/.github/workflows/daily_tests.yaml +++ b/.github/workflows/daily_tests.yaml @@ -11,6 +11,9 @@ env: jobs: HF_model: runs-on: [t_cluster] + strategy: + matrix: + transformers-version: [4.34.0, latest] steps: - name: mask env run: | @@ -19,33 +22,32 @@ jobs: - uses: actions/checkout@v3 - name: load_hf_model + if: matrix.transformers-version != 'latest' + run: | + conda create -n internlm-model-latest --clone ${CONDA_BASE_ENV} + source activate internlm-model-latest + pip install transformers==${{ matrix.transformers-version }} + pip install sentencepiece + srun -p ${SLURM_PARTITION} --kill-on-bad-exit=1 --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --gpus-per-task=2 pytest -s -v --color=yes ./tests/test_hf_model.py + conda deactivate + - name: load_latest_hf_model + if: matrix.transformers-version == 'latest' run: | conda create -n internlm-model-latest --clone ${CONDA_BASE_ENV} source activate internlm-model-latest - # TODO:test other version of transformers pip install transformers pip install sentencepiece srun -p ${SLURM_PARTITION} --kill-on-bad-exit=1 --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --gpus-per-task=2 pytest -s -v --color=yes ./tests/test_hf_model.py conda deactivate + - name: remove_env + if: always() + run: | + conda env remove --name internlm-model-latest - clear_env: - if: ${{ !cancelled() }} - needs: [HF_model] - runs-on: [t_cluster] - timeout-minutes: 10 - steps: - - name: mask env - run: | - echo "::add-mask::${{env.WORKSPACE_PREFIX}}" - echo "::add-mask::$path_prefix" - - - name: remove_env - run: | - conda env remove --name internlm-model-latest notify_to_feishu: if: ${{ always() && !cancelled() && contains(needs.*.result, 'failure') && (github.ref_name == 'develop' || github.ref_name == 'main') }} - needs: [HF_model,clear_env] + needs: [HF_model] runs-on: [t_cluster] steps: - name: mask env