add moe unit test for e2e

pull/378/head
zhanglei 2023-09-27 19:58:46 +08:00
parent eb4db08477
commit f13aea905b
1 changed files with 11 additions and 0 deletions

View File

@ -20,3 +20,14 @@ jobs:
run: |
source /mnt/petrelfs/share_data/llm_env/env/llm-flash2.0
srun -p ${SLURM_PARTITION} --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --quotatype=spot -n8 --ntasks-per-node=8 --cpus-per-task=4 --gpus-per-task=1 pytest -s -v --color=yes -m "training_8GPU" ./tests/test_training
training_moe_8GPU:
runs-on: [t_cluster]
timeout-minutes: 5
steps:
- uses: actions/checkout@v3
- name: training_moe_8GPU
run: |
source /mnt/petrelfs/share_data/llm_env/env/llm-flash2.0
srun -p ${SLURM_PARTITION} --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --quotatype=spot -n8 --ntasks-per-node=8 --cpus-per-task=4 --gpus-per-task=1 pytest -s -v --color=yes -m "training_moe_8GPU" ./tests/test_training