mirror of https://github.com/InternLM/InternLM
add moe unit test for e2e
parent
eb4db08477
commit
f13aea905b
|
@ -20,3 +20,14 @@ jobs:
|
|||
run: |
|
||||
source /mnt/petrelfs/share_data/llm_env/env/llm-flash2.0
|
||||
srun -p ${SLURM_PARTITION} --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --quotatype=spot -n8 --ntasks-per-node=8 --cpus-per-task=4 --gpus-per-task=1 pytest -s -v --color=yes -m "training_8GPU" ./tests/test_training
|
||||
|
||||
training_moe_8GPU:
|
||||
runs-on: [t_cluster]
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: training_moe_8GPU
|
||||
run: |
|
||||
source /mnt/petrelfs/share_data/llm_env/env/llm-flash2.0
|
||||
srun -p ${SLURM_PARTITION} --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --quotatype=spot -n8 --ntasks-per-node=8 --cpus-per-task=4 --gpus-per-task=1 pytest -s -v --color=yes -m "training_moe_8GPU" ./tests/test_training
|
||||
|
|
Loading…
Reference in New Issue