change max length

pull/650/head
kkscilife 2024-01-23 19:11:27 +08:00
parent d52fb1670b
commit 1a0bd4154b
2 changed files with 2 additions and 3 deletions

View File

@ -23,6 +23,7 @@ jobs:
run: |
conda create -n internlm-model-latest --clone ${CONDA_BASE_ENV}
source activate internlm-model-latest
# TODO:test other version of transformers
pip install transformers
pip install sentencepiece
srun -p ${SLURM_PARTITION} --kill-on-bad-exit=1 --job-name=${GITHUB_RUN_ID}-${GITHUB_JOB} --gpus-per-task=2 pytest -s -v --color=yes ./tests/test_hf_model.py

View File

@ -7,8 +7,6 @@ prompts = ["你好", "what's your name"]
def assert_model(response):
assert len(response) != 0
assert "user" not in response
assert "bot" not in response
assert "UNUSED_TOKEN" not in response
@ -69,7 +67,7 @@ class TestBase:
for k, v in inputs.items():
inputs[k] = v.cuda()
gen_kwargs = {
"max_length": 16280,
"max_length": 128,
"top_p": 10,
"temperature": 1.0,
"do_sample": True,