Update modeling_internlm.py

fixed the issue that the HF model spontaneously conducted multiple rounds of Q&A and stream_chat method generates garbled characters
pull/560/head
djsaber 2023-12-26 16:52:06 +08:00 committed by GitHub
parent 97e7d03d09
commit 508711cc97
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 1 additions and 1 deletions

View File

@ -861,7 +861,7 @@ class InternLMForCausalLM(InternLMPreTrainedModel):
self.chche.extend(value.tolist()) self.chche.extend(value.tolist())
token = self.tokenizer.decode(self.chche, skip_special_tokens=True) token = self.tokenizer.decode(self.chche, skip_special_tokens=True)
if " " in token and len(token) <= 5: if "<EFBFBD>" in token and len(token) <= 5:
return return
if token.strip() != "<eoa>": if token.strip() != "<eoa>":