From 88ddfdd5dc1e8b00f892b36ec37d47d337dbfebd Mon Sep 17 00:00:00 2001 From: haofanurusai Date: Sun, 19 Mar 2023 17:16:35 +0800 Subject: [PATCH] Update web_demo.py --- web_demo.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/web_demo.py b/web_demo.py index 2f33949..fa24f89 100644 --- a/web_demo.py +++ b/web_demo.py @@ -13,17 +13,27 @@ def predict(input, max_length, top_p, temperature, history=None): if history is None: history = [] response = '' + + updates = [] + for query, response in history: + updates.append(gr.update(visible=True, value="用户:" + query)) + updates.append(gr.update(visible=True, value="ChatGLM-6B:" + response)) + flag = True + for delta, seq, history in model.chat_stream(tokenizer, input, history, max_length=max_length, top_p=top_p, temperature=temperature): - updates = [] response += delta - updates.append(gr.update(visible=True, value="用户:" + input)) - updates.append(gr.update(visible=True, value="ChatGLM-6B:" + response)) + if flag: + updates.append(gr.update(visible=True, value="用户:" + input)) + updates.append(gr.update(visible=True, value="ChatGLM-6B:" + response)) + flag = False + else: + updates[-2]=gr.update(visible=True, value="用户:" + input) + updates[-1]=gr.update(visible=True, value="ChatGLM-6B:" + response) if len(updates) < MAX_BOXES: updates = updates + [gr.Textbox.update(visible=False)] * (MAX_BOXES - len(updates)) yield [history] + updates - with gr.Blocks() as demo: state = gr.State([]) text_boxes = []