From 0acf1e5851a32c28197ba10bfab1382c43423379 Mon Sep 17 00:00:00 2001 From: haofanurusai Date: Sun, 19 Mar 2023 17:29:04 +0800 Subject: [PATCH] Update web_demo.py --- web_demo.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/web_demo.py b/web_demo.py index fa24f89..1146219 100644 --- a/web_demo.py +++ b/web_demo.py @@ -19,6 +19,7 @@ def predict(input, max_length, top_p, temperature, history=None): updates.append(gr.update(visible=True, value="用户:" + query)) updates.append(gr.update(visible=True, value="ChatGLM-6B:" + response)) flag = True + update_pos = 0 for delta, seq, history in model.chat_stream(tokenizer, input, history, max_length=max_length, top_p=top_p, temperature=temperature): @@ -26,10 +27,11 @@ def predict(input, max_length, top_p, temperature, history=None): if flag: updates.append(gr.update(visible=True, value="用户:" + input)) updates.append(gr.update(visible=True, value="ChatGLM-6B:" + response)) + update_pos = len(updates) - 1 flag = False else: - updates[-2]=gr.update(visible=True, value="用户:" + input) - updates[-1]=gr.update(visible=True, value="ChatGLM-6B:" + response) + updates[update_pos-1]=gr.update(visible=True, value="用户:" + input) + updates[update_pos]=gr.update(visible=True, value="ChatGLM-6B:" + response) if len(updates) < MAX_BOXES: updates = updates + [gr.Textbox.update(visible=False)] * (MAX_BOXES - len(updates)) yield [history] + updates