Delete unused code

Fix model name
pull/157/head
duzx16 2 years ago
parent 45f45668d9
commit f63af3515f

@ -11,7 +11,7 @@ st.set_page_config(
@st.cache_resource @st.cache_resource
def get_model(): def get_model():
tokenizer = AutoTokenizer.from_pretrained("/THUDM/chatglm-6b", trust_remote_code=True) tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).half().cuda() model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).half().cuda()
model = model.eval() model = model.eval()
return tokenizer, model return tokenizer, model
@ -27,16 +27,10 @@ def predict(input, history=None):
history = [] history = []
response, history = model.chat(tokenizer, input, history) response, history = model.chat(tokenizer, input, history)
#updates = []
for i, (query, response) in enumerate(history): for i, (query, response) in enumerate(history):
#updates.append("用户:" + query)
message(query, avatar_style="big-smile", key=str(i) + "_user") message(query, avatar_style="big-smile", key=str(i) + "_user")
#updates.append("ChatGLM-6B" + response)
message(response, avatar_style="bottts", key=str(i)) message(response, avatar_style="bottts", key=str(i))
# if len(updates) < MAX_BOXES:
# updates = updates + [""] * (MAX_BOXES - len(updates))
return history return history

Loading…
Cancel
Save