mirror of https://github.com/THUDM/ChatGLM-6B
Fix `clear` command performance in Windows
parent
6a5a6326f9
commit
fdc3e5646a
11
cli_demo.py
11
cli_demo.py
|
@ -1,10 +1,13 @@
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
from transformers import AutoTokenizer, AutoModel
|
from transformers import AutoTokenizer, AutoModel
|
||||||
|
|
||||||
tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
|
tokenizer = AutoTokenizer.from_pretrained("chatglm-6b", trust_remote_code=True)
|
||||||
model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).half().cuda()
|
model = AutoModel.from_pretrained("chatglm-6b", trust_remote_code=True).half().cuda()
|
||||||
model = model.eval()
|
model = model.eval()
|
||||||
|
|
||||||
|
os_name = platform.system()
|
||||||
|
|
||||||
history = []
|
history = []
|
||||||
print("欢迎使用 ChatGLM-6B 模型,输入内容即可进行对话,clear 清空对话历史,stop 终止程序")
|
print("欢迎使用 ChatGLM-6B 模型,输入内容即可进行对话,clear 清空对话历史,stop 终止程序")
|
||||||
while True:
|
while True:
|
||||||
|
@ -13,8 +16,8 @@ while True:
|
||||||
break
|
break
|
||||||
if query == "clear":
|
if query == "clear":
|
||||||
history = []
|
history = []
|
||||||
os.system('clear')
|
command = 'cls' if os_name == 'Windows' else 'clear'
|
||||||
|
os.system(command)
|
||||||
continue
|
continue
|
||||||
response, history = model.chat(tokenizer, query, history=history)
|
response, history = model.chat(tokenizer, query, history=history)
|
||||||
print(f"ChatGLM-6B:{response}")
|
print(f"ChatGLM-6B:{response}")
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue