ChatGLM-6B/cli_demo.py

21 lines
680 B
Python
Raw Normal View History

2023-03-13 12:06:14 +00:00
import os
from transformers import AutoTokenizer, AutoModel
tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).half().cuda()
model = model.eval()
history = []
2023-03-13 19:06:37 +00:00
print("欢迎使用 ChatGLM-6B 模型输入内容即可进行对话clear 清空对话历史stop 终止程序")
2023-03-13 12:06:14 +00:00
while True:
2023-03-13 19:06:37 +00:00
query = input("\n用户:")
2023-03-13 12:06:14 +00:00
if query == "stop":
break
if query == "clear":
history = []
os.system('clear')
continue
response, history = model.chat(tokenizer, query, history=history)
2023-03-13 19:06:37 +00:00
print(f"ChatGLM-6B{response}")