ChatGLM-6B/cli_demo.py

25 lines
909 B
Python
Raw Normal View History

2023-03-13 12:06:14 +00:00
import os
import platform
2023-03-13 12:06:14 +00:00
from transformers import AutoTokenizer, AutoModel
2023-03-14 12:12:36 +00:00
tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).half().cuda()
2023-03-13 12:06:14 +00:00
model = model.eval()
os_name = platform.system()
2023-03-13 12:06:14 +00:00
history = []
2023-03-13 19:06:37 +00:00
print("欢迎使用 ChatGLM-6B 模型输入内容即可进行对话clear 清空对话历史stop 终止程序")
2023-03-13 12:06:14 +00:00
while True:
2023-03-13 19:06:37 +00:00
query = input("\n用户:")
2023-03-13 12:06:14 +00:00
if query == "stop":
break
if query == "clear":
history = []
command = 'cls' if os_name == 'Windows' else 'clear'
2023-03-14 12:39:55 +00:00
os.system(command)
print("欢迎使用 ChatGLM-6B 模型输入内容即可进行对话clear 清空对话历史stop 终止程序")
2023-03-13 12:06:14 +00:00
continue
response, history = model.chat(tokenizer, query, history=history)
2023-03-13 19:06:37 +00:00
print(f"ChatGLM-6B{response}")