接入新模型

This commit is contained in:
binary-husky
2023-10-28 19:23:43 +08:00
parent cf085565a7
commit 127385b846
18 changed files with 253 additions and 40 deletions

View File

@@ -155,13 +155,13 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
history_feedin.append([history[2*i], history[2*i+1]] )
# 开始接收chatglm的回复
response = "[Local Message]: 等待ChatGLM响应中 ..."
response = "[Local Message] 等待ChatGLM响应中 ..."
for response in glm_handle.stream_chat(query=inputs, history=history_feedin, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
chatbot[-1] = (inputs, response)
yield from update_ui(chatbot=chatbot, history=history)
# 总结输出
if response == "[Local Message]: 等待ChatGLM响应中 ...":
response = "[Local Message]: ChatGLM响应异常 ..."
if response == "[Local Message] 等待ChatGLM响应中 ...":
response = "[Local Message] ChatGLM响应异常 ..."
history.extend([inputs, response])
yield from update_ui(chatbot=chatbot, history=history)