Fix response message bug in bridge_qianfan.py,

bridge_qwen.py, and bridge_skylark2.py
This commit is contained in:
binary-husky
2024-02-15 00:02:24 +08:00
parent 2e9b4a5770
commit e359fff040
4 changed files with 9 additions and 10 deletions

View File

@@ -9,7 +9,7 @@ model_name = '星火认知大模型'
def validate_key():
XFYUN_APPID = get_conf('XFYUN_APPID')
if XFYUN_APPID == '00000000' or XFYUN_APPID == '':
if XFYUN_APPID == '00000000' or XFYUN_APPID == '':
return False
return True
@@ -49,9 +49,10 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
# 开始接收回复
# 开始接收回复
from .com_sparkapi import SparkRequestInstance
sri = SparkRequestInstance()
response = f"[Local Message] 等待{model_name}响应中 ..."
for response in sri.generate(inputs, llm_kwargs, history, system_prompt, use_image_api=True):
chatbot[-1] = (inputs, response)
yield from update_ui(chatbot=chatbot, history=history)