添加接入 火山引擎在线大模型 内容的支持 (#2165)
* use oai adaptive bridge function to handle vol engine * add vol engine deepseek v3 --------- Co-authored-by: binary-husky <qingxu.fu@outlook.com>
This commit is contained in:
@@ -57,7 +57,7 @@ def decode_chunk(chunk):
|
||||
finish_reason = chunk["error"]["code"]
|
||||
except:
|
||||
finish_reason = "API_ERROR"
|
||||
return response, reasoning_content, finish_reason
|
||||
return response, reasoning_content, finish_reason, str(chunk)
|
||||
|
||||
try:
|
||||
if chunk["choices"][0]["delta"]["content"] is not None:
|
||||
@@ -122,7 +122,8 @@ def generate_message(input, model, key, history, max_output_token, system_prompt
|
||||
def get_predict_function(
|
||||
api_key_conf_name,
|
||||
max_output_token,
|
||||
disable_proxy = False
|
||||
disable_proxy = False,
|
||||
model_remove_prefix = [],
|
||||
):
|
||||
"""
|
||||
为openai格式的API生成响应函数,其中传入参数:
|
||||
@@ -137,6 +138,16 @@ def get_predict_function(
|
||||
|
||||
APIKEY = get_conf(api_key_conf_name)
|
||||
|
||||
def remove_prefix(model_name):
|
||||
# 去除模型名字的前缀,输入 volcengine-deepseek-r1-250120 会返回 deepseek-r1-250120
|
||||
if not model_remove_prefix:
|
||||
return model_name
|
||||
model_without_prefix = model_name
|
||||
for prefix in model_remove_prefix:
|
||||
if model_without_prefix.startswith(prefix):
|
||||
model_without_prefix = model_without_prefix[len(prefix):]
|
||||
return model_without_prefix
|
||||
|
||||
def predict_no_ui_long_connection(
|
||||
inputs,
|
||||
llm_kwargs,
|
||||
@@ -164,9 +175,11 @@ def get_predict_function(
|
||||
raise RuntimeError(f"APIKEY为空,请检查配置文件的{APIKEY}")
|
||||
if inputs == "":
|
||||
inputs = "你好👋"
|
||||
|
||||
|
||||
headers, payload = generate_message(
|
||||
input=inputs,
|
||||
model=llm_kwargs["llm_model"],
|
||||
model=remove_prefix(llm_kwargs["llm_model"]),
|
||||
key=APIKEY,
|
||||
history=history,
|
||||
max_output_token=max_output_token,
|
||||
@@ -302,7 +315,7 @@ def get_predict_function(
|
||||
|
||||
headers, payload = generate_message(
|
||||
input=inputs,
|
||||
model=llm_kwargs["llm_model"],
|
||||
model=remove_prefix(llm_kwargs["llm_model"]),
|
||||
key=APIKEY,
|
||||
history=history,
|
||||
max_output_token=max_output_token,
|
||||
|
||||
Reference in New Issue
Block a user