增加一个Azure配置的Array

This commit is contained in:
binary-husky
2023-10-27 23:29:18 +08:00
parent d0589209cc
commit 103d05d242
4 changed files with 106 additions and 29 deletions

View File

@@ -48,10 +48,12 @@ class LazyloadTiktoken(object):
return encoder.decode(*args, **kwargs)
# Endpoint 重定向
API_URL_REDIRECT, = get_conf("API_URL_REDIRECT")
API_URL_REDIRECT, AZURE_ENDPOINT, AZURE_ENGINE = get_conf("API_URL_REDIRECT", "AZURE_ENDPOINT", "AZURE_ENGINE")
openai_endpoint = "https://api.openai.com/v1/chat/completions"
api2d_endpoint = "https://openai.api2d.net/v1/chat/completions"
newbing_endpoint = "wss://sydney.bing.com/sydney/ChatHub"
if not AZURE_ENDPOINT.endswith('/'): AZURE_ENDPOINT += '/'
azure_endpoint = AZURE_ENDPOINT + f'openai/deployments/{AZURE_ENGINE}/chat/completions?api-version=2023-05-15'
# 兼容旧版的配置
try:
API_URL, = get_conf("API_URL")
@@ -141,6 +143,25 @@ model_info = {
"tokenizer": tokenizer_gpt4,
"token_cnt": get_token_num_gpt4,
},
# azure openai
"azure-gpt-3.5":{
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": azure_endpoint,
"max_token": 4096,
"tokenizer": tokenizer_gpt35,
"token_cnt": get_token_num_gpt35,
},
"azure-gpt-4":{
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": azure_endpoint,
"max_token": 8192,
"tokenizer": tokenizer_gpt35,
"token_cnt": get_token_num_gpt35,
},
# api_2d
"api2d-gpt-3.5-turbo": {
@@ -187,21 +208,6 @@ model_info = {
"token_cnt": get_token_num_gpt35,
},
}
# Azure 多模型支持
AZURE_ENDPOINT, AZURE_ENGINE_DICT, AZURE_URL_VERSION = get_conf('AZURE_ENDPOINT', 'AZURE_ENGINE_DICT', 'AZURE_URL_VERSION')
for azure in AZURE_ENGINE_DICT:
if not AZURE_ENDPOINT.endswith('/'): AZURE_ENDPOINT += '/'
azure_endpoint = AZURE_ENDPOINT + str(AZURE_URL_VERSION).replace('{v}', azure)
model_info.update({
f"azure-{azure}": {
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": azure_endpoint,
"max_token": AZURE_ENGINE_DICT[azure],
"tokenizer": tokenizer_gpt35,
"token_cnt": get_token_num_gpt35,
},
})
# -=-=-=-=-=-=- 以下部分是新加入的模型,可能附带额外依赖 -=-=-=-=-=-=-
if "claude-1-100k" in AVAIL_LLM_MODELS or "claude-2" in AVAIL_LLM_MODELS:
@@ -469,6 +475,30 @@ if "llama2" in AVAIL_LLM_MODELS: # llama2
except:
print(trimmed_format_exc())
# <-- 用于定义和切换多个azure模型 -->
AZURE_CFG_ARRAY, = get_conf("AZURE_CFG_ARRAY")
if len(AZURE_CFG_ARRAY) > 0:
for azure_model_name, azure_cfg_dict in AZURE_CFG_ARRAY.items():
# 可能会覆盖之前的配置,但这是意料之中的
if not azure_model_name.startswith('azure'):
raise ValueError("AZURE_CFG_ARRAY中配置的模型必须以azure开头")
endpoint_ = azure_cfg_dict["AZURE_ENDPOINT"] + \
f'openai/deployments/{azure_cfg_dict["AZURE_ENGINE"]}/chat/completions?api-version=2023-05-15'
model_info.update({
azure_model_name: {
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": endpoint_,
"azure_api_key": azure_cfg_dict["AZURE_API_KEY"],
"max_token": azure_cfg_dict["AZURE_MODEL_MAX_TOKEN"],
"tokenizer": tokenizer_gpt35, # tokenizer只用于粗估token数量
"token_cnt": get_token_num_gpt35,
}
})
if azure_model_name not in AVAIL_LLM_MODELS:
azure_model_name += [azure_model_name]
def LLM_CATCH_EXCEPTION(f):

View File

@@ -23,8 +23,8 @@ import random
# config_private.py放自己的秘密如API和代理网址
# 读取时首先看是否存在私密的config_private配置文件不受git管控如果有则覆盖原config文件
from toolbox import get_conf, update_ui, is_any_api_key, select_api_key, what_keys, clip_history, trimmed_format_exc, is_the_upload_folder
proxies, TIMEOUT_SECONDS, MAX_RETRY, API_ORG = \
get_conf('proxies', 'TIMEOUT_SECONDS', 'MAX_RETRY', 'API_ORG')
proxies, TIMEOUT_SECONDS, MAX_RETRY, API_ORG, AZURE_CFG_ARRAY = \
get_conf('proxies', 'TIMEOUT_SECONDS', 'MAX_RETRY', 'API_ORG', 'AZURE_CFG_ARRAY')
timeout_bot_msg = '[Local Message] Request timeout. Network error. Please check proxy settings in config.py.' + \
'网络错误,检查代理服务器是否可用,以及代理设置的格式是否正确,格式须是[协议]://[地址]:[端口],缺一不可。'
@@ -289,7 +289,11 @@ def generate_payload(inputs, llm_kwargs, history, system_prompt, stream):
"Authorization": f"Bearer {api_key}"
}
if API_ORG.startswith('org-'): headers.update({"OpenAI-Organization": API_ORG})
if llm_kwargs['llm_model'].startswith('azure-'): headers.update({"api-key": api_key})
if llm_kwargs['llm_model'].startswith('azure-'):
headers.update({"api-key": api_key})
if llm_kwargs['llm_model'] in AZURE_CFG_ARRAY.keys():
azure_api_key_unshared = AZURE_CFG_ARRAY[llm_kwargs['llm_model']]["AZURE_API_KEY"]
headers.update({"api-key": azure_api_key_unshared})
conversation_cnt = len(history) // 2