diff --git a/config.py b/config.py index 6353cb8a..0ddf715d 100644 --- a/config.py +++ b/config.py @@ -7,9 +7,11 @@ Configuration reading priority: environment variable > config_private.py > config.py """ -# [step 1]>> API_KEY = "sk-123456789xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx123456789"。极少数情况下,还需要填写组织(格式如org-123456789abcdefghijklmno的),请向下翻,找 API_ORG 设置项 -API_KEY = "此处填API密钥" # 可同时填写多个API-KEY,用英文逗号分割,例如API_KEY = "sk-openaikey1,sk-openaikey2,fkxxxx-api2dkey3,azure-apikey4" +# [step 1-1]>> (接入GPT等模型) API_KEY = "sk-123456789xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx123456789"。极少数情况下,还需要填写组织(格式如org-123456789abcdefghijklmno的),请向下翻,找 API_ORG 设置项 +API_KEY = "在此处填写APIKEY" # 可同时填写多个API-KEY,用英文逗号分割,例如API_KEY = "sk-openaikey1,sk-openaikey2,fkxxxx-api2dkey3,azure-apikey4" +# [step 1-2]>> (推荐使用中文原生大模型!) 接入通义千问在线大模型,api-key获取地址 https://dashscope.console.aliyun.com/ +DASHSCOPE_API_KEY = "" # 阿里灵积云API_KEY # [step 2]>> 改为True应用代理,如果直接在海外服务器部署,此处不修改;如果使用本地或无地域限制的大模型时,此处也不需要修改 USE_PROXY = False @@ -32,7 +34,8 @@ else: # [step 3]>> 模型选择是 (注意: LLM_MODEL是默认选中的模型, 它*必须*被包含在AVAIL_LLM_MODELS列表中 ) LLM_MODEL = "gpt-3.5-turbo-16k" # 可选 ↓↓↓ -AVAIL_LLM_MODELS = ["gpt-4-1106-preview", "gpt-4-turbo-preview", "gpt-4-vision-preview", +AVAIL_LLM_MODELS = ["qwen-max", "o1-mini", "o1-mini-2024-09-12", "o1", "o1-2024-12-17", "o1-preview", "o1-preview-2024-09-12", + "gpt-4-1106-preview", "gpt-4-turbo-preview", "gpt-4-vision-preview", "gpt-4o", "gpt-4o-mini", "gpt-4-turbo", "gpt-4-turbo-2024-04-09", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5", "gpt-4", "gpt-4-32k", "azure-gpt-4", "glm-4", "glm-4v", "glm-3-turbo", @@ -47,7 +50,7 @@ EMBEDDING_MODEL = "text-embedding-3-small" # "glm-4-0520", "glm-4-air", "glm-4-airx", "glm-4-flash", # "qianfan", "deepseekcoder", # "spark", "sparkv2", "sparkv3", "sparkv3.5", "sparkv4", -# "qwen-turbo", "qwen-plus", "qwen-max", "qwen-local", +# "qwen-turbo", "qwen-plus", "qwen-local", # "moonshot-v1-128k", "moonshot-v1-32k", "moonshot-v1-8k", # "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k-0613", "gpt-3.5-turbo-0125", "gpt-4o-2024-05-13" # "claude-3-haiku-20240307","claude-3-sonnet-20240229","claude-3-opus-20240229", "claude-2.1", "claude-instant-1.2", @@ -133,10 +136,6 @@ MULTI_QUERY_LLM_MODELS = "gpt-3.5-turbo&chatglm3" QWEN_LOCAL_MODEL_SELECTION = "Qwen/Qwen-1_8B-Chat-Int8" -# 接入通义千问在线大模型 https://dashscope.console.aliyun.com/ -DASHSCOPE_API_KEY = "" # 阿里灵积云API_KEY - - # 百度千帆(LLM_MODEL="qianfan") BAIDU_CLOUD_API_KEY = '' BAIDU_CLOUD_SECRET_KEY = ''