From ae56cab6f47db1570ad492228f569255b3742234 Mon Sep 17 00:00:00 2001 From: Your Name Date: Wed, 19 Apr 2023 18:07:32 +0800 Subject: [PATCH] huggingface --- README.md | 16 ++++++++++++---- config.py | 6 +++--- crazy_functions/询问多个大语言模型.py | 4 ++-- main.py | 5 +++-- 4 files changed, 20 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index d0daf1a6..f2473b62 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,16 @@ +--- +title: academic-chatgpt +emoji: 😻 +colorFrom: blue +colorTo: blue +sdk: gradio +sdk_version: 3.25.0 +python_version: 3.11 +app_file: main.py +pinned: false +--- - -# ChatGPT 学术优化 +# ChatGPT 学术优化 **如果喜欢这个项目,请给它一个Star;如果你发明了更好用的快捷键或函数插件,欢迎发issue或者pull requests** @@ -276,8 +286,6 @@ docker run --rm -it --net=host --gpus=all gpt-academic bash - version 2.0: 引入模块化函数插件 - version 1.0: 基础功能 -chatgpt_academic开发者QQ群:734063350 - ## 参考与学习 ``` diff --git a/config.py b/config.py index e24c68b6..5cb03fb4 100644 --- a/config.py +++ b/config.py @@ -43,9 +43,9 @@ WEB_PORT = -1 # 如果OpenAI不响应(网络卡顿、代理失败、KEY失效),重试的次数限制 MAX_RETRY = 2 -# OpenAI模型选择是(gpt4现在只对申请成功的人开放,体验gpt-4可以试试api2d) -LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓ -AVAIL_LLM_MODELS = ["gpt-3.5-turbo", "api2d-gpt-3.5-turbo", "gpt-4", "api2d-gpt-4", "chatglm"] +# OpenAI模型选择是(gpt4现在只对申请成功的人开放) +LLM_MODEL = "gpt-3.5-turbo" # 可选 "chatglm" +AVAIL_LLM_MODELS = ["gpt-3.5-turbo", "gpt-4", "api2d-gpt-4", "api2d-gpt-3.5-turbo"] # 本地LLM模型如ChatGLM的执行方式 CPU/GPU LOCAL_MODEL_DEVICE = "cpu" # 可选 "cuda" diff --git a/crazy_functions/询问多个大语言模型.py b/crazy_functions/询问多个大语言模型.py index fb781458..c28f2aae 100644 --- a/crazy_functions/询问多个大语言模型.py +++ b/crazy_functions/询问多个大语言模型.py @@ -13,11 +13,11 @@ def 同时问询(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt web_port 当前软件运行的端口号 """ history = [] # 清空历史,以免输入溢出 - chatbot.append((txt, "正在同时咨询ChatGPT和ChatGLM……")) + chatbot.append((txt, "正在同时咨询gpt-3.5和gpt-4……")) yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 由于请求gpt需要一段时间,我们先及时地做一次界面更新 # llm_kwargs['llm_model'] = 'chatglm&gpt-3.5-turbo&api2d-gpt-3.5-turbo' # 支持任意数量的llm接口,用&符号分隔 - llm_kwargs['llm_model'] = 'chatglm&gpt-3.5-turbo' # 支持任意数量的llm接口,用&符号分隔 + llm_kwargs['llm_model'] = 'gpt-3.5-turbo&gpt-4' # 支持任意数量的llm接口,用&符号分隔 gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive( inputs=txt, inputs_show_user=txt, llm_kwargs=llm_kwargs, chatbot=chatbot, history=history, diff --git a/main.py b/main.py index 2ee64a97..0e722650 100644 --- a/main.py +++ b/main.py @@ -53,6 +53,7 @@ def main(): cancel_handles = [] with gr.Blocks(title="ChatGPT 学术优化", theme=set_theme, analytics_enabled=False, css=advanced_css) as demo: gr.HTML(title_html) + gr.HTML('''
Duplicate Space请您打开此页面后务必点击上方的“复制空间”(Duplicate Space)按钮!使用时,先在输入框填入API-KEY然后回车。
切忌在“复制空间”(Duplicate Space)之前填入API_KEY或进行提问,否则您的API_KEY将极可能被空间所有者攫取!
支持任意数量的OpenAI的密钥和API2D的密钥共存,例如输入"OpenAI密钥1,API2D密钥2",然后提交,即可同时使用两种模型接口。
''') cookies = gr.State({'api_key': API_KEY, 'llm_model': LLM_MODEL}) with gr_L1(): with gr_L2(scale=2): @@ -62,7 +63,7 @@ def main(): with gr_L2(scale=1): with gr.Accordion("输入区", open=True) as area_input_primary: with gr.Row(): - txt = gr.Textbox(show_label=False, placeholder="Input question here.").style(container=False) + txt = gr.Textbox(show_label=False, lines=2, placeholder="输入问题或API密钥,输入多个密钥时,用英文逗号间隔。支持OpenAI密钥和API2D密钥共存。").style(container=False) with gr.Row(): submitBtn = gr.Button("提交", variant="primary") with gr.Row(): @@ -183,7 +184,7 @@ def main(): threading.Thread(target=warm_up_modules, name="warm-up", daemon=True).start() auto_opentab_delay() - demo.queue(concurrency_count=CONCURRENT_COUNT).launch(server_name="0.0.0.0", server_port=PORT, auth=AUTHENTICATION, favicon_path="docs/logo.png") + demo.queue(concurrency_count=CONCURRENT_COUNT).launch(server_name="0.0.0.0", share=False, favicon_path="docs/logo.png") if __name__ == "__main__": main() \ No newline at end of file