diff --git a/core_functional.py b/core_functional.py index 0d773ca0..4a476c60 100644 --- a/core_functional.py +++ b/core_functional.py @@ -19,7 +19,7 @@ def get_core_functions(): # 按钮是否可见 (默认 True,即可见) "Visible": True, # 是否在触发时清除历史 (默认 False,即不处理之前的对话历史) - "AutoClearHistory": True + "AutoClearHistory": False }, "中文学术润色": { "Prefix": r"作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性," + @@ -83,11 +83,12 @@ def get_core_functions(): } -def handle_core_functionality(additional_fn, inputs, history): +def handle_core_functionality(additional_fn, inputs, history, chatbot): import core_functional importlib.reload(core_functional) # 热更新prompt core_functional = core_functional.get_core_functions() if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] - history = [] if core_functional[additional_fn].get("AutoClearHistory", False) else history + if core_functional[additional_fn].get("AutoClearHistory", False): + history = [] return inputs, history diff --git a/request_llm/bridge_chatglm.py b/request_llm/bridge_chatglm.py index 0fe557c2..6dac8639 100644 --- a/request_llm/bridge_chatglm.py +++ b/request_llm/bridge_chatglm.py @@ -145,7 +145,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_chatglmft.py b/request_llm/bridge_chatglmft.py index dd5d1e91..4e21c989 100644 --- a/request_llm/bridge_chatglmft.py +++ b/request_llm/bridge_chatglmft.py @@ -186,7 +186,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_chatgpt.py b/request_llm/bridge_chatgpt.py index d2a52400..ea48fbaf 100644 --- a/request_llm/bridge_chatgpt.py +++ b/request_llm/bridge_chatgpt.py @@ -130,7 +130,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) raw_input = inputs logging.info(f'[raw_input] {raw_input}') diff --git a/request_llm/bridge_claude.py b/request_llm/bridge_claude.py index e17b0419..6084b1f1 100644 --- a/request_llm/bridge_claude.py +++ b/request_llm/bridge_claude.py @@ -117,7 +117,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) raw_input = inputs logging.info(f'[raw_input] {raw_input}') diff --git a/request_llm/bridge_internlm.py b/request_llm/bridge_internlm.py index 6b8b7526..a0ba3bab 100644 --- a/request_llm/bridge_internlm.py +++ b/request_llm/bridge_internlm.py @@ -291,7 +291,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_jittorllms_llama.py b/request_llm/bridge_jittorllms_llama.py index 552db0f8..d4853578 100644 --- a/request_llm/bridge_jittorllms_llama.py +++ b/request_llm/bridge_jittorllms_llama.py @@ -155,7 +155,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_jittorllms_pangualpha.py b/request_llm/bridge_jittorllms_pangualpha.py index 4f937d4c..20a30213 100644 --- a/request_llm/bridge_jittorllms_pangualpha.py +++ b/request_llm/bridge_jittorllms_pangualpha.py @@ -155,7 +155,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_jittorllms_rwkv.py b/request_llm/bridge_jittorllms_rwkv.py index b0e41afd..ee4f592f 100644 --- a/request_llm/bridge_jittorllms_rwkv.py +++ b/request_llm/bridge_jittorllms_rwkv.py @@ -155,7 +155,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_moss.py b/request_llm/bridge_moss.py index c4c8142f..3c6217d2 100644 --- a/request_llm/bridge_moss.py +++ b/request_llm/bridge_moss.py @@ -225,7 +225,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) # 处理历史信息 history_feedin = [] diff --git a/request_llm/bridge_newbingfree.py b/request_llm/bridge_newbingfree.py index e99189c1..cc6e9b73 100644 --- a/request_llm/bridge_newbingfree.py +++ b/request_llm/bridge_newbingfree.py @@ -225,7 +225,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) history_feedin = [] for i in range(len(history)//2): diff --git a/request_llm/bridge_stackclaude.py b/request_llm/bridge_stackclaude.py index 2a4920c2..3f2ee674 100644 --- a/request_llm/bridge_stackclaude.py +++ b/request_llm/bridge_stackclaude.py @@ -249,7 +249,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) history_feedin = [] for i in range(len(history)//2): diff --git a/request_llm/bridge_tgui.py b/request_llm/bridge_tgui.py index 4f9b41cd..3e03f7b3 100644 --- a/request_llm/bridge_tgui.py +++ b/request_llm/bridge_tgui.py @@ -97,7 +97,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp """ if additional_fn is not None: from core_functional import handle_core_functionality - inputs, history = handle_core_functionality(additional_fn, inputs, history) + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) raw_input = "What I would like to say is the following: " + inputs history.extend([inputs, ""])