+
+
From 6789eaee45efeeaa38a4c6592abff896a141b1ea Mon Sep 17 00:00:00 2001
From: binary-husky <96192199+binary-husky@users.noreply.github.com>
Date: Sat, 1 Apr 2023 04:25:03 +0800
Subject: [PATCH 047/154] Update README.md
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 1ca58a03..69e39aa8 100644
--- a/README.md
+++ b/README.md
@@ -51,7 +51,7 @@ chat分析报告生成 | [实验性功能] 运行后自动生成总结汇报
- 新界面(左:master主分支, 右:dev开发前沿)
-
+
+
+
+
+### 源代码转译英文
+
+
+
-## Todo:
-
-- (Top Priority) 调用另一个开源项目text-generation-webui的web接口,使用其他llm模型
-- 总结大工程源代码时,文本过长、token溢出的问题(目前的方法是直接二分丢弃处理溢出,过于粗暴,有效信息大量丢失)
-
+## Todo 与 版本规划:
+- version 3 (Todo):
+- - 支持gpt4和其他更多llm
+- version 2.3+ (Todo):
+- - 总结大工程源代码时文本过长、token溢出的问题
+- - 实现项目打包部署
+- - 函数插件参数接口优化
+- - 自更新
+- version 2.3: 增强多线程交互性
+- version 2.2: 函数插件支持热重载
+- version 2.1: 可折叠式布局
+- version 2.0: 引入模块化函数插件
+- version 1.0: 基础功能
\ No newline at end of file
diff --git a/version b/version
new file mode 100644
index 00000000..c0943d3e
--- /dev/null
+++ b/version
@@ -0,0 +1 @@
+2.3
\ No newline at end of file
From 99817e904012727a364fe0ff10a1a0f0ea7557fa Mon Sep 17 00:00:00 2001
From: binary-husky <96192199+binary-husky@users.noreply.github.com>
Date: Tue, 4 Apr 2023 22:17:47 +0800
Subject: [PATCH 107/154] Update version
---
version | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/version b/version
index c0943d3e..21ab827c 100644
--- a/version
+++ b/version
@@ -1 +1,5 @@
-2.3
\ No newline at end of file
+{
+ 'version': 2.3,
+ 'show_feature': false,
+ 'new_feature': "修复多线程插件Bug;加入版本检查功能。",
+}
From 7b75422c2646acd111c8025ae11c5daed5ec4804 Mon Sep 17 00:00:00 2001
From: binary-husky <96192199+binary-husky@users.noreply.github.com>
Date: Tue, 4 Apr 2023 22:20:21 +0800
Subject: [PATCH 108/154] Update version
---
version | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/version b/version
index 21ab827c..f799a3f4 100644
--- a/version
+++ b/version
@@ -1,5 +1,5 @@
{
- 'version': 2.3,
- 'show_feature': false,
- 'new_feature': "修复多线程插件Bug;加入版本检查功能。",
+ "version": 2.3,
+ "show_feature": false,
+ "new_feature": "修复多线程插件Bug;加入版本检查功能。",
}
From 1042d28e1fb4dcdb86510cb4902f3d966e81e54a Mon Sep 17 00:00:00 2001
From: binary-husky <96192199+binary-husky@users.noreply.github.com>
Date: Tue, 4 Apr 2023 22:20:39 +0800
Subject: [PATCH 109/154] Update version
---
version | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/version b/version
index f799a3f4..436307a6 100644
--- a/version
+++ b/version
@@ -1,5 +1,5 @@
{
"version": 2.3,
"show_feature": false,
- "new_feature": "修复多线程插件Bug;加入版本检查功能。",
+ "new_feature": "修复多线程插件Bug;加入版本检查功能。"
}
From a239abac501244b202cf1b2dfd73d9266b7bbe5d Mon Sep 17 00:00:00 2001
From: binary-husky <96192199+binary-husky@users.noreply.github.com>
Date: Tue, 4 Apr 2023 22:34:28 +0800
Subject: [PATCH 110/154] Update version
---
version | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/version b/version
index 436307a6..ce7c84e1 100644
--- a/version
+++ b/version
@@ -1,5 +1,5 @@
{
"version": 2.3,
- "show_feature": false,
+ "show_feature": true,
"new_feature": "修复多线程插件Bug;加入版本检查功能。"
}
From c40f6f00bbb3776a8f4b356ce738b4b925b1af2e Mon Sep 17 00:00:00 2001
From: qingxu fu <505030475@qq.com>
Date: Tue, 4 Apr 2023 22:54:08 +0800
Subject: [PATCH 111/154] check_new_version
---
check_proxy.py | 24 ++++++++++++++++++++++++
main.py | 10 ++++++++--
2 files changed, 32 insertions(+), 2 deletions(-)
diff --git a/check_proxy.py b/check_proxy.py
index a6919dd3..abc75d04 100644
--- a/check_proxy.py
+++ b/check_proxy.py
@@ -19,6 +19,30 @@ def check_proxy(proxies):
return result
+def auto_update():
+ from toolbox import get_conf
+ import requests, time, json
+ proxies, = get_conf('proxies')
+ response = requests.get("https://raw.githubusercontent.com/binary-husky/chatgpt_academic/master/version",
+ proxies=proxies, timeout=1)
+ remote_json_data = json.loads(response.text)
+ remote_version = remote_json_data['version']
+ if remote_json_data["show_feature"]:
+ new_feature = "新功能:" + remote_json_data["new_feature"]
+ else:
+ new_feature = ""
+ with open('./version', 'r', encoding='utf8') as f:
+ current_version = f.read()
+ current_version = json.loads(current_version)['version']
+ if (remote_version - current_version) >= 0.05:
+ print(f'\n新版本可用。新版本:{remote_version},当前版本:{current_version}。{new_feature}')
+ print('Github更新地址:\nhttps://github.com/binary-husky/chatgpt_academic\n')
+ time.sleep(3)
+ return
+ else:
+ return
+
+
if __name__ == '__main__':
import os; os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染
from toolbox import get_conf
diff --git a/main.py b/main.py
index 973d16fb..48ea670f 100644
--- a/main.py
+++ b/main.py
@@ -37,6 +37,11 @@ gr.Chatbot.postprocess = format_io
from theme import adjust_theme, advanced_css
set_theme = adjust_theme()
+# 代理与自动更新
+from check_proxy import check_proxy, auto_update
+proxy_info = check_proxy(proxies)
+
+
cancel_handles = []
with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as demo:
gr.HTML(title_html)
@@ -54,8 +59,7 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
resetBtn = gr.Button("重置", variant="secondary"); resetBtn.style(size="sm")
stopBtn = gr.Button("停止", variant="secondary"); stopBtn.style(size="sm")
with gr.Row():
- from check_proxy import check_proxy
- status = gr.Markdown(f"Tip: 按Enter提交, 按Shift+Enter换行。当前模型: {LLM_MODEL} \n {check_proxy(proxies)}")
+ status = gr.Markdown(f"Tip: 按Enter提交, 按Shift+Enter换行。当前模型: {LLM_MODEL} \n {proxy_info}")
with gr.Accordion("基础功能区", open=True) as area_basic_fn:
with gr.Row():
for k in functional:
@@ -139,6 +143,8 @@ def auto_opentab_delay():
print(f"\t(暗色主体): http://localhost:{PORT}/?__dark-theme=true")
def open():
time.sleep(2)
+ try: auto_update() # 检查新版本
+ except: pass
webbrowser.open_new_tab(f"http://localhost:{PORT}/?__dark-theme=true")
threading.Thread(target=open, name="open-browser", daemon=True).start()
From 1da60b7a0c43f56a3b25566156a049c46527b1db Mon Sep 17 00:00:00 2001
From: qingxu fu <505030475@qq.com>
Date: Tue, 4 Apr 2023 22:56:06 +0800
Subject: [PATCH 112/154] merge
---
main.py | 19 +++++++++++--------
toolbox.py | 16 ++++++++++++++--
2 files changed, 25 insertions(+), 10 deletions(-)
diff --git a/main.py b/main.py
index 48ea670f..123374e0 100644
--- a/main.py
+++ b/main.py
@@ -1,7 +1,7 @@
import os; os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染
import gradio as gr
from predict import predict
-from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf
+from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper
# 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到
proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT = \
@@ -87,8 +87,12 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
system_prompt = gr.Textbox(show_label=True, placeholder=f"System Prompt", label="System prompt", value=initial_prompt)
top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",)
temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, label="Temperature",)
- checkboxes = gr.CheckboxGroup(["基础功能区", "函数插件区"], value=["基础功能区", "函数插件区"], label="显示/隐藏功能区")
+ checkboxes = gr.CheckboxGroup(["基础功能区", "函数插件区", "输入区2"], value=["基础功能区", "函数插件区"], label="显示/隐藏功能区")
gr.Markdown(description)
+ with gr.Accordion("输入区", open=True, visible=False) as input_crazy_fn:
+ with gr.Row():
+ txt2 = gr.Textbox(show_label=False, placeholder="Input question here.", label="输入区2").style(container=False)
+
# 功能区显示开关与功能区的互动
def fn_area_visibility(a):
ret = {}
@@ -97,17 +101,16 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
return ret
checkboxes.select(fn_area_visibility, [checkboxes], [area_basic_fn, area_crazy_fn] )
# 整理反复出现的控件句柄组合
- input_combo = [txt, top_p, temperature, chatbot, history, system_prompt]
+ input_combo = [txt, txt2, top_p, temperature, chatbot, history, system_prompt]
output_combo = [chatbot, history, status]
- predict_args = dict(fn=predict, inputs=input_combo, outputs=output_combo)
- empty_txt_args = dict(fn=lambda: "", inputs=[], outputs=[txt]) # 用于在提交后清空输入栏
+ predict_args = dict(fn=ArgsGeneralWrapper(predict), inputs=input_combo, outputs=output_combo)
# 提交按钮、重置按钮
- cancel_handles.append(txt.submit(**predict_args)) #; txt.submit(**empty_txt_args) 在提交后清空输入栏
- cancel_handles.append(submitBtn.click(**predict_args)) #; submitBtn.click(**empty_txt_args) 在提交后清空输入栏
+ cancel_handles.append(txt.submit(**predict_args))
+ cancel_handles.append(submitBtn.click(**predict_args))
resetBtn.click(lambda: ([], [], "已重置"), None, output_combo)
# 基础功能区的回调函数注册
for k in functional:
- click_handle = functional[k]["Button"].click(predict, [*input_combo, gr.State(True), gr.State(k)], output_combo)
+ click_handle = functional[k]["Button"].click(fn=ArgsGeneralWrapper(predict), inputs=[*input_combo, gr.State(True), gr.State(k)], outputs=output_combo)
cancel_handles.append(click_handle)
# 文件上传区,接收文件后与chatbot的互动
file_upload.upload(on_file_uploaded, [file_upload, chatbot, txt], [chatbot, txt])
diff --git a/toolbox.py b/toolbox.py
index c55a48e5..00bb03e0 100644
--- a/toolbox.py
+++ b/toolbox.py
@@ -2,6 +2,18 @@ import markdown, mdtex2html, threading, importlib, traceback, importlib, inspect
from show_math import convert as convert_math
from functools import wraps, lru_cache
+def ArgsGeneralWrapper(f):
+ """
+ 装饰器函数,用于重组输入参数,改变输入参数的顺序与结构。
+ """
+ def decorated(txt, txt2, top_p, temperature, chatbot, history, system_prompt, *args, **kwargs):
+ txt_passon = txt
+ if txt == "" and txt2 != "": txt_passon = txt2
+ yield from f(txt_passon, top_p, temperature, chatbot, history, system_prompt, *args, **kwargs)
+
+ return decorated
+
+
def get_reduce_token_percent(text):
try:
# text = "maximum context length is 4097 tokens. However, your messages resulted in 4870 tokens"
@@ -116,7 +128,7 @@ def CatchException(f):
from toolbox import get_conf
proxies, = get_conf('proxies')
tb_str = '```\n' + traceback.format_exc() + '```'
- if len(chatbot) == 0: chatbot.append(["插件调度异常","异常原因"])
+ if chatbot is None or len(chatbot) == 0: chatbot = [["插件调度异常","异常原因"]]
chatbot[-1] = (chatbot[-1][0], f"[Local Message] 实验性函数调用出错: \n\n{tb_str} \n\n当前代理可用性: \n\n{check_proxy(proxies)}")
yield chatbot, history, f'异常 {e}'
return decorated
@@ -129,7 +141,7 @@ def HotReload(f):
def decorated(*args, **kwargs):
fn_name = f.__name__
f_hot_reload = getattr(importlib.reload(inspect.getmodule(f)), fn_name)
- yield from f_hot_reload(*args, **kwargs)
+ yield from ArgsGeneralWrapper(f_hot_reload)(*args, **kwargs)
return decorated
def report_execption(chatbot, history, a, b):
From 3648648b3d8dab1784f144ae170eba211a5dfcdf Mon Sep 17 00:00:00 2001
From: qingxu fu <505030475@qq.com>
Date: Tue, 4 Apr 2023 23:46:47 +0800
Subject: [PATCH 113/154] =?UTF-8?q?=E6=94=AF=E6=8C=81=E6=9B=B4=E5=A4=9A?=
=?UTF-8?q?=E7=95=8C=E9=9D=A2=E5=B8=83=E5=B1=80=E7=9A=84=E5=88=87=E6=8D=A2?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
config.py | 3 +++
main.py | 62 +++++++++++++++++++++++++++++++++++-------------------
theme.py | 2 +-
toolbox.py | 6 ++++++
4 files changed, 50 insertions(+), 23 deletions(-)
diff --git a/config.py b/config.py
index d986750a..8c98657a 100644
--- a/config.py
+++ b/config.py
@@ -24,6 +24,9 @@ else:
# 对话窗的高度
CHATBOT_HEIGHT = 1115
+# 窗口布局
+LAYOUT = "LEFT-RIGHT" # "LEFT-RIGHT"(左右布局) # "TOP-DOWN"(上下布局)
+
# 发送请求到OpenAI后,等待多久判定为超时
TIMEOUT_SECONDS = 25
diff --git a/main.py b/main.py
index 123374e0..479acde9 100644
--- a/main.py
+++ b/main.py
@@ -1,11 +1,11 @@
import os; os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染
import gradio as gr
from predict import predict
-from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper
+from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper, DummyWith
# 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到
-proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT = \
- get_conf('proxies', 'WEB_PORT', 'LLM_MODEL', 'CONCURRENT_COUNT', 'AUTHENTICATION', 'CHATBOT_HEIGHT')
+proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT, LAYOUT = \
+ get_conf('proxies', 'WEB_PORT', 'LLM_MODEL', 'CONCURRENT_COUNT', 'AUTHENTICATION', 'CHATBOT_HEIGHT', 'LAYOUT')
# 如果WEB_PORT是-1, 则随机选取WEB端口
PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
@@ -41,25 +41,32 @@ set_theme = adjust_theme()
from check_proxy import check_proxy, auto_update
proxy_info = check_proxy(proxies)
+gr_L1 = lambda: gr.Row().style()
+gr_L2 = lambda scale: gr.Column(scale=scale)
+if LAYOUT == "TOP-DOWN":
+ gr_L1 = lambda: DummyWith()
+ gr_L2 = lambda scale: gr.Row()
+ CHATBOT_HEIGHT /= 2
cancel_handles = []
with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as demo:
gr.HTML(title_html)
- with gr.Row().style(equal_height=True):
- with gr.Column(scale=2):
+ with gr_L1():
+ with gr_L2(scale=2):
chatbot = gr.Chatbot()
chatbot.style(height=CHATBOT_HEIGHT)
history = gr.State([])
- with gr.Column(scale=1):
- with gr.Row():
- txt = gr.Textbox(show_label=False, placeholder="Input question here.").style(container=False)
- with gr.Row():
- submitBtn = gr.Button("提交", variant="primary")
- with gr.Row():
- resetBtn = gr.Button("重置", variant="secondary"); resetBtn.style(size="sm")
- stopBtn = gr.Button("停止", variant="secondary"); stopBtn.style(size="sm")
- with gr.Row():
- status = gr.Markdown(f"Tip: 按Enter提交, 按Shift+Enter换行。当前模型: {LLM_MODEL} \n {proxy_info}")
+ with gr_L2(scale=1):
+ with gr.Accordion("输入区", open=True) as area_input_primary:
+ with gr.Row():
+ txt = gr.Textbox(show_label=False, placeholder="Input question here.").style(container=False)
+ with gr.Row():
+ submitBtn = gr.Button("提交", variant="primary")
+ with gr.Row():
+ resetBtn = gr.Button("重置", variant="secondary"); resetBtn.style(size="sm")
+ stopBtn = gr.Button("停止", variant="secondary"); stopBtn.style(size="sm")
+ with gr.Row():
+ status = gr.Markdown(f"Tip: 按Enter提交, 按Shift+Enter换行。当前模型: {LLM_MODEL} \n {proxy_info}")
with gr.Accordion("基础功能区", open=True) as area_basic_fn:
with gr.Row():
for k in functional:
@@ -67,12 +74,13 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
functional[k]["Button"] = gr.Button(k, variant=variant)
with gr.Accordion("函数插件区", open=True) as area_crazy_fn:
with gr.Row():
- gr.Markdown("注意:以下“红颜色”标识的函数插件需从input区读取路径作为参数.")
+ gr.Markdown("注意:以下“红颜色”标识的函数插件需从输入区读取路径作为参数.")
with gr.Row():
for k in crazy_fns:
if not crazy_fns[k].get("AsButton", True): continue
variant = crazy_fns[k]["Color"] if "Color" in crazy_fns[k] else "secondary"
crazy_fns[k]["Button"] = gr.Button(k, variant=variant)
+ crazy_fns[k]["Button"].style(size="sm")
with gr.Row():
with gr.Accordion("更多函数插件", open=True):
dropdown_fn_list = [k for k in crazy_fns.keys() if not crazy_fns[k].get("AsButton", True)]
@@ -83,31 +91,41 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
with gr.Row():
with gr.Accordion("点击展开“文件上传区”。上传本地文件可供红色函数插件调用。", open=False) as area_file_up:
file_upload = gr.Files(label="任何文件, 但推荐上传压缩文件(zip, tar)", file_count="multiple")
- with gr.Accordion("展开SysPrompt & 交互界面布局 & Github地址", open=False):
+ with gr.Accordion("展开SysPrompt & 交互界面布局 & Github地址", open=(LAYOUT == "TOP-DOWN")):
system_prompt = gr.Textbox(show_label=True, placeholder=f"System Prompt", label="System prompt", value=initial_prompt)
top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",)
temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, label="Temperature",)
- checkboxes = gr.CheckboxGroup(["基础功能区", "函数插件区", "输入区2"], value=["基础功能区", "函数插件区"], label="显示/隐藏功能区")
+ checkboxes = gr.CheckboxGroup(["基础功能区", "函数插件区", "底部输入区"], value=["基础功能区", "函数插件区"], label="显示/隐藏功能区")
gr.Markdown(description)
- with gr.Accordion("输入区", open=True, visible=False) as input_crazy_fn:
+ with gr.Accordion("备选输入区", open=True, visible=False) as area_input_secondary:
with gr.Row():
txt2 = gr.Textbox(show_label=False, placeholder="Input question here.", label="输入区2").style(container=False)
-
+ with gr.Row():
+ submitBtn2 = gr.Button("提交", variant="primary")
+ with gr.Row():
+ resetBtn2 = gr.Button("重置", variant="secondary"); resetBtn.style(size="sm")
+ stopBtn2 = gr.Button("停止", variant="secondary"); stopBtn.style(size="sm")
# 功能区显示开关与功能区的互动
def fn_area_visibility(a):
ret = {}
ret.update({area_basic_fn: gr.update(visible=("基础功能区" in a))})
ret.update({area_crazy_fn: gr.update(visible=("函数插件区" in a))})
+ ret.update({area_input_primary: gr.update(visible=("底部输入区" not in a))})
+ ret.update({area_input_secondary: gr.update(visible=("底部输入区" in a))})
+ if "底部输入区" in a: ret.update({txt: gr.update(value="")})
return ret
- checkboxes.select(fn_area_visibility, [checkboxes], [area_basic_fn, area_crazy_fn] )
+ checkboxes.select(fn_area_visibility, [checkboxes], [area_basic_fn, area_crazy_fn, area_input_primary, area_input_secondary, txt, txt2] )
# 整理反复出现的控件句柄组合
input_combo = [txt, txt2, top_p, temperature, chatbot, history, system_prompt]
output_combo = [chatbot, history, status]
predict_args = dict(fn=ArgsGeneralWrapper(predict), inputs=input_combo, outputs=output_combo)
# 提交按钮、重置按钮
cancel_handles.append(txt.submit(**predict_args))
+ cancel_handles.append(txt2.submit(**predict_args))
cancel_handles.append(submitBtn.click(**predict_args))
+ cancel_handles.append(submitBtn2.click(**predict_args))
resetBtn.click(lambda: ([], [], "已重置"), None, output_combo)
+ resetBtn2.click(lambda: ([], [], "已重置"), None, output_combo)
# 基础功能区的回调函数注册
for k in functional:
click_handle = functional[k]["Button"].click(fn=ArgsGeneralWrapper(predict), inputs=[*input_combo, gr.State(True), gr.State(k)], outputs=output_combo)
@@ -137,7 +155,7 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
cancel_handles.append(click_handle)
# 终止按钮的回调函数注册
stopBtn.click(fn=None, inputs=None, outputs=None, cancels=cancel_handles)
-
+ stopBtn2.click(fn=None, inputs=None, outputs=None, cancels=cancel_handles)
# gradio的inbrowser触发不太稳定,回滚代码到原始的浏览器打开函数
def auto_opentab_delay():
import threading, webbrowser, time
diff --git a/theme.py b/theme.py
index 1a186aac..4ddae5a8 100644
--- a/theme.py
+++ b/theme.py
@@ -26,7 +26,7 @@ import gradio as gr
def adjust_theme():
try:
- color_er = gr.themes.utils.colors.pink
+ color_er = gr.themes.utils.colors.fuchsia
set_theme = gr.themes.Default(
primary_hue=gr.themes.utils.colors.orange,
neutral_hue=gr.themes.utils.colors.gray,
diff --git a/toolbox.py b/toolbox.py
index 00bb03e0..3925e832 100644
--- a/toolbox.py
+++ b/toolbox.py
@@ -354,3 +354,9 @@ def clear_line_break(txt):
txt = txt.replace(' ', ' ')
txt = txt.replace(' ', ' ')
return txt
+
+class DummyWith():
+ def __enter__(self):
+ return self
+ def __exit__(self, exc_type, exc_value, traceback):
+ return
\ No newline at end of file
From 7a254c150f34febdbd6b3887fcfcbeeffe52f41e Mon Sep 17 00:00:00 2001
From: qingxu fu <505030475@qq.com>
Date: Wed, 5 Apr 2023 00:07:08 +0800
Subject: [PATCH 114/154] =?UTF-8?q?=E5=8F=82=E6=95=B0=E8=BE=93=E5=85=A5bug?=
=?UTF-8?q?=E4=BF=AE=E5=A4=8D?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
main.py | 2 +-
toolbox.py | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/main.py b/main.py
index 479acde9..f9511b04 100644
--- a/main.py
+++ b/main.py
@@ -135,7 +135,7 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
# 函数插件-固定按钮区
for k in crazy_fns:
if not crazy_fns[k].get("AsButton", True): continue
- click_handle = crazy_fns[k]["Button"].click(crazy_fns[k]["Function"], [*input_combo, gr.State(PORT)], output_combo)
+ click_handle = crazy_fns[k]["Button"].click(ArgsGeneralWrapper(crazy_fns[k]["Function"]), [*input_combo, gr.State(PORT)], output_combo)
click_handle.then(on_report_generated, [file_upload, chatbot], [file_upload, chatbot])
cancel_handles.append(click_handle)
# 函数插件-下拉菜单与随变按钮的互动
diff --git a/toolbox.py b/toolbox.py
index 3925e832..011cb44d 100644
--- a/toolbox.py
+++ b/toolbox.py
@@ -6,10 +6,10 @@ def ArgsGeneralWrapper(f):
"""
装饰器函数,用于重组输入参数,改变输入参数的顺序与结构。
"""
- def decorated(txt, txt2, top_p, temperature, chatbot, history, system_prompt, *args, **kwargs):
+ def decorated(txt, txt2, *args, **kwargs):
txt_passon = txt
if txt == "" and txt2 != "": txt_passon = txt2
- yield from f(txt_passon, top_p, temperature, chatbot, history, system_prompt, *args, **kwargs)
+ yield from f(txt_passon, *args, **kwargs)
return decorated
From 9f47d0f714ccdbd3b0e5641a2235d00e0759df23 Mon Sep 17 00:00:00 2001
From: qingxu fu <505030475@qq.com>
Date: Wed, 5 Apr 2023 00:09:13 +0800
Subject: [PATCH 115/154] Bug Fix: Hot Reload Wapper For All
---
functional_crazy.py | 18 +++++++++---------
1 file changed, 9 insertions(+), 9 deletions(-)
diff --git a/functional_crazy.py b/functional_crazy.py
index 66a08db5..3f024008 100644
--- a/functional_crazy.py
+++ b/functional_crazy.py
@@ -18,43 +18,43 @@ def get_crazy_functionals():
function_plugins = {
"请解析并解构此项目本身(源码自译解)": {
"AsButton": False, # 加入下拉菜单中
- "Function": 解析项目本身
+ "Function": HotReload(解析项目本身)
},
"解析整个Py项目": {
"Color": "stop", # 按钮颜色
- "Function": 解析一个Python项目
+ "Function": HotReload(解析一个Python项目)
},
"解析整个C++项目头文件": {
"Color": "stop", # 按钮颜色
- "Function": 解析一个C项目的头文件
+ "Function": HotReload(解析一个C项目的头文件)
},
"解析整个C++项目(.cpp/.h)": {
"Color": "stop", # 按钮颜色
"AsButton": False, # 加入下拉菜单中
- "Function": 解析一个C项目
+ "Function": HotReload(解析一个C项目)
},
"解析整个Go项目": {
"Color": "stop", # 按钮颜色
"AsButton": False, # 加入下拉菜单中
- "Function": 解析一个Golang项目
+ "Function": HotReload(解析一个Golang项目)
},
"解析整个Java项目": {
"Color": "stop", # 按钮颜色
"AsButton": False, # 加入下拉菜单中
- "Function": 解析一个Java项目
+ "Function": HotReload(解析一个Java项目)
},
"解析整个React项目": {
"Color": "stop", # 按钮颜色
"AsButton": False, # 加入下拉菜单中
- "Function": 解析一个Rect项目
+ "Function": HotReload(解析一个Rect项目)
},
"读Tex论文写摘要": {
"Color": "stop", # 按钮颜色
- "Function": 读文章写摘要
+ "Function": HotReload(读文章写摘要)
},
"批量生成函数注释": {
"Color": "stop", # 按钮颜色
- "Function": 批量生成函数注释
+ "Function": HotReload(批量生成函数注释)
},
"[多线程demo] 把本项目源代码切换成全英文": {
# HotReload 的意思是热更新,修改函数插件代码后,不需要重启程序,代码直接生效
From 0c9e18291af1ea1df89524fdd628ef221aad3f60 Mon Sep 17 00:00:00 2001
From: qingxu fu <505030475@qq.com>
Date: Wed, 5 Apr 2023 00:10:06 +0800
Subject: [PATCH 116/154] BUG FIX
---
main.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/main.py b/main.py
index f9511b04..fae27a6b 100644
--- a/main.py
+++ b/main.py
@@ -147,7 +147,7 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
def route(k, *args, **kwargs):
if k in [r"打开插件列表", r"请先从插件列表中选择"]: return
yield from crazy_fns[k]["Function"](*args, **kwargs)
- click_handle = switchy_bt.click(route,[switchy_bt, *input_combo, gr.State(PORT)], output_combo)
+ click_handle = switchy_bt.click(ArgsGeneralWrapper(route),[switchy_bt, *input_combo, gr.State(PORT)], output_combo)
click_handle.then(on_report_generated, [file_upload, chatbot], [file_upload, chatbot])
# def expand_file_area(file_upload, area_file_up):
# if len(file_upload)>0: return {area_file_up: gr.update(open=True)}
From 19be0490af2ddcd7f7c4a5f1b1defafac4ac2fa3 Mon Sep 17 00:00:00 2001
From: qingxu fu <505030475@qq.com>
Date: Wed, 5 Apr 2023 00:11:12 +0800
Subject: [PATCH 117/154] BUG FIX
---
main.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/main.py b/main.py
index f9511b04..aa29e59b 100644
--- a/main.py
+++ b/main.py
@@ -146,7 +146,7 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
# 随变按钮的回调函数注册
def route(k, *args, **kwargs):
if k in [r"打开插件列表", r"请先从插件列表中选择"]: return
- yield from crazy_fns[k]["Function"](*args, **kwargs)
+ yield from ArgsGeneralWrapper(crazy_fns[k])["Function"](*args, **kwargs)
click_handle = switchy_bt.click(route,[switchy_bt, *input_combo, gr.State(PORT)], output_combo)
click_handle.then(on_report_generated, [file_upload, chatbot], [file_upload, chatbot])
# def expand_file_area(file_upload, area_file_up):
From 0aeb5b28cd48579db983db5460c624ce7ace0182 Mon Sep 17 00:00:00 2001
From: qingxu fu <505030475@qq.com>
Date: Wed, 5 Apr 2023 00:25:53 +0800
Subject: [PATCH 118/154] =?UTF-8?q?=E6=94=B9=E8=BF=9B=E6=95=88=E7=8E=87?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
crazy_functions/代码重写为全英文_多线程.py | 25 ++++++++++------------
1 file changed, 11 insertions(+), 14 deletions(-)
diff --git a/crazy_functions/代码重写为全英文_多线程.py b/crazy_functions/代码重写为全英文_多线程.py
index bfcbec3b..7f620880 100644
--- a/crazy_functions/代码重写为全英文_多线程.py
+++ b/crazy_functions/代码重写为全英文_多线程.py
@@ -10,16 +10,13 @@ def extract_code_block_carefully(txt):
txt_out = '```'.join(splitted[1:-1])
return txt_out
-def breakdown_txt_to_satisfy_token_limit(txt, limit, must_break_at_empty_line=True):
- from transformers import GPT2TokenizerFast
- tokenizer = GPT2TokenizerFast.from_pretrained("gpt2")
- get_token_cnt = lambda txt: len(tokenizer(txt)["input_ids"])
+def breakdown_txt_to_satisfy_token_limit(txt, get_token_fn, limit, must_break_at_empty_line=True):
def cut(txt_tocut, must_break_at_empty_line): # 递归
- if get_token_cnt(txt_tocut) <= limit:
+ if get_token_fn(txt_tocut) <= limit:
return [txt_tocut]
else:
lines = txt_tocut.split('\n')
- estimated_line_cut = limit / get_token_cnt(txt_tocut) * len(lines)
+ estimated_line_cut = limit / get_token_fn(txt_tocut) * len(lines)
estimated_line_cut = int(estimated_line_cut)
for cnt in reversed(range(estimated_line_cut)):
if must_break_at_empty_line:
@@ -27,7 +24,7 @@ def breakdown_txt_to_satisfy_token_limit(txt, limit, must_break_at_empty_line=Tr
print(cnt)
prev = "\n".join(lines[:cnt])
post = "\n".join(lines[cnt:])
- if get_token_cnt(prev) < limit: break
+ if get_token_fn(prev) < limit: break
if cnt == 0:
print('what the f?')
raise RuntimeError("存在一行极长的文本!")
@@ -86,12 +83,12 @@ def 全项目切换英文(txt, top_p, temperature, chatbot, history, sys_prompt,
# 第5步:Token限制下的截断与处理
- MAX_TOKEN = 2500
- # from transformers import GPT2TokenizerFast
- # print('加载tokenizer中')
- # tokenizer = GPT2TokenizerFast.from_pretrained("gpt2")
- # get_token_cnt = lambda txt: len(tokenizer(txt)["input_ids"])
- # print('加载tokenizer结束')
+ MAX_TOKEN = 3000
+ from transformers import GPT2TokenizerFast
+ print('加载tokenizer中')
+ tokenizer = GPT2TokenizerFast.from_pretrained("gpt2")
+ get_token_fn = lambda txt: len(tokenizer(txt)["input_ids"])
+ print('加载tokenizer结束')
# 第6步:任务函数
@@ -107,7 +104,7 @@ def 全项目切换英文(txt, top_p, temperature, chatbot, history, sys_prompt,
try:
gpt_say = ""
# 分解代码文件
- file_content_breakdown = breakdown_txt_to_satisfy_token_limit(file_content, MAX_TOKEN)
+ file_content_breakdown = breakdown_txt_to_satisfy_token_limit(file_content, get_token_fn, MAX_TOKEN)
for file_content_partial in file_content_breakdown:
i_say = i_say_template(fp, file_content_partial)
# # ** gpt request **
From dcdc8351e7ad86d09feceb7911fa224e41a3d93c Mon Sep 17 00:00:00 2001
From: qingxu fu <505030475@qq.com>
Date: Wed, 5 Apr 2023 01:58:34 +0800
Subject: [PATCH 119/154] BUG FIX
---
crazy_functions/解析项目源代码.py | 5 +++--
toolbox.py | 4 +++-
2 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/crazy_functions/解析项目源代码.py b/crazy_functions/解析项目源代码.py
index 172be245..da6d102f 100644
--- a/crazy_functions/解析项目源代码.py
+++ b/crazy_functions/解析项目源代码.py
@@ -119,8 +119,8 @@ def 解析一个C项目的头文件(txt, top_p, temperature, chatbot, history, s
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}")
yield chatbot, history, '正常'
return
- file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] # + \
- # [f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \
+ file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] + \
+ [f for f in glob.glob(f'{project_folder}/**/*.hpp', recursive=True)] #+ \
# [f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)]
if len(file_manifest) == 0:
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.h头文件: {txt}")
@@ -141,6 +141,7 @@ def 解析一个C项目(txt, top_p, temperature, chatbot, history, systemPromptT
return
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] + \
[f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \
+ [f for f in glob.glob(f'{project_folder}/**/*.hpp', recursive=True)] + \
[f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)]
if len(file_manifest) == 0:
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.h头文件: {txt}")
diff --git a/toolbox.py b/toolbox.py
index c55a48e5..17e5670c 100644
--- a/toolbox.py
+++ b/toolbox.py
@@ -303,7 +303,9 @@ def on_file_uploaded(files, chatbot, txt):
def on_report_generated(files, chatbot):
from toolbox import find_recent_files
report_files = find_recent_files('gpt_log')
- if len(report_files) == 0: return files, chatbot
+ if len(report_files) == 0:
+ if files is None: return None, chatbot
+ else: return [], chatbot
# files.extend(report_files)
chatbot.append(['汇总报告如何远程获取?', '汇总报告已经添加到右侧“文件上传区”(可能处于折叠状态),请查收。'])
return report_files, chatbot
From ee7494c0b7d58e778dd0ce3a946035d593f9d753 Mon Sep 17 00:00:00 2001
From: Your Name
+
From 823c136de42bcc6c97dfc95c89c5dca8a223032e Mon Sep 17 00:00:00 2001
From: binary-husky <96192199+binary-husky@users.noreply.github.com>
Date: Thu, 6 Apr 2023 19:24:37 +0800
Subject: [PATCH 146/154] Update README.md
---
README.md | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 56c4a961..2eb81da8 100644
--- a/README.md
+++ b/README.md
@@ -260,11 +260,12 @@ python check_proxy.py
- version 3 (Todo):
- - 支持gpt4和其他更多llm
-- version 2.3+ (Todo):
+- version 2.4+ (Todo):
- - 总结大工程源代码时文本过长、token溢出的问题
- - 实现项目打包部署
- - 函数插件参数接口优化
- - 自更新
+- version 2.4: (1)新增PDF全文翻译功能; (2)新增输入区切换位置的功能; (3)新增垂直布局选项; (4)多线程函数插件优化。
- version 2.3: 增强多线程交互性
- version 2.2: 函数插件支持热重载
- version 2.1: 可折叠式布局
From b47f69978ec21fe8de44086ea325e1a5c4a8739f Mon Sep 17 00:00:00 2001
From: qingxu fu <505030475@qq.com>
Date: Fri, 7 Apr 2023 12:45:47 +0800
Subject: [PATCH 147/154] =?UTF-8?q?=E6=9B=B4=E6=96=B0requirements.txt?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.gitignore | 3 ++-
requirements.txt | 12 ++++++++----
2 files changed, 10 insertions(+), 5 deletions(-)
diff --git a/.gitignore b/.gitignore
index d2c0f963..a5b6d85a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -140,4 +140,5 @@ gpt_log
private.md
private_upload
other_llms
-cradle.py
\ No newline at end of file
+cradle*
+debug*
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 80343923..d864593b 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,8 +1,12 @@
gradio>=3.23
requests[socks]
-mdtex2html
-Markdown
-latex2mathml
-openai
transformers
+python-markdown-math
+beautifulsoup4
+latex2mathml
+mdtex2html
+tiktoken
+Markdown
+pymupdf
+openai
numpy
\ No newline at end of file
From 4da7d75ad406eb7f43637c86332ac3c0c23c70d1 Mon Sep 17 00:00:00 2001
From: Your Name