Compare commits

...

5 Commits

Author SHA1 Message Date
binary-husky
2f83b60fb3 添加搜索失败时的提示 2023-09-06 12:36:59 +08:00
qingxu fu
12c8cd75ee Merge branch 'master' of https://github.com/binary-husky/chatgpt_academic into master 2023-09-06 10:24:14 +08:00
qingxu fu
0e21e3e2e7 修复没填写讯飞APPID无报错提示的问题 2023-09-06 10:24:11 +08:00
binary-husky
fda1e87278 Update stale.yml 2023-09-06 10:19:21 +08:00
binary-husky
1092031d77 Create stale.yml 2023-09-06 10:15:52 +08:00
5 changed files with 50 additions and 4 deletions

25
.github/workflows/stale.yml vendored Normal file
View File

@@ -0,0 +1,25 @@
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
#
# You can adjust the behavior by modifying this file.
# For more information, see:
# https://github.com/actions/stale
name: 'Close stale issues and PRs'
on:
schedule:
- cron: '*/5 * * * *'
jobs:
stale:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: read
steps:
- uses: actions/stale@v8
with:
stale-issue-message: 'This issue is stale because it has been open 100 days with no activity. Remove stale label or comment or this will be closed in 1 days.'
days-before-stale: 100
days-before-close: 1
debug-only: true

View File

@@ -75,7 +75,11 @@ def 连接网络回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, s
proxies, = get_conf('proxies')
urls = google(txt, proxies)
history = []
if len(urls) == 0:
chatbot.append((f"结论:{txt}",
"[Local Message] 受到google限制无法从google获取信息"))
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 由于请求gpt需要一段时间我们先及时地做一次界面更新
return
# ------------- < 第2步依次访问网页 > -------------
max_search_result = 5 # 最多收纳多少个网页的结果
for index, url in enumerate(urls[:max_search_result]):

View File

@@ -75,7 +75,11 @@ def 连接bing搜索回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, histor
proxies, = get_conf('proxies')
urls = bing_search(txt, proxies)
history = []
if len(urls) == 0:
chatbot.append((f"结论:{txt}",
"[Local Message] 受到bing限制无法从bing获取信息"))
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 由于请求gpt需要一段时间我们先及时地做一次界面更新
return
# ------------- < 第2步依次访问网页 > -------------
max_search_result = 8 # 最多收纳多少个网页的结果
for index, url in enumerate(urls[:max_search_result]):

View File

@@ -2,11 +2,17 @@
import time
import threading
import importlib
from toolbox import update_ui, get_conf
from toolbox import update_ui, get_conf, update_ui_lastest_msg
from multiprocessing import Process, Pipe
model_name = '星火认知大模型'
def validate_key():
XFYUN_APPID, = get_conf('XFYUN_APPID', )
if XFYUN_APPID == '00000000' or XFYUN_APPID == '':
return False
return True
def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", observe_window=[], console_slience=False):
"""
⭐多线程方法
@@ -15,6 +21,9 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="",
watch_dog_patience = 5
response = ""
if validate_key() is False:
raise RuntimeError('请配置讯飞星火大模型的XFYUN_APPID, XFYUN_API_KEY, XFYUN_API_SECRET')
from .com_sparkapi import SparkRequestInstance
sri = SparkRequestInstance()
for response in sri.generate(inputs, llm_kwargs, history, sys_prompt):
@@ -32,6 +41,10 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
chatbot.append((inputs, ""))
yield from update_ui(chatbot=chatbot, history=history)
if validate_key() is False:
yield from update_ui_lastest_msg(lastmsg="[Local Message]: 请配置讯飞星火大模型的XFYUN_APPID, XFYUN_API_KEY, XFYUN_API_SECRET", chatbot=chatbot, history=history, delay=0)
return
if additional_fn is not None:
from core_functional import handle_core_functionality
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)

View File

@@ -58,7 +58,7 @@ class Ws_Param(object):
class SparkRequestInstance():
def __init__(self):
XFYUN_APPID, XFYUN_API_SECRET, XFYUN_API_KEY = get_conf('XFYUN_APPID', 'XFYUN_API_SECRET', 'XFYUN_API_KEY')
if XFYUN_APPID == '00000000' or XFYUN_APPID == '': raise RuntimeError('请配置讯飞星火大模型的XFYUN_APPID, XFYUN_API_KEY, XFYUN_API_SECRET')
self.appid = XFYUN_APPID
self.api_secret = XFYUN_API_SECRET
self.api_key = XFYUN_API_KEY