优化autogen的使用
This commit is contained in:
@@ -36,12 +36,8 @@ class AutoGenGeneral(PluginMultiprocessManager):
|
||||
# ⭐⭐ 子进程执行
|
||||
input = input.content
|
||||
with ProxyNetworkActivate("AutoGen"):
|
||||
from autogen import AssistantAgent, UserProxyAgent
|
||||
config_list = [{
|
||||
'model': self.llm_kwargs['llm_model'],
|
||||
'api_key': self.llm_kwargs['api_key'],
|
||||
},]
|
||||
code_execution_config={"work_dir": self.autogen_work_dir, "use_docker":True}
|
||||
config_list = self.get_config_list()
|
||||
code_execution_config={"work_dir": self.autogen_work_dir, "use_docker":self.use_docker}
|
||||
agents = self.define_agents()
|
||||
user_proxy = None
|
||||
assistant = None
|
||||
@@ -67,6 +63,20 @@ class AutoGenGeneral(PluginMultiprocessManager):
|
||||
tb_str = '```\n' + trimmed_format_exc() + '```'
|
||||
self.child_conn.send(PipeCom("done", "AutoGen 执行失败: \n\n" + tb_str))
|
||||
|
||||
def get_config_list(self):
|
||||
model = self.llm_kwargs['llm_model']
|
||||
api_base = None
|
||||
if self.llm_kwargs['llm_model'].startswith('api2d-'):
|
||||
model = self.llm_kwargs['llm_model'][len('api2d-'):]
|
||||
api_base = "https://openai.api2d.net/v1"
|
||||
config_list = [{
|
||||
'model': model,
|
||||
'api_key': self.llm_kwargs['api_key'],
|
||||
},]
|
||||
if api_base is not None:
|
||||
config_list[0]['api_base'] = api_base
|
||||
return config_list
|
||||
|
||||
def subprocess_worker(self, child_conn):
|
||||
# ⭐⭐ 子进程执行
|
||||
self.child_conn = child_conn
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from toolbox import get_log_folder, update_ui, gen_time_str, trimmed_format_exc, promote_file_to_downloadzone
|
||||
from toolbox import get_log_folder, update_ui, gen_time_str, get_conf, promote_file_to_downloadzone
|
||||
from crazy_functions.agent_fns.watchdog import WatchDog
|
||||
import time, os
|
||||
|
||||
class PipeCom():
|
||||
@@ -19,6 +20,16 @@ class PluginMultiprocessManager():
|
||||
self.system_prompt = system_prompt
|
||||
self.web_port = web_port
|
||||
self.alive = True
|
||||
self.use_docker, = get_conf('AUTOGEN_USE_DOCKER')
|
||||
|
||||
# create a thread to monitor self.heartbeat, terminate the instance if no heartbeat for a long time
|
||||
timeout_seconds = 5*60
|
||||
self.heartbeat_watchdog = WatchDog(timeout=timeout_seconds, bark_fn=self.terminate, interval=5)
|
||||
self.heartbeat_watchdog.begin_watch()
|
||||
|
||||
def feed_heartbeat_watchdog(self):
|
||||
# feed this `dog`, so the dog will not `bark` (bark_fn will terminate the instance)
|
||||
self.heartbeat_watchdog.feed()
|
||||
|
||||
def is_alive(self):
|
||||
return self.alive
|
||||
@@ -50,7 +61,7 @@ class PluginMultiprocessManager():
|
||||
# 获取fp的拓展名
|
||||
file_type = fp.split('.')[-1]
|
||||
# 如果是文本文件, 则直接显示文本内容
|
||||
if file_type in ['png', 'jpg']:
|
||||
if file_type.lower() in ['png', 'jpg']:
|
||||
image_path = os.path.abspath(fp)
|
||||
self.chatbot.append(['检测到新生图像:', f'本地文件预览: <br/><div align="center"><img src="file={image_path}"></div>'])
|
||||
yield from update_ui(chatbot=self.chatbot, history=self.history)
|
||||
@@ -98,9 +109,17 @@ class PluginMultiprocessManager():
|
||||
self.terminate()
|
||||
return "terminate"
|
||||
|
||||
# patience = 10
|
||||
|
||||
while True:
|
||||
time.sleep(0.5)
|
||||
if not self.alive:
|
||||
# the heartbeat watchdog might have it killed
|
||||
self.terminate()
|
||||
return "terminate"
|
||||
|
||||
if self.parent_conn.poll():
|
||||
self.feed_heartbeat_watchdog()
|
||||
if '[GPT-Academic] 等待中' in self.chatbot[-1][-1]:
|
||||
self.chatbot.pop(-1) # remove the last line
|
||||
msg = self.parent_conn.recv() # PipeCom
|
||||
@@ -124,10 +143,17 @@ class PluginMultiprocessManager():
|
||||
# do not terminate here, leave the subprocess_worker instance alive
|
||||
return "wait_feedback"
|
||||
else:
|
||||
self.feed_heartbeat_watchdog()
|
||||
if '[GPT-Academic] 等待中' not in self.chatbot[-1][-1]:
|
||||
# begin_waiting_time = time.time()
|
||||
self.chatbot.append(["[GPT-Academic] 等待AutoGen执行结果 ...", "[GPT-Academic] 等待中"])
|
||||
self.chatbot[-1] = [self.chatbot[-1][0], self.chatbot[-1][1].replace("[GPT-Academic] 等待中", "[GPT-Academic] 等待中.")]
|
||||
yield from update_ui(chatbot=self.chatbot, history=self.history)
|
||||
# if time.time() - begin_waiting_time > patience:
|
||||
# self.chatbot.append([f"结束", "等待超时, 终止AutoGen程序。"])
|
||||
# yield from update_ui(chatbot=self.chatbot, history=self.history)
|
||||
# self.terminate()
|
||||
# return "terminate"
|
||||
|
||||
self.terminate()
|
||||
return "terminate"
|
||||
|
||||
28
crazy_functions/agent_fns/watchdog.py
Normal file
28
crazy_functions/agent_fns/watchdog.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import threading, time
|
||||
|
||||
class WatchDog():
|
||||
def __init__(self, timeout, bark_fn, interval=3, msg="") -> None:
|
||||
self.last_feed = None
|
||||
self.timeout = timeout
|
||||
self.bark_fn = bark_fn
|
||||
self.interval = interval
|
||||
self.msg = msg
|
||||
self.kill_dog = False
|
||||
|
||||
def watch(self):
|
||||
while True:
|
||||
if self.kill_dog: break
|
||||
if time.time() - self.last_feed > self.timeout:
|
||||
if len(self.msg) > 0: print(self.msg)
|
||||
self.bark_fn()
|
||||
break
|
||||
time.sleep(self.interval)
|
||||
|
||||
def begin_watch(self):
|
||||
self.last_feed = time.time()
|
||||
th = threading.Thread(target=self.watch)
|
||||
th.daemon = True
|
||||
th.start()
|
||||
|
||||
def feed(self):
|
||||
self.last_feed = time.time()
|
||||
Reference in New Issue
Block a user