logging -> loguru: stage 3

This commit is contained in:
binary-husky
2024-09-11 08:49:55 +00:00
parent 80acd9c875
commit 0d082327c8
27 changed files with 73 additions and 67 deletions

View File

@@ -12,7 +12,6 @@ import json
import os
import re
import time
import logging
import traceback
import requests
import random
@@ -317,7 +316,6 @@ def predict(inputs:str, llm_kwargs:dict, plugin_kwargs:dict, chatbot:ChatBotWith
# 前者是API2D的结束条件后者是OPENAI的结束条件
if ('data: [DONE]' in chunk_decoded) or (len(chunkjson['choices'][0]["delta"]) == 0):
# 判定为数据流的结束gpt_replying_buffer也写完了
# logging.info(f'[response] {gpt_replying_buffer}')
log_chat(llm_model=llm_kwargs["llm_model"], input_str=inputs, output_str=gpt_replying_buffer)
break
# 处理数据流的主体
@@ -486,7 +484,6 @@ def generate_payload(inputs:str, llm_kwargs:dict, history:list, system_prompt:st
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-0301",
])
logging.info("Random select model:" + model)
payload = {
"model": model,

View File

@@ -8,15 +8,14 @@
2. predict_no_ui_long_connection支持多线程
"""
import os
import json
import time
import logging
import requests
import base64
import os
import glob
from toolbox import get_conf, update_ui, is_any_api_key, select_api_key, what_keys, clip_history, trimmed_format_exc, is_the_upload_folder, \
update_ui_lastest_msg, get_max_token, encode_image, have_any_recent_upload_image_files
update_ui_lastest_msg, get_max_token, encode_image, have_any_recent_upload_image_files, log_chat
proxies, TIMEOUT_SECONDS, MAX_RETRY, API_ORG, AZURE_CFG_ARRAY = \
@@ -100,7 +99,6 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
raw_input = inputs
logging.info(f'[raw_input] {raw_input}')
def make_media_input(inputs, image_paths):
for image_path in image_paths:
inputs = inputs + f'<br/><br/><div align="center"><img src="file={os.path.abspath(image_path)}"></div>'
@@ -185,7 +183,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
# 判定为数据流的结束gpt_replying_buffer也写完了
lastmsg = chatbot[-1][-1] + f"\n\n\n\n{llm_kwargs['llm_model']}调用结束,该模型不具备上下文对话能力,如需追问,请及时切换模型。」"
yield from update_ui_lastest_msg(lastmsg, chatbot, history, delay=1)
logging.info(f'[response] {gpt_replying_buffer}')
log_chat(llm_model=llm_kwargs["llm_model"], input_str=inputs, output_str=gpt_replying_buffer)
break
# 处理数据流的主体
status_text = f"finish_reason: {chunkjson['choices'][0].get('finish_reason', 'null')}"

View File

@@ -13,10 +13,10 @@
import json
import time
import gradio as gr
import logging
import traceback
import requests
import importlib
from loguru import logger as logging
# config_private.py放自己的秘密如API和代理网址
# 读取时首先看是否存在私密的config_private配置文件不受git管控如果有则覆盖原config文件

View File

@@ -9,13 +9,14 @@
具备多线程调用能力的函数
2. predict_no_ui_long_connection支持多线程
"""
import logging
import os
import time
import traceback
import json
import requests
from loguru import logger as logging
from toolbox import get_conf, update_ui, trimmed_format_exc, encode_image, every_image_file_in_path, log_chat
picture_system_prompt = "\n当回复图像时,必须说明正在回复哪张图像。所有图像仅在最后一个问题中提供,即使它们在历史记录中被提及。请使用'这是第X张图像:'的格式来指明您正在描述的是哪张图像。"
Claude_3_Models = ["claude-3-haiku-20240307", "claude-3-sonnet-20240229", "claude-3-opus-20240229", "claude-3-5-sonnet-20240620"]

View File

@@ -13,11 +13,9 @@
import json
import time
import gradio as gr
import logging
import traceback
import requests
import importlib
import random
from loguru import logger as logging
# config_private.py放自己的秘密如API和代理网址
# 读取时首先看是否存在私密的config_private配置文件不受git管控如果有则覆盖原config文件

View File

@@ -65,10 +65,10 @@ class GetInternlmHandle(LocalLLMHandle):
def llm_stream_generator(self, **kwargs):
import torch
import logging
import copy
import warnings
import torch.nn as nn
from loguru import logger as logging
from transformers.generation.utils import LogitsProcessorList, StoppingCriteriaList, GenerationConfig
# 🏃‍♂️🏃‍♂️🏃‍♂️ 子进程执行
@@ -119,7 +119,7 @@ class GetInternlmHandle(LocalLLMHandle):
elif generation_config.max_new_tokens is not None:
generation_config.max_length = generation_config.max_new_tokens + input_ids_seq_length
if not has_default_max_length:
logging.warn(
logging.warning(
f"Both `max_new_tokens` (={generation_config.max_new_tokens}) and `max_length`(="
f"{generation_config.max_length}) seem to have been set. `max_new_tokens` will take precedence. "
"Please refer to the documentation for more information. "

View File

@@ -5,7 +5,6 @@
import json
import os
import time
import logging
from toolbox import get_conf, update_ui, log_chat
from toolbox import ChatBotWithCookies

View File

@@ -13,11 +13,11 @@
import json
import time
import gradio as gr
import logging
import traceback
import requests
import importlib
import random
from loguru import logger as logging
# config_private.py放自己的秘密如API和代理网址
# 读取时首先看是否存在私密的config_private配置文件不受git管控如果有则覆盖原config文件

View File

@@ -1,12 +1,13 @@
import time
import asyncio
import threading
import importlib
from .bridge_newbingfree import preprocess_newbing_out, preprocess_newbing_out_simple
from multiprocessing import Process, Pipe
from toolbox import update_ui, get_conf, trimmed_format_exc
import threading
import importlib
import logging
import time
from loguru import logger as logging
from toolbox import get_conf
import asyncio
load_message = "正在加载Claude组件请稍候..."

View File

@@ -8,7 +8,6 @@ import json
import random
import string
import websockets
import logging
import time
import threading
import importlib

View File

@@ -1,7 +1,6 @@
from http import HTTPStatus
from toolbox import get_conf
import threading
import logging
timeout_bot_msg = '[Local Message] Request timeout. Network error.'

View File

@@ -1,7 +1,7 @@
from toolbox import get_conf
import threading
import logging
import os
import threading
from toolbox import get_conf
from loguru import logger as logging
timeout_bot_msg = '[Local Message] Request timeout. Network error.'
#os.environ['VOLC_ACCESSKEY'] = ''

View File

@@ -4,7 +4,7 @@
# @Descr : 兼容最新的智谱Ai
from toolbox import get_conf
from toolbox import get_conf, encode_image, get_pictures_list
import logging, os, requests
import requests
import json
class TaichuChatInit:
def __init__(self): ...

View File

@@ -1,8 +1,8 @@
import json
import time
import logging
import traceback
import requests
from loguru import logger as logging
# config_private.py放自己的秘密如API和代理网址
# 读取时首先看是否存在私密的config_private配置文件不受git管控如果有则覆盖原config文件