From 72b2ce9b62f91c31d90e9ea946ac0fa531a7a21c Mon Sep 17 00:00:00 2001 From: binary-husky Date: Wed, 18 Dec 2024 23:05:55 +0800 Subject: [PATCH] ollama patch --- request_llms/bridge_ollama.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/request_llms/bridge_ollama.py b/request_llms/bridge_ollama.py index 9a2fb97f..c701795f 100644 --- a/request_llms/bridge_ollama.py +++ b/request_llms/bridge_ollama.py @@ -75,7 +75,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", # make a POST request to the API endpoint, stream=False from .bridge_all import model_info endpoint = model_info[llm_kwargs['llm_model']]['endpoint'] - response = requests.post(endpoint, headers=headers, proxies=proxies, + response = requests.post(endpoint, headers=headers, proxies=None, json=payload, stream=True, timeout=TIMEOUT_SECONDS); break except requests.exceptions.ReadTimeout as e: retry += 1 @@ -152,10 +152,12 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp history.append(inputs); history.append("") retry = 0 + if proxies is not None: + logger.error("Ollama不会使用代理服务器, 忽略了proxies的设置。") while True: try: # make a POST request to the API endpoint, stream=True - response = requests.post(endpoint, headers=headers, proxies=proxies, + response = requests.post(endpoint, headers=headers, proxies=None, json=payload, stream=True, timeout=TIMEOUT_SECONDS);break except: retry += 1