ollama patch

This commit is contained in:
binary-husky
2024-12-18 23:05:55 +08:00
parent f43ef909e2
commit 72b2ce9b62

View File

@@ -75,7 +75,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="",
# make a POST request to the API endpoint, stream=False
from .bridge_all import model_info
endpoint = model_info[llm_kwargs['llm_model']]['endpoint']
response = requests.post(endpoint, headers=headers, proxies=proxies,
response = requests.post(endpoint, headers=headers, proxies=None,
json=payload, stream=True, timeout=TIMEOUT_SECONDS); break
except requests.exceptions.ReadTimeout as e:
retry += 1
@@ -152,10 +152,12 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
history.append(inputs); history.append("")
retry = 0
if proxies is not None:
logger.error("Ollama不会使用代理服务器, 忽略了proxies的设置。")
while True:
try:
# make a POST request to the API endpoint, stream=True
response = requests.post(endpoint, headers=headers, proxies=proxies,
response = requests.post(endpoint, headers=headers, proxies=None,
json=payload, stream=True, timeout=TIMEOUT_SECONDS);break
except:
retry += 1