浏览代码

Merge pull request #2799 from mindspawn/proxy-fix

Enable use of http_proxy environment variable in openai and ollama calls
Timothy Jaeryang Baek 11 月之前
父节点
当前提交
8c3a6eb262
共有 2 个文件被更改,包括 5 次插入5 次删除
  1. 3 3
      backend/apps/ollama/main.py
  2. 2 2
      backend/apps/openai/main.py

+ 3 - 3
backend/apps/ollama/main.py

@@ -134,7 +134,7 @@ async def update_ollama_api_url(form_data: UrlUpdateForm, user=Depends(get_admin
 async def fetch_url(url):
 async def fetch_url(url):
     timeout = aiohttp.ClientTimeout(total=5)
     timeout = aiohttp.ClientTimeout(total=5)
     try:
     try:
-        async with aiohttp.ClientSession(timeout=timeout) as session:
+        async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session:
             async with session.get(url) as response:
             async with session.get(url) as response:
                 return await response.json()
                 return await response.json()
     except Exception as e:
     except Exception as e:
@@ -156,7 +156,7 @@ async def cleanup_response(
 async def post_streaming_url(url: str, payload: str):
 async def post_streaming_url(url: str, payload: str):
     r = None
     r = None
     try:
     try:
-        session = aiohttp.ClientSession()
+        session = aiohttp.ClientSession(trust_env=True)
         r = await session.post(url, data=payload)
         r = await session.post(url, data=payload)
         r.raise_for_status()
         r.raise_for_status()
 
 
@@ -1045,7 +1045,7 @@ async def download_file_stream(
 
 
     timeout = aiohttp.ClientTimeout(total=600)  # Set the timeout
     timeout = aiohttp.ClientTimeout(total=600)  # Set the timeout
 
 
-    async with aiohttp.ClientSession(timeout=timeout) as session:
+    async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session:
         async with session.get(file_url, headers=headers) as response:
         async with session.get(file_url, headers=headers) as response:
             total_size = int(response.headers.get("content-length", 0)) + current_size
             total_size = int(response.headers.get("content-length", 0)) + current_size
 
 

+ 2 - 2
backend/apps/openai/main.py

@@ -186,7 +186,7 @@ async def fetch_url(url, key):
     timeout = aiohttp.ClientTimeout(total=5)
     timeout = aiohttp.ClientTimeout(total=5)
     try:
     try:
         headers = {"Authorization": f"Bearer {key}"}
         headers = {"Authorization": f"Bearer {key}"}
-        async with aiohttp.ClientSession(timeout=timeout) as session:
+        async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session:
             async with session.get(url, headers=headers) as response:
             async with session.get(url, headers=headers) as response:
                 return await response.json()
                 return await response.json()
     except Exception as e:
     except Exception as e:
@@ -462,7 +462,7 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
     streaming = False
     streaming = False
 
 
     try:
     try:
-        session = aiohttp.ClientSession()
+        session = aiohttp.ClientSession(trust_env=True)
         r = await session.request(
         r = await session.request(
             method=request.method,
             method=request.method,
             url=target_url,
             url=target_url,