main.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856
  1. from fastapi import FastAPI, Request, Response, HTTPException, Depends, status
  2. from fastapi.middleware.cors import CORSMiddleware
  3. from fastapi.responses import StreamingResponse
  4. from fastapi.concurrency import run_in_threadpool
  5. from pydantic import BaseModel
  6. import random
  7. import requests
  8. import json
  9. import uuid
  10. import aiohttp
  11. import asyncio
  12. from apps.web.models.users import Users
  13. from constants import ERROR_MESSAGES
  14. from utils.utils import decode_token, get_current_user, get_admin_user
  15. from config import OLLAMA_BASE_URL, WEBUI_AUTH
  16. from typing import Optional, List, Union
  17. app = FastAPI()
  18. app.add_middleware(
  19. CORSMiddleware,
  20. allow_origins=["*"],
  21. allow_credentials=True,
  22. allow_methods=["*"],
  23. allow_headers=["*"],
  24. )
  25. app.state.OLLAMA_BASE_URL = OLLAMA_BASE_URL
  26. app.state.OLLAMA_BASE_URLS = [OLLAMA_BASE_URL]
  27. app.state.MODELS = {}
  28. REQUEST_POOL = []
  29. @app.middleware("http")
  30. async def check_url(request: Request, call_next):
  31. if len(app.state.MODELS) == 0:
  32. await get_all_models()
  33. else:
  34. pass
  35. response = await call_next(request)
  36. return response
  37. @app.get("/urls")
  38. async def get_ollama_api_urls(user=Depends(get_admin_user)):
  39. return {"OLLAMA_BASE_URLS": app.state.OLLAMA_BASE_URLS}
  40. class UrlUpdateForm(BaseModel):
  41. urls: List[str]
  42. @app.post("/urls/update")
  43. async def update_ollama_api_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
  44. app.state.OLLAMA_BASE_URLS = form_data.urls
  45. print(app.state.OLLAMA_BASE_URLS)
  46. return {"OLLAMA_BASE_URLS": app.state.OLLAMA_BASE_URLS}
  47. @app.get("/cancel/{request_id}")
  48. async def cancel_ollama_request(request_id: str, user=Depends(get_current_user)):
  49. if user:
  50. if request_id in REQUEST_POOL:
  51. REQUEST_POOL.remove(request_id)
  52. return True
  53. else:
  54. raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
  55. async def fetch_url(url):
  56. try:
  57. async with aiohttp.ClientSession() as session:
  58. async with session.get(url) as response:
  59. return await response.json()
  60. except Exception as e:
  61. # Handle connection error here
  62. print(f"Connection error: {e}")
  63. return None
  64. def merge_models_lists(model_lists):
  65. merged_models = {}
  66. for idx, model_list in enumerate(model_lists):
  67. for model in model_list:
  68. digest = model["digest"]
  69. if digest not in merged_models:
  70. model["urls"] = [idx]
  71. merged_models[digest] = model
  72. else:
  73. merged_models[digest]["urls"].append(idx)
  74. return list(merged_models.values())
  75. # user=Depends(get_current_user)
  76. async def get_all_models():
  77. print("get_all_models")
  78. tasks = [fetch_url(f"{url}/api/tags") for url in app.state.OLLAMA_BASE_URLS]
  79. responses = await asyncio.gather(*tasks)
  80. responses = list(filter(lambda x: x is not None, responses))
  81. models = {
  82. "models": merge_models_lists(
  83. map(lambda response: response["models"], responses)
  84. )
  85. }
  86. app.state.MODELS = {model["model"]: model for model in models["models"]}
  87. return models
  88. @app.get("/api/tags")
  89. @app.get("/api/tags/{url_idx}")
  90. async def get_ollama_tags(
  91. url_idx: Optional[int] = None, user=Depends(get_current_user)
  92. ):
  93. if url_idx == None:
  94. return await get_all_models()
  95. else:
  96. url = app.state.OLLAMA_BASE_URLS[url_idx]
  97. try:
  98. r = requests.request(method="GET", url=f"{url}/api/tags")
  99. r.raise_for_status()
  100. return r.json()
  101. except Exception as e:
  102. print(e)
  103. error_detail = "Open WebUI: Server Connection Error"
  104. if r is not None:
  105. try:
  106. res = r.json()
  107. if "error" in res:
  108. error_detail = f"Ollama: {res['error']}"
  109. except:
  110. error_detail = f"Ollama: {e}"
  111. raise HTTPException(
  112. status_code=r.status_code if r else 500,
  113. detail=error_detail,
  114. )
  115. @app.get("/api/version")
  116. @app.get("/api/version/{url_idx}")
  117. async def get_ollama_versions(url_idx: Optional[int] = None):
  118. if url_idx == None:
  119. # returns lowest version
  120. tasks = [fetch_url(f"{url}/api/version") for url in app.state.OLLAMA_BASE_URLS]
  121. responses = await asyncio.gather(*tasks)
  122. responses = list(filter(lambda x: x is not None, responses))
  123. lowest_version = min(
  124. responses, key=lambda x: tuple(map(int, x["version"].split(".")))
  125. )
  126. return {"version": lowest_version["version"]}
  127. else:
  128. url = app.state.OLLAMA_BASE_URLS[url_idx]
  129. try:
  130. r = requests.request(method="GET", url=f"{url}/api/version")
  131. r.raise_for_status()
  132. return r.json()
  133. except Exception as e:
  134. print(e)
  135. error_detail = "Open WebUI: Server Connection Error"
  136. if r is not None:
  137. try:
  138. res = r.json()
  139. if "error" in res:
  140. error_detail = f"Ollama: {res['error']}"
  141. except:
  142. error_detail = f"Ollama: {e}"
  143. raise HTTPException(
  144. status_code=r.status_code if r else 500,
  145. detail=error_detail,
  146. )
  147. class ModelNameForm(BaseModel):
  148. name: str
  149. @app.post("/api/pull")
  150. @app.post("/api/pull/{url_idx}")
  151. async def pull_model(
  152. form_data: ModelNameForm, url_idx: int = 0, user=Depends(get_admin_user)
  153. ):
  154. url = app.state.OLLAMA_BASE_URLS[url_idx]
  155. r = None
  156. def get_request(url):
  157. nonlocal r
  158. try:
  159. def stream_content():
  160. for chunk in r.iter_content(chunk_size=8192):
  161. yield chunk
  162. r = requests.request(
  163. method="POST",
  164. url=f"{url}/api/pull",
  165. data=form_data.model_dump_json(exclude_none=True),
  166. stream=True,
  167. )
  168. r.raise_for_status()
  169. return StreamingResponse(
  170. stream_content(),
  171. status_code=r.status_code,
  172. headers=dict(r.headers),
  173. )
  174. except Exception as e:
  175. raise e
  176. try:
  177. return await run_in_threadpool(get_request(url))
  178. except Exception as e:
  179. print(e)
  180. error_detail = "Open WebUI: Server Connection Error"
  181. if r is not None:
  182. try:
  183. res = r.json()
  184. if "error" in res:
  185. error_detail = f"Ollama: {res['error']}"
  186. except:
  187. error_detail = f"Ollama: {e}"
  188. raise HTTPException(
  189. status_code=r.status_code if r else 500,
  190. detail=error_detail,
  191. )
  192. class PushModelForm(BaseModel):
  193. name: str
  194. insecure: Optional[bool] = None
  195. stream: Optional[bool] = None
  196. @app.delete("/api/push")
  197. @app.delete("/api/push/{url_idx}")
  198. async def push_model(
  199. form_data: PushModelForm,
  200. url_idx: Optional[int] = None,
  201. user=Depends(get_admin_user),
  202. ):
  203. if url_idx == None:
  204. if form_data.name in app.state.MODELS:
  205. url_idx = app.state.MODELS[form_data.name]["urls"][0]
  206. else:
  207. raise HTTPException(
  208. status_code=400,
  209. detail="error_detail",
  210. )
  211. url = app.state.OLLAMA_BASE_URLS[url_idx]
  212. r = None
  213. def get_request():
  214. nonlocal url
  215. nonlocal r
  216. try:
  217. def stream_content():
  218. for chunk in r.iter_content(chunk_size=8192):
  219. yield chunk
  220. r = requests.request(
  221. method="POST",
  222. url=f"{url}/api/push",
  223. data=form_data.model_dump_json(exclude_none=True),
  224. )
  225. r.raise_for_status()
  226. return StreamingResponse(
  227. stream_content(),
  228. status_code=r.status_code,
  229. headers=dict(r.headers),
  230. )
  231. except Exception as e:
  232. raise e
  233. try:
  234. return await run_in_threadpool(get_request)
  235. except Exception as e:
  236. print(e)
  237. error_detail = "Open WebUI: Server Connection Error"
  238. if r is not None:
  239. try:
  240. res = r.json()
  241. if "error" in res:
  242. error_detail = f"Ollama: {res['error']}"
  243. except:
  244. error_detail = f"Ollama: {e}"
  245. raise HTTPException(
  246. status_code=r.status_code if r else 500,
  247. detail=error_detail,
  248. )
  249. class CreateModelForm(BaseModel):
  250. name: str
  251. modelfile: Optional[str] = None
  252. stream: Optional[bool] = None
  253. path: Optional[str] = None
  254. @app.post("/api/create")
  255. @app.post("/api/create/{url_idx}")
  256. async def create_model(
  257. form_data: CreateModelForm, url_idx: int = 0, user=Depends(get_admin_user)
  258. ):
  259. print(form_data)
  260. url = app.state.OLLAMA_BASE_URLS[url_idx]
  261. r = None
  262. def get_request():
  263. nonlocal url
  264. nonlocal r
  265. try:
  266. def stream_content():
  267. for chunk in r.iter_content(chunk_size=8192):
  268. yield chunk
  269. r = requests.request(
  270. method="POST",
  271. url=f"{url}/api/create",
  272. data=form_data.model_dump_json(exclude_none=True),
  273. stream=True,
  274. )
  275. r.raise_for_status()
  276. print(r)
  277. return StreamingResponse(
  278. stream_content(),
  279. status_code=r.status_code,
  280. headers=dict(r.headers),
  281. )
  282. except Exception as e:
  283. raise e
  284. try:
  285. return await run_in_threadpool(get_request)
  286. except Exception as e:
  287. print(e)
  288. error_detail = "Open WebUI: Server Connection Error"
  289. if r is not None:
  290. try:
  291. res = r.json()
  292. if "error" in res:
  293. error_detail = f"Ollama: {res['error']}"
  294. except:
  295. error_detail = f"Ollama: {e}"
  296. raise HTTPException(
  297. status_code=r.status_code if r else 500,
  298. detail=error_detail,
  299. )
  300. class CopyModelForm(BaseModel):
  301. source: str
  302. destination: str
  303. @app.post("/api/copy")
  304. @app.post("/api/copy/{url_idx}")
  305. async def copy_model(
  306. form_data: CopyModelForm,
  307. url_idx: Optional[int] = None,
  308. user=Depends(get_admin_user),
  309. ):
  310. if url_idx == None:
  311. if form_data.source in app.state.MODELS:
  312. url_idx = app.state.MODELS[form_data.source]["urls"][0]
  313. else:
  314. raise HTTPException(
  315. status_code=400,
  316. detail="error_detail",
  317. )
  318. url = app.state.OLLAMA_BASE_URLS[url_idx]
  319. try:
  320. r = requests.request(
  321. method="POST",
  322. url=f"{url}/api/copy",
  323. data=form_data.model_dump_json(exclude_none=True),
  324. )
  325. r.raise_for_status()
  326. print(r.text)
  327. return True
  328. except Exception as e:
  329. print(e)
  330. error_detail = "Open WebUI: Server Connection Error"
  331. if r is not None:
  332. try:
  333. res = r.json()
  334. if "error" in res:
  335. error_detail = f"Ollama: {res['error']}"
  336. except:
  337. error_detail = f"Ollama: {e}"
  338. raise HTTPException(
  339. status_code=r.status_code if r else 500,
  340. detail=error_detail,
  341. )
  342. @app.delete("/api/delete")
  343. @app.delete("/api/delete/{url_idx}")
  344. async def delete_model(
  345. form_data: ModelNameForm,
  346. url_idx: Optional[int] = None,
  347. user=Depends(get_admin_user),
  348. ):
  349. if url_idx == None:
  350. if form_data.name in app.state.MODELS:
  351. url_idx = app.state.MODELS[form_data.name]["urls"][0]
  352. else:
  353. raise HTTPException(
  354. status_code=400,
  355. detail="error_detail",
  356. )
  357. url = app.state.OLLAMA_BASE_URLS[url_idx]
  358. try:
  359. r = requests.request(
  360. method="DELETE",
  361. url=f"{url}/api/delete",
  362. data=form_data.model_dump_json(exclude_none=True),
  363. )
  364. r.raise_for_status()
  365. print(r.text)
  366. return True
  367. except Exception as e:
  368. print(e)
  369. error_detail = "Open WebUI: Server Connection Error"
  370. if r is not None:
  371. try:
  372. res = r.json()
  373. if "error" in res:
  374. error_detail = f"Ollama: {res['error']}"
  375. except:
  376. error_detail = f"Ollama: {e}"
  377. raise HTTPException(
  378. status_code=r.status_code if r else 500,
  379. detail=error_detail,
  380. )
  381. @app.post("/api/show")
  382. async def show_model_info(form_data: ModelNameForm, user=Depends(get_current_user)):
  383. if form_data.name not in app.state.MODELS:
  384. raise HTTPException(
  385. status_code=400,
  386. detail="error_detail",
  387. )
  388. url_idx = random.choice(app.state.MODELS[form_data.name]["urls"])
  389. url = app.state.OLLAMA_BASE_URLS[url_idx]
  390. try:
  391. r = requests.request(
  392. method="POST",
  393. url=f"{url}/api/show",
  394. data=form_data.model_dump_json(exclude_none=True),
  395. )
  396. r.raise_for_status()
  397. return r.json()
  398. except Exception as e:
  399. print(e)
  400. error_detail = "Open WebUI: Server Connection Error"
  401. if r is not None:
  402. try:
  403. res = r.json()
  404. if "error" in res:
  405. error_detail = f"Ollama: {res['error']}"
  406. except:
  407. error_detail = f"Ollama: {e}"
  408. raise HTTPException(
  409. status_code=r.status_code if r else 500,
  410. detail=error_detail,
  411. )
  412. class GenerateEmbeddingsForm(BaseModel):
  413. model: str
  414. prompt: str
  415. options: Optional[dict] = None
  416. keep_alive: Optional[Union[int, str]] = None
  417. @app.post("/api/embeddings")
  418. @app.post("/api/embeddings/{url_idx}")
  419. async def generate_embeddings(
  420. form_data: GenerateEmbeddingsForm,
  421. url_idx: Optional[int] = None,
  422. user=Depends(get_current_user),
  423. ):
  424. if url_idx == None:
  425. if form_data.model in app.state.MODELS:
  426. url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
  427. else:
  428. raise HTTPException(
  429. status_code=400,
  430. detail="error_detail",
  431. )
  432. url = app.state.OLLAMA_BASE_URLS[url_idx]
  433. try:
  434. r = requests.request(
  435. method="POST",
  436. url=f"{url}/api/embeddings",
  437. data=form_data.model_dump_json(exclude_none=True),
  438. )
  439. r.raise_for_status()
  440. return r.json()
  441. except Exception as e:
  442. print(e)
  443. error_detail = "Open WebUI: Server Connection Error"
  444. if r is not None:
  445. try:
  446. res = r.json()
  447. if "error" in res:
  448. error_detail = f"Ollama: {res['error']}"
  449. except:
  450. error_detail = f"Ollama: {e}"
  451. raise HTTPException(
  452. status_code=r.status_code if r else 500,
  453. detail=error_detail,
  454. )
  455. class GenerateCompletionForm(BaseModel):
  456. model: str
  457. prompt: str
  458. images: Optional[List[str]] = None
  459. format: Optional[str] = None
  460. options: Optional[dict] = None
  461. system: Optional[str] = None
  462. template: Optional[str] = None
  463. context: Optional[str] = None
  464. stream: Optional[bool] = True
  465. raw: Optional[bool] = None
  466. keep_alive: Optional[Union[int, str]] = None
  467. @app.post("/api/generate")
  468. @app.post("/api/generate/{url_idx}")
  469. async def generate_completion(
  470. form_data: GenerateCompletionForm,
  471. url_idx: Optional[int] = None,
  472. user=Depends(get_current_user),
  473. ):
  474. if url_idx == None:
  475. if form_data.model in app.state.MODELS:
  476. url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
  477. else:
  478. raise HTTPException(
  479. status_code=400,
  480. detail="error_detail",
  481. )
  482. url = app.state.OLLAMA_BASE_URLS[url_idx]
  483. r = None
  484. def get_request():
  485. nonlocal form_data
  486. nonlocal r
  487. request_id = str(uuid.uuid4())
  488. try:
  489. REQUEST_POOL.append(request_id)
  490. def stream_content():
  491. try:
  492. if form_data.stream:
  493. yield json.dumps({"id": request_id, "done": False}) + "\n"
  494. for chunk in r.iter_content(chunk_size=8192):
  495. if request_id in REQUEST_POOL:
  496. yield chunk
  497. else:
  498. print("User: canceled request")
  499. break
  500. finally:
  501. if hasattr(r, "close"):
  502. r.close()
  503. if request_id in REQUEST_POOL:
  504. REQUEST_POOL.remove(request_id)
  505. r = requests.request(
  506. method="POST",
  507. url=f"{url}/api/generate",
  508. data=form_data.model_dump_json(exclude_none=True),
  509. stream=True,
  510. )
  511. r.raise_for_status()
  512. return StreamingResponse(
  513. stream_content(),
  514. status_code=r.status_code,
  515. headers=dict(r.headers),
  516. )
  517. except Exception as e:
  518. raise e
  519. try:
  520. return await run_in_threadpool(get_request)
  521. except Exception as e:
  522. error_detail = "Open WebUI: Server Connection Error"
  523. if r is not None:
  524. try:
  525. res = r.json()
  526. if "error" in res:
  527. error_detail = f"Ollama: {res['error']}"
  528. except:
  529. error_detail = f"Ollama: {e}"
  530. raise HTTPException(
  531. status_code=r.status_code if r else 500,
  532. detail=error_detail,
  533. )
  534. class ChatMessage(BaseModel):
  535. role: str
  536. content: str
  537. images: Optional[List[str]] = None
  538. class GenerateChatCompletionForm(BaseModel):
  539. model: str
  540. messages: List[ChatMessage]
  541. format: Optional[str] = None
  542. options: Optional[dict] = None
  543. template: Optional[str] = None
  544. stream: Optional[bool] = True
  545. keep_alive: Optional[Union[int, str]] = None
  546. @app.post("/api/chat")
  547. @app.post("/api/chat/{url_idx}")
  548. async def generate_completion(
  549. form_data: GenerateChatCompletionForm,
  550. url_idx: Optional[int] = None,
  551. user=Depends(get_current_user),
  552. ):
  553. if url_idx == None:
  554. if form_data.model in app.state.MODELS:
  555. url_idx = random.choice(app.state.MODELS[form_data.model]["urls"])
  556. else:
  557. raise HTTPException(
  558. status_code=400,
  559. detail="error_detail",
  560. )
  561. url = app.state.OLLAMA_BASE_URLS[url_idx]
  562. r = None
  563. print(form_data.model_dump_json(exclude_none=True))
  564. def get_request():
  565. nonlocal form_data
  566. nonlocal r
  567. request_id = str(uuid.uuid4())
  568. try:
  569. REQUEST_POOL.append(request_id)
  570. def stream_content():
  571. try:
  572. if form_data.stream:
  573. yield json.dumps({"id": request_id, "done": False}) + "\n"
  574. for chunk in r.iter_content(chunk_size=8192):
  575. if request_id in REQUEST_POOL:
  576. yield chunk
  577. else:
  578. print("User: canceled request")
  579. break
  580. finally:
  581. if hasattr(r, "close"):
  582. r.close()
  583. if request_id in REQUEST_POOL:
  584. REQUEST_POOL.remove(request_id)
  585. r = requests.request(
  586. method="POST",
  587. url=f"{url}/api/chat",
  588. data=form_data.model_dump_json(exclude_none=True),
  589. stream=True,
  590. )
  591. r.raise_for_status()
  592. return StreamingResponse(
  593. stream_content(),
  594. status_code=r.status_code,
  595. headers=dict(r.headers),
  596. )
  597. except Exception as e:
  598. raise e
  599. try:
  600. return await run_in_threadpool(get_request)
  601. except Exception as e:
  602. error_detail = "Open WebUI: Server Connection Error"
  603. if r is not None:
  604. try:
  605. res = r.json()
  606. if "error" in res:
  607. error_detail = f"Ollama: {res['error']}"
  608. except:
  609. error_detail = f"Ollama: {e}"
  610. raise HTTPException(
  611. status_code=r.status_code if r else 500,
  612. detail=error_detail,
  613. )
  614. @app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
  615. async def proxy(path: str, request: Request, user=Depends(get_current_user)):
  616. url = app.state.OLLAMA_BASE_URLS[0]
  617. target_url = f"{url}/{path}"
  618. body = await request.body()
  619. headers = dict(request.headers)
  620. if user.role in ["user", "admin"]:
  621. if path in ["pull", "delete", "push", "copy", "create"]:
  622. if user.role != "admin":
  623. raise HTTPException(
  624. status_code=status.HTTP_401_UNAUTHORIZED,
  625. detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
  626. )
  627. else:
  628. raise HTTPException(
  629. status_code=status.HTTP_401_UNAUTHORIZED,
  630. detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
  631. )
  632. headers.pop("host", None)
  633. headers.pop("authorization", None)
  634. headers.pop("origin", None)
  635. headers.pop("referer", None)
  636. r = None
  637. def get_request():
  638. nonlocal r
  639. request_id = str(uuid.uuid4())
  640. try:
  641. REQUEST_POOL.append(request_id)
  642. def stream_content():
  643. try:
  644. if path == "generate":
  645. data = json.loads(body.decode("utf-8"))
  646. if not ("stream" in data and data["stream"] == False):
  647. yield json.dumps({"id": request_id, "done": False}) + "\n"
  648. elif path == "chat":
  649. yield json.dumps({"id": request_id, "done": False}) + "\n"
  650. for chunk in r.iter_content(chunk_size=8192):
  651. if request_id in REQUEST_POOL:
  652. yield chunk
  653. else:
  654. print("User: canceled request")
  655. break
  656. finally:
  657. if hasattr(r, "close"):
  658. r.close()
  659. if request_id in REQUEST_POOL:
  660. REQUEST_POOL.remove(request_id)
  661. r = requests.request(
  662. method=request.method,
  663. url=target_url,
  664. data=body,
  665. headers=headers,
  666. stream=True,
  667. )
  668. r.raise_for_status()
  669. # r.close()
  670. return StreamingResponse(
  671. stream_content(),
  672. status_code=r.status_code,
  673. headers=dict(r.headers),
  674. )
  675. except Exception as e:
  676. raise e
  677. try:
  678. return await run_in_threadpool(get_request)
  679. except Exception as e:
  680. error_detail = "Open WebUI: Server Connection Error"
  681. if r is not None:
  682. try:
  683. res = r.json()
  684. if "error" in res:
  685. error_detail = f"Ollama: {res['error']}"
  686. except:
  687. error_detail = f"Ollama: {e}"
  688. raise HTTPException(
  689. status_code=r.status_code if r else 500,
  690. detail=error_detail,
  691. )