main.py 31 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037
  1. from contextlib import asynccontextmanager
  2. from bs4 import BeautifulSoup
  3. import json
  4. import markdown
  5. import time
  6. import os
  7. import sys
  8. import logging
  9. import aiohttp
  10. import requests
  11. import mimetypes
  12. import shutil
  13. import os
  14. import asyncio
  15. from fastapi import FastAPI, Request, Depends, status, UploadFile, File, Form
  16. from fastapi.staticfiles import StaticFiles
  17. from fastapi.responses import JSONResponse
  18. from fastapi import HTTPException
  19. from fastapi.middleware.wsgi import WSGIMiddleware
  20. from fastapi.middleware.cors import CORSMiddleware
  21. from starlette.exceptions import HTTPException as StarletteHTTPException
  22. from starlette.middleware.base import BaseHTTPMiddleware
  23. from starlette.responses import StreamingResponse, Response
  24. from apps.socket.main import app as socket_app
  25. from apps.ollama.main import app as ollama_app, get_all_models as get_ollama_models
  26. from apps.openai.main import app as openai_app, get_all_models as get_openai_models
  27. from apps.audio.main import app as audio_app
  28. from apps.images.main import app as images_app
  29. from apps.rag.main import app as rag_app
  30. from apps.webui.main import app as webui_app
  31. from pydantic import BaseModel
  32. from typing import List, Optional
  33. from apps.webui.models.models import Models, ModelModel
  34. from utils.utils import (
  35. get_admin_user,
  36. get_verified_user,
  37. get_current_user,
  38. get_http_authorization_cred,
  39. )
  40. from apps.rag.utils import rag_messages
  41. from config import (
  42. CONFIG_DATA,
  43. WEBUI_NAME,
  44. WEBUI_URL,
  45. WEBUI_AUTH,
  46. ENV,
  47. VERSION,
  48. CHANGELOG,
  49. FRONTEND_BUILD_DIR,
  50. CACHE_DIR,
  51. STATIC_DIR,
  52. ENABLE_OPENAI_API,
  53. ENABLE_OLLAMA_API,
  54. ENABLE_MODEL_FILTER,
  55. MODEL_FILTER_LIST,
  56. GLOBAL_LOG_LEVEL,
  57. SRC_LOG_LEVELS,
  58. WEBHOOK_URL,
  59. ENABLE_ADMIN_EXPORT,
  60. AppConfig,
  61. WEBUI_BUILD_HASH,
  62. )
  63. from constants import ERROR_MESSAGES
  64. logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
  65. log = logging.getLogger(__name__)
  66. log.setLevel(SRC_LOG_LEVELS["MAIN"])
  67. class SPAStaticFiles(StaticFiles):
  68. async def get_response(self, path: str, scope):
  69. try:
  70. return await super().get_response(path, scope)
  71. except (HTTPException, StarletteHTTPException) as ex:
  72. if ex.status_code == 404:
  73. return await super().get_response("index.html", scope)
  74. else:
  75. raise ex
  76. print(
  77. rf"""
  78. ___ __ __ _ _ _ ___
  79. / _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _|
  80. | | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || |
  81. | |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || |
  82. \___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___|
  83. |_|
  84. v{VERSION} - building the best open-source AI user interface.
  85. {f"Commit: {WEBUI_BUILD_HASH}" if WEBUI_BUILD_HASH != "dev-build" else ""}
  86. https://github.com/open-webui/open-webui
  87. """
  88. )
  89. @asynccontextmanager
  90. async def lifespan(app: FastAPI):
  91. yield
  92. app = FastAPI(
  93. docs_url="/docs" if ENV == "dev" else None, redoc_url=None, lifespan=lifespan
  94. )
  95. app.state.config = AppConfig()
  96. app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
  97. app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
  98. app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
  99. app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
  100. app.state.config.WEBHOOK_URL = WEBHOOK_URL
  101. app.state.MODELS = {}
  102. origins = ["*"]
  103. # Custom middleware to add security headers
  104. # class SecurityHeadersMiddleware(BaseHTTPMiddleware):
  105. # async def dispatch(self, request: Request, call_next):
  106. # response: Response = await call_next(request)
  107. # response.headers["Cross-Origin-Opener-Policy"] = "same-origin"
  108. # response.headers["Cross-Origin-Embedder-Policy"] = "require-corp"
  109. # return response
  110. # app.add_middleware(SecurityHeadersMiddleware)
  111. class RAGMiddleware(BaseHTTPMiddleware):
  112. async def dispatch(self, request: Request, call_next):
  113. return_citations = False
  114. if request.method == "POST" and (
  115. "/ollama/api/chat" in request.url.path
  116. or "/chat/completions" in request.url.path
  117. ):
  118. log.debug(f"request.url.path: {request.url.path}")
  119. # Read the original request body
  120. body = await request.body()
  121. # Decode body to string
  122. body_str = body.decode("utf-8")
  123. # Parse string to JSON
  124. data = json.loads(body_str) if body_str else {}
  125. return_citations = data.get("citations", False)
  126. if "citations" in data:
  127. del data["citations"]
  128. # Example: Add a new key-value pair or modify existing ones
  129. # data["modified"] = True # Example modification
  130. if "docs" in data:
  131. data = {**data}
  132. data["messages"], citations = rag_messages(
  133. docs=data["docs"],
  134. messages=data["messages"],
  135. template=rag_app.state.config.RAG_TEMPLATE,
  136. embedding_function=rag_app.state.EMBEDDING_FUNCTION,
  137. k=rag_app.state.config.TOP_K,
  138. reranking_function=rag_app.state.sentence_transformer_rf,
  139. r=rag_app.state.config.RELEVANCE_THRESHOLD,
  140. hybrid_search=rag_app.state.config.ENABLE_RAG_HYBRID_SEARCH,
  141. )
  142. del data["docs"]
  143. log.debug(
  144. f"data['messages']: {data['messages']}, citations: {citations}"
  145. )
  146. modified_body_bytes = json.dumps(data).encode("utf-8")
  147. # Replace the request body with the modified one
  148. request._body = modified_body_bytes
  149. # Set custom header to ensure content-length matches new body length
  150. request.headers.__dict__["_list"] = [
  151. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  152. *[
  153. (k, v)
  154. for k, v in request.headers.raw
  155. if k.lower() != b"content-length"
  156. ],
  157. ]
  158. response = await call_next(request)
  159. if return_citations:
  160. # Inject the citations into the response
  161. if isinstance(response, StreamingResponse):
  162. # If it's a streaming response, inject it as SSE event or NDJSON line
  163. content_type = response.headers.get("Content-Type")
  164. if "text/event-stream" in content_type:
  165. return StreamingResponse(
  166. self.openai_stream_wrapper(response.body_iterator, citations),
  167. )
  168. if "application/x-ndjson" in content_type:
  169. return StreamingResponse(
  170. self.ollama_stream_wrapper(response.body_iterator, citations),
  171. )
  172. return response
  173. async def _receive(self, body: bytes):
  174. return {"type": "http.request", "body": body, "more_body": False}
  175. async def openai_stream_wrapper(self, original_generator, citations):
  176. yield f"data: {json.dumps({'citations': citations})}\n\n"
  177. async for data in original_generator:
  178. yield data
  179. async def ollama_stream_wrapper(self, original_generator, citations):
  180. yield f"{json.dumps({'citations': citations})}\n"
  181. async for data in original_generator:
  182. yield data
  183. app.add_middleware(RAGMiddleware)
  184. class PipelineMiddleware(BaseHTTPMiddleware):
  185. async def dispatch(self, request: Request, call_next):
  186. if request.method == "POST" and (
  187. "/ollama/api/chat" in request.url.path
  188. or "/chat/completions" in request.url.path
  189. ):
  190. log.debug(f"request.url.path: {request.url.path}")
  191. # Read the original request body
  192. body = await request.body()
  193. # Decode body to string
  194. body_str = body.decode("utf-8")
  195. # Parse string to JSON
  196. data = json.loads(body_str) if body_str else {}
  197. model_id = data["model"]
  198. filters = [
  199. model
  200. for model in app.state.MODELS.values()
  201. if "pipeline" in model
  202. and "type" in model["pipeline"]
  203. and model["pipeline"]["type"] == "filter"
  204. and (
  205. model["pipeline"]["pipelines"] == ["*"]
  206. or any(
  207. model_id == target_model_id
  208. for target_model_id in model["pipeline"]["pipelines"]
  209. )
  210. )
  211. ]
  212. sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
  213. user = None
  214. if len(sorted_filters) > 0:
  215. try:
  216. user = get_current_user(
  217. get_http_authorization_cred(
  218. request.headers.get("Authorization")
  219. )
  220. )
  221. user = {"id": user.id, "name": user.name, "role": user.role}
  222. except:
  223. pass
  224. model = app.state.MODELS[model_id]
  225. if "pipeline" in model:
  226. sorted_filters.append(model)
  227. for filter in sorted_filters:
  228. r = None
  229. try:
  230. urlIdx = filter["urlIdx"]
  231. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  232. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  233. if key != "":
  234. headers = {"Authorization": f"Bearer {key}"}
  235. r = requests.post(
  236. f"{url}/{filter['id']}/filter/inlet",
  237. headers=headers,
  238. json={
  239. "user": user,
  240. "body": data,
  241. },
  242. )
  243. r.raise_for_status()
  244. data = r.json()
  245. except Exception as e:
  246. # Handle connection error here
  247. print(f"Connection error: {e}")
  248. if r is not None:
  249. try:
  250. res = r.json()
  251. if "detail" in res:
  252. return JSONResponse(
  253. status_code=r.status_code,
  254. content=res,
  255. )
  256. except:
  257. pass
  258. else:
  259. pass
  260. if "pipeline" not in app.state.MODELS[model_id]:
  261. if "chat_id" in data:
  262. del data["chat_id"]
  263. if "title" in data:
  264. del data["title"]
  265. modified_body_bytes = json.dumps(data).encode("utf-8")
  266. # Replace the request body with the modified one
  267. request._body = modified_body_bytes
  268. # Set custom header to ensure content-length matches new body length
  269. request.headers.__dict__["_list"] = [
  270. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  271. *[
  272. (k, v)
  273. for k, v in request.headers.raw
  274. if k.lower() != b"content-length"
  275. ],
  276. ]
  277. response = await call_next(request)
  278. return response
  279. async def _receive(self, body: bytes):
  280. return {"type": "http.request", "body": body, "more_body": False}
  281. app.add_middleware(PipelineMiddleware)
  282. app.add_middleware(
  283. CORSMiddleware,
  284. allow_origins=origins,
  285. allow_credentials=True,
  286. allow_methods=["*"],
  287. allow_headers=["*"],
  288. )
  289. @app.middleware("http")
  290. async def check_url(request: Request, call_next):
  291. if len(app.state.MODELS) == 0:
  292. await get_all_models()
  293. else:
  294. pass
  295. start_time = int(time.time())
  296. response = await call_next(request)
  297. process_time = int(time.time()) - start_time
  298. response.headers["X-Process-Time"] = str(process_time)
  299. return response
  300. @app.middleware("http")
  301. async def update_embedding_function(request: Request, call_next):
  302. response = await call_next(request)
  303. if "/embedding/update" in request.url.path:
  304. webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
  305. return response
  306. app.mount("/ws", socket_app)
  307. app.mount("/ollama", ollama_app)
  308. app.mount("/openai", openai_app)
  309. app.mount("/images/api/v1", images_app)
  310. app.mount("/audio/api/v1", audio_app)
  311. app.mount("/rag/api/v1", rag_app)
  312. app.mount("/api/v1", webui_app)
  313. webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
  314. async def get_all_models():
  315. openai_models = []
  316. ollama_models = []
  317. if app.state.config.ENABLE_OPENAI_API:
  318. openai_models = await get_openai_models()
  319. openai_models = openai_models["data"]
  320. if app.state.config.ENABLE_OLLAMA_API:
  321. ollama_models = await get_ollama_models()
  322. ollama_models = [
  323. {
  324. "id": model["model"],
  325. "name": model["name"],
  326. "object": "model",
  327. "created": int(time.time()),
  328. "owned_by": "ollama",
  329. "ollama": model,
  330. }
  331. for model in ollama_models["models"]
  332. ]
  333. models = openai_models + ollama_models
  334. custom_models = Models.get_all_models()
  335. for custom_model in custom_models:
  336. if custom_model.base_model_id == None:
  337. for model in models:
  338. if (
  339. custom_model.id == model["id"]
  340. or custom_model.id == model["id"].split(":")[0]
  341. ):
  342. model["name"] = custom_model.name
  343. model["info"] = custom_model.model_dump()
  344. else:
  345. owned_by = "openai"
  346. for model in models:
  347. if (
  348. custom_model.base_model_id == model["id"]
  349. or custom_model.base_model_id == model["id"].split(":")[0]
  350. ):
  351. owned_by = model["owned_by"]
  352. break
  353. models.append(
  354. {
  355. "id": custom_model.id,
  356. "name": custom_model.name,
  357. "object": "model",
  358. "created": custom_model.created_at,
  359. "owned_by": owned_by,
  360. "info": custom_model.model_dump(),
  361. "preset": True,
  362. }
  363. )
  364. app.state.MODELS = {model["id"]: model for model in models}
  365. webui_app.state.MODELS = app.state.MODELS
  366. return models
  367. @app.get("/api/models")
  368. async def get_models(user=Depends(get_verified_user)):
  369. models = await get_all_models()
  370. # Filter out filter pipelines
  371. models = [
  372. model
  373. for model in models
  374. if "pipeline" not in model or model["pipeline"].get("type", None) != "filter"
  375. ]
  376. if app.state.config.ENABLE_MODEL_FILTER:
  377. if user.role == "user":
  378. models = list(
  379. filter(
  380. lambda model: model["id"] in app.state.config.MODEL_FILTER_LIST,
  381. models,
  382. )
  383. )
  384. return {"data": models}
  385. return {"data": models}
  386. @app.post("/api/chat/completed")
  387. async def chat_completed(form_data: dict, user=Depends(get_verified_user)):
  388. data = form_data
  389. model_id = data["model"]
  390. filters = [
  391. model
  392. for model in app.state.MODELS.values()
  393. if "pipeline" in model
  394. and "type" in model["pipeline"]
  395. and model["pipeline"]["type"] == "filter"
  396. and (
  397. model["pipeline"]["pipelines"] == ["*"]
  398. or any(
  399. model_id == target_model_id
  400. for target_model_id in model["pipeline"]["pipelines"]
  401. )
  402. )
  403. ]
  404. sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
  405. print(model_id)
  406. if model_id in app.state.MODELS:
  407. model = app.state.MODELS[model_id]
  408. if "pipeline" in model:
  409. sorted_filters = [model] + sorted_filters
  410. for filter in sorted_filters:
  411. r = None
  412. try:
  413. urlIdx = filter["urlIdx"]
  414. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  415. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  416. if key != "":
  417. headers = {"Authorization": f"Bearer {key}"}
  418. r = requests.post(
  419. f"{url}/{filter['id']}/filter/outlet",
  420. headers=headers,
  421. json={
  422. "user": {"id": user.id, "name": user.name, "role": user.role},
  423. "body": data,
  424. },
  425. )
  426. r.raise_for_status()
  427. data = r.json()
  428. except Exception as e:
  429. # Handle connection error here
  430. print(f"Connection error: {e}")
  431. if r is not None:
  432. try:
  433. res = r.json()
  434. if "detail" in res:
  435. return JSONResponse(
  436. status_code=r.status_code,
  437. content=res,
  438. )
  439. except:
  440. pass
  441. else:
  442. pass
  443. return data
  444. @app.get("/api/pipelines/list")
  445. async def get_pipelines_list(user=Depends(get_admin_user)):
  446. responses = await get_openai_models(raw=True)
  447. print(responses)
  448. urlIdxs = [
  449. idx
  450. for idx, response in enumerate(responses)
  451. if response != None and "pipelines" in response
  452. ]
  453. return {
  454. "data": [
  455. {
  456. "url": openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx],
  457. "idx": urlIdx,
  458. }
  459. for urlIdx in urlIdxs
  460. ]
  461. }
  462. @app.post("/api/pipelines/upload")
  463. async def upload_pipeline(
  464. urlIdx: int = Form(...), file: UploadFile = File(...), user=Depends(get_admin_user)
  465. ):
  466. print("upload_pipeline", urlIdx, file.filename)
  467. # Check if the uploaded file is a python file
  468. if not file.filename.endswith(".py"):
  469. raise HTTPException(
  470. status_code=status.HTTP_400_BAD_REQUEST,
  471. detail="Only Python (.py) files are allowed.",
  472. )
  473. upload_folder = f"{CACHE_DIR}/pipelines"
  474. os.makedirs(upload_folder, exist_ok=True)
  475. file_path = os.path.join(upload_folder, file.filename)
  476. try:
  477. # Save the uploaded file
  478. with open(file_path, "wb") as buffer:
  479. shutil.copyfileobj(file.file, buffer)
  480. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  481. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  482. headers = {"Authorization": f"Bearer {key}"}
  483. with open(file_path, "rb") as f:
  484. files = {"file": f}
  485. r = requests.post(f"{url}/pipelines/upload", headers=headers, files=files)
  486. r.raise_for_status()
  487. data = r.json()
  488. return {**data}
  489. except Exception as e:
  490. # Handle connection error here
  491. print(f"Connection error: {e}")
  492. detail = "Pipeline not found"
  493. if r is not None:
  494. try:
  495. res = r.json()
  496. if "detail" in res:
  497. detail = res["detail"]
  498. except:
  499. pass
  500. raise HTTPException(
  501. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  502. detail=detail,
  503. )
  504. finally:
  505. # Ensure the file is deleted after the upload is completed or on failure
  506. if os.path.exists(file_path):
  507. os.remove(file_path)
  508. class AddPipelineForm(BaseModel):
  509. url: str
  510. urlIdx: int
  511. @app.post("/api/pipelines/add")
  512. async def add_pipeline(form_data: AddPipelineForm, user=Depends(get_admin_user)):
  513. r = None
  514. try:
  515. urlIdx = form_data.urlIdx
  516. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  517. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  518. headers = {"Authorization": f"Bearer {key}"}
  519. r = requests.post(
  520. f"{url}/pipelines/add", headers=headers, json={"url": form_data.url}
  521. )
  522. r.raise_for_status()
  523. data = r.json()
  524. return {**data}
  525. except Exception as e:
  526. # Handle connection error here
  527. print(f"Connection error: {e}")
  528. detail = "Pipeline not found"
  529. if r is not None:
  530. try:
  531. res = r.json()
  532. if "detail" in res:
  533. detail = res["detail"]
  534. except:
  535. pass
  536. raise HTTPException(
  537. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  538. detail=detail,
  539. )
  540. class DeletePipelineForm(BaseModel):
  541. id: str
  542. urlIdx: int
  543. @app.delete("/api/pipelines/delete")
  544. async def delete_pipeline(form_data: DeletePipelineForm, user=Depends(get_admin_user)):
  545. r = None
  546. try:
  547. urlIdx = form_data.urlIdx
  548. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  549. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  550. headers = {"Authorization": f"Bearer {key}"}
  551. r = requests.delete(
  552. f"{url}/pipelines/delete", headers=headers, json={"id": form_data.id}
  553. )
  554. r.raise_for_status()
  555. data = r.json()
  556. return {**data}
  557. except Exception as e:
  558. # Handle connection error here
  559. print(f"Connection error: {e}")
  560. detail = "Pipeline not found"
  561. if r is not None:
  562. try:
  563. res = r.json()
  564. if "detail" in res:
  565. detail = res["detail"]
  566. except:
  567. pass
  568. raise HTTPException(
  569. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  570. detail=detail,
  571. )
  572. @app.get("/api/pipelines")
  573. async def get_pipelines(urlIdx: Optional[int] = None, user=Depends(get_admin_user)):
  574. r = None
  575. try:
  576. urlIdx
  577. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  578. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  579. headers = {"Authorization": f"Bearer {key}"}
  580. r = requests.get(f"{url}/pipelines", headers=headers)
  581. r.raise_for_status()
  582. data = r.json()
  583. return {**data}
  584. except Exception as e:
  585. # Handle connection error here
  586. print(f"Connection error: {e}")
  587. detail = "Pipeline not found"
  588. if r is not None:
  589. try:
  590. res = r.json()
  591. if "detail" in res:
  592. detail = res["detail"]
  593. except:
  594. pass
  595. raise HTTPException(
  596. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  597. detail=detail,
  598. )
  599. @app.get("/api/pipelines/{pipeline_id}/valves")
  600. async def get_pipeline_valves(
  601. urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
  602. ):
  603. models = await get_all_models()
  604. r = None
  605. try:
  606. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  607. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  608. headers = {"Authorization": f"Bearer {key}"}
  609. r = requests.get(f"{url}/{pipeline_id}/valves", headers=headers)
  610. r.raise_for_status()
  611. data = r.json()
  612. return {**data}
  613. except Exception as e:
  614. # Handle connection error here
  615. print(f"Connection error: {e}")
  616. detail = "Pipeline not found"
  617. if r is not None:
  618. try:
  619. res = r.json()
  620. if "detail" in res:
  621. detail = res["detail"]
  622. except:
  623. pass
  624. raise HTTPException(
  625. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  626. detail=detail,
  627. )
  628. @app.get("/api/pipelines/{pipeline_id}/valves/spec")
  629. async def get_pipeline_valves_spec(
  630. urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
  631. ):
  632. models = await get_all_models()
  633. r = None
  634. try:
  635. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  636. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  637. headers = {"Authorization": f"Bearer {key}"}
  638. r = requests.get(f"{url}/{pipeline_id}/valves/spec", headers=headers)
  639. r.raise_for_status()
  640. data = r.json()
  641. return {**data}
  642. except Exception as e:
  643. # Handle connection error here
  644. print(f"Connection error: {e}")
  645. detail = "Pipeline not found"
  646. if r is not None:
  647. try:
  648. res = r.json()
  649. if "detail" in res:
  650. detail = res["detail"]
  651. except:
  652. pass
  653. raise HTTPException(
  654. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  655. detail=detail,
  656. )
  657. @app.post("/api/pipelines/{pipeline_id}/valves/update")
  658. async def update_pipeline_valves(
  659. urlIdx: Optional[int],
  660. pipeline_id: str,
  661. form_data: dict,
  662. user=Depends(get_admin_user),
  663. ):
  664. models = await get_all_models()
  665. r = None
  666. try:
  667. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  668. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  669. headers = {"Authorization": f"Bearer {key}"}
  670. r = requests.post(
  671. f"{url}/{pipeline_id}/valves/update",
  672. headers=headers,
  673. json={**form_data},
  674. )
  675. r.raise_for_status()
  676. data = r.json()
  677. return {**data}
  678. except Exception as e:
  679. # Handle connection error here
  680. print(f"Connection error: {e}")
  681. detail = "Pipeline not found"
  682. if r is not None:
  683. try:
  684. res = r.json()
  685. if "detail" in res:
  686. detail = res["detail"]
  687. except:
  688. pass
  689. raise HTTPException(
  690. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  691. detail=detail,
  692. )
  693. @app.get("/api/config")
  694. async def get_app_config():
  695. # Checking and Handling the Absence of 'ui' in CONFIG_DATA
  696. default_locale = "en-US"
  697. if "ui" in CONFIG_DATA:
  698. default_locale = CONFIG_DATA["ui"].get("default_locale", "en-US")
  699. # The Rest of the Function Now Uses the Variables Defined Above
  700. return {
  701. "status": True,
  702. "name": WEBUI_NAME,
  703. "version": VERSION,
  704. "default_locale": default_locale,
  705. "default_models": webui_app.state.config.DEFAULT_MODELS,
  706. "default_prompt_suggestions": webui_app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
  707. "features": {
  708. "auth": WEBUI_AUTH,
  709. "auth_trusted_header": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
  710. "enable_signup": webui_app.state.config.ENABLE_SIGNUP,
  711. "enable_web_search": rag_app.state.config.ENABLE_RAG_WEB_SEARCH,
  712. "enable_image_generation": images_app.state.config.ENABLED,
  713. "enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING,
  714. "enable_admin_export": ENABLE_ADMIN_EXPORT,
  715. },
  716. "audio": {
  717. "tts": {
  718. "engine": audio_app.state.config.TTS_ENGINE,
  719. "voice": audio_app.state.config.TTS_VOICE,
  720. },
  721. "stt": {
  722. "engine": audio_app.state.config.STT_ENGINE,
  723. },
  724. },
  725. }
  726. @app.get("/api/config/model/filter")
  727. async def get_model_filter_config(user=Depends(get_admin_user)):
  728. return {
  729. "enabled": app.state.config.ENABLE_MODEL_FILTER,
  730. "models": app.state.config.MODEL_FILTER_LIST,
  731. }
  732. class ModelFilterConfigForm(BaseModel):
  733. enabled: bool
  734. models: List[str]
  735. @app.post("/api/config/model/filter")
  736. async def update_model_filter_config(
  737. form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
  738. ):
  739. app.state.config.ENABLE_MODEL_FILTER = form_data.enabled
  740. app.state.config.MODEL_FILTER_LIST = form_data.models
  741. return {
  742. "enabled": app.state.config.ENABLE_MODEL_FILTER,
  743. "models": app.state.config.MODEL_FILTER_LIST,
  744. }
  745. @app.get("/api/webhook")
  746. async def get_webhook_url(user=Depends(get_admin_user)):
  747. return {
  748. "url": app.state.config.WEBHOOK_URL,
  749. }
  750. class UrlForm(BaseModel):
  751. url: str
  752. @app.post("/api/webhook")
  753. async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
  754. app.state.config.WEBHOOK_URL = form_data.url
  755. webui_app.state.WEBHOOK_URL = app.state.config.WEBHOOK_URL
  756. return {"url": app.state.config.WEBHOOK_URL}
  757. @app.get("/api/version")
  758. async def get_app_config():
  759. return {
  760. "version": VERSION,
  761. }
  762. @app.get("/api/changelog")
  763. async def get_app_changelog():
  764. return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5}
  765. @app.get("/api/version/updates")
  766. async def get_app_latest_release_version():
  767. try:
  768. async with aiohttp.ClientSession(trust_env=True) as session:
  769. async with session.get(
  770. "https://api.github.com/repos/open-webui/open-webui/releases/latest"
  771. ) as response:
  772. response.raise_for_status()
  773. data = await response.json()
  774. latest_version = data["tag_name"]
  775. return {"current": VERSION, "latest": latest_version[1:]}
  776. except aiohttp.ClientError as e:
  777. raise HTTPException(
  778. status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
  779. detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
  780. )
  781. @app.get("/manifest.json")
  782. async def get_manifest_json():
  783. return {
  784. "name": WEBUI_NAME,
  785. "short_name": WEBUI_NAME,
  786. "start_url": "/",
  787. "display": "standalone",
  788. "background_color": "#343541",
  789. "theme_color": "#343541",
  790. "orientation": "portrait-primary",
  791. "icons": [{"src": "/static/logo.png", "type": "image/png", "sizes": "500x500"}],
  792. }
  793. @app.get("/opensearch.xml")
  794. async def get_opensearch_xml():
  795. xml_content = rf"""
  796. <OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/" xmlns:moz="http://www.mozilla.org/2006/browser/search/">
  797. <ShortName>{WEBUI_NAME}</ShortName>
  798. <Description>Search {WEBUI_NAME}</Description>
  799. <InputEncoding>UTF-8</InputEncoding>
  800. <Image width="16" height="16" type="image/x-icon">{WEBUI_URL}/favicon.png</Image>
  801. <Url type="text/html" method="get" template="{WEBUI_URL}/?q={"{searchTerms}"}"/>
  802. <moz:SearchForm>{WEBUI_URL}</moz:SearchForm>
  803. </OpenSearchDescription>
  804. """
  805. return Response(content=xml_content, media_type="application/xml")
  806. @app.get("/health")
  807. async def healthcheck():
  808. return {"status": True}
  809. app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
  810. app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
  811. if os.path.exists(FRONTEND_BUILD_DIR):
  812. mimetypes.add_type("text/javascript", ".js")
  813. app.mount(
  814. "/",
  815. SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
  816. name="spa-static-files",
  817. )
  818. else:
  819. log.warning(
  820. f"Frontend build directory not found at '{FRONTEND_BUILD_DIR}'. Serving API only."
  821. )