main.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979
  1. from contextlib import asynccontextmanager
  2. from bs4 import BeautifulSoup
  3. import json
  4. import markdown
  5. import time
  6. import os
  7. import sys
  8. import logging
  9. import aiohttp
  10. import requests
  11. import mimetypes
  12. from fastapi import FastAPI, Request, Depends, status
  13. from fastapi.staticfiles import StaticFiles
  14. from fastapi.responses import JSONResponse
  15. from fastapi import HTTPException
  16. from fastapi.middleware.wsgi import WSGIMiddleware
  17. from fastapi.middleware.cors import CORSMiddleware
  18. from starlette.exceptions import HTTPException as StarletteHTTPException
  19. from starlette.middleware.base import BaseHTTPMiddleware
  20. from starlette.responses import StreamingResponse, Response
  21. from apps.ollama.main import app as ollama_app, get_all_models as get_ollama_models
  22. from apps.openai.main import app as openai_app, get_all_models as get_openai_models
  23. from apps.audio.main import app as audio_app
  24. from apps.images.main import app as images_app
  25. from apps.rag.main import app as rag_app
  26. from apps.webui.main import app as webui_app
  27. import asyncio
  28. from pydantic import BaseModel
  29. from typing import List, Optional
  30. from apps.webui.models.models import Models, ModelModel
  31. from utils.utils import (
  32. get_admin_user,
  33. get_verified_user,
  34. get_current_user,
  35. get_http_authorization_cred,
  36. )
  37. from apps.rag.utils import rag_messages
  38. from config import (
  39. CONFIG_DATA,
  40. WEBUI_NAME,
  41. WEBUI_URL,
  42. WEBUI_AUTH,
  43. ENV,
  44. VERSION,
  45. CHANGELOG,
  46. FRONTEND_BUILD_DIR,
  47. CACHE_DIR,
  48. STATIC_DIR,
  49. ENABLE_OPENAI_API,
  50. ENABLE_OLLAMA_API,
  51. ENABLE_MODEL_FILTER,
  52. MODEL_FILTER_LIST,
  53. GLOBAL_LOG_LEVEL,
  54. SRC_LOG_LEVELS,
  55. WEBHOOK_URL,
  56. ENABLE_ADMIN_EXPORT,
  57. AppConfig,
  58. WEBUI_BUILD_HASH,
  59. )
  60. from constants import ERROR_MESSAGES
  61. logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
  62. log = logging.getLogger(__name__)
  63. log.setLevel(SRC_LOG_LEVELS["MAIN"])
  64. class SPAStaticFiles(StaticFiles):
  65. async def get_response(self, path: str, scope):
  66. try:
  67. return await super().get_response(path, scope)
  68. except (HTTPException, StarletteHTTPException) as ex:
  69. if ex.status_code == 404:
  70. return await super().get_response("index.html", scope)
  71. else:
  72. raise ex
  73. print(
  74. rf"""
  75. ___ __ __ _ _ _ ___
  76. / _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _|
  77. | | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || |
  78. | |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || |
  79. \___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___|
  80. |_|
  81. v{VERSION} - building the best open-source AI user interface.
  82. {f"Commit: {WEBUI_BUILD_HASH}" if WEBUI_BUILD_HASH != "dev-build" else ""}
  83. https://github.com/open-webui/open-webui
  84. """
  85. )
  86. @asynccontextmanager
  87. async def lifespan(app: FastAPI):
  88. yield
  89. app = FastAPI(
  90. docs_url="/docs" if ENV == "dev" else None, redoc_url=None, lifespan=lifespan
  91. )
  92. app.state.config = AppConfig()
  93. app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
  94. app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
  95. app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
  96. app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
  97. app.state.config.WEBHOOK_URL = WEBHOOK_URL
  98. app.state.MODELS = {}
  99. origins = ["*"]
  100. # Custom middleware to add security headers
  101. # class SecurityHeadersMiddleware(BaseHTTPMiddleware):
  102. # async def dispatch(self, request: Request, call_next):
  103. # response: Response = await call_next(request)
  104. # response.headers["Cross-Origin-Opener-Policy"] = "same-origin"
  105. # response.headers["Cross-Origin-Embedder-Policy"] = "require-corp"
  106. # return response
  107. # app.add_middleware(SecurityHeadersMiddleware)
  108. class RAGMiddleware(BaseHTTPMiddleware):
  109. async def dispatch(self, request: Request, call_next):
  110. return_citations = False
  111. if request.method == "POST" and (
  112. "/ollama/api/chat" in request.url.path
  113. or "/chat/completions" in request.url.path
  114. ):
  115. log.debug(f"request.url.path: {request.url.path}")
  116. # Read the original request body
  117. body = await request.body()
  118. # Decode body to string
  119. body_str = body.decode("utf-8")
  120. # Parse string to JSON
  121. data = json.loads(body_str) if body_str else {}
  122. return_citations = data.get("citations", False)
  123. if "citations" in data:
  124. del data["citations"]
  125. # Example: Add a new key-value pair or modify existing ones
  126. # data["modified"] = True # Example modification
  127. if "docs" in data:
  128. data = {**data}
  129. data["messages"], citations = rag_messages(
  130. docs=data["docs"],
  131. messages=data["messages"],
  132. template=rag_app.state.config.RAG_TEMPLATE,
  133. embedding_function=rag_app.state.EMBEDDING_FUNCTION,
  134. k=rag_app.state.config.TOP_K,
  135. reranking_function=rag_app.state.sentence_transformer_rf,
  136. r=rag_app.state.config.RELEVANCE_THRESHOLD,
  137. hybrid_search=rag_app.state.config.ENABLE_RAG_HYBRID_SEARCH,
  138. )
  139. del data["docs"]
  140. log.debug(
  141. f"data['messages']: {data['messages']}, citations: {citations}"
  142. )
  143. modified_body_bytes = json.dumps(data).encode("utf-8")
  144. # Replace the request body with the modified one
  145. request._body = modified_body_bytes
  146. # Set custom header to ensure content-length matches new body length
  147. request.headers.__dict__["_list"] = [
  148. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  149. *[
  150. (k, v)
  151. for k, v in request.headers.raw
  152. if k.lower() != b"content-length"
  153. ],
  154. ]
  155. response = await call_next(request)
  156. if return_citations:
  157. # Inject the citations into the response
  158. if isinstance(response, StreamingResponse):
  159. # If it's a streaming response, inject it as SSE event or NDJSON line
  160. content_type = response.headers.get("Content-Type")
  161. if "text/event-stream" in content_type:
  162. return StreamingResponse(
  163. self.openai_stream_wrapper(response.body_iterator, citations),
  164. )
  165. if "application/x-ndjson" in content_type:
  166. return StreamingResponse(
  167. self.ollama_stream_wrapper(response.body_iterator, citations),
  168. )
  169. return response
  170. async def _receive(self, body: bytes):
  171. return {"type": "http.request", "body": body, "more_body": False}
  172. async def openai_stream_wrapper(self, original_generator, citations):
  173. yield f"data: {json.dumps({'citations': citations})}\n\n"
  174. async for data in original_generator:
  175. yield data
  176. async def ollama_stream_wrapper(self, original_generator, citations):
  177. yield f"{json.dumps({'citations': citations})}\n"
  178. async for data in original_generator:
  179. yield data
  180. app.add_middleware(RAGMiddleware)
  181. class PipelineMiddleware(BaseHTTPMiddleware):
  182. async def dispatch(self, request: Request, call_next):
  183. if request.method == "POST" and (
  184. "/ollama/api/chat" in request.url.path
  185. or "/chat/completions" in request.url.path
  186. ):
  187. log.debug(f"request.url.path: {request.url.path}")
  188. # Read the original request body
  189. body = await request.body()
  190. # Decode body to string
  191. body_str = body.decode("utf-8")
  192. # Parse string to JSON
  193. data = json.loads(body_str) if body_str else {}
  194. model_id = data["model"]
  195. filters = [
  196. model
  197. for model in app.state.MODELS.values()
  198. if "pipeline" in model
  199. and "type" in model["pipeline"]
  200. and model["pipeline"]["type"] == "filter"
  201. and (
  202. model["pipeline"]["pipelines"] == ["*"]
  203. or any(
  204. model_id == target_model_id
  205. for target_model_id in model["pipeline"]["pipelines"]
  206. )
  207. )
  208. ]
  209. sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
  210. user = None
  211. if len(sorted_filters) > 0:
  212. try:
  213. user = get_current_user(
  214. get_http_authorization_cred(
  215. request.headers.get("Authorization")
  216. )
  217. )
  218. user = {"id": user.id, "name": user.name, "role": user.role}
  219. except:
  220. pass
  221. model = app.state.MODELS[model_id]
  222. if "pipeline" in model:
  223. sorted_filters.append(model)
  224. for filter in sorted_filters:
  225. r = None
  226. try:
  227. urlIdx = filter["urlIdx"]
  228. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  229. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  230. if key != "":
  231. headers = {"Authorization": f"Bearer {key}"}
  232. r = requests.post(
  233. f"{url}/{filter['id']}/filter/inlet",
  234. headers=headers,
  235. json={
  236. "user": user,
  237. "body": data,
  238. },
  239. )
  240. r.raise_for_status()
  241. data = r.json()
  242. except Exception as e:
  243. # Handle connection error here
  244. print(f"Connection error: {e}")
  245. if r is not None:
  246. try:
  247. res = r.json()
  248. if "detail" in res:
  249. return JSONResponse(
  250. status_code=r.status_code,
  251. content=res,
  252. )
  253. except:
  254. pass
  255. else:
  256. pass
  257. if "pipeline" not in app.state.MODELS[model_id]:
  258. if "chat_id" in data:
  259. del data["chat_id"]
  260. if "title" in data:
  261. del data["title"]
  262. modified_body_bytes = json.dumps(data).encode("utf-8")
  263. # Replace the request body with the modified one
  264. request._body = modified_body_bytes
  265. # Set custom header to ensure content-length matches new body length
  266. request.headers.__dict__["_list"] = [
  267. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  268. *[
  269. (k, v)
  270. for k, v in request.headers.raw
  271. if k.lower() != b"content-length"
  272. ],
  273. ]
  274. response = await call_next(request)
  275. return response
  276. async def _receive(self, body: bytes):
  277. return {"type": "http.request", "body": body, "more_body": False}
  278. app.add_middleware(PipelineMiddleware)
  279. app.add_middleware(
  280. CORSMiddleware,
  281. allow_origins=origins,
  282. allow_credentials=True,
  283. allow_methods=["*"],
  284. allow_headers=["*"],
  285. )
  286. @app.middleware("http")
  287. async def check_url(request: Request, call_next):
  288. if len(app.state.MODELS) == 0:
  289. await get_all_models()
  290. else:
  291. pass
  292. start_time = int(time.time())
  293. response = await call_next(request)
  294. process_time = int(time.time()) - start_time
  295. response.headers["X-Process-Time"] = str(process_time)
  296. return response
  297. @app.middleware("http")
  298. async def update_embedding_function(request: Request, call_next):
  299. response = await call_next(request)
  300. if "/embedding/update" in request.url.path:
  301. webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
  302. return response
  303. app.mount("/ollama", ollama_app)
  304. app.mount("/openai", openai_app)
  305. app.mount("/images/api/v1", images_app)
  306. app.mount("/audio/api/v1", audio_app)
  307. app.mount("/rag/api/v1", rag_app)
  308. app.mount("/api/v1", webui_app)
  309. webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
  310. async def get_all_models():
  311. openai_models = []
  312. ollama_models = []
  313. if app.state.config.ENABLE_OPENAI_API:
  314. openai_models = await get_openai_models()
  315. openai_models = openai_models["data"]
  316. if app.state.config.ENABLE_OLLAMA_API:
  317. ollama_models = await get_ollama_models()
  318. ollama_models = [
  319. {
  320. "id": model["model"],
  321. "name": model["name"],
  322. "object": "model",
  323. "created": int(time.time()),
  324. "owned_by": "ollama",
  325. "ollama": model,
  326. }
  327. for model in ollama_models["models"]
  328. ]
  329. models = openai_models + ollama_models
  330. custom_models = Models.get_all_models()
  331. for custom_model in custom_models:
  332. if custom_model.base_model_id == None:
  333. for model in models:
  334. if (
  335. custom_model.id == model["id"]
  336. or custom_model.id == model["id"].split(":")[0]
  337. ):
  338. model["name"] = custom_model.name
  339. model["info"] = custom_model.model_dump()
  340. else:
  341. owned_by = "openai"
  342. for model in models:
  343. if (
  344. custom_model.base_model_id == model["id"]
  345. or custom_model.base_model_id == model["id"].split(":")[0]
  346. ):
  347. owned_by = model["owned_by"]
  348. break
  349. models.append(
  350. {
  351. "id": custom_model.id,
  352. "name": custom_model.name,
  353. "object": "model",
  354. "created": custom_model.created_at,
  355. "owned_by": owned_by,
  356. "info": custom_model.model_dump(),
  357. "preset": True,
  358. }
  359. )
  360. app.state.MODELS = {model["id"]: model for model in models}
  361. webui_app.state.MODELS = app.state.MODELS
  362. return models
  363. @app.get("/api/models")
  364. async def get_models(user=Depends(get_verified_user)):
  365. models = await get_all_models()
  366. # Filter out filter pipelines
  367. models = [
  368. model
  369. for model in models
  370. if "pipeline" not in model or model["pipeline"].get("type", None) != "filter"
  371. ]
  372. if app.state.config.ENABLE_MODEL_FILTER:
  373. if user.role == "user":
  374. models = list(
  375. filter(
  376. lambda model: model["id"] in app.state.config.MODEL_FILTER_LIST,
  377. models,
  378. )
  379. )
  380. return {"data": models}
  381. return {"data": models}
  382. @app.post("/api/chat/completed")
  383. async def chat_completed(form_data: dict, user=Depends(get_verified_user)):
  384. data = form_data
  385. model_id = data["model"]
  386. filters = [
  387. model
  388. for model in app.state.MODELS.values()
  389. if "pipeline" in model
  390. and "type" in model["pipeline"]
  391. and model["pipeline"]["type"] == "filter"
  392. and (
  393. model["pipeline"]["pipelines"] == ["*"]
  394. or any(
  395. model_id == target_model_id
  396. for target_model_id in model["pipeline"]["pipelines"]
  397. )
  398. )
  399. ]
  400. sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
  401. print(model_id)
  402. if model_id in app.state.MODELS:
  403. model = app.state.MODELS[model_id]
  404. if "pipeline" in model:
  405. sorted_filters = [model] + sorted_filters
  406. for filter in sorted_filters:
  407. r = None
  408. try:
  409. urlIdx = filter["urlIdx"]
  410. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  411. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  412. if key != "":
  413. headers = {"Authorization": f"Bearer {key}"}
  414. r = requests.post(
  415. f"{url}/{filter['id']}/filter/outlet",
  416. headers=headers,
  417. json={
  418. "user": {"id": user.id, "name": user.name, "role": user.role},
  419. "body": data,
  420. },
  421. )
  422. r.raise_for_status()
  423. data = r.json()
  424. except Exception as e:
  425. # Handle connection error here
  426. print(f"Connection error: {e}")
  427. if r is not None:
  428. try:
  429. res = r.json()
  430. if "detail" in res:
  431. return JSONResponse(
  432. status_code=r.status_code,
  433. content=res,
  434. )
  435. except:
  436. pass
  437. else:
  438. pass
  439. return data
  440. @app.get("/api/pipelines/list")
  441. async def get_pipelines_list(user=Depends(get_admin_user)):
  442. responses = await get_openai_models(raw=True)
  443. print(responses)
  444. urlIdxs = [
  445. idx
  446. for idx, response in enumerate(responses)
  447. if response != None and "pipelines" in response
  448. ]
  449. return {
  450. "data": [
  451. {
  452. "url": openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx],
  453. "idx": urlIdx,
  454. }
  455. for urlIdx in urlIdxs
  456. ]
  457. }
  458. class AddPipelineForm(BaseModel):
  459. url: str
  460. urlIdx: int
  461. @app.post("/api/pipelines/add")
  462. async def add_pipeline(form_data: AddPipelineForm, user=Depends(get_admin_user)):
  463. r = None
  464. try:
  465. urlIdx = form_data.urlIdx
  466. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  467. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  468. headers = {"Authorization": f"Bearer {key}"}
  469. r = requests.post(
  470. f"{url}/pipelines/add", headers=headers, json={"url": form_data.url}
  471. )
  472. r.raise_for_status()
  473. data = r.json()
  474. return {**data}
  475. except Exception as e:
  476. # Handle connection error here
  477. print(f"Connection error: {e}")
  478. detail = "Pipeline not found"
  479. if r is not None:
  480. try:
  481. res = r.json()
  482. if "detail" in res:
  483. detail = res["detail"]
  484. except:
  485. pass
  486. raise HTTPException(
  487. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  488. detail=detail,
  489. )
  490. class DeletePipelineForm(BaseModel):
  491. id: str
  492. urlIdx: int
  493. @app.delete("/api/pipelines/delete")
  494. async def delete_pipeline(form_data: DeletePipelineForm, user=Depends(get_admin_user)):
  495. r = None
  496. try:
  497. urlIdx = form_data.urlIdx
  498. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  499. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  500. headers = {"Authorization": f"Bearer {key}"}
  501. r = requests.delete(
  502. f"{url}/pipelines/delete", headers=headers, json={"id": form_data.id}
  503. )
  504. r.raise_for_status()
  505. data = r.json()
  506. return {**data}
  507. except Exception as e:
  508. # Handle connection error here
  509. print(f"Connection error: {e}")
  510. detail = "Pipeline not found"
  511. if r is not None:
  512. try:
  513. res = r.json()
  514. if "detail" in res:
  515. detail = res["detail"]
  516. except:
  517. pass
  518. raise HTTPException(
  519. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  520. detail=detail,
  521. )
  522. @app.get("/api/pipelines")
  523. async def get_pipelines(urlIdx: Optional[int] = None, user=Depends(get_admin_user)):
  524. r = None
  525. try:
  526. urlIdx
  527. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  528. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  529. headers = {"Authorization": f"Bearer {key}"}
  530. r = requests.get(f"{url}/pipelines", headers=headers)
  531. r.raise_for_status()
  532. data = r.json()
  533. return {**data}
  534. except Exception as e:
  535. # Handle connection error here
  536. print(f"Connection error: {e}")
  537. detail = "Pipeline not found"
  538. if r is not None:
  539. try:
  540. res = r.json()
  541. if "detail" in res:
  542. detail = res["detail"]
  543. except:
  544. pass
  545. raise HTTPException(
  546. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  547. detail=detail,
  548. )
  549. @app.get("/api/pipelines/{pipeline_id}/valves")
  550. async def get_pipeline_valves(
  551. urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
  552. ):
  553. models = await get_all_models()
  554. r = None
  555. try:
  556. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  557. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  558. headers = {"Authorization": f"Bearer {key}"}
  559. r = requests.get(f"{url}/{pipeline_id}/valves", headers=headers)
  560. r.raise_for_status()
  561. data = r.json()
  562. return {**data}
  563. except Exception as e:
  564. # Handle connection error here
  565. print(f"Connection error: {e}")
  566. detail = "Pipeline not found"
  567. if r is not None:
  568. try:
  569. res = r.json()
  570. if "detail" in res:
  571. detail = res["detail"]
  572. except:
  573. pass
  574. raise HTTPException(
  575. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  576. detail=detail,
  577. )
  578. @app.get("/api/pipelines/{pipeline_id}/valves/spec")
  579. async def get_pipeline_valves_spec(
  580. urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
  581. ):
  582. models = await get_all_models()
  583. r = None
  584. try:
  585. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  586. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  587. headers = {"Authorization": f"Bearer {key}"}
  588. r = requests.get(f"{url}/{pipeline_id}/valves/spec", headers=headers)
  589. r.raise_for_status()
  590. data = r.json()
  591. return {**data}
  592. except Exception as e:
  593. # Handle connection error here
  594. print(f"Connection error: {e}")
  595. detail = "Pipeline not found"
  596. if r is not None:
  597. try:
  598. res = r.json()
  599. if "detail" in res:
  600. detail = res["detail"]
  601. except:
  602. pass
  603. raise HTTPException(
  604. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  605. detail=detail,
  606. )
  607. @app.post("/api/pipelines/{pipeline_id}/valves/update")
  608. async def update_pipeline_valves(
  609. urlIdx: Optional[int],
  610. pipeline_id: str,
  611. form_data: dict,
  612. user=Depends(get_admin_user),
  613. ):
  614. models = await get_all_models()
  615. r = None
  616. try:
  617. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  618. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  619. headers = {"Authorization": f"Bearer {key}"}
  620. r = requests.post(
  621. f"{url}/{pipeline_id}/valves/update",
  622. headers=headers,
  623. json={**form_data},
  624. )
  625. r.raise_for_status()
  626. data = r.json()
  627. return {**data}
  628. except Exception as e:
  629. # Handle connection error here
  630. print(f"Connection error: {e}")
  631. detail = "Pipeline not found"
  632. if r is not None:
  633. try:
  634. res = r.json()
  635. if "detail" in res:
  636. detail = res["detail"]
  637. except:
  638. pass
  639. raise HTTPException(
  640. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  641. detail=detail,
  642. )
  643. @app.get("/api/config")
  644. async def get_app_config():
  645. # Checking and Handling the Absence of 'ui' in CONFIG_DATA
  646. default_locale = "en-US"
  647. if "ui" in CONFIG_DATA:
  648. default_locale = CONFIG_DATA["ui"].get("default_locale", "en-US")
  649. # The Rest of the Function Now Uses the Variables Defined Above
  650. return {
  651. "status": True,
  652. "name": WEBUI_NAME,
  653. "version": VERSION,
  654. "default_locale": default_locale,
  655. "default_models": webui_app.state.config.DEFAULT_MODELS,
  656. "default_prompt_suggestions": webui_app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
  657. "features": {
  658. "auth": WEBUI_AUTH,
  659. "auth_trusted_header": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
  660. "enable_signup": webui_app.state.config.ENABLE_SIGNUP,
  661. "enable_web_search": rag_app.state.config.ENABLE_RAG_WEB_SEARCH,
  662. "enable_image_generation": images_app.state.config.ENABLED,
  663. "enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING,
  664. "enable_admin_export": ENABLE_ADMIN_EXPORT,
  665. },
  666. }
  667. @app.get("/api/config/model/filter")
  668. async def get_model_filter_config(user=Depends(get_admin_user)):
  669. return {
  670. "enabled": app.state.config.ENABLE_MODEL_FILTER,
  671. "models": app.state.config.MODEL_FILTER_LIST,
  672. }
  673. class ModelFilterConfigForm(BaseModel):
  674. enabled: bool
  675. models: List[str]
  676. @app.post("/api/config/model/filter")
  677. async def update_model_filter_config(
  678. form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
  679. ):
  680. app.state.config.ENABLE_MODEL_FILTER = form_data.enabled
  681. app.state.config.MODEL_FILTER_LIST = form_data.models
  682. return {
  683. "enabled": app.state.config.ENABLE_MODEL_FILTER,
  684. "models": app.state.config.MODEL_FILTER_LIST,
  685. }
  686. @app.get("/api/webhook")
  687. async def get_webhook_url(user=Depends(get_admin_user)):
  688. return {
  689. "url": app.state.config.WEBHOOK_URL,
  690. }
  691. class UrlForm(BaseModel):
  692. url: str
  693. @app.post("/api/webhook")
  694. async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
  695. app.state.config.WEBHOOK_URL = form_data.url
  696. webui_app.state.WEBHOOK_URL = app.state.config.WEBHOOK_URL
  697. return {
  698. "url": app.state.config.WEBHOOK_URL,
  699. }
  700. @app.get("/api/community_sharing", response_model=bool)
  701. async def get_community_sharing_status(request: Request, user=Depends(get_admin_user)):
  702. return webui_app.state.config.ENABLE_COMMUNITY_SHARING
  703. @app.get("/api/community_sharing/toggle", response_model=bool)
  704. async def toggle_community_sharing(request: Request, user=Depends(get_admin_user)):
  705. webui_app.state.config.ENABLE_COMMUNITY_SHARING = (
  706. not webui_app.state.config.ENABLE_COMMUNITY_SHARING
  707. )
  708. return webui_app.state.config.ENABLE_COMMUNITY_SHARING
  709. @app.get("/api/version")
  710. async def get_app_config():
  711. return {
  712. "version": VERSION,
  713. }
  714. @app.get("/api/changelog")
  715. async def get_app_changelog():
  716. return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5}
  717. @app.get("/api/version/updates")
  718. async def get_app_latest_release_version():
  719. try:
  720. async with aiohttp.ClientSession() as session:
  721. async with session.get(
  722. "https://api.github.com/repos/open-webui/open-webui/releases/latest"
  723. ) as response:
  724. response.raise_for_status()
  725. data = await response.json()
  726. latest_version = data["tag_name"]
  727. return {"current": VERSION, "latest": latest_version[1:]}
  728. except aiohttp.ClientError as e:
  729. raise HTTPException(
  730. status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
  731. detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
  732. )
  733. @app.get("/manifest.json")
  734. async def get_manifest_json():
  735. return {
  736. "name": WEBUI_NAME,
  737. "short_name": WEBUI_NAME,
  738. "start_url": "/",
  739. "display": "standalone",
  740. "background_color": "#343541",
  741. "theme_color": "#343541",
  742. "orientation": "portrait-primary",
  743. "icons": [{"src": "/static/logo.png", "type": "image/png", "sizes": "500x500"}],
  744. }
  745. @app.get("/opensearch.xml")
  746. async def get_opensearch_xml():
  747. xml_content = rf"""
  748. <OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/" xmlns:moz="http://www.mozilla.org/2006/browser/search/">
  749. <ShortName>{WEBUI_NAME}</ShortName>
  750. <Description>Search {WEBUI_NAME}</Description>
  751. <InputEncoding>UTF-8</InputEncoding>
  752. <Image width="16" height="16" type="image/x-icon">{WEBUI_URL}/favicon.png</Image>
  753. <Url type="text/html" method="get" template="{WEBUI_URL}/?q={"{searchTerms}"}"/>
  754. <moz:SearchForm>{WEBUI_URL}</moz:SearchForm>
  755. </OpenSearchDescription>
  756. """
  757. return Response(content=xml_content, media_type="application/xml")
  758. @app.get("/health")
  759. async def healthcheck():
  760. return {"status": True}
  761. app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
  762. app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
  763. if os.path.exists(FRONTEND_BUILD_DIR):
  764. mimetypes.add_type("text/javascript", ".js")
  765. app.mount(
  766. "/",
  767. SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
  768. name="spa-static-files",
  769. )
  770. else:
  771. log.warning(
  772. f"Frontend build directory not found at '{FRONTEND_BUILD_DIR}'. Serving API only."
  773. )