main.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968
  1. from contextlib import asynccontextmanager
  2. from bs4 import BeautifulSoup
  3. import json
  4. import markdown
  5. import time
  6. import os
  7. import sys
  8. import logging
  9. import aiohttp
  10. import requests
  11. import mimetypes
  12. from fastapi import FastAPI, Request, Depends, status
  13. from fastapi.staticfiles import StaticFiles
  14. from fastapi.responses import JSONResponse
  15. from fastapi import HTTPException
  16. from fastapi.middleware.wsgi import WSGIMiddleware
  17. from fastapi.middleware.cors import CORSMiddleware
  18. from starlette.exceptions import HTTPException as StarletteHTTPException
  19. from starlette.middleware.base import BaseHTTPMiddleware
  20. from starlette.responses import StreamingResponse, Response
  21. from apps.socket.main import app as socket_app
  22. from apps.ollama.main import app as ollama_app, get_all_models as get_ollama_models
  23. from apps.openai.main import app as openai_app, get_all_models as get_openai_models
  24. from apps.audio.main import app as audio_app
  25. from apps.images.main import app as images_app
  26. from apps.rag.main import app as rag_app
  27. from apps.webui.main import app as webui_app
  28. import asyncio
  29. from pydantic import BaseModel
  30. from typing import List, Optional
  31. from apps.webui.models.models import Models, ModelModel
  32. from utils.utils import (
  33. get_admin_user,
  34. get_verified_user,
  35. get_current_user,
  36. get_http_authorization_cred,
  37. )
  38. from apps.rag.utils import rag_messages
  39. from config import (
  40. CONFIG_DATA,
  41. WEBUI_NAME,
  42. WEBUI_URL,
  43. WEBUI_AUTH,
  44. ENV,
  45. VERSION,
  46. CHANGELOG,
  47. FRONTEND_BUILD_DIR,
  48. CACHE_DIR,
  49. STATIC_DIR,
  50. ENABLE_OPENAI_API,
  51. ENABLE_OLLAMA_API,
  52. ENABLE_MODEL_FILTER,
  53. MODEL_FILTER_LIST,
  54. GLOBAL_LOG_LEVEL,
  55. SRC_LOG_LEVELS,
  56. WEBHOOK_URL,
  57. ENABLE_ADMIN_EXPORT,
  58. AppConfig,
  59. WEBUI_BUILD_HASH,
  60. )
  61. from constants import ERROR_MESSAGES
  62. logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
  63. log = logging.getLogger(__name__)
  64. log.setLevel(SRC_LOG_LEVELS["MAIN"])
  65. class SPAStaticFiles(StaticFiles):
  66. async def get_response(self, path: str, scope):
  67. try:
  68. return await super().get_response(path, scope)
  69. except (HTTPException, StarletteHTTPException) as ex:
  70. if ex.status_code == 404:
  71. return await super().get_response("index.html", scope)
  72. else:
  73. raise ex
  74. print(
  75. rf"""
  76. ___ __ __ _ _ _ ___
  77. / _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _|
  78. | | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || |
  79. | |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || |
  80. \___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___|
  81. |_|
  82. v{VERSION} - building the best open-source AI user interface.
  83. {f"Commit: {WEBUI_BUILD_HASH}" if WEBUI_BUILD_HASH != "dev-build" else ""}
  84. https://github.com/open-webui/open-webui
  85. """
  86. )
  87. @asynccontextmanager
  88. async def lifespan(app: FastAPI):
  89. yield
  90. app = FastAPI(
  91. docs_url="/docs" if ENV == "dev" else None, redoc_url=None, lifespan=lifespan
  92. )
  93. app.state.config = AppConfig()
  94. app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
  95. app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
  96. app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
  97. app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
  98. app.state.config.WEBHOOK_URL = WEBHOOK_URL
  99. app.state.MODELS = {}
  100. origins = ["*"]
  101. # Custom middleware to add security headers
  102. # class SecurityHeadersMiddleware(BaseHTTPMiddleware):
  103. # async def dispatch(self, request: Request, call_next):
  104. # response: Response = await call_next(request)
  105. # response.headers["Cross-Origin-Opener-Policy"] = "same-origin"
  106. # response.headers["Cross-Origin-Embedder-Policy"] = "require-corp"
  107. # return response
  108. # app.add_middleware(SecurityHeadersMiddleware)
  109. class RAGMiddleware(BaseHTTPMiddleware):
  110. async def dispatch(self, request: Request, call_next):
  111. return_citations = False
  112. if request.method == "POST" and (
  113. "/ollama/api/chat" in request.url.path
  114. or "/chat/completions" in request.url.path
  115. ):
  116. log.debug(f"request.url.path: {request.url.path}")
  117. # Read the original request body
  118. body = await request.body()
  119. # Decode body to string
  120. body_str = body.decode("utf-8")
  121. # Parse string to JSON
  122. data = json.loads(body_str) if body_str else {}
  123. return_citations = data.get("citations", False)
  124. if "citations" in data:
  125. del data["citations"]
  126. # Example: Add a new key-value pair or modify existing ones
  127. # data["modified"] = True # Example modification
  128. if "docs" in data:
  129. data = {**data}
  130. data["messages"], citations = rag_messages(
  131. docs=data["docs"],
  132. messages=data["messages"],
  133. template=rag_app.state.config.RAG_TEMPLATE,
  134. embedding_function=rag_app.state.EMBEDDING_FUNCTION,
  135. k=rag_app.state.config.TOP_K,
  136. reranking_function=rag_app.state.sentence_transformer_rf,
  137. r=rag_app.state.config.RELEVANCE_THRESHOLD,
  138. hybrid_search=rag_app.state.config.ENABLE_RAG_HYBRID_SEARCH,
  139. )
  140. del data["docs"]
  141. log.debug(
  142. f"data['messages']: {data['messages']}, citations: {citations}"
  143. )
  144. modified_body_bytes = json.dumps(data).encode("utf-8")
  145. # Replace the request body with the modified one
  146. request._body = modified_body_bytes
  147. # Set custom header to ensure content-length matches new body length
  148. request.headers.__dict__["_list"] = [
  149. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  150. *[
  151. (k, v)
  152. for k, v in request.headers.raw
  153. if k.lower() != b"content-length"
  154. ],
  155. ]
  156. response = await call_next(request)
  157. if return_citations:
  158. # Inject the citations into the response
  159. if isinstance(response, StreamingResponse):
  160. # If it's a streaming response, inject it as SSE event or NDJSON line
  161. content_type = response.headers.get("Content-Type")
  162. if "text/event-stream" in content_type:
  163. return StreamingResponse(
  164. self.openai_stream_wrapper(response.body_iterator, citations),
  165. )
  166. if "application/x-ndjson" in content_type:
  167. return StreamingResponse(
  168. self.ollama_stream_wrapper(response.body_iterator, citations),
  169. )
  170. return response
  171. async def _receive(self, body: bytes):
  172. return {"type": "http.request", "body": body, "more_body": False}
  173. async def openai_stream_wrapper(self, original_generator, citations):
  174. yield f"data: {json.dumps({'citations': citations})}\n\n"
  175. async for data in original_generator:
  176. yield data
  177. async def ollama_stream_wrapper(self, original_generator, citations):
  178. yield f"{json.dumps({'citations': citations})}\n"
  179. async for data in original_generator:
  180. yield data
  181. app.add_middleware(RAGMiddleware)
  182. class PipelineMiddleware(BaseHTTPMiddleware):
  183. async def dispatch(self, request: Request, call_next):
  184. if request.method == "POST" and (
  185. "/ollama/api/chat" in request.url.path
  186. or "/chat/completions" in request.url.path
  187. ):
  188. log.debug(f"request.url.path: {request.url.path}")
  189. # Read the original request body
  190. body = await request.body()
  191. # Decode body to string
  192. body_str = body.decode("utf-8")
  193. # Parse string to JSON
  194. data = json.loads(body_str) if body_str else {}
  195. model_id = data["model"]
  196. filters = [
  197. model
  198. for model in app.state.MODELS.values()
  199. if "pipeline" in model
  200. and "type" in model["pipeline"]
  201. and model["pipeline"]["type"] == "filter"
  202. and (
  203. model["pipeline"]["pipelines"] == ["*"]
  204. or any(
  205. model_id == target_model_id
  206. for target_model_id in model["pipeline"]["pipelines"]
  207. )
  208. )
  209. ]
  210. sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
  211. user = None
  212. if len(sorted_filters) > 0:
  213. try:
  214. user = get_current_user(
  215. get_http_authorization_cred(
  216. request.headers.get("Authorization")
  217. )
  218. )
  219. user = {"id": user.id, "name": user.name, "role": user.role}
  220. except:
  221. pass
  222. model = app.state.MODELS[model_id]
  223. if "pipeline" in model:
  224. sorted_filters.append(model)
  225. for filter in sorted_filters:
  226. r = None
  227. try:
  228. urlIdx = filter["urlIdx"]
  229. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  230. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  231. if key != "":
  232. headers = {"Authorization": f"Bearer {key}"}
  233. r = requests.post(
  234. f"{url}/{filter['id']}/filter/inlet",
  235. headers=headers,
  236. json={
  237. "user": user,
  238. "body": data,
  239. },
  240. )
  241. r.raise_for_status()
  242. data = r.json()
  243. except Exception as e:
  244. # Handle connection error here
  245. print(f"Connection error: {e}")
  246. if r is not None:
  247. try:
  248. res = r.json()
  249. if "detail" in res:
  250. return JSONResponse(
  251. status_code=r.status_code,
  252. content=res,
  253. )
  254. except:
  255. pass
  256. else:
  257. pass
  258. if "pipeline" not in app.state.MODELS[model_id]:
  259. if "chat_id" in data:
  260. del data["chat_id"]
  261. if "title" in data:
  262. del data["title"]
  263. modified_body_bytes = json.dumps(data).encode("utf-8")
  264. # Replace the request body with the modified one
  265. request._body = modified_body_bytes
  266. # Set custom header to ensure content-length matches new body length
  267. request.headers.__dict__["_list"] = [
  268. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  269. *[
  270. (k, v)
  271. for k, v in request.headers.raw
  272. if k.lower() != b"content-length"
  273. ],
  274. ]
  275. response = await call_next(request)
  276. return response
  277. async def _receive(self, body: bytes):
  278. return {"type": "http.request", "body": body, "more_body": False}
  279. app.add_middleware(PipelineMiddleware)
  280. app.add_middleware(
  281. CORSMiddleware,
  282. allow_origins=origins,
  283. allow_credentials=True,
  284. allow_methods=["*"],
  285. allow_headers=["*"],
  286. )
  287. @app.middleware("http")
  288. async def check_url(request: Request, call_next):
  289. if len(app.state.MODELS) == 0:
  290. await get_all_models()
  291. else:
  292. pass
  293. start_time = int(time.time())
  294. response = await call_next(request)
  295. process_time = int(time.time()) - start_time
  296. response.headers["X-Process-Time"] = str(process_time)
  297. return response
  298. @app.middleware("http")
  299. async def update_embedding_function(request: Request, call_next):
  300. response = await call_next(request)
  301. if "/embedding/update" in request.url.path:
  302. webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
  303. return response
  304. app.mount("/ws", socket_app)
  305. app.mount("/ollama", ollama_app)
  306. app.mount("/openai", openai_app)
  307. app.mount("/images/api/v1", images_app)
  308. app.mount("/audio/api/v1", audio_app)
  309. app.mount("/rag/api/v1", rag_app)
  310. app.mount("/api/v1", webui_app)
  311. webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
  312. async def get_all_models():
  313. openai_models = []
  314. ollama_models = []
  315. if app.state.config.ENABLE_OPENAI_API:
  316. openai_models = await get_openai_models()
  317. openai_models = openai_models["data"]
  318. if app.state.config.ENABLE_OLLAMA_API:
  319. ollama_models = await get_ollama_models()
  320. ollama_models = [
  321. {
  322. "id": model["model"],
  323. "name": model["name"],
  324. "object": "model",
  325. "created": int(time.time()),
  326. "owned_by": "ollama",
  327. "ollama": model,
  328. }
  329. for model in ollama_models["models"]
  330. ]
  331. models = openai_models + ollama_models
  332. custom_models = Models.get_all_models()
  333. for custom_model in custom_models:
  334. if custom_model.base_model_id == None:
  335. for model in models:
  336. if (
  337. custom_model.id == model["id"]
  338. or custom_model.id == model["id"].split(":")[0]
  339. ):
  340. model["name"] = custom_model.name
  341. model["info"] = custom_model.model_dump()
  342. else:
  343. owned_by = "openai"
  344. for model in models:
  345. if (
  346. custom_model.base_model_id == model["id"]
  347. or custom_model.base_model_id == model["id"].split(":")[0]
  348. ):
  349. owned_by = model["owned_by"]
  350. break
  351. models.append(
  352. {
  353. "id": custom_model.id,
  354. "name": custom_model.name,
  355. "object": "model",
  356. "created": custom_model.created_at,
  357. "owned_by": owned_by,
  358. "info": custom_model.model_dump(),
  359. "preset": True,
  360. }
  361. )
  362. app.state.MODELS = {model["id"]: model for model in models}
  363. webui_app.state.MODELS = app.state.MODELS
  364. return models
  365. @app.get("/api/models")
  366. async def get_models(user=Depends(get_verified_user)):
  367. models = await get_all_models()
  368. # Filter out filter pipelines
  369. models = [
  370. model
  371. for model in models
  372. if "pipeline" not in model or model["pipeline"].get("type", None) != "filter"
  373. ]
  374. if app.state.config.ENABLE_MODEL_FILTER:
  375. if user.role == "user":
  376. models = list(
  377. filter(
  378. lambda model: model["id"] in app.state.config.MODEL_FILTER_LIST,
  379. models,
  380. )
  381. )
  382. return {"data": models}
  383. return {"data": models}
  384. @app.post("/api/chat/completed")
  385. async def chat_completed(form_data: dict, user=Depends(get_verified_user)):
  386. data = form_data
  387. model_id = data["model"]
  388. filters = [
  389. model
  390. for model in app.state.MODELS.values()
  391. if "pipeline" in model
  392. and "type" in model["pipeline"]
  393. and model["pipeline"]["type"] == "filter"
  394. and (
  395. model["pipeline"]["pipelines"] == ["*"]
  396. or any(
  397. model_id == target_model_id
  398. for target_model_id in model["pipeline"]["pipelines"]
  399. )
  400. )
  401. ]
  402. sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
  403. print(model_id)
  404. if model_id in app.state.MODELS:
  405. model = app.state.MODELS[model_id]
  406. if "pipeline" in model:
  407. sorted_filters = [model] + sorted_filters
  408. for filter in sorted_filters:
  409. r = None
  410. try:
  411. urlIdx = filter["urlIdx"]
  412. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  413. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  414. if key != "":
  415. headers = {"Authorization": f"Bearer {key}"}
  416. r = requests.post(
  417. f"{url}/{filter['id']}/filter/outlet",
  418. headers=headers,
  419. json={
  420. "user": {"id": user.id, "name": user.name, "role": user.role},
  421. "body": data,
  422. },
  423. )
  424. r.raise_for_status()
  425. data = r.json()
  426. except Exception as e:
  427. # Handle connection error here
  428. print(f"Connection error: {e}")
  429. if r is not None:
  430. try:
  431. res = r.json()
  432. if "detail" in res:
  433. return JSONResponse(
  434. status_code=r.status_code,
  435. content=res,
  436. )
  437. except:
  438. pass
  439. else:
  440. pass
  441. return data
  442. @app.get("/api/pipelines/list")
  443. async def get_pipelines_list(user=Depends(get_admin_user)):
  444. responses = await get_openai_models(raw=True)
  445. print(responses)
  446. urlIdxs = [
  447. idx
  448. for idx, response in enumerate(responses)
  449. if response != None and "pipelines" in response
  450. ]
  451. return {
  452. "data": [
  453. {
  454. "url": openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx],
  455. "idx": urlIdx,
  456. }
  457. for urlIdx in urlIdxs
  458. ]
  459. }
  460. class AddPipelineForm(BaseModel):
  461. url: str
  462. urlIdx: int
  463. @app.post("/api/pipelines/add")
  464. async def add_pipeline(form_data: AddPipelineForm, user=Depends(get_admin_user)):
  465. r = None
  466. try:
  467. urlIdx = form_data.urlIdx
  468. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  469. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  470. headers = {"Authorization": f"Bearer {key}"}
  471. r = requests.post(
  472. f"{url}/pipelines/add", headers=headers, json={"url": form_data.url}
  473. )
  474. r.raise_for_status()
  475. data = r.json()
  476. return {**data}
  477. except Exception as e:
  478. # Handle connection error here
  479. print(f"Connection error: {e}")
  480. detail = "Pipeline not found"
  481. if r is not None:
  482. try:
  483. res = r.json()
  484. if "detail" in res:
  485. detail = res["detail"]
  486. except:
  487. pass
  488. raise HTTPException(
  489. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  490. detail=detail,
  491. )
  492. class DeletePipelineForm(BaseModel):
  493. id: str
  494. urlIdx: int
  495. @app.delete("/api/pipelines/delete")
  496. async def delete_pipeline(form_data: DeletePipelineForm, user=Depends(get_admin_user)):
  497. r = None
  498. try:
  499. urlIdx = form_data.urlIdx
  500. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  501. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  502. headers = {"Authorization": f"Bearer {key}"}
  503. r = requests.delete(
  504. f"{url}/pipelines/delete", headers=headers, json={"id": form_data.id}
  505. )
  506. r.raise_for_status()
  507. data = r.json()
  508. return {**data}
  509. except Exception as e:
  510. # Handle connection error here
  511. print(f"Connection error: {e}")
  512. detail = "Pipeline not found"
  513. if r is not None:
  514. try:
  515. res = r.json()
  516. if "detail" in res:
  517. detail = res["detail"]
  518. except:
  519. pass
  520. raise HTTPException(
  521. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  522. detail=detail,
  523. )
  524. @app.get("/api/pipelines")
  525. async def get_pipelines(urlIdx: Optional[int] = None, user=Depends(get_admin_user)):
  526. r = None
  527. try:
  528. urlIdx
  529. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  530. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  531. headers = {"Authorization": f"Bearer {key}"}
  532. r = requests.get(f"{url}/pipelines", headers=headers)
  533. r.raise_for_status()
  534. data = r.json()
  535. return {**data}
  536. except Exception as e:
  537. # Handle connection error here
  538. print(f"Connection error: {e}")
  539. detail = "Pipeline not found"
  540. if r is not None:
  541. try:
  542. res = r.json()
  543. if "detail" in res:
  544. detail = res["detail"]
  545. except:
  546. pass
  547. raise HTTPException(
  548. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  549. detail=detail,
  550. )
  551. @app.get("/api/pipelines/{pipeline_id}/valves")
  552. async def get_pipeline_valves(
  553. urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
  554. ):
  555. models = await get_all_models()
  556. r = None
  557. try:
  558. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  559. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  560. headers = {"Authorization": f"Bearer {key}"}
  561. r = requests.get(f"{url}/{pipeline_id}/valves", headers=headers)
  562. r.raise_for_status()
  563. data = r.json()
  564. return {**data}
  565. except Exception as e:
  566. # Handle connection error here
  567. print(f"Connection error: {e}")
  568. detail = "Pipeline not found"
  569. if r is not None:
  570. try:
  571. res = r.json()
  572. if "detail" in res:
  573. detail = res["detail"]
  574. except:
  575. pass
  576. raise HTTPException(
  577. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  578. detail=detail,
  579. )
  580. @app.get("/api/pipelines/{pipeline_id}/valves/spec")
  581. async def get_pipeline_valves_spec(
  582. urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
  583. ):
  584. models = await get_all_models()
  585. r = None
  586. try:
  587. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  588. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  589. headers = {"Authorization": f"Bearer {key}"}
  590. r = requests.get(f"{url}/{pipeline_id}/valves/spec", headers=headers)
  591. r.raise_for_status()
  592. data = r.json()
  593. return {**data}
  594. except Exception as e:
  595. # Handle connection error here
  596. print(f"Connection error: {e}")
  597. detail = "Pipeline not found"
  598. if r is not None:
  599. try:
  600. res = r.json()
  601. if "detail" in res:
  602. detail = res["detail"]
  603. except:
  604. pass
  605. raise HTTPException(
  606. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  607. detail=detail,
  608. )
  609. @app.post("/api/pipelines/{pipeline_id}/valves/update")
  610. async def update_pipeline_valves(
  611. urlIdx: Optional[int],
  612. pipeline_id: str,
  613. form_data: dict,
  614. user=Depends(get_admin_user),
  615. ):
  616. models = await get_all_models()
  617. r = None
  618. try:
  619. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  620. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  621. headers = {"Authorization": f"Bearer {key}"}
  622. r = requests.post(
  623. f"{url}/{pipeline_id}/valves/update",
  624. headers=headers,
  625. json={**form_data},
  626. )
  627. r.raise_for_status()
  628. data = r.json()
  629. return {**data}
  630. except Exception as e:
  631. # Handle connection error here
  632. print(f"Connection error: {e}")
  633. detail = "Pipeline not found"
  634. if r is not None:
  635. try:
  636. res = r.json()
  637. if "detail" in res:
  638. detail = res["detail"]
  639. except:
  640. pass
  641. raise HTTPException(
  642. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  643. detail=detail,
  644. )
  645. @app.get("/api/config")
  646. async def get_app_config():
  647. # Checking and Handling the Absence of 'ui' in CONFIG_DATA
  648. default_locale = "en-US"
  649. if "ui" in CONFIG_DATA:
  650. default_locale = CONFIG_DATA["ui"].get("default_locale", "en-US")
  651. # The Rest of the Function Now Uses the Variables Defined Above
  652. return {
  653. "status": True,
  654. "name": WEBUI_NAME,
  655. "version": VERSION,
  656. "default_locale": default_locale,
  657. "default_models": webui_app.state.config.DEFAULT_MODELS,
  658. "default_prompt_suggestions": webui_app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
  659. "features": {
  660. "auth": WEBUI_AUTH,
  661. "auth_trusted_header": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
  662. "enable_signup": webui_app.state.config.ENABLE_SIGNUP,
  663. "enable_web_search": rag_app.state.config.ENABLE_RAG_WEB_SEARCH,
  664. "enable_image_generation": images_app.state.config.ENABLED,
  665. "enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING,
  666. "enable_admin_export": ENABLE_ADMIN_EXPORT,
  667. },
  668. }
  669. @app.get("/api/config/model/filter")
  670. async def get_model_filter_config(user=Depends(get_admin_user)):
  671. return {
  672. "enabled": app.state.config.ENABLE_MODEL_FILTER,
  673. "models": app.state.config.MODEL_FILTER_LIST,
  674. }
  675. class ModelFilterConfigForm(BaseModel):
  676. enabled: bool
  677. models: List[str]
  678. @app.post("/api/config/model/filter")
  679. async def update_model_filter_config(
  680. form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
  681. ):
  682. app.state.config.ENABLE_MODEL_FILTER = form_data.enabled
  683. app.state.config.MODEL_FILTER_LIST = form_data.models
  684. return {
  685. "enabled": app.state.config.ENABLE_MODEL_FILTER,
  686. "models": app.state.config.MODEL_FILTER_LIST,
  687. }
  688. @app.get("/api/webhook")
  689. async def get_webhook_url(user=Depends(get_admin_user)):
  690. return {
  691. "url": app.state.config.WEBHOOK_URL,
  692. }
  693. class UrlForm(BaseModel):
  694. url: str
  695. @app.post("/api/webhook")
  696. async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
  697. app.state.config.WEBHOOK_URL = form_data.url
  698. webui_app.state.WEBHOOK_URL = app.state.config.WEBHOOK_URL
  699. return {"url": app.state.config.WEBHOOK_URL}
  700. @app.get("/api/version")
  701. async def get_app_config():
  702. return {
  703. "version": VERSION,
  704. }
  705. @app.get("/api/changelog")
  706. async def get_app_changelog():
  707. return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5}
  708. @app.get("/api/version/updates")
  709. async def get_app_latest_release_version():
  710. try:
  711. async with aiohttp.ClientSession(trust_env=True) as session:
  712. async with session.get(
  713. "https://api.github.com/repos/open-webui/open-webui/releases/latest"
  714. ) as response:
  715. response.raise_for_status()
  716. data = await response.json()
  717. latest_version = data["tag_name"]
  718. return {"current": VERSION, "latest": latest_version[1:]}
  719. except aiohttp.ClientError as e:
  720. raise HTTPException(
  721. status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
  722. detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
  723. )
  724. @app.get("/manifest.json")
  725. async def get_manifest_json():
  726. return {
  727. "name": WEBUI_NAME,
  728. "short_name": WEBUI_NAME,
  729. "start_url": "/",
  730. "display": "standalone",
  731. "background_color": "#343541",
  732. "theme_color": "#343541",
  733. "orientation": "portrait-primary",
  734. "icons": [{"src": "/static/logo.png", "type": "image/png", "sizes": "500x500"}],
  735. }
  736. @app.get("/opensearch.xml")
  737. async def get_opensearch_xml():
  738. xml_content = rf"""
  739. <OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/" xmlns:moz="http://www.mozilla.org/2006/browser/search/">
  740. <ShortName>{WEBUI_NAME}</ShortName>
  741. <Description>Search {WEBUI_NAME}</Description>
  742. <InputEncoding>UTF-8</InputEncoding>
  743. <Image width="16" height="16" type="image/x-icon">{WEBUI_URL}/favicon.png</Image>
  744. <Url type="text/html" method="get" template="{WEBUI_URL}/?q={"{searchTerms}"}"/>
  745. <moz:SearchForm>{WEBUI_URL}</moz:SearchForm>
  746. </OpenSearchDescription>
  747. """
  748. return Response(content=xml_content, media_type="application/xml")
  749. @app.get("/health")
  750. async def healthcheck():
  751. return {"status": True}
  752. app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
  753. app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
  754. if os.path.exists(FRONTEND_BUILD_DIR):
  755. mimetypes.add_type("text/javascript", ".js")
  756. app.mount(
  757. "/",
  758. SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
  759. name="spa-static-files",
  760. )
  761. else:
  762. log.warning(
  763. f"Frontend build directory not found at '{FRONTEND_BUILD_DIR}'. Serving API only."
  764. )