12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037 |
- from contextlib import asynccontextmanager
- from bs4 import BeautifulSoup
- import json
- import markdown
- import time
- import os
- import sys
- import logging
- import aiohttp
- import requests
- import mimetypes
- import shutil
- import os
- import asyncio
- from fastapi import FastAPI, Request, Depends, status, UploadFile, File, Form
- from fastapi.staticfiles import StaticFiles
- from fastapi.responses import JSONResponse
- from fastapi import HTTPException
- from fastapi.middleware.wsgi import WSGIMiddleware
- from fastapi.middleware.cors import CORSMiddleware
- from starlette.exceptions import HTTPException as StarletteHTTPException
- from starlette.middleware.base import BaseHTTPMiddleware
- from starlette.responses import StreamingResponse, Response
- from apps.socket.main import app as socket_app
- from apps.ollama.main import app as ollama_app, get_all_models as get_ollama_models
- from apps.openai.main import app as openai_app, get_all_models as get_openai_models
- from apps.audio.main import app as audio_app
- from apps.images.main import app as images_app
- from apps.rag.main import app as rag_app
- from apps.webui.main import app as webui_app
- from pydantic import BaseModel
- from typing import List, Optional
- from apps.webui.models.models import Models, ModelModel
- from utils.utils import (
- get_admin_user,
- get_verified_user,
- get_current_user,
- get_http_authorization_cred,
- )
- from apps.rag.utils import rag_messages
- from config import (
- CONFIG_DATA,
- WEBUI_NAME,
- WEBUI_URL,
- WEBUI_AUTH,
- ENV,
- VERSION,
- CHANGELOG,
- FRONTEND_BUILD_DIR,
- CACHE_DIR,
- STATIC_DIR,
- ENABLE_OPENAI_API,
- ENABLE_OLLAMA_API,
- ENABLE_MODEL_FILTER,
- MODEL_FILTER_LIST,
- GLOBAL_LOG_LEVEL,
- SRC_LOG_LEVELS,
- WEBHOOK_URL,
- ENABLE_ADMIN_EXPORT,
- AppConfig,
- WEBUI_BUILD_HASH,
- )
- from constants import ERROR_MESSAGES
- logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
- log = logging.getLogger(__name__)
- log.setLevel(SRC_LOG_LEVELS["MAIN"])
- class SPAStaticFiles(StaticFiles):
- async def get_response(self, path: str, scope):
- try:
- return await super().get_response(path, scope)
- except (HTTPException, StarletteHTTPException) as ex:
- if ex.status_code == 404:
- return await super().get_response("index.html", scope)
- else:
- raise ex
- print(
- rf"""
- ___ __ __ _ _ _ ___
- / _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _|
- | | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || |
- | |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || |
- \___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___|
- |_|
-
- v{VERSION} - building the best open-source AI user interface.
- {f"Commit: {WEBUI_BUILD_HASH}" if WEBUI_BUILD_HASH != "dev-build" else ""}
- https://github.com/open-webui/open-webui
- """
- )
- @asynccontextmanager
- async def lifespan(app: FastAPI):
- yield
- app = FastAPI(
- docs_url="/docs" if ENV == "dev" else None, redoc_url=None, lifespan=lifespan
- )
- app.state.config = AppConfig()
- app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
- app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
- app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
- app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
- app.state.config.WEBHOOK_URL = WEBHOOK_URL
- app.state.MODELS = {}
- origins = ["*"]
- # Custom middleware to add security headers
- # class SecurityHeadersMiddleware(BaseHTTPMiddleware):
- # async def dispatch(self, request: Request, call_next):
- # response: Response = await call_next(request)
- # response.headers["Cross-Origin-Opener-Policy"] = "same-origin"
- # response.headers["Cross-Origin-Embedder-Policy"] = "require-corp"
- # return response
- # app.add_middleware(SecurityHeadersMiddleware)
- class RAGMiddleware(BaseHTTPMiddleware):
- async def dispatch(self, request: Request, call_next):
- return_citations = False
- if request.method == "POST" and (
- "/ollama/api/chat" in request.url.path
- or "/chat/completions" in request.url.path
- ):
- log.debug(f"request.url.path: {request.url.path}")
- # Read the original request body
- body = await request.body()
- # Decode body to string
- body_str = body.decode("utf-8")
- # Parse string to JSON
- data = json.loads(body_str) if body_str else {}
- return_citations = data.get("citations", False)
- if "citations" in data:
- del data["citations"]
- # Example: Add a new key-value pair or modify existing ones
- # data["modified"] = True # Example modification
- if "docs" in data:
- data = {**data}
- data["messages"], citations = rag_messages(
- docs=data["docs"],
- messages=data["messages"],
- template=rag_app.state.config.RAG_TEMPLATE,
- embedding_function=rag_app.state.EMBEDDING_FUNCTION,
- k=rag_app.state.config.TOP_K,
- reranking_function=rag_app.state.sentence_transformer_rf,
- r=rag_app.state.config.RELEVANCE_THRESHOLD,
- hybrid_search=rag_app.state.config.ENABLE_RAG_HYBRID_SEARCH,
- )
- del data["docs"]
- log.debug(
- f"data['messages']: {data['messages']}, citations: {citations}"
- )
- modified_body_bytes = json.dumps(data).encode("utf-8")
- # Replace the request body with the modified one
- request._body = modified_body_bytes
- # Set custom header to ensure content-length matches new body length
- request.headers.__dict__["_list"] = [
- (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
- *[
- (k, v)
- for k, v in request.headers.raw
- if k.lower() != b"content-length"
- ],
- ]
- response = await call_next(request)
- if return_citations:
- # Inject the citations into the response
- if isinstance(response, StreamingResponse):
- # If it's a streaming response, inject it as SSE event or NDJSON line
- content_type = response.headers.get("Content-Type")
- if "text/event-stream" in content_type:
- return StreamingResponse(
- self.openai_stream_wrapper(response.body_iterator, citations),
- )
- if "application/x-ndjson" in content_type:
- return StreamingResponse(
- self.ollama_stream_wrapper(response.body_iterator, citations),
- )
- return response
- async def _receive(self, body: bytes):
- return {"type": "http.request", "body": body, "more_body": False}
- async def openai_stream_wrapper(self, original_generator, citations):
- yield f"data: {json.dumps({'citations': citations})}\n\n"
- async for data in original_generator:
- yield data
- async def ollama_stream_wrapper(self, original_generator, citations):
- yield f"{json.dumps({'citations': citations})}\n"
- async for data in original_generator:
- yield data
- app.add_middleware(RAGMiddleware)
- class PipelineMiddleware(BaseHTTPMiddleware):
- async def dispatch(self, request: Request, call_next):
- if request.method == "POST" and (
- "/ollama/api/chat" in request.url.path
- or "/chat/completions" in request.url.path
- ):
- log.debug(f"request.url.path: {request.url.path}")
- # Read the original request body
- body = await request.body()
- # Decode body to string
- body_str = body.decode("utf-8")
- # Parse string to JSON
- data = json.loads(body_str) if body_str else {}
- model_id = data["model"]
- filters = [
- model
- for model in app.state.MODELS.values()
- if "pipeline" in model
- and "type" in model["pipeline"]
- and model["pipeline"]["type"] == "filter"
- and (
- model["pipeline"]["pipelines"] == ["*"]
- or any(
- model_id == target_model_id
- for target_model_id in model["pipeline"]["pipelines"]
- )
- )
- ]
- sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
- user = None
- if len(sorted_filters) > 0:
- try:
- user = get_current_user(
- get_http_authorization_cred(
- request.headers.get("Authorization")
- )
- )
- user = {"id": user.id, "name": user.name, "role": user.role}
- except:
- pass
- model = app.state.MODELS[model_id]
- if "pipeline" in model:
- sorted_filters.append(model)
- for filter in sorted_filters:
- r = None
- try:
- urlIdx = filter["urlIdx"]
- url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
- key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
- if key != "":
- headers = {"Authorization": f"Bearer {key}"}
- r = requests.post(
- f"{url}/{filter['id']}/filter/inlet",
- headers=headers,
- json={
- "user": user,
- "body": data,
- },
- )
- r.raise_for_status()
- data = r.json()
- except Exception as e:
- # Handle connection error here
- print(f"Connection error: {e}")
- if r is not None:
- try:
- res = r.json()
- if "detail" in res:
- return JSONResponse(
- status_code=r.status_code,
- content=res,
- )
- except:
- pass
- else:
- pass
- if "pipeline" not in app.state.MODELS[model_id]:
- if "chat_id" in data:
- del data["chat_id"]
- if "title" in data:
- del data["title"]
- modified_body_bytes = json.dumps(data).encode("utf-8")
- # Replace the request body with the modified one
- request._body = modified_body_bytes
- # Set custom header to ensure content-length matches new body length
- request.headers.__dict__["_list"] = [
- (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
- *[
- (k, v)
- for k, v in request.headers.raw
- if k.lower() != b"content-length"
- ],
- ]
- response = await call_next(request)
- return response
- async def _receive(self, body: bytes):
- return {"type": "http.request", "body": body, "more_body": False}
- app.add_middleware(PipelineMiddleware)
- app.add_middleware(
- CORSMiddleware,
- allow_origins=origins,
- allow_credentials=True,
- allow_methods=["*"],
- allow_headers=["*"],
- )
- @app.middleware("http")
- async def check_url(request: Request, call_next):
- if len(app.state.MODELS) == 0:
- await get_all_models()
- else:
- pass
- start_time = int(time.time())
- response = await call_next(request)
- process_time = int(time.time()) - start_time
- response.headers["X-Process-Time"] = str(process_time)
- return response
- @app.middleware("http")
- async def update_embedding_function(request: Request, call_next):
- response = await call_next(request)
- if "/embedding/update" in request.url.path:
- webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
- return response
- app.mount("/ws", socket_app)
- app.mount("/ollama", ollama_app)
- app.mount("/openai", openai_app)
- app.mount("/images/api/v1", images_app)
- app.mount("/audio/api/v1", audio_app)
- app.mount("/rag/api/v1", rag_app)
- app.mount("/api/v1", webui_app)
- webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
- async def get_all_models():
- openai_models = []
- ollama_models = []
- if app.state.config.ENABLE_OPENAI_API:
- openai_models = await get_openai_models()
- openai_models = openai_models["data"]
- if app.state.config.ENABLE_OLLAMA_API:
- ollama_models = await get_ollama_models()
- ollama_models = [
- {
- "id": model["model"],
- "name": model["name"],
- "object": "model",
- "created": int(time.time()),
- "owned_by": "ollama",
- "ollama": model,
- }
- for model in ollama_models["models"]
- ]
- models = openai_models + ollama_models
- custom_models = Models.get_all_models()
- for custom_model in custom_models:
- if custom_model.base_model_id == None:
- for model in models:
- if (
- custom_model.id == model["id"]
- or custom_model.id == model["id"].split(":")[0]
- ):
- model["name"] = custom_model.name
- model["info"] = custom_model.model_dump()
- else:
- owned_by = "openai"
- for model in models:
- if (
- custom_model.base_model_id == model["id"]
- or custom_model.base_model_id == model["id"].split(":")[0]
- ):
- owned_by = model["owned_by"]
- break
- models.append(
- {
- "id": custom_model.id,
- "name": custom_model.name,
- "object": "model",
- "created": custom_model.created_at,
- "owned_by": owned_by,
- "info": custom_model.model_dump(),
- "preset": True,
- }
- )
- app.state.MODELS = {model["id"]: model for model in models}
- webui_app.state.MODELS = app.state.MODELS
- return models
- @app.get("/api/models")
- async def get_models(user=Depends(get_verified_user)):
- models = await get_all_models()
- # Filter out filter pipelines
- models = [
- model
- for model in models
- if "pipeline" not in model or model["pipeline"].get("type", None) != "filter"
- ]
- if app.state.config.ENABLE_MODEL_FILTER:
- if user.role == "user":
- models = list(
- filter(
- lambda model: model["id"] in app.state.config.MODEL_FILTER_LIST,
- models,
- )
- )
- return {"data": models}
- return {"data": models}
- @app.post("/api/chat/completed")
- async def chat_completed(form_data: dict, user=Depends(get_verified_user)):
- data = form_data
- model_id = data["model"]
- filters = [
- model
- for model in app.state.MODELS.values()
- if "pipeline" in model
- and "type" in model["pipeline"]
- and model["pipeline"]["type"] == "filter"
- and (
- model["pipeline"]["pipelines"] == ["*"]
- or any(
- model_id == target_model_id
- for target_model_id in model["pipeline"]["pipelines"]
- )
- )
- ]
- sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
- print(model_id)
- if model_id in app.state.MODELS:
- model = app.state.MODELS[model_id]
- if "pipeline" in model:
- sorted_filters = [model] + sorted_filters
- for filter in sorted_filters:
- r = None
- try:
- urlIdx = filter["urlIdx"]
- url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
- key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
- if key != "":
- headers = {"Authorization": f"Bearer {key}"}
- r = requests.post(
- f"{url}/{filter['id']}/filter/outlet",
- headers=headers,
- json={
- "user": {"id": user.id, "name": user.name, "role": user.role},
- "body": data,
- },
- )
- r.raise_for_status()
- data = r.json()
- except Exception as e:
- # Handle connection error here
- print(f"Connection error: {e}")
- if r is not None:
- try:
- res = r.json()
- if "detail" in res:
- return JSONResponse(
- status_code=r.status_code,
- content=res,
- )
- except:
- pass
- else:
- pass
- return data
- @app.get("/api/pipelines/list")
- async def get_pipelines_list(user=Depends(get_admin_user)):
- responses = await get_openai_models(raw=True)
- print(responses)
- urlIdxs = [
- idx
- for idx, response in enumerate(responses)
- if response != None and "pipelines" in response
- ]
- return {
- "data": [
- {
- "url": openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx],
- "idx": urlIdx,
- }
- for urlIdx in urlIdxs
- ]
- }
- @app.post("/api/pipelines/upload")
- async def upload_pipeline(
- urlIdx: int = Form(...), file: UploadFile = File(...), user=Depends(get_admin_user)
- ):
- print("upload_pipeline", urlIdx, file.filename)
- # Check if the uploaded file is a python file
- if not file.filename.endswith(".py"):
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail="Only Python (.py) files are allowed.",
- )
- upload_folder = f"{CACHE_DIR}/pipelines"
- os.makedirs(upload_folder, exist_ok=True)
- file_path = os.path.join(upload_folder, file.filename)
- try:
- # Save the uploaded file
- with open(file_path, "wb") as buffer:
- shutil.copyfileobj(file.file, buffer)
- url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
- key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
- headers = {"Authorization": f"Bearer {key}"}
- with open(file_path, "rb") as f:
- files = {"file": f}
- r = requests.post(f"{url}/pipelines/upload", headers=headers, files=files)
- r.raise_for_status()
- data = r.json()
- return {**data}
- except Exception as e:
- # Handle connection error here
- print(f"Connection error: {e}")
- detail = "Pipeline not found"
- if r is not None:
- try:
- res = r.json()
- if "detail" in res:
- detail = res["detail"]
- except:
- pass
- raise HTTPException(
- status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
- detail=detail,
- )
- finally:
- # Ensure the file is deleted after the upload is completed or on failure
- if os.path.exists(file_path):
- os.remove(file_path)
- class AddPipelineForm(BaseModel):
- url: str
- urlIdx: int
- @app.post("/api/pipelines/add")
- async def add_pipeline(form_data: AddPipelineForm, user=Depends(get_admin_user)):
- r = None
- try:
- urlIdx = form_data.urlIdx
- url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
- key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
- headers = {"Authorization": f"Bearer {key}"}
- r = requests.post(
- f"{url}/pipelines/add", headers=headers, json={"url": form_data.url}
- )
- r.raise_for_status()
- data = r.json()
- return {**data}
- except Exception as e:
- # Handle connection error here
- print(f"Connection error: {e}")
- detail = "Pipeline not found"
- if r is not None:
- try:
- res = r.json()
- if "detail" in res:
- detail = res["detail"]
- except:
- pass
- raise HTTPException(
- status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
- detail=detail,
- )
- class DeletePipelineForm(BaseModel):
- id: str
- urlIdx: int
- @app.delete("/api/pipelines/delete")
- async def delete_pipeline(form_data: DeletePipelineForm, user=Depends(get_admin_user)):
- r = None
- try:
- urlIdx = form_data.urlIdx
- url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
- key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
- headers = {"Authorization": f"Bearer {key}"}
- r = requests.delete(
- f"{url}/pipelines/delete", headers=headers, json={"id": form_data.id}
- )
- r.raise_for_status()
- data = r.json()
- return {**data}
- except Exception as e:
- # Handle connection error here
- print(f"Connection error: {e}")
- detail = "Pipeline not found"
- if r is not None:
- try:
- res = r.json()
- if "detail" in res:
- detail = res["detail"]
- except:
- pass
- raise HTTPException(
- status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
- detail=detail,
- )
- @app.get("/api/pipelines")
- async def get_pipelines(urlIdx: Optional[int] = None, user=Depends(get_admin_user)):
- r = None
- try:
- urlIdx
- url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
- key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
- headers = {"Authorization": f"Bearer {key}"}
- r = requests.get(f"{url}/pipelines", headers=headers)
- r.raise_for_status()
- data = r.json()
- return {**data}
- except Exception as e:
- # Handle connection error here
- print(f"Connection error: {e}")
- detail = "Pipeline not found"
- if r is not None:
- try:
- res = r.json()
- if "detail" in res:
- detail = res["detail"]
- except:
- pass
- raise HTTPException(
- status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
- detail=detail,
- )
- @app.get("/api/pipelines/{pipeline_id}/valves")
- async def get_pipeline_valves(
- urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
- ):
- models = await get_all_models()
- r = None
- try:
- url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
- key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
- headers = {"Authorization": f"Bearer {key}"}
- r = requests.get(f"{url}/{pipeline_id}/valves", headers=headers)
- r.raise_for_status()
- data = r.json()
- return {**data}
- except Exception as e:
- # Handle connection error here
- print(f"Connection error: {e}")
- detail = "Pipeline not found"
- if r is not None:
- try:
- res = r.json()
- if "detail" in res:
- detail = res["detail"]
- except:
- pass
- raise HTTPException(
- status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
- detail=detail,
- )
- @app.get("/api/pipelines/{pipeline_id}/valves/spec")
- async def get_pipeline_valves_spec(
- urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
- ):
- models = await get_all_models()
- r = None
- try:
- url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
- key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
- headers = {"Authorization": f"Bearer {key}"}
- r = requests.get(f"{url}/{pipeline_id}/valves/spec", headers=headers)
- r.raise_for_status()
- data = r.json()
- return {**data}
- except Exception as e:
- # Handle connection error here
- print(f"Connection error: {e}")
- detail = "Pipeline not found"
- if r is not None:
- try:
- res = r.json()
- if "detail" in res:
- detail = res["detail"]
- except:
- pass
- raise HTTPException(
- status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
- detail=detail,
- )
- @app.post("/api/pipelines/{pipeline_id}/valves/update")
- async def update_pipeline_valves(
- urlIdx: Optional[int],
- pipeline_id: str,
- form_data: dict,
- user=Depends(get_admin_user),
- ):
- models = await get_all_models()
- r = None
- try:
- url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
- key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
- headers = {"Authorization": f"Bearer {key}"}
- r = requests.post(
- f"{url}/{pipeline_id}/valves/update",
- headers=headers,
- json={**form_data},
- )
- r.raise_for_status()
- data = r.json()
- return {**data}
- except Exception as e:
- # Handle connection error here
- print(f"Connection error: {e}")
- detail = "Pipeline not found"
- if r is not None:
- try:
- res = r.json()
- if "detail" in res:
- detail = res["detail"]
- except:
- pass
- raise HTTPException(
- status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
- detail=detail,
- )
- @app.get("/api/config")
- async def get_app_config():
- # Checking and Handling the Absence of 'ui' in CONFIG_DATA
- default_locale = "en-US"
- if "ui" in CONFIG_DATA:
- default_locale = CONFIG_DATA["ui"].get("default_locale", "en-US")
- # The Rest of the Function Now Uses the Variables Defined Above
- return {
- "status": True,
- "name": WEBUI_NAME,
- "version": VERSION,
- "default_locale": default_locale,
- "default_models": webui_app.state.config.DEFAULT_MODELS,
- "default_prompt_suggestions": webui_app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
- "features": {
- "auth": WEBUI_AUTH,
- "auth_trusted_header": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
- "enable_signup": webui_app.state.config.ENABLE_SIGNUP,
- "enable_web_search": rag_app.state.config.ENABLE_RAG_WEB_SEARCH,
- "enable_image_generation": images_app.state.config.ENABLED,
- "enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING,
- "enable_admin_export": ENABLE_ADMIN_EXPORT,
- },
- "audio": {
- "tts": {
- "engine": audio_app.state.config.TTS_ENGINE,
- "voice": audio_app.state.config.TTS_VOICE,
- },
- "stt": {
- "engine": audio_app.state.config.STT_ENGINE,
- },
- },
- }
- @app.get("/api/config/model/filter")
- async def get_model_filter_config(user=Depends(get_admin_user)):
- return {
- "enabled": app.state.config.ENABLE_MODEL_FILTER,
- "models": app.state.config.MODEL_FILTER_LIST,
- }
- class ModelFilterConfigForm(BaseModel):
- enabled: bool
- models: List[str]
- @app.post("/api/config/model/filter")
- async def update_model_filter_config(
- form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
- ):
- app.state.config.ENABLE_MODEL_FILTER = form_data.enabled
- app.state.config.MODEL_FILTER_LIST = form_data.models
- return {
- "enabled": app.state.config.ENABLE_MODEL_FILTER,
- "models": app.state.config.MODEL_FILTER_LIST,
- }
- @app.get("/api/webhook")
- async def get_webhook_url(user=Depends(get_admin_user)):
- return {
- "url": app.state.config.WEBHOOK_URL,
- }
- class UrlForm(BaseModel):
- url: str
- @app.post("/api/webhook")
- async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
- app.state.config.WEBHOOK_URL = form_data.url
- webui_app.state.WEBHOOK_URL = app.state.config.WEBHOOK_URL
- return {"url": app.state.config.WEBHOOK_URL}
- @app.get("/api/version")
- async def get_app_config():
- return {
- "version": VERSION,
- }
- @app.get("/api/changelog")
- async def get_app_changelog():
- return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5}
- @app.get("/api/version/updates")
- async def get_app_latest_release_version():
- try:
- async with aiohttp.ClientSession(trust_env=True) as session:
- async with session.get(
- "https://api.github.com/repos/open-webui/open-webui/releases/latest"
- ) as response:
- response.raise_for_status()
- data = await response.json()
- latest_version = data["tag_name"]
- return {"current": VERSION, "latest": latest_version[1:]}
- except aiohttp.ClientError as e:
- raise HTTPException(
- status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
- detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
- )
- @app.get("/manifest.json")
- async def get_manifest_json():
- return {
- "name": WEBUI_NAME,
- "short_name": WEBUI_NAME,
- "start_url": "/",
- "display": "standalone",
- "background_color": "#343541",
- "theme_color": "#343541",
- "orientation": "portrait-primary",
- "icons": [{"src": "/static/logo.png", "type": "image/png", "sizes": "500x500"}],
- }
- @app.get("/opensearch.xml")
- async def get_opensearch_xml():
- xml_content = rf"""
- <OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/" xmlns:moz="http://www.mozilla.org/2006/browser/search/">
- <ShortName>{WEBUI_NAME}</ShortName>
- <Description>Search {WEBUI_NAME}</Description>
- <InputEncoding>UTF-8</InputEncoding>
- <Image width="16" height="16" type="image/x-icon">{WEBUI_URL}/favicon.png</Image>
- <Url type="text/html" method="get" template="{WEBUI_URL}/?q={"{searchTerms}"}"/>
- <moz:SearchForm>{WEBUI_URL}</moz:SearchForm>
- </OpenSearchDescription>
- """
- return Response(content=xml_content, media_type="application/xml")
- @app.get("/health")
- async def healthcheck():
- return {"status": True}
- app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
- app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
- if os.path.exists(FRONTEND_BUILD_DIR):
- mimetypes.add_type("text/javascript", ".js")
- app.mount(
- "/",
- SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
- name="spa-static-files",
- )
- else:
- log.warning(
- f"Frontend build directory not found at '{FRONTEND_BUILD_DIR}'. Serving API only."
- )
|