main.py 9.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335
  1. from bs4 import BeautifulSoup
  2. import json
  3. import markdown
  4. import time
  5. import os
  6. import sys
  7. import logging
  8. import aiohttp
  9. import requests
  10. from fastapi import FastAPI, Request, Depends, status
  11. from fastapi.staticfiles import StaticFiles
  12. from fastapi import HTTPException
  13. from fastapi.middleware.wsgi import WSGIMiddleware
  14. from fastapi.middleware.cors import CORSMiddleware
  15. from starlette.exceptions import HTTPException as StarletteHTTPException
  16. from starlette.middleware.base import BaseHTTPMiddleware
  17. from apps.ollama.main import app as ollama_app
  18. from apps.openai.main import app as openai_app
  19. from apps.litellm.main import (
  20. app as litellm_app,
  21. start_litellm_background,
  22. shutdown_litellm_background,
  23. )
  24. from apps.audio.main import app as audio_app
  25. from apps.images.main import app as images_app
  26. from apps.rag.main import app as rag_app
  27. from apps.web.main import app as webui_app
  28. import asyncio
  29. from pydantic import BaseModel
  30. from typing import List
  31. from utils.utils import get_admin_user
  32. from apps.rag.utils import rag_messages
  33. from config import (
  34. CONFIG_DATA,
  35. WEBUI_NAME,
  36. ENV,
  37. VERSION,
  38. CHANGELOG,
  39. FRONTEND_BUILD_DIR,
  40. CACHE_DIR,
  41. STATIC_DIR,
  42. ENABLE_LITELLM,
  43. ENABLE_MODEL_FILTER,
  44. MODEL_FILTER_LIST,
  45. GLOBAL_LOG_LEVEL,
  46. SRC_LOG_LEVELS,
  47. WEBHOOK_URL,
  48. ENABLE_ADMIN_EXPORT,
  49. )
  50. from constants import ERROR_MESSAGES
  51. logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
  52. log = logging.getLogger(__name__)
  53. log.setLevel(SRC_LOG_LEVELS["MAIN"])
  54. class SPAStaticFiles(StaticFiles):
  55. async def get_response(self, path: str, scope):
  56. try:
  57. return await super().get_response(path, scope)
  58. except (HTTPException, StarletteHTTPException) as ex:
  59. if ex.status_code == 404:
  60. return await super().get_response("index.html", scope)
  61. else:
  62. raise ex
  63. print(
  64. f"""
  65. ___ __ __ _ _ _ ___
  66. / _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _|
  67. | | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || |
  68. | |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || |
  69. \___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___|
  70. |_|
  71. v{VERSION} - building the best open-source AI user interface.
  72. https://github.com/open-webui/open-webui
  73. """
  74. )
  75. app = FastAPI(docs_url="/docs" if ENV == "dev" else None, redoc_url=None)
  76. app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
  77. app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
  78. app.state.WEBHOOK_URL = WEBHOOK_URL
  79. origins = ["*"]
  80. class RAGMiddleware(BaseHTTPMiddleware):
  81. async def dispatch(self, request: Request, call_next):
  82. if request.method == "POST" and (
  83. "/api/chat" in request.url.path or "/chat/completions" in request.url.path
  84. ):
  85. log.debug(f"request.url.path: {request.url.path}")
  86. # Read the original request body
  87. body = await request.body()
  88. # Decode body to string
  89. body_str = body.decode("utf-8")
  90. # Parse string to JSON
  91. data = json.loads(body_str) if body_str else {}
  92. # Example: Add a new key-value pair or modify existing ones
  93. # data["modified"] = True # Example modification
  94. if "docs" in data:
  95. data = {**data}
  96. data["messages"] = rag_messages(
  97. data["docs"],
  98. data["messages"],
  99. rag_app.state.RAG_TEMPLATE,
  100. rag_app.state.TOP_K,
  101. rag_app.state.RELEVANCE_THRESHOLD,
  102. rag_app.state.ENABLE_RAG_HYBRID_SEARCH,
  103. rag_app.state.RAG_EMBEDDING_ENGINE,
  104. rag_app.state.RAG_EMBEDDING_MODEL,
  105. rag_app.state.sentence_transformer_ef,
  106. rag_app.state.sentence_transformer_rf,
  107. rag_app.state.OPENAI_API_KEY,
  108. rag_app.state.OPENAI_API_BASE_URL,
  109. )
  110. del data["docs"]
  111. log.debug(f"data['messages']: {data['messages']}")
  112. modified_body_bytes = json.dumps(data).encode("utf-8")
  113. # Replace the request body with the modified one
  114. request._body = modified_body_bytes
  115. # Set custom header to ensure content-length matches new body length
  116. request.headers.__dict__["_list"] = [
  117. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  118. *[
  119. (k, v)
  120. for k, v in request.headers.raw
  121. if k.lower() != b"content-length"
  122. ],
  123. ]
  124. response = await call_next(request)
  125. return response
  126. async def _receive(self, body: bytes):
  127. return {"type": "http.request", "body": body, "more_body": False}
  128. app.add_middleware(RAGMiddleware)
  129. app.add_middleware(
  130. CORSMiddleware,
  131. allow_origins=origins,
  132. allow_credentials=True,
  133. allow_methods=["*"],
  134. allow_headers=["*"],
  135. )
  136. @app.middleware("http")
  137. async def check_url(request: Request, call_next):
  138. start_time = int(time.time())
  139. response = await call_next(request)
  140. process_time = int(time.time()) - start_time
  141. response.headers["X-Process-Time"] = str(process_time)
  142. return response
  143. @app.on_event("startup")
  144. async def on_startup():
  145. if ENABLE_LITELLM:
  146. asyncio.create_task(start_litellm_background())
  147. app.mount("/api/v1", webui_app)
  148. app.mount("/litellm/api", litellm_app)
  149. app.mount("/ollama", ollama_app)
  150. app.mount("/openai/api", openai_app)
  151. app.mount("/images/api/v1", images_app)
  152. app.mount("/audio/api/v1", audio_app)
  153. app.mount("/rag/api/v1", rag_app)
  154. @app.get("/api/config")
  155. async def get_app_config():
  156. # Checking and Handling the Absence of 'ui' in CONFIG_DATA
  157. default_locale = "en-US"
  158. if "ui" in CONFIG_DATA:
  159. default_locale = CONFIG_DATA["ui"].get("default_locale", "en-US")
  160. # The Rest of the Function Now Uses the Variables Defined Above
  161. return {
  162. "status": True,
  163. "name": WEBUI_NAME,
  164. "version": VERSION,
  165. "default_locale": default_locale,
  166. "images": images_app.state.ENABLED,
  167. "default_models": webui_app.state.DEFAULT_MODELS,
  168. "default_prompt_suggestions": webui_app.state.DEFAULT_PROMPT_SUGGESTIONS,
  169. "trusted_header_auth": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
  170. "admin_export_enabled": ENABLE_ADMIN_EXPORT,
  171. }
  172. @app.get("/api/config/model/filter")
  173. async def get_model_filter_config(user=Depends(get_admin_user)):
  174. return {
  175. "enabled": app.state.ENABLE_MODEL_FILTER,
  176. "models": app.state.MODEL_FILTER_LIST,
  177. }
  178. class ModelFilterConfigForm(BaseModel):
  179. enabled: bool
  180. models: List[str]
  181. @app.post("/api/config/model/filter")
  182. async def update_model_filter_config(
  183. form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
  184. ):
  185. app.state.ENABLE_MODEL_FILTER = form_data.enabled
  186. app.state.MODEL_FILTER_LIST = form_data.models
  187. ollama_app.state.ENABLE_MODEL_FILTER = app.state.ENABLE_MODEL_FILTER
  188. ollama_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
  189. openai_app.state.ENABLE_MODEL_FILTER = app.state.ENABLE_MODEL_FILTER
  190. openai_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
  191. litellm_app.state.ENABLE_MODEL_FILTER = app.state.ENABLE_MODEL_FILTER
  192. litellm_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
  193. return {
  194. "enabled": app.state.ENABLE_MODEL_FILTER,
  195. "models": app.state.MODEL_FILTER_LIST,
  196. }
  197. @app.get("/api/webhook")
  198. async def get_webhook_url(user=Depends(get_admin_user)):
  199. return {
  200. "url": app.state.WEBHOOK_URL,
  201. }
  202. class UrlForm(BaseModel):
  203. url: str
  204. @app.post("/api/webhook")
  205. async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
  206. app.state.WEBHOOK_URL = form_data.url
  207. webui_app.state.WEBHOOK_URL = app.state.WEBHOOK_URL
  208. return {
  209. "url": app.state.WEBHOOK_URL,
  210. }
  211. @app.get("/api/version")
  212. async def get_app_config():
  213. return {
  214. "version": VERSION,
  215. }
  216. @app.get("/api/changelog")
  217. async def get_app_changelog():
  218. return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5}
  219. @app.get("/api/version/updates")
  220. async def get_app_latest_release_version():
  221. try:
  222. async with aiohttp.ClientSession() as session:
  223. async with session.get(
  224. "https://api.github.com/repos/open-webui/open-webui/releases/latest"
  225. ) as response:
  226. response.raise_for_status()
  227. data = await response.json()
  228. latest_version = data["tag_name"]
  229. return {"current": VERSION, "latest": latest_version[1:]}
  230. except aiohttp.ClientError as e:
  231. raise HTTPException(
  232. status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
  233. detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
  234. )
  235. @app.get("/manifest.json")
  236. async def get_manifest_json():
  237. return {
  238. "name": WEBUI_NAME,
  239. "short_name": WEBUI_NAME,
  240. "start_url": "/",
  241. "display": "standalone",
  242. "background_color": "#343541",
  243. "theme_color": "#343541",
  244. "orientation": "portrait-primary",
  245. "icons": [{"src": "/favicon.png", "type": "image/png", "sizes": "844x884"}],
  246. }
  247. app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
  248. app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
  249. app.mount(
  250. "/",
  251. SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
  252. name="spa-static-files",
  253. )
  254. @app.on_event("shutdown")
  255. async def shutdown_event():
  256. if ENABLE_LITELLM:
  257. await shutdown_litellm_background()