main.py 6.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238
  1. from bs4 import BeautifulSoup
  2. import json
  3. import markdown
  4. import time
  5. import os
  6. import sys
  7. import requests
  8. from fastapi import FastAPI, Request, Depends, status
  9. from fastapi.staticfiles import StaticFiles
  10. from fastapi import HTTPException
  11. from fastapi.middleware.wsgi import WSGIMiddleware
  12. from fastapi.middleware.cors import CORSMiddleware
  13. from starlette.exceptions import HTTPException as StarletteHTTPException
  14. from starlette.middleware.base import BaseHTTPMiddleware
  15. from apps.ollama.main import app as ollama_app
  16. from apps.openai.main import app as openai_app
  17. from apps.litellm.main import app as litellm_app, startup as litellm_app_startup
  18. from apps.audio.main import app as audio_app
  19. from apps.images.main import app as images_app
  20. from apps.rag.main import app as rag_app
  21. from apps.web.main import app as webui_app
  22. from pydantic import BaseModel
  23. from typing import List
  24. from utils.utils import get_admin_user
  25. from apps.rag.utils import rag_messages
  26. from config import (
  27. WEBUI_NAME,
  28. ENV,
  29. VERSION,
  30. CHANGELOG,
  31. FRONTEND_BUILD_DIR,
  32. MODEL_FILTER_ENABLED,
  33. MODEL_FILTER_LIST,
  34. )
  35. from constants import ERROR_MESSAGES
  36. class SPAStaticFiles(StaticFiles):
  37. async def get_response(self, path: str, scope):
  38. try:
  39. return await super().get_response(path, scope)
  40. except (HTTPException, StarletteHTTPException) as ex:
  41. if ex.status_code == 404:
  42. return await super().get_response("index.html", scope)
  43. else:
  44. raise ex
  45. app = FastAPI(docs_url="/docs" if ENV == "dev" else None, redoc_url=None)
  46. app.state.MODEL_FILTER_ENABLED = MODEL_FILTER_ENABLED
  47. app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
  48. origins = ["*"]
  49. class RAGMiddleware(BaseHTTPMiddleware):
  50. async def dispatch(self, request: Request, call_next):
  51. if request.method == "POST" and (
  52. "/api/chat" in request.url.path or "/chat/completions" in request.url.path
  53. ):
  54. print(request.url.path)
  55. # Read the original request body
  56. body = await request.body()
  57. # Decode body to string
  58. body_str = body.decode("utf-8")
  59. # Parse string to JSON
  60. data = json.loads(body_str) if body_str else {}
  61. # Example: Add a new key-value pair or modify existing ones
  62. # data["modified"] = True # Example modification
  63. if "docs" in data:
  64. data = {**data}
  65. data["messages"] = rag_messages(
  66. data["docs"],
  67. data["messages"],
  68. rag_app.state.RAG_TEMPLATE,
  69. rag_app.state.TOP_K,
  70. rag_app.state.sentence_transformer_ef,
  71. )
  72. del data["docs"]
  73. print(data["messages"])
  74. modified_body_bytes = json.dumps(data).encode("utf-8")
  75. # Replace the request body with the modified one
  76. request._body = modified_body_bytes
  77. # Set custom header to ensure content-length matches new body length
  78. request.headers.__dict__["_list"] = [
  79. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  80. *[
  81. (k, v)
  82. for k, v in request.headers.raw
  83. if k.lower() != b"content-length"
  84. ],
  85. ]
  86. response = await call_next(request)
  87. return response
  88. async def _receive(self, body: bytes):
  89. return {"type": "http.request", "body": body, "more_body": False}
  90. app.add_middleware(RAGMiddleware)
  91. app.add_middleware(
  92. CORSMiddleware,
  93. allow_origins=origins,
  94. allow_credentials=True,
  95. allow_methods=["*"],
  96. allow_headers=["*"],
  97. )
  98. @app.middleware("http")
  99. async def check_url(request: Request, call_next):
  100. start_time = int(time.time())
  101. response = await call_next(request)
  102. process_time = int(time.time()) - start_time
  103. response.headers["X-Process-Time"] = str(process_time)
  104. return response
  105. @app.on_event("startup")
  106. async def on_startup():
  107. await litellm_app_startup()
  108. app.mount("/api/v1", webui_app)
  109. app.mount("/litellm/api", litellm_app)
  110. app.mount("/ollama", ollama_app)
  111. app.mount("/openai/api", openai_app)
  112. app.mount("/images/api/v1", images_app)
  113. app.mount("/audio/api/v1", audio_app)
  114. app.mount("/rag/api/v1", rag_app)
  115. @app.get("/api/config")
  116. async def get_app_config():
  117. return {
  118. "status": True,
  119. "name": WEBUI_NAME,
  120. "version": VERSION,
  121. "images": images_app.state.ENABLED,
  122. "default_models": webui_app.state.DEFAULT_MODELS,
  123. "default_prompt_suggestions": webui_app.state.DEFAULT_PROMPT_SUGGESTIONS,
  124. }
  125. @app.get("/api/config/model/filter")
  126. async def get_model_filter_config(user=Depends(get_admin_user)):
  127. return {
  128. "enabled": app.state.MODEL_FILTER_ENABLED,
  129. "models": app.state.MODEL_FILTER_LIST,
  130. }
  131. class ModelFilterConfigForm(BaseModel):
  132. enabled: bool
  133. models: List[str]
  134. @app.post("/api/config/model/filter")
  135. async def get_model_filter_config(
  136. form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
  137. ):
  138. app.state.MODEL_FILTER_ENABLED = form_data.enabled
  139. app.state.MODEL_FILTER_LIST = form_data.models
  140. ollama_app.state.MODEL_FILTER_ENABLED = app.state.MODEL_FILTER_ENABLED
  141. ollama_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
  142. openai_app.state.MODEL_FILTER_ENABLED = app.state.MODEL_FILTER_ENABLED
  143. openai_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
  144. return {
  145. "enabled": app.state.MODEL_FILTER_ENABLED,
  146. "models": app.state.MODEL_FILTER_LIST,
  147. }
  148. @app.get("/api/version")
  149. async def get_app_config():
  150. return {
  151. "version": VERSION,
  152. }
  153. @app.get("/api/changelog")
  154. async def get_app_changelog():
  155. return CHANGELOG
  156. @app.get("/api/version/updates")
  157. async def get_app_latest_release_version():
  158. try:
  159. response = requests.get(
  160. f"https://api.github.com/repos/open-webui/open-webui/releases/latest"
  161. )
  162. response.raise_for_status()
  163. latest_version = response.json()["tag_name"]
  164. return {"current": VERSION, "latest": latest_version[1:]}
  165. except Exception as e:
  166. raise HTTPException(
  167. status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
  168. detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
  169. )
  170. app.mount("/static", StaticFiles(directory="static"), name="static")
  171. app.mount("/cache", StaticFiles(directory="data/cache"), name="cache")
  172. app.mount(
  173. "/",
  174. SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
  175. name="spa-static-files",
  176. )