main.py 57 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803
  1. from contextlib import asynccontextmanager
  2. from bs4 import BeautifulSoup
  3. import json
  4. import markdown
  5. import time
  6. import os
  7. import sys
  8. import logging
  9. import aiohttp
  10. import requests
  11. import mimetypes
  12. import shutil
  13. import os
  14. import uuid
  15. import inspect
  16. import asyncio
  17. from fastapi.concurrency import run_in_threadpool
  18. from fastapi import FastAPI, Request, Depends, status, UploadFile, File, Form
  19. from fastapi.staticfiles import StaticFiles
  20. from fastapi.responses import JSONResponse
  21. from fastapi import HTTPException
  22. from fastapi.middleware.wsgi import WSGIMiddleware
  23. from fastapi.middleware.cors import CORSMiddleware
  24. from starlette.exceptions import HTTPException as StarletteHTTPException
  25. from starlette.middleware.base import BaseHTTPMiddleware
  26. from starlette.responses import StreamingResponse, Response
  27. from apps.socket.main import app as socket_app
  28. from apps.ollama.main import (
  29. app as ollama_app,
  30. OpenAIChatCompletionForm,
  31. get_all_models as get_ollama_models,
  32. generate_openai_chat_completion as generate_ollama_chat_completion,
  33. )
  34. from apps.openai.main import (
  35. app as openai_app,
  36. get_all_models as get_openai_models,
  37. generate_chat_completion as generate_openai_chat_completion,
  38. )
  39. from apps.audio.main import app as audio_app
  40. from apps.images.main import app as images_app
  41. from apps.rag.main import app as rag_app
  42. from apps.webui.main import app as webui_app, get_pipe_models
  43. from pydantic import BaseModel
  44. from typing import List, Optional, Iterator, Generator, Union
  45. from apps.webui.models.models import Models, ModelModel
  46. from apps.webui.models.tools import Tools
  47. from apps.webui.models.functions import Functions
  48. from apps.webui.utils import load_toolkit_module_by_id, load_function_module_by_id
  49. from utils.utils import (
  50. get_admin_user,
  51. get_verified_user,
  52. get_current_user,
  53. get_http_authorization_cred,
  54. )
  55. from utils.task import (
  56. title_generation_template,
  57. search_query_generation_template,
  58. tools_function_calling_generation_template,
  59. )
  60. from utils.misc import (
  61. get_last_user_message,
  62. add_or_update_system_message,
  63. stream_message_template,
  64. )
  65. from apps.rag.utils import get_rag_context, rag_template
  66. from config import (
  67. CONFIG_DATA,
  68. WEBUI_NAME,
  69. WEBUI_URL,
  70. WEBUI_AUTH,
  71. ENV,
  72. VERSION,
  73. CHANGELOG,
  74. FRONTEND_BUILD_DIR,
  75. UPLOAD_DIR,
  76. CACHE_DIR,
  77. STATIC_DIR,
  78. ENABLE_OPENAI_API,
  79. ENABLE_OLLAMA_API,
  80. ENABLE_MODEL_FILTER,
  81. MODEL_FILTER_LIST,
  82. GLOBAL_LOG_LEVEL,
  83. SRC_LOG_LEVELS,
  84. WEBHOOK_URL,
  85. ENABLE_ADMIN_EXPORT,
  86. WEBUI_BUILD_HASH,
  87. TASK_MODEL,
  88. TASK_MODEL_EXTERNAL,
  89. TITLE_GENERATION_PROMPT_TEMPLATE,
  90. SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE,
  91. SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD,
  92. TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
  93. AppConfig,
  94. )
  95. from constants import ERROR_MESSAGES
  96. logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
  97. log = logging.getLogger(__name__)
  98. log.setLevel(SRC_LOG_LEVELS["MAIN"])
  99. class SPAStaticFiles(StaticFiles):
  100. async def get_response(self, path: str, scope):
  101. try:
  102. return await super().get_response(path, scope)
  103. except (HTTPException, StarletteHTTPException) as ex:
  104. if ex.status_code == 404:
  105. return await super().get_response("index.html", scope)
  106. else:
  107. raise ex
  108. print(
  109. rf"""
  110. ___ __ __ _ _ _ ___
  111. / _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _|
  112. | | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || |
  113. | |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || |
  114. \___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___|
  115. |_|
  116. v{VERSION} - building the best open-source AI user interface.
  117. {f"Commit: {WEBUI_BUILD_HASH}" if WEBUI_BUILD_HASH != "dev-build" else ""}
  118. https://github.com/open-webui/open-webui
  119. """
  120. )
  121. @asynccontextmanager
  122. async def lifespan(app: FastAPI):
  123. yield
  124. app = FastAPI(
  125. docs_url="/docs" if ENV == "dev" else None, redoc_url=None, lifespan=lifespan
  126. )
  127. app.state.config = AppConfig()
  128. app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
  129. app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
  130. app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
  131. app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
  132. app.state.config.WEBHOOK_URL = WEBHOOK_URL
  133. app.state.config.TASK_MODEL = TASK_MODEL
  134. app.state.config.TASK_MODEL_EXTERNAL = TASK_MODEL_EXTERNAL
  135. app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE = TITLE_GENERATION_PROMPT_TEMPLATE
  136. app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = (
  137. SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE
  138. )
  139. app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = (
  140. SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD
  141. )
  142. app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = (
  143. TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
  144. )
  145. app.state.MODELS = {}
  146. origins = ["*"]
  147. ##################################
  148. #
  149. # ChatCompletion Middleware
  150. #
  151. ##################################
  152. async def get_function_call_response(
  153. messages, files, tool_id, template, task_model_id, user
  154. ):
  155. tool = Tools.get_tool_by_id(tool_id)
  156. tools_specs = json.dumps(tool.specs, indent=2)
  157. content = tools_function_calling_generation_template(template, tools_specs)
  158. user_message = get_last_user_message(messages)
  159. prompt = (
  160. "History:\n"
  161. + "\n".join(
  162. [
  163. f"{message['role'].upper()}: \"\"\"{message['content']}\"\"\""
  164. for message in messages[::-1][:4]
  165. ]
  166. )
  167. + f"\nQuery: {user_message}"
  168. )
  169. print(prompt)
  170. payload = {
  171. "model": task_model_id,
  172. "messages": [
  173. {"role": "system", "content": content},
  174. {"role": "user", "content": f"Query: {prompt}"},
  175. ],
  176. "stream": False,
  177. }
  178. try:
  179. payload = filter_pipeline(payload, user)
  180. except Exception as e:
  181. raise e
  182. model = app.state.MODELS[task_model_id]
  183. response = None
  184. try:
  185. if model["owned_by"] == "ollama":
  186. response = await generate_ollama_chat_completion(payload, user=user)
  187. else:
  188. response = await generate_openai_chat_completion(payload, user=user)
  189. content = None
  190. if hasattr(response, "body_iterator"):
  191. async for chunk in response.body_iterator:
  192. data = json.loads(chunk.decode("utf-8"))
  193. content = data["choices"][0]["message"]["content"]
  194. # Cleanup any remaining background tasks if necessary
  195. if response.background is not None:
  196. await response.background()
  197. else:
  198. content = response["choices"][0]["message"]["content"]
  199. # Parse the function response
  200. if content is not None:
  201. print(f"content: {content}")
  202. result = json.loads(content)
  203. print(result)
  204. # Call the function
  205. if "name" in result:
  206. if tool_id in webui_app.state.TOOLS:
  207. toolkit_module = webui_app.state.TOOLS[tool_id]
  208. else:
  209. toolkit_module = load_toolkit_module_by_id(tool_id)
  210. webui_app.state.TOOLS[tool_id] = toolkit_module
  211. file_handler = False
  212. # check if toolkit_module has file_handler self variable
  213. if hasattr(toolkit_module, "file_handler"):
  214. file_handler = True
  215. print("file_handler: ", file_handler)
  216. function = getattr(toolkit_module, result["name"])
  217. function_result = None
  218. try:
  219. # Get the signature of the function
  220. sig = inspect.signature(function)
  221. params = result["parameters"]
  222. if "__user__" in sig.parameters:
  223. # Call the function with the '__user__' parameter included
  224. params = {
  225. **params,
  226. "__user__": {
  227. "id": user.id,
  228. "email": user.email,
  229. "name": user.name,
  230. "role": user.role,
  231. },
  232. }
  233. if "__messages__" in sig.parameters:
  234. # Call the function with the '__messages__' parameter included
  235. params = {
  236. **params,
  237. "__messages__": messages,
  238. }
  239. if "__files__" in sig.parameters:
  240. # Call the function with the '__files__' parameter included
  241. params = {
  242. **params,
  243. "__files__": files,
  244. }
  245. if "__model__" in sig.parameters:
  246. # Call the function with the '__model__' parameter included
  247. params = {
  248. **params,
  249. "__model__": model,
  250. }
  251. if "__id__" in sig.parameters:
  252. # Call the function with the '__id__' parameter included
  253. params = {
  254. **params,
  255. "__id__": tool_id,
  256. }
  257. function_result = function(**params)
  258. except Exception as e:
  259. print(e)
  260. # Add the function result to the system prompt
  261. if function_result is not None:
  262. return function_result, file_handler
  263. except Exception as e:
  264. print(f"Error: {e}")
  265. return None, False
  266. class ChatCompletionMiddleware(BaseHTTPMiddleware):
  267. async def dispatch(self, request: Request, call_next):
  268. data_items = []
  269. if request.method == "POST" and any(
  270. endpoint in request.url.path
  271. for endpoint in ["/ollama/api/chat", "/chat/completions"]
  272. ):
  273. log.debug(f"request.url.path: {request.url.path}")
  274. # Read the original request body
  275. body = await request.body()
  276. body_str = body.decode("utf-8")
  277. data = json.loads(body_str) if body_str else {}
  278. user = get_current_user(
  279. request,
  280. get_http_authorization_cred(request.headers.get("Authorization")),
  281. )
  282. # Flag to skip RAG completions if file_handler is present in tools/functions
  283. skip_files = False
  284. model_id = data["model"]
  285. if model_id not in app.state.MODELS:
  286. raise HTTPException(
  287. status_code=status.HTTP_404_NOT_FOUND,
  288. detail="Model not found",
  289. )
  290. model = app.state.MODELS[model_id]
  291. # Check if the model has any filters
  292. if "info" in model and "meta" in model["info"]:
  293. for filter_id in model["info"]["meta"].get("filterIds", []):
  294. filter = Functions.get_function_by_id(filter_id)
  295. if filter:
  296. if filter_id in webui_app.state.FUNCTIONS:
  297. function_module = webui_app.state.FUNCTIONS[filter_id]
  298. else:
  299. function_module, function_type = load_function_module_by_id(
  300. filter_id
  301. )
  302. webui_app.state.FUNCTIONS[filter_id] = function_module
  303. # Check if the function has a file_handler variable
  304. if getattr(function_module, "file_handler"):
  305. skip_files = True
  306. try:
  307. if hasattr(function_module, "inlet"):
  308. data = function_module.inlet(
  309. data,
  310. {
  311. "id": user.id,
  312. "email": user.email,
  313. "name": user.name,
  314. "role": user.role,
  315. },
  316. )
  317. except Exception as e:
  318. print(f"Error: {e}")
  319. return JSONResponse(
  320. status_code=status.HTTP_400_BAD_REQUEST,
  321. content={"detail": str(e)},
  322. )
  323. # Set the task model
  324. task_model_id = data["model"]
  325. # Check if the user has a custom task model and use that model
  326. if app.state.MODELS[task_model_id]["owned_by"] == "ollama":
  327. if (
  328. app.state.config.TASK_MODEL
  329. and app.state.config.TASK_MODEL in app.state.MODELS
  330. ):
  331. task_model_id = app.state.config.TASK_MODEL
  332. else:
  333. if (
  334. app.state.config.TASK_MODEL_EXTERNAL
  335. and app.state.config.TASK_MODEL_EXTERNAL in app.state.MODELS
  336. ):
  337. task_model_id = app.state.config.TASK_MODEL_EXTERNAL
  338. prompt = get_last_user_message(data["messages"])
  339. context = ""
  340. # If tool_ids field is present, call the functions
  341. if "tool_ids" in data:
  342. print(data["tool_ids"])
  343. for tool_id in data["tool_ids"]:
  344. print(tool_id)
  345. try:
  346. response, file_handler = await get_function_call_response(
  347. messages=data["messages"],
  348. files=data.get("files", []),
  349. tool_id=tool_id,
  350. template=app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
  351. task_model_id=task_model_id,
  352. user=user,
  353. )
  354. print(file_handler)
  355. if isinstance(response, str):
  356. context += ("\n" if context != "" else "") + response
  357. if file_handler:
  358. skip_files = True
  359. except Exception as e:
  360. print(f"Error: {e}")
  361. del data["tool_ids"]
  362. print(f"tool_context: {context}")
  363. # If files field is present, generate RAG completions
  364. # If skip_files is True, skip the RAG completions
  365. if "files" in data:
  366. if not skip_files:
  367. data = {**data}
  368. rag_context, citations = get_rag_context(
  369. files=data["files"],
  370. messages=data["messages"],
  371. embedding_function=rag_app.state.EMBEDDING_FUNCTION,
  372. k=rag_app.state.config.TOP_K,
  373. reranking_function=rag_app.state.sentence_transformer_rf,
  374. r=rag_app.state.config.RELEVANCE_THRESHOLD,
  375. hybrid_search=rag_app.state.config.ENABLE_RAG_HYBRID_SEARCH,
  376. )
  377. if rag_context:
  378. context += ("\n" if context != "" else "") + rag_context
  379. log.debug(f"rag_context: {rag_context}, citations: {citations}")
  380. if citations and data.get("citations"):
  381. data_items.append({"citations": citations})
  382. del data["files"]
  383. if data.get("citations"):
  384. del data["citations"]
  385. if context != "":
  386. system_prompt = rag_template(
  387. rag_app.state.config.RAG_TEMPLATE, context, prompt
  388. )
  389. print(system_prompt)
  390. data["messages"] = add_or_update_system_message(
  391. system_prompt, data["messages"]
  392. )
  393. modified_body_bytes = json.dumps(data).encode("utf-8")
  394. # Replace the request body with the modified one
  395. request._body = modified_body_bytes
  396. # Set custom header to ensure content-length matches new body length
  397. request.headers.__dict__["_list"] = [
  398. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  399. *[
  400. (k, v)
  401. for k, v in request.headers.raw
  402. if k.lower() != b"content-length"
  403. ],
  404. ]
  405. response = await call_next(request)
  406. if isinstance(response, StreamingResponse):
  407. # If it's a streaming response, inject it as SSE event or NDJSON line
  408. content_type = response.headers.get("Content-Type")
  409. if "text/event-stream" in content_type:
  410. return StreamingResponse(
  411. self.openai_stream_wrapper(response.body_iterator, data_items),
  412. )
  413. if "application/x-ndjson" in content_type:
  414. return StreamingResponse(
  415. self.ollama_stream_wrapper(response.body_iterator, data_items),
  416. )
  417. else:
  418. return response
  419. # If it's not a chat completion request, just pass it through
  420. response = await call_next(request)
  421. return response
  422. async def _receive(self, body: bytes):
  423. return {"type": "http.request", "body": body, "more_body": False}
  424. async def openai_stream_wrapper(self, original_generator, data_items):
  425. for item in data_items:
  426. yield f"data: {json.dumps(item)}\n\n"
  427. async for data in original_generator:
  428. yield data
  429. async def ollama_stream_wrapper(self, original_generator, data_items):
  430. for item in data_items:
  431. yield f"{json.dumps(item)}\n"
  432. async for data in original_generator:
  433. yield data
  434. app.add_middleware(ChatCompletionMiddleware)
  435. ##################################
  436. #
  437. # Pipeline Middleware
  438. #
  439. ##################################
  440. def filter_pipeline(payload, user):
  441. user = {"id": user.id, "email": user.email, "name": user.name, "role": user.role}
  442. model_id = payload["model"]
  443. filters = [
  444. model
  445. for model in app.state.MODELS.values()
  446. if "pipeline" in model
  447. and "type" in model["pipeline"]
  448. and model["pipeline"]["type"] == "filter"
  449. and (
  450. model["pipeline"]["pipelines"] == ["*"]
  451. or any(
  452. model_id == target_model_id
  453. for target_model_id in model["pipeline"]["pipelines"]
  454. )
  455. )
  456. ]
  457. sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
  458. model = app.state.MODELS[model_id]
  459. if "pipeline" in model:
  460. sorted_filters.append(model)
  461. for filter in sorted_filters:
  462. r = None
  463. try:
  464. urlIdx = filter["urlIdx"]
  465. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  466. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  467. if key != "":
  468. headers = {"Authorization": f"Bearer {key}"}
  469. r = requests.post(
  470. f"{url}/{filter['id']}/filter/inlet",
  471. headers=headers,
  472. json={
  473. "user": user,
  474. "body": payload,
  475. },
  476. )
  477. r.raise_for_status()
  478. payload = r.json()
  479. except Exception as e:
  480. # Handle connection error here
  481. print(f"Connection error: {e}")
  482. if r is not None:
  483. try:
  484. res = r.json()
  485. except:
  486. pass
  487. if "detail" in res:
  488. raise Exception(r.status_code, res["detail"])
  489. else:
  490. pass
  491. if "pipeline" not in app.state.MODELS[model_id]:
  492. if "chat_id" in payload:
  493. del payload["chat_id"]
  494. if "title" in payload:
  495. del payload["title"]
  496. if "task" in payload:
  497. del payload["task"]
  498. return payload
  499. class PipelineMiddleware(BaseHTTPMiddleware):
  500. async def dispatch(self, request: Request, call_next):
  501. if request.method == "POST" and (
  502. "/ollama/api/chat" in request.url.path
  503. or "/chat/completions" in request.url.path
  504. ):
  505. log.debug(f"request.url.path: {request.url.path}")
  506. # Read the original request body
  507. body = await request.body()
  508. # Decode body to string
  509. body_str = body.decode("utf-8")
  510. # Parse string to JSON
  511. data = json.loads(body_str) if body_str else {}
  512. user = get_current_user(
  513. request,
  514. get_http_authorization_cred(request.headers.get("Authorization")),
  515. )
  516. try:
  517. data = filter_pipeline(data, user)
  518. except Exception as e:
  519. return JSONResponse(
  520. status_code=e.args[0],
  521. content={"detail": e.args[1]},
  522. )
  523. modified_body_bytes = json.dumps(data).encode("utf-8")
  524. # Replace the request body with the modified one
  525. request._body = modified_body_bytes
  526. # Set custom header to ensure content-length matches new body length
  527. request.headers.__dict__["_list"] = [
  528. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  529. *[
  530. (k, v)
  531. for k, v in request.headers.raw
  532. if k.lower() != b"content-length"
  533. ],
  534. ]
  535. response = await call_next(request)
  536. return response
  537. async def _receive(self, body: bytes):
  538. return {"type": "http.request", "body": body, "more_body": False}
  539. app.add_middleware(PipelineMiddleware)
  540. app.add_middleware(
  541. CORSMiddleware,
  542. allow_origins=origins,
  543. allow_credentials=True,
  544. allow_methods=["*"],
  545. allow_headers=["*"],
  546. )
  547. @app.middleware("http")
  548. async def check_url(request: Request, call_next):
  549. if len(app.state.MODELS) == 0:
  550. await get_all_models()
  551. else:
  552. pass
  553. start_time = int(time.time())
  554. response = await call_next(request)
  555. process_time = int(time.time()) - start_time
  556. response.headers["X-Process-Time"] = str(process_time)
  557. return response
  558. @app.middleware("http")
  559. async def update_embedding_function(request: Request, call_next):
  560. response = await call_next(request)
  561. if "/embedding/update" in request.url.path:
  562. webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
  563. return response
  564. app.mount("/ws", socket_app)
  565. app.mount("/ollama", ollama_app)
  566. app.mount("/openai", openai_app)
  567. app.mount("/images/api/v1", images_app)
  568. app.mount("/audio/api/v1", audio_app)
  569. app.mount("/rag/api/v1", rag_app)
  570. app.mount("/api/v1", webui_app)
  571. webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
  572. async def get_all_models():
  573. pipe_models = []
  574. openai_models = []
  575. ollama_models = []
  576. pipe_models = await get_pipe_models()
  577. if app.state.config.ENABLE_OPENAI_API:
  578. openai_models = await get_openai_models()
  579. openai_models = openai_models["data"]
  580. if app.state.config.ENABLE_OLLAMA_API:
  581. ollama_models = await get_ollama_models()
  582. ollama_models = [
  583. {
  584. "id": model["model"],
  585. "name": model["name"],
  586. "object": "model",
  587. "created": int(time.time()),
  588. "owned_by": "ollama",
  589. "ollama": model,
  590. }
  591. for model in ollama_models["models"]
  592. ]
  593. models = pipe_models + openai_models + ollama_models
  594. custom_models = Models.get_all_models()
  595. for custom_model in custom_models:
  596. if custom_model.base_model_id == None:
  597. for model in models:
  598. if (
  599. custom_model.id == model["id"]
  600. or custom_model.id == model["id"].split(":")[0]
  601. ):
  602. model["name"] = custom_model.name
  603. model["info"] = custom_model.model_dump()
  604. else:
  605. owned_by = "openai"
  606. for model in models:
  607. if (
  608. custom_model.base_model_id == model["id"]
  609. or custom_model.base_model_id == model["id"].split(":")[0]
  610. ):
  611. owned_by = model["owned_by"]
  612. break
  613. models.append(
  614. {
  615. "id": custom_model.id,
  616. "name": custom_model.name,
  617. "object": "model",
  618. "created": custom_model.created_at,
  619. "owned_by": owned_by,
  620. "info": custom_model.model_dump(),
  621. "preset": True,
  622. }
  623. )
  624. app.state.MODELS = {model["id"]: model for model in models}
  625. webui_app.state.MODELS = app.state.MODELS
  626. return models
  627. @app.get("/api/models")
  628. async def get_models(user=Depends(get_verified_user)):
  629. models = await get_all_models()
  630. # Filter out filter pipelines
  631. models = [
  632. model
  633. for model in models
  634. if "pipeline" not in model or model["pipeline"].get("type", None) != "filter"
  635. ]
  636. if app.state.config.ENABLE_MODEL_FILTER:
  637. if user.role == "user":
  638. models = list(
  639. filter(
  640. lambda model: model["id"] in app.state.config.MODEL_FILTER_LIST,
  641. models,
  642. )
  643. )
  644. return {"data": models}
  645. return {"data": models}
  646. @app.post("/api/chat/completions")
  647. async def generate_chat_completions(form_data: dict, user=Depends(get_verified_user)):
  648. model_id = form_data["model"]
  649. if model_id not in app.state.MODELS:
  650. raise HTTPException(
  651. status_code=status.HTTP_404_NOT_FOUND,
  652. detail="Model not found",
  653. )
  654. model = app.state.MODELS[model_id]
  655. print(model)
  656. pipe = model.get("pipe")
  657. if pipe:
  658. form_data["user"] = {
  659. "id": user.id,
  660. "email": user.email,
  661. "name": user.name,
  662. "role": user.role,
  663. }
  664. def job():
  665. pipe_id = form_data["model"]
  666. if "." in pipe_id:
  667. pipe_id, sub_pipe_id = pipe_id.split(".", 1)
  668. print(pipe_id)
  669. pipe = webui_app.state.FUNCTIONS[pipe_id].pipe
  670. if form_data["stream"]:
  671. def stream_content():
  672. res = pipe(body=form_data)
  673. if isinstance(res, str):
  674. message = stream_message_template(form_data["model"], res)
  675. yield f"data: {json.dumps(message)}\n\n"
  676. if isinstance(res, Iterator):
  677. for line in res:
  678. if isinstance(line, BaseModel):
  679. line = line.model_dump_json()
  680. line = f"data: {line}"
  681. try:
  682. line = line.decode("utf-8")
  683. except:
  684. pass
  685. if line.startswith("data:"):
  686. yield f"{line}\n\n"
  687. else:
  688. line = stream_message_template(form_data["model"], line)
  689. yield f"data: {json.dumps(line)}\n\n"
  690. if isinstance(res, str) or isinstance(res, Generator):
  691. finish_message = {
  692. "id": f"{form_data['model']}-{str(uuid.uuid4())}",
  693. "object": "chat.completion.chunk",
  694. "created": int(time.time()),
  695. "model": form_data["model"],
  696. "choices": [
  697. {
  698. "index": 0,
  699. "delta": {},
  700. "logprobs": None,
  701. "finish_reason": "stop",
  702. }
  703. ],
  704. }
  705. yield f"data: {json.dumps(finish_message)}\n\n"
  706. yield f"data: [DONE]"
  707. return StreamingResponse(
  708. stream_content(), media_type="text/event-stream"
  709. )
  710. else:
  711. res = pipe(body=form_data)
  712. if isinstance(res, dict):
  713. return res
  714. elif isinstance(res, BaseModel):
  715. return res.model_dump()
  716. else:
  717. message = ""
  718. if isinstance(res, str):
  719. message = res
  720. if isinstance(res, Generator):
  721. for stream in res:
  722. message = f"{message}{stream}"
  723. return {
  724. "id": f"{form_data['model']}-{str(uuid.uuid4())}",
  725. "object": "chat.completion",
  726. "created": int(time.time()),
  727. "model": form_data["model"],
  728. "choices": [
  729. {
  730. "index": 0,
  731. "message": {
  732. "role": "assistant",
  733. "content": message,
  734. },
  735. "logprobs": None,
  736. "finish_reason": "stop",
  737. }
  738. ],
  739. }
  740. return await run_in_threadpool(job)
  741. if model["owned_by"] == "ollama":
  742. return await generate_ollama_chat_completion(form_data, user=user)
  743. else:
  744. return await generate_openai_chat_completion(form_data, user=user)
  745. @app.post("/api/chat/completed")
  746. async def chat_completed(form_data: dict, user=Depends(get_verified_user)):
  747. data = form_data
  748. model_id = data["model"]
  749. if model_id not in app.state.MODELS:
  750. raise HTTPException(
  751. status_code=status.HTTP_404_NOT_FOUND,
  752. detail="Model not found",
  753. )
  754. model = app.state.MODELS[model_id]
  755. filters = [
  756. model
  757. for model in app.state.MODELS.values()
  758. if "pipeline" in model
  759. and "type" in model["pipeline"]
  760. and model["pipeline"]["type"] == "filter"
  761. and (
  762. model["pipeline"]["pipelines"] == ["*"]
  763. or any(
  764. model_id == target_model_id
  765. for target_model_id in model["pipeline"]["pipelines"]
  766. )
  767. )
  768. ]
  769. sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
  770. if "pipeline" in model:
  771. sorted_filters = [model] + sorted_filters
  772. for filter in sorted_filters:
  773. r = None
  774. try:
  775. urlIdx = filter["urlIdx"]
  776. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  777. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  778. if key != "":
  779. headers = {"Authorization": f"Bearer {key}"}
  780. r = requests.post(
  781. f"{url}/{filter['id']}/filter/outlet",
  782. headers=headers,
  783. json={
  784. "user": {"id": user.id, "name": user.name, "role": user.role},
  785. "body": data,
  786. },
  787. )
  788. r.raise_for_status()
  789. data = r.json()
  790. except Exception as e:
  791. # Handle connection error here
  792. print(f"Connection error: {e}")
  793. if r is not None:
  794. try:
  795. res = r.json()
  796. if "detail" in res:
  797. return JSONResponse(
  798. status_code=r.status_code,
  799. content=res,
  800. )
  801. except:
  802. pass
  803. else:
  804. pass
  805. # Check if the model has any filters
  806. if "info" in model and "meta" in model["info"]:
  807. for filter_id in model["info"]["meta"].get("filterIds", []):
  808. filter = Functions.get_function_by_id(filter_id)
  809. if filter:
  810. if filter_id in webui_app.state.FUNCTIONS:
  811. function_module = webui_app.state.FUNCTIONS[filter_id]
  812. else:
  813. function_module, function_type = load_function_module_by_id(
  814. filter_id
  815. )
  816. webui_app.state.FUNCTIONS[filter_id] = function_module
  817. try:
  818. if hasattr(function_module, "outlet"):
  819. data = function_module.outlet(
  820. data,
  821. {
  822. "id": user.id,
  823. "email": user.email,
  824. "name": user.name,
  825. "role": user.role,
  826. },
  827. )
  828. except Exception as e:
  829. print(f"Error: {e}")
  830. return JSONResponse(
  831. status_code=status.HTTP_400_BAD_REQUEST,
  832. content={"detail": str(e)},
  833. )
  834. return data
  835. ##################################
  836. #
  837. # Task Endpoints
  838. #
  839. ##################################
  840. # TODO: Refactor task API endpoints below into a separate file
  841. @app.get("/api/task/config")
  842. async def get_task_config(user=Depends(get_verified_user)):
  843. return {
  844. "TASK_MODEL": app.state.config.TASK_MODEL,
  845. "TASK_MODEL_EXTERNAL": app.state.config.TASK_MODEL_EXTERNAL,
  846. "TITLE_GENERATION_PROMPT_TEMPLATE": app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE,
  847. "SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE": app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE,
  848. "SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD": app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD,
  849. "TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE": app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
  850. }
  851. class TaskConfigForm(BaseModel):
  852. TASK_MODEL: Optional[str]
  853. TASK_MODEL_EXTERNAL: Optional[str]
  854. TITLE_GENERATION_PROMPT_TEMPLATE: str
  855. SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE: str
  856. SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD: int
  857. TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE: str
  858. @app.post("/api/task/config/update")
  859. async def update_task_config(form_data: TaskConfigForm, user=Depends(get_admin_user)):
  860. app.state.config.TASK_MODEL = form_data.TASK_MODEL
  861. app.state.config.TASK_MODEL_EXTERNAL = form_data.TASK_MODEL_EXTERNAL
  862. app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE = (
  863. form_data.TITLE_GENERATION_PROMPT_TEMPLATE
  864. )
  865. app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = (
  866. form_data.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE
  867. )
  868. app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = (
  869. form_data.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD
  870. )
  871. app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = (
  872. form_data.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
  873. )
  874. return {
  875. "TASK_MODEL": app.state.config.TASK_MODEL,
  876. "TASK_MODEL_EXTERNAL": app.state.config.TASK_MODEL_EXTERNAL,
  877. "TITLE_GENERATION_PROMPT_TEMPLATE": app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE,
  878. "SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE": app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE,
  879. "SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD": app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD,
  880. "TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE": app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
  881. }
  882. @app.post("/api/task/title/completions")
  883. async def generate_title(form_data: dict, user=Depends(get_verified_user)):
  884. print("generate_title")
  885. model_id = form_data["model"]
  886. if model_id not in app.state.MODELS:
  887. raise HTTPException(
  888. status_code=status.HTTP_404_NOT_FOUND,
  889. detail="Model not found",
  890. )
  891. # Check if the user has a custom task model
  892. # If the user has a custom task model, use that model
  893. if app.state.MODELS[model_id]["owned_by"] == "ollama":
  894. if app.state.config.TASK_MODEL:
  895. task_model_id = app.state.config.TASK_MODEL
  896. if task_model_id in app.state.MODELS:
  897. model_id = task_model_id
  898. else:
  899. if app.state.config.TASK_MODEL_EXTERNAL:
  900. task_model_id = app.state.config.TASK_MODEL_EXTERNAL
  901. if task_model_id in app.state.MODELS:
  902. model_id = task_model_id
  903. print(model_id)
  904. model = app.state.MODELS[model_id]
  905. template = app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE
  906. content = title_generation_template(
  907. template,
  908. form_data["prompt"],
  909. {
  910. "name": user.name,
  911. "location": user.info.get("location") if user.info else None,
  912. },
  913. )
  914. payload = {
  915. "model": model_id,
  916. "messages": [{"role": "user", "content": content}],
  917. "stream": False,
  918. "max_tokens": 50,
  919. "chat_id": form_data.get("chat_id", None),
  920. "title": True,
  921. }
  922. log.debug(payload)
  923. try:
  924. payload = filter_pipeline(payload, user)
  925. except Exception as e:
  926. return JSONResponse(
  927. status_code=e.args[0],
  928. content={"detail": e.args[1]},
  929. )
  930. if model["owned_by"] == "ollama":
  931. return await generate_ollama_chat_completion(payload, user=user)
  932. else:
  933. return await generate_openai_chat_completion(payload, user=user)
  934. @app.post("/api/task/query/completions")
  935. async def generate_search_query(form_data: dict, user=Depends(get_verified_user)):
  936. print("generate_search_query")
  937. if len(form_data["prompt"]) < app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD:
  938. raise HTTPException(
  939. status_code=status.HTTP_400_BAD_REQUEST,
  940. detail=f"Skip search query generation for short prompts (< {app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD} characters)",
  941. )
  942. model_id = form_data["model"]
  943. if model_id not in app.state.MODELS:
  944. raise HTTPException(
  945. status_code=status.HTTP_404_NOT_FOUND,
  946. detail="Model not found",
  947. )
  948. # Check if the user has a custom task model
  949. # If the user has a custom task model, use that model
  950. if app.state.MODELS[model_id]["owned_by"] == "ollama":
  951. if app.state.config.TASK_MODEL:
  952. task_model_id = app.state.config.TASK_MODEL
  953. if task_model_id in app.state.MODELS:
  954. model_id = task_model_id
  955. else:
  956. if app.state.config.TASK_MODEL_EXTERNAL:
  957. task_model_id = app.state.config.TASK_MODEL_EXTERNAL
  958. if task_model_id in app.state.MODELS:
  959. model_id = task_model_id
  960. print(model_id)
  961. model = app.state.MODELS[model_id]
  962. template = app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE
  963. content = search_query_generation_template(
  964. template, form_data["prompt"], {"name": user.name}
  965. )
  966. payload = {
  967. "model": model_id,
  968. "messages": [{"role": "user", "content": content}],
  969. "stream": False,
  970. "max_tokens": 30,
  971. "task": True,
  972. }
  973. print(payload)
  974. try:
  975. payload = filter_pipeline(payload, user)
  976. except Exception as e:
  977. return JSONResponse(
  978. status_code=e.args[0],
  979. content={"detail": e.args[1]},
  980. )
  981. if model["owned_by"] == "ollama":
  982. return await generate_ollama_chat_completion(payload, user=user)
  983. else:
  984. return await generate_openai_chat_completion(payload, user=user)
  985. @app.post("/api/task/emoji/completions")
  986. async def generate_emoji(form_data: dict, user=Depends(get_verified_user)):
  987. print("generate_emoji")
  988. model_id = form_data["model"]
  989. if model_id not in app.state.MODELS:
  990. raise HTTPException(
  991. status_code=status.HTTP_404_NOT_FOUND,
  992. detail="Model not found",
  993. )
  994. # Check if the user has a custom task model
  995. # If the user has a custom task model, use that model
  996. if app.state.MODELS[model_id]["owned_by"] == "ollama":
  997. if app.state.config.TASK_MODEL:
  998. task_model_id = app.state.config.TASK_MODEL
  999. if task_model_id in app.state.MODELS:
  1000. model_id = task_model_id
  1001. else:
  1002. if app.state.config.TASK_MODEL_EXTERNAL:
  1003. task_model_id = app.state.config.TASK_MODEL_EXTERNAL
  1004. if task_model_id in app.state.MODELS:
  1005. model_id = task_model_id
  1006. print(model_id)
  1007. model = app.state.MODELS[model_id]
  1008. template = '''
  1009. Your task is to reflect the speaker's likely facial expression through a fitting emoji. Interpret emotions from the message and reflect their facial expression using fitting, diverse emojis (e.g., 😊, 😢, 😡, 😱).
  1010. Message: """{{prompt}}"""
  1011. '''
  1012. content = title_generation_template(
  1013. template,
  1014. form_data["prompt"],
  1015. {
  1016. "name": user.name,
  1017. "location": user.info.get("location") if user.info else None,
  1018. },
  1019. )
  1020. payload = {
  1021. "model": model_id,
  1022. "messages": [{"role": "user", "content": content}],
  1023. "stream": False,
  1024. "max_tokens": 4,
  1025. "chat_id": form_data.get("chat_id", None),
  1026. "task": True,
  1027. }
  1028. log.debug(payload)
  1029. try:
  1030. payload = filter_pipeline(payload, user)
  1031. except Exception as e:
  1032. return JSONResponse(
  1033. status_code=e.args[0],
  1034. content={"detail": e.args[1]},
  1035. )
  1036. if model["owned_by"] == "ollama":
  1037. return await generate_ollama_chat_completion(payload, user=user)
  1038. else:
  1039. return await generate_openai_chat_completion(payload, user=user)
  1040. @app.post("/api/task/tools/completions")
  1041. async def get_tools_function_calling(form_data: dict, user=Depends(get_verified_user)):
  1042. print("get_tools_function_calling")
  1043. model_id = form_data["model"]
  1044. if model_id not in app.state.MODELS:
  1045. raise HTTPException(
  1046. status_code=status.HTTP_404_NOT_FOUND,
  1047. detail="Model not found",
  1048. )
  1049. # Check if the user has a custom task model
  1050. # If the user has a custom task model, use that model
  1051. if app.state.MODELS[model_id]["owned_by"] == "ollama":
  1052. if app.state.config.TASK_MODEL:
  1053. task_model_id = app.state.config.TASK_MODEL
  1054. if task_model_id in app.state.MODELS:
  1055. model_id = task_model_id
  1056. else:
  1057. if app.state.config.TASK_MODEL_EXTERNAL:
  1058. task_model_id = app.state.config.TASK_MODEL_EXTERNAL
  1059. if task_model_id in app.state.MODELS:
  1060. model_id = task_model_id
  1061. print(model_id)
  1062. template = app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
  1063. try:
  1064. context, file_handler = await get_function_call_response(
  1065. form_data["messages"],
  1066. form_data.get("files", []),
  1067. form_data["tool_id"],
  1068. template,
  1069. model_id,
  1070. user,
  1071. )
  1072. return context
  1073. except Exception as e:
  1074. return JSONResponse(
  1075. status_code=e.args[0],
  1076. content={"detail": e.args[1]},
  1077. )
  1078. ##################################
  1079. #
  1080. # Pipelines Endpoints
  1081. #
  1082. ##################################
  1083. # TODO: Refactor pipelines API endpoints below into a separate file
  1084. @app.get("/api/pipelines/list")
  1085. async def get_pipelines_list(user=Depends(get_admin_user)):
  1086. responses = await get_openai_models(raw=True)
  1087. print(responses)
  1088. urlIdxs = [
  1089. idx
  1090. for idx, response in enumerate(responses)
  1091. if response != None and "pipelines" in response
  1092. ]
  1093. return {
  1094. "data": [
  1095. {
  1096. "url": openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx],
  1097. "idx": urlIdx,
  1098. }
  1099. for urlIdx in urlIdxs
  1100. ]
  1101. }
  1102. @app.post("/api/pipelines/upload")
  1103. async def upload_pipeline(
  1104. urlIdx: int = Form(...), file: UploadFile = File(...), user=Depends(get_admin_user)
  1105. ):
  1106. print("upload_pipeline", urlIdx, file.filename)
  1107. # Check if the uploaded file is a python file
  1108. if not file.filename.endswith(".py"):
  1109. raise HTTPException(
  1110. status_code=status.HTTP_400_BAD_REQUEST,
  1111. detail="Only Python (.py) files are allowed.",
  1112. )
  1113. upload_folder = f"{CACHE_DIR}/pipelines"
  1114. os.makedirs(upload_folder, exist_ok=True)
  1115. file_path = os.path.join(upload_folder, file.filename)
  1116. try:
  1117. # Save the uploaded file
  1118. with open(file_path, "wb") as buffer:
  1119. shutil.copyfileobj(file.file, buffer)
  1120. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  1121. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  1122. headers = {"Authorization": f"Bearer {key}"}
  1123. with open(file_path, "rb") as f:
  1124. files = {"file": f}
  1125. r = requests.post(f"{url}/pipelines/upload", headers=headers, files=files)
  1126. r.raise_for_status()
  1127. data = r.json()
  1128. return {**data}
  1129. except Exception as e:
  1130. # Handle connection error here
  1131. print(f"Connection error: {e}")
  1132. detail = "Pipeline not found"
  1133. if r is not None:
  1134. try:
  1135. res = r.json()
  1136. if "detail" in res:
  1137. detail = res["detail"]
  1138. except:
  1139. pass
  1140. raise HTTPException(
  1141. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  1142. detail=detail,
  1143. )
  1144. finally:
  1145. # Ensure the file is deleted after the upload is completed or on failure
  1146. if os.path.exists(file_path):
  1147. os.remove(file_path)
  1148. class AddPipelineForm(BaseModel):
  1149. url: str
  1150. urlIdx: int
  1151. @app.post("/api/pipelines/add")
  1152. async def add_pipeline(form_data: AddPipelineForm, user=Depends(get_admin_user)):
  1153. r = None
  1154. try:
  1155. urlIdx = form_data.urlIdx
  1156. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  1157. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  1158. headers = {"Authorization": f"Bearer {key}"}
  1159. r = requests.post(
  1160. f"{url}/pipelines/add", headers=headers, json={"url": form_data.url}
  1161. )
  1162. r.raise_for_status()
  1163. data = r.json()
  1164. return {**data}
  1165. except Exception as e:
  1166. # Handle connection error here
  1167. print(f"Connection error: {e}")
  1168. detail = "Pipeline not found"
  1169. if r is not None:
  1170. try:
  1171. res = r.json()
  1172. if "detail" in res:
  1173. detail = res["detail"]
  1174. except:
  1175. pass
  1176. raise HTTPException(
  1177. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  1178. detail=detail,
  1179. )
  1180. class DeletePipelineForm(BaseModel):
  1181. id: str
  1182. urlIdx: int
  1183. @app.delete("/api/pipelines/delete")
  1184. async def delete_pipeline(form_data: DeletePipelineForm, user=Depends(get_admin_user)):
  1185. r = None
  1186. try:
  1187. urlIdx = form_data.urlIdx
  1188. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  1189. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  1190. headers = {"Authorization": f"Bearer {key}"}
  1191. r = requests.delete(
  1192. f"{url}/pipelines/delete", headers=headers, json={"id": form_data.id}
  1193. )
  1194. r.raise_for_status()
  1195. data = r.json()
  1196. return {**data}
  1197. except Exception as e:
  1198. # Handle connection error here
  1199. print(f"Connection error: {e}")
  1200. detail = "Pipeline not found"
  1201. if r is not None:
  1202. try:
  1203. res = r.json()
  1204. if "detail" in res:
  1205. detail = res["detail"]
  1206. except:
  1207. pass
  1208. raise HTTPException(
  1209. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  1210. detail=detail,
  1211. )
  1212. @app.get("/api/pipelines")
  1213. async def get_pipelines(urlIdx: Optional[int] = None, user=Depends(get_admin_user)):
  1214. r = None
  1215. try:
  1216. urlIdx
  1217. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  1218. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  1219. headers = {"Authorization": f"Bearer {key}"}
  1220. r = requests.get(f"{url}/pipelines", headers=headers)
  1221. r.raise_for_status()
  1222. data = r.json()
  1223. return {**data}
  1224. except Exception as e:
  1225. # Handle connection error here
  1226. print(f"Connection error: {e}")
  1227. detail = "Pipeline not found"
  1228. if r is not None:
  1229. try:
  1230. res = r.json()
  1231. if "detail" in res:
  1232. detail = res["detail"]
  1233. except:
  1234. pass
  1235. raise HTTPException(
  1236. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  1237. detail=detail,
  1238. )
  1239. @app.get("/api/pipelines/{pipeline_id}/valves")
  1240. async def get_pipeline_valves(
  1241. urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
  1242. ):
  1243. models = await get_all_models()
  1244. r = None
  1245. try:
  1246. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  1247. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  1248. headers = {"Authorization": f"Bearer {key}"}
  1249. r = requests.get(f"{url}/{pipeline_id}/valves", headers=headers)
  1250. r.raise_for_status()
  1251. data = r.json()
  1252. return {**data}
  1253. except Exception as e:
  1254. # Handle connection error here
  1255. print(f"Connection error: {e}")
  1256. detail = "Pipeline not found"
  1257. if r is not None:
  1258. try:
  1259. res = r.json()
  1260. if "detail" in res:
  1261. detail = res["detail"]
  1262. except:
  1263. pass
  1264. raise HTTPException(
  1265. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  1266. detail=detail,
  1267. )
  1268. @app.get("/api/pipelines/{pipeline_id}/valves/spec")
  1269. async def get_pipeline_valves_spec(
  1270. urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
  1271. ):
  1272. models = await get_all_models()
  1273. r = None
  1274. try:
  1275. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  1276. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  1277. headers = {"Authorization": f"Bearer {key}"}
  1278. r = requests.get(f"{url}/{pipeline_id}/valves/spec", headers=headers)
  1279. r.raise_for_status()
  1280. data = r.json()
  1281. return {**data}
  1282. except Exception as e:
  1283. # Handle connection error here
  1284. print(f"Connection error: {e}")
  1285. detail = "Pipeline not found"
  1286. if r is not None:
  1287. try:
  1288. res = r.json()
  1289. if "detail" in res:
  1290. detail = res["detail"]
  1291. except:
  1292. pass
  1293. raise HTTPException(
  1294. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  1295. detail=detail,
  1296. )
  1297. @app.post("/api/pipelines/{pipeline_id}/valves/update")
  1298. async def update_pipeline_valves(
  1299. urlIdx: Optional[int],
  1300. pipeline_id: str,
  1301. form_data: dict,
  1302. user=Depends(get_admin_user),
  1303. ):
  1304. models = await get_all_models()
  1305. r = None
  1306. try:
  1307. url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  1308. key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
  1309. headers = {"Authorization": f"Bearer {key}"}
  1310. r = requests.post(
  1311. f"{url}/{pipeline_id}/valves/update",
  1312. headers=headers,
  1313. json={**form_data},
  1314. )
  1315. r.raise_for_status()
  1316. data = r.json()
  1317. return {**data}
  1318. except Exception as e:
  1319. # Handle connection error here
  1320. print(f"Connection error: {e}")
  1321. detail = "Pipeline not found"
  1322. if r is not None:
  1323. try:
  1324. res = r.json()
  1325. if "detail" in res:
  1326. detail = res["detail"]
  1327. except:
  1328. pass
  1329. raise HTTPException(
  1330. status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
  1331. detail=detail,
  1332. )
  1333. ##################################
  1334. #
  1335. # Config Endpoints
  1336. #
  1337. ##################################
  1338. @app.get("/api/config")
  1339. async def get_app_config():
  1340. # Checking and Handling the Absence of 'ui' in CONFIG_DATA
  1341. default_locale = "en-US"
  1342. if "ui" in CONFIG_DATA:
  1343. default_locale = CONFIG_DATA["ui"].get("default_locale", "en-US")
  1344. # The Rest of the Function Now Uses the Variables Defined Above
  1345. return {
  1346. "status": True,
  1347. "name": WEBUI_NAME,
  1348. "version": VERSION,
  1349. "default_locale": default_locale,
  1350. "default_models": webui_app.state.config.DEFAULT_MODELS,
  1351. "default_prompt_suggestions": webui_app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
  1352. "features": {
  1353. "auth": WEBUI_AUTH,
  1354. "auth_trusted_header": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
  1355. "enable_signup": webui_app.state.config.ENABLE_SIGNUP,
  1356. "enable_web_search": rag_app.state.config.ENABLE_RAG_WEB_SEARCH,
  1357. "enable_image_generation": images_app.state.config.ENABLED,
  1358. "enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING,
  1359. "enable_admin_export": ENABLE_ADMIN_EXPORT,
  1360. },
  1361. "audio": {
  1362. "tts": {
  1363. "engine": audio_app.state.config.TTS_ENGINE,
  1364. "voice": audio_app.state.config.TTS_VOICE,
  1365. },
  1366. "stt": {
  1367. "engine": audio_app.state.config.STT_ENGINE,
  1368. },
  1369. },
  1370. }
  1371. @app.get("/api/config/model/filter")
  1372. async def get_model_filter_config(user=Depends(get_admin_user)):
  1373. return {
  1374. "enabled": app.state.config.ENABLE_MODEL_FILTER,
  1375. "models": app.state.config.MODEL_FILTER_LIST,
  1376. }
  1377. class ModelFilterConfigForm(BaseModel):
  1378. enabled: bool
  1379. models: List[str]
  1380. @app.post("/api/config/model/filter")
  1381. async def update_model_filter_config(
  1382. form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
  1383. ):
  1384. app.state.config.ENABLE_MODEL_FILTER = form_data.enabled
  1385. app.state.config.MODEL_FILTER_LIST = form_data.models
  1386. return {
  1387. "enabled": app.state.config.ENABLE_MODEL_FILTER,
  1388. "models": app.state.config.MODEL_FILTER_LIST,
  1389. }
  1390. # TODO: webhook endpoint should be under config endpoints
  1391. @app.get("/api/webhook")
  1392. async def get_webhook_url(user=Depends(get_admin_user)):
  1393. return {
  1394. "url": app.state.config.WEBHOOK_URL,
  1395. }
  1396. class UrlForm(BaseModel):
  1397. url: str
  1398. @app.post("/api/webhook")
  1399. async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
  1400. app.state.config.WEBHOOK_URL = form_data.url
  1401. webui_app.state.WEBHOOK_URL = app.state.config.WEBHOOK_URL
  1402. return {"url": app.state.config.WEBHOOK_URL}
  1403. @app.get("/api/version")
  1404. async def get_app_config():
  1405. return {
  1406. "version": VERSION,
  1407. }
  1408. @app.get("/api/changelog")
  1409. async def get_app_changelog():
  1410. return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5}
  1411. @app.get("/api/version/updates")
  1412. async def get_app_latest_release_version():
  1413. try:
  1414. async with aiohttp.ClientSession(trust_env=True) as session:
  1415. async with session.get(
  1416. "https://api.github.com/repos/open-webui/open-webui/releases/latest"
  1417. ) as response:
  1418. response.raise_for_status()
  1419. data = await response.json()
  1420. latest_version = data["tag_name"]
  1421. return {"current": VERSION, "latest": latest_version[1:]}
  1422. except aiohttp.ClientError as e:
  1423. raise HTTPException(
  1424. status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
  1425. detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
  1426. )
  1427. @app.get("/manifest.json")
  1428. async def get_manifest_json():
  1429. return {
  1430. "name": WEBUI_NAME,
  1431. "short_name": WEBUI_NAME,
  1432. "start_url": "/",
  1433. "display": "standalone",
  1434. "background_color": "#343541",
  1435. "theme_color": "#343541",
  1436. "orientation": "portrait-primary",
  1437. "icons": [{"src": "/static/logo.png", "type": "image/png", "sizes": "500x500"}],
  1438. }
  1439. @app.get("/opensearch.xml")
  1440. async def get_opensearch_xml():
  1441. xml_content = rf"""
  1442. <OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/" xmlns:moz="http://www.mozilla.org/2006/browser/search/">
  1443. <ShortName>{WEBUI_NAME}</ShortName>
  1444. <Description>Search {WEBUI_NAME}</Description>
  1445. <InputEncoding>UTF-8</InputEncoding>
  1446. <Image width="16" height="16" type="image/x-icon">{WEBUI_URL}/favicon.png</Image>
  1447. <Url type="text/html" method="get" template="{WEBUI_URL}/?q={"{searchTerms}"}"/>
  1448. <moz:SearchForm>{WEBUI_URL}</moz:SearchForm>
  1449. </OpenSearchDescription>
  1450. """
  1451. return Response(content=xml_content, media_type="application/xml")
  1452. @app.get("/health")
  1453. async def healthcheck():
  1454. return {"status": True}
  1455. app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
  1456. app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
  1457. if os.path.exists(FRONTEND_BUILD_DIR):
  1458. mimetypes.add_type("text/javascript", ".js")
  1459. app.mount(
  1460. "/",
  1461. SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
  1462. name="spa-static-files",
  1463. )
  1464. else:
  1465. log.warning(
  1466. f"Frontend build directory not found at '{FRONTEND_BUILD_DIR}'. Serving API only."
  1467. )