main.py 82 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523
  1. import asyncio
  2. import inspect
  3. import json
  4. import logging
  5. import mimetypes
  6. import os
  7. import shutil
  8. import sys
  9. import time
  10. import random
  11. from typing import AsyncGenerator, Generator, Iterator
  12. from contextlib import asynccontextmanager
  13. from urllib.parse import urlencode, parse_qs, urlparse
  14. from pydantic import BaseModel
  15. from sqlalchemy import text
  16. from typing import Optional
  17. from aiocache import cached
  18. import aiohttp
  19. import requests
  20. from fastapi import (
  21. Depends,
  22. FastAPI,
  23. File,
  24. Form,
  25. HTTPException,
  26. Request,
  27. UploadFile,
  28. status,
  29. )
  30. from fastapi.middleware.cors import CORSMiddleware
  31. from fastapi.responses import JSONResponse, RedirectResponse
  32. from fastapi.staticfiles import StaticFiles
  33. from starlette.exceptions import HTTPException as StarletteHTTPException
  34. from starlette.middleware.base import BaseHTTPMiddleware
  35. from starlette.middleware.sessions import SessionMiddleware
  36. from starlette.responses import Response, StreamingResponse
  37. from open_webui.socket.main import (
  38. app as socket_app,
  39. periodic_usage_pool_cleanup,
  40. get_event_call,
  41. get_event_emitter,
  42. )
  43. from open_webui.routers import (
  44. audio,
  45. images,
  46. ollama,
  47. openai,
  48. retrieval,
  49. pipelines,
  50. tasks,
  51. auths,
  52. chats,
  53. folders,
  54. configs,
  55. groups,
  56. files,
  57. functions,
  58. memories,
  59. models,
  60. knowledge,
  61. prompts,
  62. evaluations,
  63. tools,
  64. users,
  65. utils,
  66. )
  67. from open_webui.routers.retrieval import (
  68. get_embedding_function,
  69. get_ef,
  70. get_rf,
  71. )
  72. from open_webui.routers.pipelines import (
  73. process_pipeline_inlet_filter,
  74. process_pipeline_outlet_filter,
  75. )
  76. from open_webui.retrieval.utils import get_sources_from_files
  77. from open_webui.internal.db import Session
  78. from open_webui.models.functions import Functions
  79. from open_webui.models.models import Models
  80. from open_webui.models.users import UserModel, Users
  81. from open_webui.constants import TASKS
  82. from open_webui.config import (
  83. # Ollama
  84. ENABLE_OLLAMA_API,
  85. OLLAMA_BASE_URLS,
  86. OLLAMA_API_CONFIGS,
  87. # OpenAI
  88. ENABLE_OPENAI_API,
  89. OPENAI_API_BASE_URLS,
  90. OPENAI_API_KEYS,
  91. OPENAI_API_CONFIGS,
  92. # Image
  93. AUTOMATIC1111_API_AUTH,
  94. AUTOMATIC1111_BASE_URL,
  95. AUTOMATIC1111_CFG_SCALE,
  96. AUTOMATIC1111_SAMPLER,
  97. AUTOMATIC1111_SCHEDULER,
  98. COMFYUI_BASE_URL,
  99. COMFYUI_WORKFLOW,
  100. COMFYUI_WORKFLOW_NODES,
  101. ENABLE_IMAGE_GENERATION,
  102. IMAGE_GENERATION_ENGINE,
  103. IMAGE_GENERATION_MODEL,
  104. IMAGE_SIZE,
  105. IMAGE_STEPS,
  106. IMAGES_OPENAI_API_BASE_URL,
  107. IMAGES_OPENAI_API_KEY,
  108. # Audio
  109. AUDIO_STT_ENGINE,
  110. AUDIO_STT_MODEL,
  111. AUDIO_STT_OPENAI_API_BASE_URL,
  112. AUDIO_STT_OPENAI_API_KEY,
  113. AUDIO_TTS_API_KEY,
  114. AUDIO_TTS_ENGINE,
  115. AUDIO_TTS_MODEL,
  116. AUDIO_TTS_OPENAI_API_BASE_URL,
  117. AUDIO_TTS_OPENAI_API_KEY,
  118. AUDIO_TTS_SPLIT_ON,
  119. AUDIO_TTS_VOICE,
  120. AUDIO_TTS_AZURE_SPEECH_REGION,
  121. AUDIO_TTS_AZURE_SPEECH_OUTPUT_FORMAT,
  122. WHISPER_MODEL,
  123. WHISPER_MODEL_AUTO_UPDATE,
  124. WHISPER_MODEL_DIR,
  125. # Retrieval
  126. RAG_TEMPLATE,
  127. DEFAULT_RAG_TEMPLATE,
  128. RAG_EMBEDDING_MODEL,
  129. RAG_EMBEDDING_MODEL_AUTO_UPDATE,
  130. RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
  131. RAG_RERANKING_MODEL,
  132. RAG_RERANKING_MODEL_AUTO_UPDATE,
  133. RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
  134. RAG_EMBEDDING_ENGINE,
  135. RAG_EMBEDDING_BATCH_SIZE,
  136. RAG_RELEVANCE_THRESHOLD,
  137. RAG_FILE_MAX_COUNT,
  138. RAG_FILE_MAX_SIZE,
  139. RAG_OPENAI_API_BASE_URL,
  140. RAG_OPENAI_API_KEY,
  141. RAG_OLLAMA_BASE_URL,
  142. RAG_OLLAMA_API_KEY,
  143. CHUNK_OVERLAP,
  144. CHUNK_SIZE,
  145. CONTENT_EXTRACTION_ENGINE,
  146. TIKA_SERVER_URL,
  147. RAG_TOP_K,
  148. RAG_TEXT_SPLITTER,
  149. TIKTOKEN_ENCODING_NAME,
  150. PDF_EXTRACT_IMAGES,
  151. YOUTUBE_LOADER_LANGUAGE,
  152. YOUTUBE_LOADER_PROXY_URL,
  153. # Retrieval (Web Search)
  154. RAG_WEB_SEARCH_ENGINE,
  155. RAG_WEB_SEARCH_RESULT_COUNT,
  156. RAG_WEB_SEARCH_CONCURRENT_REQUESTS,
  157. RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  158. JINA_API_KEY,
  159. SEARCHAPI_API_KEY,
  160. SEARCHAPI_ENGINE,
  161. SEARXNG_QUERY_URL,
  162. SERPER_API_KEY,
  163. SERPLY_API_KEY,
  164. SERPSTACK_API_KEY,
  165. SERPSTACK_HTTPS,
  166. TAVILY_API_KEY,
  167. BING_SEARCH_V7_ENDPOINT,
  168. BING_SEARCH_V7_SUBSCRIPTION_KEY,
  169. BRAVE_SEARCH_API_KEY,
  170. KAGI_SEARCH_API_KEY,
  171. MOJEEK_SEARCH_API_KEY,
  172. GOOGLE_PSE_API_KEY,
  173. GOOGLE_PSE_ENGINE_ID,
  174. ENABLE_RAG_HYBRID_SEARCH,
  175. ENABLE_RAG_LOCAL_WEB_FETCH,
  176. ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
  177. ENABLE_RAG_WEB_SEARCH,
  178. UPLOAD_DIR,
  179. # WebUI
  180. WEBUI_AUTH,
  181. WEBUI_NAME,
  182. WEBUI_BANNERS,
  183. WEBHOOK_URL,
  184. ADMIN_EMAIL,
  185. SHOW_ADMIN_DETAILS,
  186. JWT_EXPIRES_IN,
  187. ENABLE_SIGNUP,
  188. ENABLE_LOGIN_FORM,
  189. ENABLE_API_KEY,
  190. ENABLE_COMMUNITY_SHARING,
  191. ENABLE_MESSAGE_RATING,
  192. ENABLE_EVALUATION_ARENA_MODELS,
  193. USER_PERMISSIONS,
  194. DEFAULT_USER_ROLE,
  195. DEFAULT_PROMPT_SUGGESTIONS,
  196. DEFAULT_MODELS,
  197. DEFAULT_ARENA_MODEL,
  198. MODEL_ORDER_LIST,
  199. EVALUATION_ARENA_MODELS,
  200. # WebUI (OAuth)
  201. ENABLE_OAUTH_ROLE_MANAGEMENT,
  202. OAUTH_ROLES_CLAIM,
  203. OAUTH_EMAIL_CLAIM,
  204. OAUTH_PICTURE_CLAIM,
  205. OAUTH_USERNAME_CLAIM,
  206. OAUTH_ALLOWED_ROLES,
  207. OAUTH_ADMIN_ROLES,
  208. # WebUI (LDAP)
  209. ENABLE_LDAP,
  210. LDAP_SERVER_LABEL,
  211. LDAP_SERVER_HOST,
  212. LDAP_SERVER_PORT,
  213. LDAP_ATTRIBUTE_FOR_USERNAME,
  214. LDAP_SEARCH_FILTERS,
  215. LDAP_SEARCH_BASE,
  216. LDAP_APP_DN,
  217. LDAP_APP_PASSWORD,
  218. LDAP_USE_TLS,
  219. LDAP_CA_CERT_FILE,
  220. LDAP_CIPHERS,
  221. # Misc
  222. ENV,
  223. CACHE_DIR,
  224. STATIC_DIR,
  225. FRONTEND_BUILD_DIR,
  226. CORS_ALLOW_ORIGIN,
  227. DEFAULT_LOCALE,
  228. OAUTH_PROVIDERS,
  229. # Admin
  230. ENABLE_ADMIN_CHAT_ACCESS,
  231. ENABLE_ADMIN_EXPORT,
  232. # Tasks
  233. TASK_MODEL,
  234. TASK_MODEL_EXTERNAL,
  235. ENABLE_TAGS_GENERATION,
  236. ENABLE_SEARCH_QUERY_GENERATION,
  237. ENABLE_RETRIEVAL_QUERY_GENERATION,
  238. ENABLE_AUTOCOMPLETE_GENERATION,
  239. TITLE_GENERATION_PROMPT_TEMPLATE,
  240. TAGS_GENERATION_PROMPT_TEMPLATE,
  241. TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
  242. QUERY_GENERATION_PROMPT_TEMPLATE,
  243. AUTOCOMPLETE_GENERATION_PROMPT_TEMPLATE,
  244. AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH,
  245. AppConfig,
  246. reset_config,
  247. )
  248. from open_webui.env import (
  249. CHANGELOG,
  250. GLOBAL_LOG_LEVEL,
  251. SAFE_MODE,
  252. SRC_LOG_LEVELS,
  253. VERSION,
  254. WEBUI_URL,
  255. WEBUI_BUILD_HASH,
  256. WEBUI_SECRET_KEY,
  257. WEBUI_SESSION_COOKIE_SAME_SITE,
  258. WEBUI_SESSION_COOKIE_SECURE,
  259. WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
  260. WEBUI_AUTH_TRUSTED_NAME_HEADER,
  261. BYPASS_MODEL_ACCESS_CONTROL,
  262. RESET_CONFIG_ON_START,
  263. OFFLINE_MODE,
  264. )
  265. from open_webui.utils.plugin import load_function_module_by_id
  266. from open_webui.utils.misc import (
  267. add_or_update_system_message,
  268. get_last_user_message,
  269. prepend_to_first_user_message_content,
  270. openai_chat_chunk_message_template,
  271. openai_chat_completion_message_template,
  272. )
  273. from open_webui.utils.payload import (
  274. apply_model_params_to_body_openai,
  275. apply_model_system_prompt_to_body,
  276. )
  277. from open_webui.utils.payload import convert_payload_openai_to_ollama
  278. from open_webui.utils.response import (
  279. convert_response_ollama_to_openai,
  280. convert_streaming_response_ollama_to_openai,
  281. )
  282. from open_webui.utils.task import (
  283. get_task_model_id,
  284. rag_template,
  285. tools_function_calling_generation_template,
  286. )
  287. from open_webui.utils.tools import get_tools
  288. from open_webui.utils.access_control import has_access
  289. from open_webui.utils.auth import (
  290. decode_token,
  291. get_admin_user,
  292. get_current_user,
  293. get_http_authorization_cred,
  294. get_verified_user,
  295. )
  296. from open_webui.utils.oauth import oauth_manager
  297. from open_webui.utils.security_headers import SecurityHeadersMiddleware
  298. if SAFE_MODE:
  299. print("SAFE MODE ENABLED")
  300. Functions.deactivate_all_functions()
  301. logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
  302. log = logging.getLogger(__name__)
  303. log.setLevel(SRC_LOG_LEVELS["MAIN"])
  304. class SPAStaticFiles(StaticFiles):
  305. async def get_response(self, path: str, scope):
  306. try:
  307. return await super().get_response(path, scope)
  308. except (HTTPException, StarletteHTTPException) as ex:
  309. if ex.status_code == 404:
  310. return await super().get_response("index.html", scope)
  311. else:
  312. raise ex
  313. print(
  314. rf"""
  315. ___ __ __ _ _ _ ___
  316. / _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _|
  317. | | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || |
  318. | |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || |
  319. \___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___|
  320. |_|
  321. v{VERSION} - building the best open-source AI user interface.
  322. {f"Commit: {WEBUI_BUILD_HASH}" if WEBUI_BUILD_HASH != "dev-build" else ""}
  323. https://github.com/open-webui/open-webui
  324. """
  325. )
  326. @asynccontextmanager
  327. async def lifespan(app: FastAPI):
  328. if RESET_CONFIG_ON_START:
  329. reset_config()
  330. asyncio.create_task(periodic_usage_pool_cleanup())
  331. yield
  332. app = FastAPI(
  333. docs_url="/docs" if ENV == "dev" else None,
  334. openapi_url="/openapi.json" if ENV == "dev" else None,
  335. redoc_url=None,
  336. lifespan=lifespan,
  337. )
  338. app.state.config = AppConfig()
  339. ########################################
  340. #
  341. # OLLAMA
  342. #
  343. ########################################
  344. app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
  345. app.state.config.OLLAMA_BASE_URLS = OLLAMA_BASE_URLS
  346. app.state.config.OLLAMA_API_CONFIGS = OLLAMA_API_CONFIGS
  347. app.state.OLLAMA_MODELS = {}
  348. ########################################
  349. #
  350. # OPENAI
  351. #
  352. ########################################
  353. app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
  354. app.state.config.OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS
  355. app.state.config.OPENAI_API_KEYS = OPENAI_API_KEYS
  356. app.state.config.OPENAI_API_CONFIGS = OPENAI_API_CONFIGS
  357. app.state.OPENAI_MODELS = {}
  358. ########################################
  359. #
  360. # WEBUI
  361. #
  362. ########################################
  363. app.state.config.ENABLE_SIGNUP = ENABLE_SIGNUP
  364. app.state.config.ENABLE_LOGIN_FORM = ENABLE_LOGIN_FORM
  365. app.state.config.ENABLE_API_KEY = ENABLE_API_KEY
  366. app.state.config.JWT_EXPIRES_IN = JWT_EXPIRES_IN
  367. app.state.config.SHOW_ADMIN_DETAILS = SHOW_ADMIN_DETAILS
  368. app.state.config.ADMIN_EMAIL = ADMIN_EMAIL
  369. app.state.config.DEFAULT_MODELS = DEFAULT_MODELS
  370. app.state.config.DEFAULT_PROMPT_SUGGESTIONS = DEFAULT_PROMPT_SUGGESTIONS
  371. app.state.config.DEFAULT_USER_ROLE = DEFAULT_USER_ROLE
  372. app.state.config.USER_PERMISSIONS = USER_PERMISSIONS
  373. app.state.config.WEBHOOK_URL = WEBHOOK_URL
  374. app.state.config.BANNERS = WEBUI_BANNERS
  375. app.state.config.MODEL_ORDER_LIST = MODEL_ORDER_LIST
  376. app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING
  377. app.state.config.ENABLE_MESSAGE_RATING = ENABLE_MESSAGE_RATING
  378. app.state.config.ENABLE_EVALUATION_ARENA_MODELS = ENABLE_EVALUATION_ARENA_MODELS
  379. app.state.config.EVALUATION_ARENA_MODELS = EVALUATION_ARENA_MODELS
  380. app.state.config.OAUTH_USERNAME_CLAIM = OAUTH_USERNAME_CLAIM
  381. app.state.config.OAUTH_PICTURE_CLAIM = OAUTH_PICTURE_CLAIM
  382. app.state.config.OAUTH_EMAIL_CLAIM = OAUTH_EMAIL_CLAIM
  383. app.state.config.ENABLE_OAUTH_ROLE_MANAGEMENT = ENABLE_OAUTH_ROLE_MANAGEMENT
  384. app.state.config.OAUTH_ROLES_CLAIM = OAUTH_ROLES_CLAIM
  385. app.state.config.OAUTH_ALLOWED_ROLES = OAUTH_ALLOWED_ROLES
  386. app.state.config.OAUTH_ADMIN_ROLES = OAUTH_ADMIN_ROLES
  387. app.state.config.ENABLE_LDAP = ENABLE_LDAP
  388. app.state.config.LDAP_SERVER_LABEL = LDAP_SERVER_LABEL
  389. app.state.config.LDAP_SERVER_HOST = LDAP_SERVER_HOST
  390. app.state.config.LDAP_SERVER_PORT = LDAP_SERVER_PORT
  391. app.state.config.LDAP_ATTRIBUTE_FOR_USERNAME = LDAP_ATTRIBUTE_FOR_USERNAME
  392. app.state.config.LDAP_APP_DN = LDAP_APP_DN
  393. app.state.config.LDAP_APP_PASSWORD = LDAP_APP_PASSWORD
  394. app.state.config.LDAP_SEARCH_BASE = LDAP_SEARCH_BASE
  395. app.state.config.LDAP_SEARCH_FILTERS = LDAP_SEARCH_FILTERS
  396. app.state.config.LDAP_USE_TLS = LDAP_USE_TLS
  397. app.state.config.LDAP_CA_CERT_FILE = LDAP_CA_CERT_FILE
  398. app.state.config.LDAP_CIPHERS = LDAP_CIPHERS
  399. app.state.AUTH_TRUSTED_EMAIL_HEADER = WEBUI_AUTH_TRUSTED_EMAIL_HEADER
  400. app.state.AUTH_TRUSTED_NAME_HEADER = WEBUI_AUTH_TRUSTED_NAME_HEADER
  401. app.state.TOOLS = {}
  402. app.state.FUNCTIONS = {}
  403. ########################################
  404. #
  405. # RETRIEVAL
  406. #
  407. ########################################
  408. app.state.config.TOP_K = RAG_TOP_K
  409. app.state.config.RELEVANCE_THRESHOLD = RAG_RELEVANCE_THRESHOLD
  410. app.state.config.FILE_MAX_SIZE = RAG_FILE_MAX_SIZE
  411. app.state.config.FILE_MAX_COUNT = RAG_FILE_MAX_COUNT
  412. app.state.config.ENABLE_RAG_HYBRID_SEARCH = ENABLE_RAG_HYBRID_SEARCH
  413. app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
  414. ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
  415. )
  416. app.state.config.CONTENT_EXTRACTION_ENGINE = CONTENT_EXTRACTION_ENGINE
  417. app.state.config.TIKA_SERVER_URL = TIKA_SERVER_URL
  418. app.state.config.TEXT_SPLITTER = RAG_TEXT_SPLITTER
  419. app.state.config.TIKTOKEN_ENCODING_NAME = TIKTOKEN_ENCODING_NAME
  420. app.state.config.CHUNK_SIZE = CHUNK_SIZE
  421. app.state.config.CHUNK_OVERLAP = CHUNK_OVERLAP
  422. app.state.config.RAG_EMBEDDING_ENGINE = RAG_EMBEDDING_ENGINE
  423. app.state.config.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL
  424. app.state.config.RAG_EMBEDDING_BATCH_SIZE = RAG_EMBEDDING_BATCH_SIZE
  425. app.state.config.RAG_RERANKING_MODEL = RAG_RERANKING_MODEL
  426. app.state.config.RAG_TEMPLATE = RAG_TEMPLATE
  427. app.state.config.RAG_OPENAI_API_BASE_URL = RAG_OPENAI_API_BASE_URL
  428. app.state.config.RAG_OPENAI_API_KEY = RAG_OPENAI_API_KEY
  429. app.state.config.RAG_OLLAMA_BASE_URL = RAG_OLLAMA_BASE_URL
  430. app.state.config.RAG_OLLAMA_API_KEY = RAG_OLLAMA_API_KEY
  431. app.state.config.PDF_EXTRACT_IMAGES = PDF_EXTRACT_IMAGES
  432. app.state.config.YOUTUBE_LOADER_LANGUAGE = YOUTUBE_LOADER_LANGUAGE
  433. app.state.config.YOUTUBE_LOADER_PROXY_URL = YOUTUBE_LOADER_PROXY_URL
  434. app.state.config.ENABLE_RAG_WEB_SEARCH = ENABLE_RAG_WEB_SEARCH
  435. app.state.config.RAG_WEB_SEARCH_ENGINE = RAG_WEB_SEARCH_ENGINE
  436. app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST = RAG_WEB_SEARCH_DOMAIN_FILTER_LIST
  437. app.state.config.SEARXNG_QUERY_URL = SEARXNG_QUERY_URL
  438. app.state.config.GOOGLE_PSE_API_KEY = GOOGLE_PSE_API_KEY
  439. app.state.config.GOOGLE_PSE_ENGINE_ID = GOOGLE_PSE_ENGINE_ID
  440. app.state.config.BRAVE_SEARCH_API_KEY = BRAVE_SEARCH_API_KEY
  441. app.state.config.KAGI_SEARCH_API_KEY = KAGI_SEARCH_API_KEY
  442. app.state.config.MOJEEK_SEARCH_API_KEY = MOJEEK_SEARCH_API_KEY
  443. app.state.config.SERPSTACK_API_KEY = SERPSTACK_API_KEY
  444. app.state.config.SERPSTACK_HTTPS = SERPSTACK_HTTPS
  445. app.state.config.SERPER_API_KEY = SERPER_API_KEY
  446. app.state.config.SERPLY_API_KEY = SERPLY_API_KEY
  447. app.state.config.TAVILY_API_KEY = TAVILY_API_KEY
  448. app.state.config.SEARCHAPI_API_KEY = SEARCHAPI_API_KEY
  449. app.state.config.SEARCHAPI_ENGINE = SEARCHAPI_ENGINE
  450. app.state.config.JINA_API_KEY = JINA_API_KEY
  451. app.state.config.BING_SEARCH_V7_ENDPOINT = BING_SEARCH_V7_ENDPOINT
  452. app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY = BING_SEARCH_V7_SUBSCRIPTION_KEY
  453. app.state.config.RAG_WEB_SEARCH_RESULT_COUNT = RAG_WEB_SEARCH_RESULT_COUNT
  454. app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS = RAG_WEB_SEARCH_CONCURRENT_REQUESTS
  455. app.state.EMBEDDING_FUNCTION = None
  456. app.state.ef = None
  457. app.state.rf = None
  458. app.state.YOUTUBE_LOADER_TRANSLATION = None
  459. app.state.EMBEDDING_FUNCTION = get_embedding_function(
  460. app.state.config.RAG_EMBEDDING_ENGINE,
  461. app.state.config.RAG_EMBEDDING_MODEL,
  462. app.state.ef,
  463. (
  464. app.state.config.RAG_OPENAI_API_BASE_URL
  465. if app.state.config.RAG_EMBEDDING_ENGINE == "openai"
  466. else app.state.config.RAG_OLLAMA_BASE_URL
  467. ),
  468. (
  469. app.state.config.RAG_OPENAI_API_KEY
  470. if app.state.config.RAG_EMBEDDING_ENGINE == "openai"
  471. else app.state.config.RAG_OLLAMA_API_KEY
  472. ),
  473. app.state.config.RAG_EMBEDDING_BATCH_SIZE,
  474. )
  475. try:
  476. app.state.ef = get_ef(
  477. app.state.config.RAG_EMBEDDING_ENGINE,
  478. app.state.config.RAG_EMBEDDING_MODEL,
  479. RAG_EMBEDDING_MODEL_AUTO_UPDATE,
  480. )
  481. app.state.rf = get_rf(
  482. app.state.config.RAG_RERANKING_MODEL,
  483. RAG_RERANKING_MODEL_AUTO_UPDATE,
  484. )
  485. except Exception as e:
  486. log.error(f"Error updating models: {e}")
  487. pass
  488. ########################################
  489. #
  490. # IMAGES
  491. #
  492. ########################################
  493. app.state.config.IMAGE_GENERATION_ENGINE = IMAGE_GENERATION_ENGINE
  494. app.state.config.ENABLE_IMAGE_GENERATION = ENABLE_IMAGE_GENERATION
  495. app.state.config.IMAGES_OPENAI_API_BASE_URL = IMAGES_OPENAI_API_BASE_URL
  496. app.state.config.IMAGES_OPENAI_API_KEY = IMAGES_OPENAI_API_KEY
  497. app.state.config.IMAGE_GENERATION_MODEL = IMAGE_GENERATION_MODEL
  498. app.state.config.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL
  499. app.state.config.AUTOMATIC1111_API_AUTH = AUTOMATIC1111_API_AUTH
  500. app.state.config.AUTOMATIC1111_CFG_SCALE = AUTOMATIC1111_CFG_SCALE
  501. app.state.config.AUTOMATIC1111_SAMPLER = AUTOMATIC1111_SAMPLER
  502. app.state.config.AUTOMATIC1111_SCHEDULER = AUTOMATIC1111_SCHEDULER
  503. app.state.config.COMFYUI_BASE_URL = COMFYUI_BASE_URL
  504. app.state.config.COMFYUI_WORKFLOW = COMFYUI_WORKFLOW
  505. app.state.config.COMFYUI_WORKFLOW_NODES = COMFYUI_WORKFLOW_NODES
  506. app.state.config.IMAGE_SIZE = IMAGE_SIZE
  507. app.state.config.IMAGE_STEPS = IMAGE_STEPS
  508. ########################################
  509. #
  510. # AUDIO
  511. #
  512. ########################################
  513. app.state.config.STT_OPENAI_API_BASE_URL = AUDIO_STT_OPENAI_API_BASE_URL
  514. app.state.config.STT_OPENAI_API_KEY = AUDIO_STT_OPENAI_API_KEY
  515. app.state.config.STT_ENGINE = AUDIO_STT_ENGINE
  516. app.state.config.STT_MODEL = AUDIO_STT_MODEL
  517. app.state.config.WHISPER_MODEL = WHISPER_MODEL
  518. app.state.config.TTS_OPENAI_API_BASE_URL = AUDIO_TTS_OPENAI_API_BASE_URL
  519. app.state.config.TTS_OPENAI_API_KEY = AUDIO_TTS_OPENAI_API_KEY
  520. app.state.config.TTS_ENGINE = AUDIO_TTS_ENGINE
  521. app.state.config.TTS_MODEL = AUDIO_TTS_MODEL
  522. app.state.config.TTS_VOICE = AUDIO_TTS_VOICE
  523. app.state.config.TTS_API_KEY = AUDIO_TTS_API_KEY
  524. app.state.config.TTS_SPLIT_ON = AUDIO_TTS_SPLIT_ON
  525. app.state.config.TTS_AZURE_SPEECH_REGION = AUDIO_TTS_AZURE_SPEECH_REGION
  526. app.state.config.TTS_AZURE_SPEECH_OUTPUT_FORMAT = AUDIO_TTS_AZURE_SPEECH_OUTPUT_FORMAT
  527. app.state.faster_whisper_model = None
  528. app.state.speech_synthesiser = None
  529. app.state.speech_speaker_embeddings_dataset = None
  530. ########################################
  531. #
  532. # TASKS
  533. #
  534. ########################################
  535. app.state.config.TASK_MODEL = TASK_MODEL
  536. app.state.config.TASK_MODEL_EXTERNAL = TASK_MODEL_EXTERNAL
  537. app.state.config.ENABLE_SEARCH_QUERY_GENERATION = ENABLE_SEARCH_QUERY_GENERATION
  538. app.state.config.ENABLE_RETRIEVAL_QUERY_GENERATION = ENABLE_RETRIEVAL_QUERY_GENERATION
  539. app.state.config.ENABLE_AUTOCOMPLETE_GENERATION = ENABLE_AUTOCOMPLETE_GENERATION
  540. app.state.config.ENABLE_TAGS_GENERATION = ENABLE_TAGS_GENERATION
  541. app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE = TITLE_GENERATION_PROMPT_TEMPLATE
  542. app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE = TAGS_GENERATION_PROMPT_TEMPLATE
  543. app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = (
  544. TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
  545. )
  546. app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE = QUERY_GENERATION_PROMPT_TEMPLATE
  547. app.state.config.AUTOCOMPLETE_GENERATION_PROMPT_TEMPLATE = (
  548. AUTOCOMPLETE_GENERATION_PROMPT_TEMPLATE
  549. )
  550. app.state.config.AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH = (
  551. AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH
  552. )
  553. ########################################
  554. #
  555. # WEBUI
  556. #
  557. ########################################
  558. app.state.MODELS = {}
  559. ##################################
  560. #
  561. # ChatCompletion Middleware
  562. #
  563. ##################################
  564. async def chat_completion_filter_functions_handler(body, model, extra_params):
  565. skip_files = None
  566. def get_filter_function_ids(model):
  567. def get_priority(function_id):
  568. function = Functions.get_function_by_id(function_id)
  569. if function is not None and hasattr(function, "valves"):
  570. # TODO: Fix FunctionModel
  571. return (function.valves if function.valves else {}).get("priority", 0)
  572. return 0
  573. filter_ids = [
  574. function.id for function in Functions.get_global_filter_functions()
  575. ]
  576. if "info" in model and "meta" in model["info"]:
  577. filter_ids.extend(model["info"]["meta"].get("filterIds", []))
  578. filter_ids = list(set(filter_ids))
  579. enabled_filter_ids = [
  580. function.id
  581. for function in Functions.get_functions_by_type("filter", active_only=True)
  582. ]
  583. filter_ids = [
  584. filter_id for filter_id in filter_ids if filter_id in enabled_filter_ids
  585. ]
  586. filter_ids.sort(key=get_priority)
  587. return filter_ids
  588. filter_ids = get_filter_function_ids(model)
  589. for filter_id in filter_ids:
  590. filter = Functions.get_function_by_id(filter_id)
  591. if not filter:
  592. continue
  593. if filter_id in app.state.FUNCTIONS:
  594. function_module = app.state.FUNCTIONS[filter_id]
  595. else:
  596. function_module, _, _ = load_function_module_by_id(filter_id)
  597. app.state.FUNCTIONS[filter_id] = function_module
  598. # Check if the function has a file_handler variable
  599. if hasattr(function_module, "file_handler"):
  600. skip_files = function_module.file_handler
  601. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  602. valves = Functions.get_function_valves_by_id(filter_id)
  603. function_module.valves = function_module.Valves(
  604. **(valves if valves else {})
  605. )
  606. if not hasattr(function_module, "inlet"):
  607. continue
  608. try:
  609. inlet = function_module.inlet
  610. # Get the signature of the function
  611. sig = inspect.signature(inlet)
  612. params = {"body": body} | {
  613. k: v
  614. for k, v in {
  615. **extra_params,
  616. "__model__": model,
  617. "__id__": filter_id,
  618. }.items()
  619. if k in sig.parameters
  620. }
  621. if "__user__" in params and hasattr(function_module, "UserValves"):
  622. try:
  623. params["__user__"]["valves"] = function_module.UserValves(
  624. **Functions.get_user_valves_by_id_and_user_id(
  625. filter_id, params["__user__"]["id"]
  626. )
  627. )
  628. except Exception as e:
  629. print(e)
  630. if inspect.iscoroutinefunction(inlet):
  631. body = await inlet(**params)
  632. else:
  633. body = inlet(**params)
  634. except Exception as e:
  635. print(f"Error: {e}")
  636. raise e
  637. if skip_files and "files" in body.get("metadata", {}):
  638. del body["metadata"]["files"]
  639. return body, {}
  640. def get_tools_function_calling_payload(messages, task_model_id, content):
  641. user_message = get_last_user_message(messages)
  642. history = "\n".join(
  643. f"{message['role'].upper()}: \"\"\"{message['content']}\"\"\""
  644. for message in messages[::-1][:4]
  645. )
  646. prompt = f"History:\n{history}\nQuery: {user_message}"
  647. return {
  648. "model": task_model_id,
  649. "messages": [
  650. {"role": "system", "content": content},
  651. {"role": "user", "content": f"Query: {prompt}"},
  652. ],
  653. "stream": False,
  654. "metadata": {"task": str(TASKS.FUNCTION_CALLING)},
  655. }
  656. async def get_content_from_response(response) -> Optional[str]:
  657. content = None
  658. if hasattr(response, "body_iterator"):
  659. async for chunk in response.body_iterator:
  660. data = json.loads(chunk.decode("utf-8"))
  661. content = data["choices"][0]["message"]["content"]
  662. # Cleanup any remaining background tasks if necessary
  663. if response.background is not None:
  664. await response.background()
  665. else:
  666. content = response["choices"][0]["message"]["content"]
  667. return content
  668. async def chat_completion_tools_handler(
  669. body: dict, user: UserModel, models, extra_params: dict
  670. ) -> tuple[dict, dict]:
  671. # If tool_ids field is present, call the functions
  672. metadata = body.get("metadata", {})
  673. tool_ids = metadata.get("tool_ids", None)
  674. log.debug(f"{tool_ids=}")
  675. if not tool_ids:
  676. return body, {}
  677. skip_files = False
  678. sources = []
  679. task_model_id = get_task_model_id(
  680. body["model"],
  681. app.state.config.TASK_MODEL,
  682. app.state.config.TASK_MODEL_EXTERNAL,
  683. models,
  684. )
  685. tools = get_tools(
  686. app,
  687. tool_ids,
  688. user,
  689. {
  690. **extra_params,
  691. "__model__": models[task_model_id],
  692. "__messages__": body["messages"],
  693. "__files__": metadata.get("files", []),
  694. },
  695. )
  696. log.info(f"{tools=}")
  697. specs = [tool["spec"] for tool in tools.values()]
  698. tools_specs = json.dumps(specs)
  699. if app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE != "":
  700. template = app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
  701. else:
  702. template = """Available Tools: {{TOOLS}}\nReturn an empty string if no tools match the query. If a function tool matches, construct and return a JSON object in the format {\"name\": \"functionName\", \"parameters\": {\"requiredFunctionParamKey\": \"requiredFunctionParamValue\"}} using the appropriate tool and its parameters. Only return the object and limit the response to the JSON object without additional text."""
  703. tools_function_calling_prompt = tools_function_calling_generation_template(
  704. template, tools_specs
  705. )
  706. log.info(f"{tools_function_calling_prompt=}")
  707. payload = get_tools_function_calling_payload(
  708. body["messages"], task_model_id, tools_function_calling_prompt
  709. )
  710. try:
  711. payload = process_pipeline_inlet_filter(request, payload, user, models)
  712. except Exception as e:
  713. raise e
  714. try:
  715. response = await generate_chat_completions(form_data=payload, user=user)
  716. log.debug(f"{response=}")
  717. content = await get_content_from_response(response)
  718. log.debug(f"{content=}")
  719. if not content:
  720. return body, {}
  721. try:
  722. content = content[content.find("{") : content.rfind("}") + 1]
  723. if not content:
  724. raise Exception("No JSON object found in the response")
  725. result = json.loads(content)
  726. tool_function_name = result.get("name", None)
  727. if tool_function_name not in tools:
  728. return body, {}
  729. tool_function_params = result.get("parameters", {})
  730. try:
  731. required_params = (
  732. tools[tool_function_name]
  733. .get("spec", {})
  734. .get("parameters", {})
  735. .get("required", [])
  736. )
  737. tool_function = tools[tool_function_name]["callable"]
  738. tool_function_params = {
  739. k: v
  740. for k, v in tool_function_params.items()
  741. if k in required_params
  742. }
  743. tool_output = await tool_function(**tool_function_params)
  744. except Exception as e:
  745. tool_output = str(e)
  746. if isinstance(tool_output, str):
  747. if tools[tool_function_name]["citation"]:
  748. sources.append(
  749. {
  750. "source": {
  751. "name": f"TOOL:{tools[tool_function_name]['toolkit_id']}/{tool_function_name}"
  752. },
  753. "document": [tool_output],
  754. "metadata": [
  755. {
  756. "source": f"TOOL:{tools[tool_function_name]['toolkit_id']}/{tool_function_name}"
  757. }
  758. ],
  759. }
  760. )
  761. else:
  762. sources.append(
  763. {
  764. "source": {},
  765. "document": [tool_output],
  766. "metadata": [
  767. {
  768. "source": f"TOOL:{tools[tool_function_name]['toolkit_id']}/{tool_function_name}"
  769. }
  770. ],
  771. }
  772. )
  773. if tools[tool_function_name]["file_handler"]:
  774. skip_files = True
  775. except Exception as e:
  776. log.exception(f"Error: {e}")
  777. content = None
  778. except Exception as e:
  779. log.exception(f"Error: {e}")
  780. content = None
  781. log.debug(f"tool_contexts: {sources}")
  782. if skip_files and "files" in body.get("metadata", {}):
  783. del body["metadata"]["files"]
  784. return body, {"sources": sources}
  785. async def chat_completion_files_handler(
  786. body: dict, user: UserModel
  787. ) -> tuple[dict, dict[str, list]]:
  788. sources = []
  789. if files := body.get("metadata", {}).get("files", None):
  790. try:
  791. queries_response = await generate_queries(
  792. {
  793. "model": body["model"],
  794. "messages": body["messages"],
  795. "type": "retrieval",
  796. },
  797. user,
  798. )
  799. queries_response = queries_response["choices"][0]["message"]["content"]
  800. try:
  801. bracket_start = queries_response.find("{")
  802. bracket_end = queries_response.rfind("}") + 1
  803. if bracket_start == -1 or bracket_end == -1:
  804. raise Exception("No JSON object found in the response")
  805. queries_response = queries_response[bracket_start:bracket_end]
  806. queries_response = json.loads(queries_response)
  807. except Exception as e:
  808. queries_response = {"queries": [queries_response]}
  809. queries = queries_response.get("queries", [])
  810. except Exception as e:
  811. queries = []
  812. if len(queries) == 0:
  813. queries = [get_last_user_message(body["messages"])]
  814. sources = get_sources_from_files(
  815. files=files,
  816. queries=queries,
  817. embedding_function=app.state.EMBEDDING_FUNCTION,
  818. k=app.state.config.TOP_K,
  819. reranking_function=app.state.rf,
  820. r=app.state.config.RELEVANCE_THRESHOLD,
  821. hybrid_search=app.state.config.ENABLE_RAG_HYBRID_SEARCH,
  822. )
  823. log.debug(f"rag_contexts:sources: {sources}")
  824. return body, {"sources": sources}
  825. async def get_body_and_model_and_user(request, models):
  826. # Read the original request body
  827. body = await request.body()
  828. body_str = body.decode("utf-8")
  829. body = json.loads(body_str) if body_str else {}
  830. model_id = body["model"]
  831. if model_id not in models:
  832. raise Exception("Model not found")
  833. model = models[model_id]
  834. user = get_current_user(
  835. request,
  836. get_http_authorization_cred(request.headers.get("Authorization")),
  837. )
  838. return body, model, user
  839. class ChatCompletionMiddleware(BaseHTTPMiddleware):
  840. async def dispatch(self, request: Request, call_next):
  841. if not request.method == "POST" and any(
  842. endpoint in request.url.path
  843. for endpoint in ["/ollama/api/chat", "/chat/completions"]
  844. ):
  845. return await call_next(request)
  846. log.debug(f"request.url.path: {request.url.path}")
  847. model_list = await get_all_models()
  848. models = {model["id"]: model for model in model_list}
  849. try:
  850. body, model, user = await get_body_and_model_and_user(request, models)
  851. except Exception as e:
  852. return JSONResponse(
  853. status_code=status.HTTP_400_BAD_REQUEST,
  854. content={"detail": str(e)},
  855. )
  856. model_info = Models.get_model_by_id(model["id"])
  857. if user.role == "user" and not BYPASS_MODEL_ACCESS_CONTROL:
  858. if model.get("arena"):
  859. if not has_access(
  860. user.id,
  861. type="read",
  862. access_control=model.get("info", {})
  863. .get("meta", {})
  864. .get("access_control", {}),
  865. ):
  866. raise HTTPException(
  867. status_code=403,
  868. detail="Model not found",
  869. )
  870. else:
  871. if not model_info:
  872. return JSONResponse(
  873. status_code=status.HTTP_404_NOT_FOUND,
  874. content={"detail": "Model not found"},
  875. )
  876. elif not (
  877. user.id == model_info.user_id
  878. or has_access(
  879. user.id, type="read", access_control=model_info.access_control
  880. )
  881. ):
  882. return JSONResponse(
  883. status_code=status.HTTP_403_FORBIDDEN,
  884. content={"detail": "User does not have access to the model"},
  885. )
  886. metadata = {
  887. "chat_id": body.pop("chat_id", None),
  888. "message_id": body.pop("id", None),
  889. "session_id": body.pop("session_id", None),
  890. "tool_ids": body.get("tool_ids", None),
  891. "files": body.get("files", None),
  892. }
  893. body["metadata"] = metadata
  894. extra_params = {
  895. "__event_emitter__": get_event_emitter(metadata),
  896. "__event_call__": get_event_call(metadata),
  897. "__user__": {
  898. "id": user.id,
  899. "email": user.email,
  900. "name": user.name,
  901. "role": user.role,
  902. },
  903. "__metadata__": metadata,
  904. }
  905. # Initialize data_items to store additional data to be sent to the client
  906. # Initialize contexts and citation
  907. data_items = []
  908. sources = []
  909. try:
  910. body, flags = await chat_completion_filter_functions_handler(
  911. body, model, extra_params
  912. )
  913. except Exception as e:
  914. return JSONResponse(
  915. status_code=status.HTTP_400_BAD_REQUEST,
  916. content={"detail": str(e)},
  917. )
  918. tool_ids = body.pop("tool_ids", None)
  919. files = body.pop("files", None)
  920. metadata = {
  921. **metadata,
  922. "tool_ids": tool_ids,
  923. "files": files,
  924. }
  925. body["metadata"] = metadata
  926. try:
  927. body, flags = await chat_completion_tools_handler(
  928. body, user, models, extra_params
  929. )
  930. sources.extend(flags.get("sources", []))
  931. except Exception as e:
  932. log.exception(e)
  933. try:
  934. body, flags = await chat_completion_files_handler(body, user)
  935. sources.extend(flags.get("sources", []))
  936. except Exception as e:
  937. log.exception(e)
  938. # If context is not empty, insert it into the messages
  939. if len(sources) > 0:
  940. context_string = ""
  941. for source_idx, source in enumerate(sources):
  942. source_id = source.get("source", {}).get("name", "")
  943. if "document" in source:
  944. for doc_idx, doc_context in enumerate(source["document"]):
  945. metadata = source.get("metadata")
  946. doc_source_id = None
  947. if metadata:
  948. doc_source_id = metadata[doc_idx].get("source", source_id)
  949. if source_id:
  950. context_string += f"<source><source_id>{doc_source_id if doc_source_id is not None else source_id}</source_id><source_context>{doc_context}</source_context></source>\n"
  951. else:
  952. # If there is no source_id, then do not include the source_id tag
  953. context_string += f"<source><source_context>{doc_context}</source_context></source>\n"
  954. context_string = context_string.strip()
  955. prompt = get_last_user_message(body["messages"])
  956. if prompt is None:
  957. raise Exception("No user message found")
  958. if (
  959. app.state.config.RELEVANCE_THRESHOLD == 0
  960. and context_string.strip() == ""
  961. ):
  962. log.debug(
  963. f"With a 0 relevancy threshold for RAG, the context cannot be empty"
  964. )
  965. # Workaround for Ollama 2.0+ system prompt issue
  966. # TODO: replace with add_or_update_system_message
  967. if model["owned_by"] == "ollama":
  968. body["messages"] = prepend_to_first_user_message_content(
  969. rag_template(app.state.config.RAG_TEMPLATE, context_string, prompt),
  970. body["messages"],
  971. )
  972. else:
  973. body["messages"] = add_or_update_system_message(
  974. rag_template(app.state.config.RAG_TEMPLATE, context_string, prompt),
  975. body["messages"],
  976. )
  977. # If there are citations, add them to the data_items
  978. sources = [
  979. source for source in sources if source.get("source", {}).get("name", "")
  980. ]
  981. if len(sources) > 0:
  982. data_items.append({"sources": sources})
  983. modified_body_bytes = json.dumps(body).encode("utf-8")
  984. # Replace the request body with the modified one
  985. request._body = modified_body_bytes
  986. # Set custom header to ensure content-length matches new body length
  987. request.headers.__dict__["_list"] = [
  988. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  989. *[(k, v) for k, v in request.headers.raw if k.lower() != b"content-length"],
  990. ]
  991. response = await call_next(request)
  992. if not isinstance(response, StreamingResponse):
  993. return response
  994. content_type = response.headers["Content-Type"]
  995. is_openai = "text/event-stream" in content_type
  996. is_ollama = "application/x-ndjson" in content_type
  997. if not is_openai and not is_ollama:
  998. return response
  999. def wrap_item(item):
  1000. return f"data: {item}\n\n" if is_openai else f"{item}\n"
  1001. async def stream_wrapper(original_generator, data_items):
  1002. for item in data_items:
  1003. yield wrap_item(json.dumps(item))
  1004. async for data in original_generator:
  1005. yield data
  1006. return StreamingResponse(
  1007. stream_wrapper(response.body_iterator, data_items),
  1008. headers=dict(response.headers),
  1009. )
  1010. async def _receive(self, body: bytes):
  1011. return {"type": "http.request", "body": body, "more_body": False}
  1012. app.add_middleware(ChatCompletionMiddleware)
  1013. class PipelineMiddleware(BaseHTTPMiddleware):
  1014. async def dispatch(self, request: Request, call_next):
  1015. if not request.method == "POST" and any(
  1016. endpoint in request.url.path
  1017. for endpoint in ["/ollama/api/chat", "/chat/completions"]
  1018. ):
  1019. return await call_next(request)
  1020. log.debug(f"request.url.path: {request.url.path}")
  1021. # Read the original request body
  1022. body = await request.body()
  1023. # Decode body to string
  1024. body_str = body.decode("utf-8")
  1025. # Parse string to JSON
  1026. data = json.loads(body_str) if body_str else {}
  1027. try:
  1028. user = get_current_user(
  1029. request,
  1030. get_http_authorization_cred(request.headers["Authorization"]),
  1031. )
  1032. except KeyError as e:
  1033. if len(e.args) > 1:
  1034. return JSONResponse(
  1035. status_code=e.args[0],
  1036. content={"detail": e.args[1]},
  1037. )
  1038. else:
  1039. return JSONResponse(
  1040. status_code=status.HTTP_401_UNAUTHORIZED,
  1041. content={"detail": "Not authenticated"},
  1042. )
  1043. except HTTPException as e:
  1044. return JSONResponse(
  1045. status_code=e.status_code,
  1046. content={"detail": e.detail},
  1047. )
  1048. await get_all_models()
  1049. models = app.state.MODELS
  1050. try:
  1051. data = process_pipeline_inlet_filter(request, data, user, models)
  1052. except Exception as e:
  1053. if len(e.args) > 1:
  1054. return JSONResponse(
  1055. status_code=e.args[0],
  1056. content={"detail": e.args[1]},
  1057. )
  1058. else:
  1059. return JSONResponse(
  1060. status_code=status.HTTP_400_BAD_REQUEST,
  1061. content={"detail": str(e)},
  1062. )
  1063. modified_body_bytes = json.dumps(data).encode("utf-8")
  1064. # Replace the request body with the modified one
  1065. request._body = modified_body_bytes
  1066. # Set custom header to ensure content-length matches new body length
  1067. request.headers.__dict__["_list"] = [
  1068. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  1069. *[(k, v) for k, v in request.headers.raw if k.lower() != b"content-length"],
  1070. ]
  1071. response = await call_next(request)
  1072. return response
  1073. async def _receive(self, body: bytes):
  1074. return {"type": "http.request", "body": body, "more_body": False}
  1075. app.add_middleware(PipelineMiddleware)
  1076. class RedirectMiddleware(BaseHTTPMiddleware):
  1077. async def dispatch(self, request: Request, call_next):
  1078. # Check if the request is a GET request
  1079. if request.method == "GET":
  1080. path = request.url.path
  1081. query_params = dict(parse_qs(urlparse(str(request.url)).query))
  1082. # Check for the specific watch path and the presence of 'v' parameter
  1083. if path.endswith("/watch") and "v" in query_params:
  1084. video_id = query_params["v"][0] # Extract the first 'v' parameter
  1085. encoded_video_id = urlencode({"youtube": video_id})
  1086. redirect_url = f"/?{encoded_video_id}"
  1087. return RedirectResponse(url=redirect_url)
  1088. # Proceed with the normal flow of other requests
  1089. response = await call_next(request)
  1090. return response
  1091. # Add the middleware to the app
  1092. app.add_middleware(RedirectMiddleware)
  1093. app.add_middleware(SecurityHeadersMiddleware)
  1094. @app.middleware("http")
  1095. async def commit_session_after_request(request: Request, call_next):
  1096. response = await call_next(request)
  1097. # log.debug("Commit session after request")
  1098. Session.commit()
  1099. return response
  1100. @app.middleware("http")
  1101. async def check_url(request: Request, call_next):
  1102. start_time = int(time.time())
  1103. request.state.enable_api_key = app.state.config.ENABLE_API_KEY
  1104. response = await call_next(request)
  1105. process_time = int(time.time()) - start_time
  1106. response.headers["X-Process-Time"] = str(process_time)
  1107. return response
  1108. @app.middleware("http")
  1109. async def inspect_websocket(request: Request, call_next):
  1110. if (
  1111. "/ws/socket.io" in request.url.path
  1112. and request.query_params.get("transport") == "websocket"
  1113. ):
  1114. upgrade = (request.headers.get("Upgrade") or "").lower()
  1115. connection = (request.headers.get("Connection") or "").lower().split(",")
  1116. # Check that there's the correct headers for an upgrade, else reject the connection
  1117. # This is to work around this upstream issue: https://github.com/miguelgrinberg/python-engineio/issues/367
  1118. if upgrade != "websocket" or "upgrade" not in connection:
  1119. return JSONResponse(
  1120. status_code=status.HTTP_400_BAD_REQUEST,
  1121. content={"detail": "Invalid WebSocket upgrade request"},
  1122. )
  1123. return await call_next(request)
  1124. app.add_middleware(
  1125. CORSMiddleware,
  1126. allow_origins=CORS_ALLOW_ORIGIN,
  1127. allow_credentials=True,
  1128. allow_methods=["*"],
  1129. allow_headers=["*"],
  1130. )
  1131. app.mount("/ws", socket_app)
  1132. app.include_router(ollama.router, prefix="/ollama", tags=["ollama"])
  1133. app.include_router(openai.router, prefix="/openai", tags=["openai"])
  1134. app.include_router(pipelines.router, prefix="/api/pipelines", tags=["pipelines"])
  1135. app.include_router(tasks.router, prefix="/api/tasks", tags=["tasks"])
  1136. app.include_router(images.router, prefix="/api/v1/images", tags=["images"])
  1137. app.include_router(audio.router, prefix="/api/v1/audio", tags=["audio"])
  1138. app.include_router(retrieval.router, prefix="/api/v1/retrieval", tags=["retrieval"])
  1139. app.include_router(configs.router, prefix="/api/v1/configs", tags=["configs"])
  1140. app.include_router(auths.router, prefix="/api/v1/auths", tags=["auths"])
  1141. app.include_router(users.router, prefix="/api/v1/users", tags=["users"])
  1142. app.include_router(chats.router, prefix="/api/v1/chats", tags=["chats"])
  1143. app.include_router(models.router, prefix="/api/v1/models", tags=["models"])
  1144. app.include_router(knowledge.router, prefix="/api/v1/knowledge", tags=["knowledge"])
  1145. app.include_router(prompts.router, prefix="/api/v1/prompts", tags=["prompts"])
  1146. app.include_router(tools.router, prefix="/api/v1/tools", tags=["tools"])
  1147. app.include_router(memories.router, prefix="/api/v1/memories", tags=["memories"])
  1148. app.include_router(folders.router, prefix="/api/v1/folders", tags=["folders"])
  1149. app.include_router(groups.router, prefix="/api/v1/groups", tags=["groups"])
  1150. app.include_router(files.router, prefix="/api/v1/files", tags=["files"])
  1151. app.include_router(functions.router, prefix="/api/v1/functions", tags=["functions"])
  1152. app.include_router(
  1153. evaluations.router, prefix="/api/v1/evaluations", tags=["evaluations"]
  1154. )
  1155. app.include_router(utils.router, prefix="/api/v1/utils", tags=["utils"])
  1156. ##################################
  1157. #
  1158. # Chat Endpoints
  1159. #
  1160. ##################################
  1161. def get_function_module(pipe_id: str):
  1162. # Check if function is already loaded
  1163. if pipe_id not in app.state.FUNCTIONS:
  1164. function_module, _, _ = load_function_module_by_id(pipe_id)
  1165. app.state.FUNCTIONS[pipe_id] = function_module
  1166. else:
  1167. function_module = app.state.FUNCTIONS[pipe_id]
  1168. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  1169. valves = Functions.get_function_valves_by_id(pipe_id)
  1170. function_module.valves = function_module.Valves(**(valves if valves else {}))
  1171. return function_module
  1172. async def get_function_models():
  1173. pipes = Functions.get_functions_by_type("pipe", active_only=True)
  1174. pipe_models = []
  1175. for pipe in pipes:
  1176. function_module = get_function_module(pipe.id)
  1177. # Check if function is a manifold
  1178. if hasattr(function_module, "pipes"):
  1179. sub_pipes = []
  1180. # Check if pipes is a function or a list
  1181. try:
  1182. if callable(function_module.pipes):
  1183. sub_pipes = function_module.pipes()
  1184. else:
  1185. sub_pipes = function_module.pipes
  1186. except Exception as e:
  1187. log.exception(e)
  1188. sub_pipes = []
  1189. log.debug(
  1190. f"get_function_models: function '{pipe.id}' is a manifold of {sub_pipes}"
  1191. )
  1192. for p in sub_pipes:
  1193. sub_pipe_id = f'{pipe.id}.{p["id"]}'
  1194. sub_pipe_name = p["name"]
  1195. if hasattr(function_module, "name"):
  1196. sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
  1197. pipe_flag = {"type": pipe.type}
  1198. pipe_models.append(
  1199. {
  1200. "id": sub_pipe_id,
  1201. "name": sub_pipe_name,
  1202. "object": "model",
  1203. "created": pipe.created_at,
  1204. "owned_by": "openai",
  1205. "pipe": pipe_flag,
  1206. }
  1207. )
  1208. else:
  1209. pipe_flag = {"type": "pipe"}
  1210. log.debug(
  1211. f"get_function_models: function '{pipe.id}' is a single pipe {{ 'id': {pipe.id}, 'name': {pipe.name} }}"
  1212. )
  1213. pipe_models.append(
  1214. {
  1215. "id": pipe.id,
  1216. "name": pipe.name,
  1217. "object": "model",
  1218. "created": pipe.created_at,
  1219. "owned_by": "openai",
  1220. "pipe": pipe_flag,
  1221. }
  1222. )
  1223. return pipe_models
  1224. async def generate_function_chat_completion(form_data, user, models: dict = {}):
  1225. async def execute_pipe(pipe, params):
  1226. if inspect.iscoroutinefunction(pipe):
  1227. return await pipe(**params)
  1228. else:
  1229. return pipe(**params)
  1230. async def get_message_content(res: str | Generator | AsyncGenerator) -> str:
  1231. if isinstance(res, str):
  1232. return res
  1233. if isinstance(res, Generator):
  1234. return "".join(map(str, res))
  1235. if isinstance(res, AsyncGenerator):
  1236. return "".join([str(stream) async for stream in res])
  1237. def process_line(form_data: dict, line):
  1238. if isinstance(line, BaseModel):
  1239. line = line.model_dump_json()
  1240. line = f"data: {line}"
  1241. if isinstance(line, dict):
  1242. line = f"data: {json.dumps(line)}"
  1243. try:
  1244. line = line.decode("utf-8")
  1245. except Exception:
  1246. pass
  1247. if line.startswith("data:"):
  1248. return f"{line}\n\n"
  1249. else:
  1250. line = openai_chat_chunk_message_template(form_data["model"], line)
  1251. return f"data: {json.dumps(line)}\n\n"
  1252. def get_pipe_id(form_data: dict) -> str:
  1253. pipe_id = form_data["model"]
  1254. if "." in pipe_id:
  1255. pipe_id, _ = pipe_id.split(".", 1)
  1256. return pipe_id
  1257. def get_function_params(function_module, form_data, user, extra_params=None):
  1258. if extra_params is None:
  1259. extra_params = {}
  1260. pipe_id = get_pipe_id(form_data)
  1261. # Get the signature of the function
  1262. sig = inspect.signature(function_module.pipe)
  1263. params = {"body": form_data} | {
  1264. k: v for k, v in extra_params.items() if k in sig.parameters
  1265. }
  1266. if "__user__" in params and hasattr(function_module, "UserValves"):
  1267. user_valves = Functions.get_user_valves_by_id_and_user_id(pipe_id, user.id)
  1268. try:
  1269. params["__user__"]["valves"] = function_module.UserValves(**user_valves)
  1270. except Exception as e:
  1271. log.exception(e)
  1272. params["__user__"]["valves"] = function_module.UserValves()
  1273. return params
  1274. model_id = form_data.get("model")
  1275. model_info = Models.get_model_by_id(model_id)
  1276. metadata = form_data.pop("metadata", {})
  1277. files = metadata.get("files", [])
  1278. tool_ids = metadata.get("tool_ids", [])
  1279. # Check if tool_ids is None
  1280. if tool_ids is None:
  1281. tool_ids = []
  1282. __event_emitter__ = None
  1283. __event_call__ = None
  1284. __task__ = None
  1285. __task_body__ = None
  1286. if metadata:
  1287. if all(k in metadata for k in ("session_id", "chat_id", "message_id")):
  1288. __event_emitter__ = get_event_emitter(metadata)
  1289. __event_call__ = get_event_call(metadata)
  1290. __task__ = metadata.get("task", None)
  1291. __task_body__ = metadata.get("task_body", None)
  1292. extra_params = {
  1293. "__event_emitter__": __event_emitter__,
  1294. "__event_call__": __event_call__,
  1295. "__task__": __task__,
  1296. "__task_body__": __task_body__,
  1297. "__files__": files,
  1298. "__user__": {
  1299. "id": user.id,
  1300. "email": user.email,
  1301. "name": user.name,
  1302. "role": user.role,
  1303. },
  1304. "__metadata__": metadata,
  1305. }
  1306. extra_params["__tools__"] = get_tools(
  1307. app,
  1308. tool_ids,
  1309. user,
  1310. {
  1311. **extra_params,
  1312. "__model__": models.get(form_data["model"], None),
  1313. "__messages__": form_data["messages"],
  1314. "__files__": files,
  1315. },
  1316. )
  1317. if model_info:
  1318. if model_info.base_model_id:
  1319. form_data["model"] = model_info.base_model_id
  1320. params = model_info.params.model_dump()
  1321. form_data = apply_model_params_to_body_openai(params, form_data)
  1322. form_data = apply_model_system_prompt_to_body(params, form_data, user)
  1323. pipe_id = get_pipe_id(form_data)
  1324. function_module = get_function_module(pipe_id)
  1325. pipe = function_module.pipe
  1326. params = get_function_params(function_module, form_data, user, extra_params)
  1327. if form_data.get("stream", False):
  1328. async def stream_content():
  1329. try:
  1330. res = await execute_pipe(pipe, params)
  1331. # Directly return if the response is a StreamingResponse
  1332. if isinstance(res, StreamingResponse):
  1333. async for data in res.body_iterator:
  1334. yield data
  1335. return
  1336. if isinstance(res, dict):
  1337. yield f"data: {json.dumps(res)}\n\n"
  1338. return
  1339. except Exception as e:
  1340. log.error(f"Error: {e}")
  1341. yield f"data: {json.dumps({'error': {'detail':str(e)}})}\n\n"
  1342. return
  1343. if isinstance(res, str):
  1344. message = openai_chat_chunk_message_template(form_data["model"], res)
  1345. yield f"data: {json.dumps(message)}\n\n"
  1346. if isinstance(res, Iterator):
  1347. for line in res:
  1348. yield process_line(form_data, line)
  1349. if isinstance(res, AsyncGenerator):
  1350. async for line in res:
  1351. yield process_line(form_data, line)
  1352. if isinstance(res, str) or isinstance(res, Generator):
  1353. finish_message = openai_chat_chunk_message_template(
  1354. form_data["model"], ""
  1355. )
  1356. finish_message["choices"][0]["finish_reason"] = "stop"
  1357. yield f"data: {json.dumps(finish_message)}\n\n"
  1358. yield "data: [DONE]"
  1359. return StreamingResponse(stream_content(), media_type="text/event-stream")
  1360. else:
  1361. try:
  1362. res = await execute_pipe(pipe, params)
  1363. except Exception as e:
  1364. log.error(f"Error: {e}")
  1365. return {"error": {"detail": str(e)}}
  1366. if isinstance(res, StreamingResponse) or isinstance(res, dict):
  1367. return res
  1368. if isinstance(res, BaseModel):
  1369. return res.model_dump()
  1370. message = await get_message_content(res)
  1371. return openai_chat_completion_message_template(form_data["model"], message)
  1372. async def get_all_base_models():
  1373. function_models = []
  1374. openai_models = []
  1375. ollama_models = []
  1376. if app.state.config.ENABLE_OPENAI_API:
  1377. openai_models = await openai.get_all_models()
  1378. openai_models = openai_models["data"]
  1379. if app.state.config.ENABLE_OLLAMA_API:
  1380. ollama_models = await ollama.get_all_models()
  1381. ollama_models = [
  1382. {
  1383. "id": model["model"],
  1384. "name": model["name"],
  1385. "object": "model",
  1386. "created": int(time.time()),
  1387. "owned_by": "ollama",
  1388. "ollama": model,
  1389. }
  1390. for model in ollama_models["models"]
  1391. ]
  1392. function_models = await get_function_models()
  1393. models = function_models + openai_models + ollama_models
  1394. # Add arena models
  1395. if app.state.config.ENABLE_EVALUATION_ARENA_MODELS:
  1396. arena_models = []
  1397. if len(app.state.config.EVALUATION_ARENA_MODELS) > 0:
  1398. arena_models = [
  1399. {
  1400. "id": model["id"],
  1401. "name": model["name"],
  1402. "info": {
  1403. "meta": model["meta"],
  1404. },
  1405. "object": "model",
  1406. "created": int(time.time()),
  1407. "owned_by": "arena",
  1408. "arena": True,
  1409. }
  1410. for model in app.state.config.EVALUATION_ARENA_MODELS
  1411. ]
  1412. else:
  1413. # Add default arena model
  1414. arena_models = [
  1415. {
  1416. "id": DEFAULT_ARENA_MODEL["id"],
  1417. "name": DEFAULT_ARENA_MODEL["name"],
  1418. "info": {
  1419. "meta": DEFAULT_ARENA_MODEL["meta"],
  1420. },
  1421. "object": "model",
  1422. "created": int(time.time()),
  1423. "owned_by": "arena",
  1424. "arena": True,
  1425. }
  1426. ]
  1427. models = models + arena_models
  1428. return models
  1429. @cached(ttl=3)
  1430. async def get_all_models():
  1431. models = await get_all_base_models()
  1432. # If there are no models, return an empty list
  1433. if len([model for model in models if not model.get("arena", False)]) == 0:
  1434. return []
  1435. global_action_ids = [
  1436. function.id for function in Functions.get_global_action_functions()
  1437. ]
  1438. enabled_action_ids = [
  1439. function.id
  1440. for function in Functions.get_functions_by_type("action", active_only=True)
  1441. ]
  1442. custom_models = Models.get_all_models()
  1443. for custom_model in custom_models:
  1444. if custom_model.base_model_id is None:
  1445. for model in models:
  1446. if (
  1447. custom_model.id == model["id"]
  1448. or custom_model.id == model["id"].split(":")[0]
  1449. ):
  1450. if custom_model.is_active:
  1451. model["name"] = custom_model.name
  1452. model["info"] = custom_model.model_dump()
  1453. action_ids = []
  1454. if "info" in model and "meta" in model["info"]:
  1455. action_ids.extend(
  1456. model["info"]["meta"].get("actionIds", [])
  1457. )
  1458. model["action_ids"] = action_ids
  1459. else:
  1460. models.remove(model)
  1461. elif custom_model.is_active and (
  1462. custom_model.id not in [model["id"] for model in models]
  1463. ):
  1464. owned_by = "openai"
  1465. pipe = None
  1466. action_ids = []
  1467. for model in models:
  1468. if (
  1469. custom_model.base_model_id == model["id"]
  1470. or custom_model.base_model_id == model["id"].split(":")[0]
  1471. ):
  1472. owned_by = model["owned_by"]
  1473. if "pipe" in model:
  1474. pipe = model["pipe"]
  1475. break
  1476. if custom_model.meta:
  1477. meta = custom_model.meta.model_dump()
  1478. if "actionIds" in meta:
  1479. action_ids.extend(meta["actionIds"])
  1480. models.append(
  1481. {
  1482. "id": f"{custom_model.id}",
  1483. "name": custom_model.name,
  1484. "object": "model",
  1485. "created": custom_model.created_at,
  1486. "owned_by": owned_by,
  1487. "info": custom_model.model_dump(),
  1488. "preset": True,
  1489. **({"pipe": pipe} if pipe is not None else {}),
  1490. "action_ids": action_ids,
  1491. }
  1492. )
  1493. # Process action_ids to get the actions
  1494. def get_action_items_from_module(function, module):
  1495. actions = []
  1496. if hasattr(module, "actions"):
  1497. actions = module.actions
  1498. return [
  1499. {
  1500. "id": f"{function.id}.{action['id']}",
  1501. "name": action.get("name", f"{function.name} ({action['id']})"),
  1502. "description": function.meta.description,
  1503. "icon_url": action.get(
  1504. "icon_url", function.meta.manifest.get("icon_url", None)
  1505. ),
  1506. }
  1507. for action in actions
  1508. ]
  1509. else:
  1510. return [
  1511. {
  1512. "id": function.id,
  1513. "name": function.name,
  1514. "description": function.meta.description,
  1515. "icon_url": function.meta.manifest.get("icon_url", None),
  1516. }
  1517. ]
  1518. def get_function_module_by_id(function_id):
  1519. if function_id in app.state.FUNCTIONS:
  1520. function_module = app.state.FUNCTIONS[function_id]
  1521. else:
  1522. function_module, _, _ = load_function_module_by_id(function_id)
  1523. app.state.FUNCTIONS[function_id] = function_module
  1524. for model in models:
  1525. action_ids = [
  1526. action_id
  1527. for action_id in list(set(model.pop("action_ids", []) + global_action_ids))
  1528. if action_id in enabled_action_ids
  1529. ]
  1530. model["actions"] = []
  1531. for action_id in action_ids:
  1532. action_function = Functions.get_function_by_id(action_id)
  1533. if action_function is None:
  1534. raise Exception(f"Action not found: {action_id}")
  1535. function_module = get_function_module_by_id(action_id)
  1536. model["actions"].extend(
  1537. get_action_items_from_module(action_function, function_module)
  1538. )
  1539. log.debug(f"get_all_models() returned {len(models)} models")
  1540. app.state.MODELS = {model["id"]: model for model in models}
  1541. return models
  1542. @app.get("/api/models")
  1543. async def get_models(user=Depends(get_verified_user)):
  1544. models = await get_all_models()
  1545. # Filter out filter pipelines
  1546. models = [
  1547. model
  1548. for model in models
  1549. if "pipeline" not in model or model["pipeline"].get("type", None) != "filter"
  1550. ]
  1551. model_order_list = app.state.config.MODEL_ORDER_LIST
  1552. if model_order_list:
  1553. model_order_dict = {model_id: i for i, model_id in enumerate(model_order_list)}
  1554. # Sort models by order list priority, with fallback for those not in the list
  1555. models.sort(
  1556. key=lambda x: (model_order_dict.get(x["id"], float("inf")), x["name"])
  1557. )
  1558. # Filter out models that the user does not have access to
  1559. if user.role == "user" and not BYPASS_MODEL_ACCESS_CONTROL:
  1560. filtered_models = []
  1561. for model in models:
  1562. if model.get("arena"):
  1563. if has_access(
  1564. user.id,
  1565. type="read",
  1566. access_control=model.get("info", {})
  1567. .get("meta", {})
  1568. .get("access_control", {}),
  1569. ):
  1570. filtered_models.append(model)
  1571. continue
  1572. model_info = Models.get_model_by_id(model["id"])
  1573. if model_info:
  1574. if user.id == model_info.user_id or has_access(
  1575. user.id, type="read", access_control=model_info.access_control
  1576. ):
  1577. filtered_models.append(model)
  1578. models = filtered_models
  1579. log.debug(
  1580. f"/api/models returned filtered models accessible to the user: {json.dumps([model['id'] for model in models])}"
  1581. )
  1582. return {"data": models}
  1583. @app.get("/api/models/base")
  1584. async def get_base_models(user=Depends(get_admin_user)):
  1585. models = await get_all_base_models()
  1586. # Filter out arena models
  1587. models = [model for model in models if not model.get("arena", False)]
  1588. return {"data": models}
  1589. @app.post("/api/chat/completions")
  1590. async def generate_chat_completions(
  1591. form_data: dict,
  1592. user=Depends(get_verified_user),
  1593. bypass_filter: bool = False,
  1594. ):
  1595. if BYPASS_MODEL_ACCESS_CONTROL:
  1596. bypass_filter = True
  1597. model_list = app.state.MODELS
  1598. models = {model["id"]: model for model in model_list}
  1599. model_id = form_data["model"]
  1600. if model_id not in models:
  1601. raise HTTPException(
  1602. status_code=status.HTTP_404_NOT_FOUND,
  1603. detail="Model not found",
  1604. )
  1605. model = models[model_id]
  1606. # Check if user has access to the model
  1607. if not bypass_filter and user.role == "user":
  1608. if model.get("arena"):
  1609. if not has_access(
  1610. user.id,
  1611. type="read",
  1612. access_control=model.get("info", {})
  1613. .get("meta", {})
  1614. .get("access_control", {}),
  1615. ):
  1616. raise HTTPException(
  1617. status_code=403,
  1618. detail="Model not found",
  1619. )
  1620. else:
  1621. model_info = Models.get_model_by_id(model_id)
  1622. if not model_info:
  1623. raise HTTPException(
  1624. status_code=404,
  1625. detail="Model not found",
  1626. )
  1627. elif not (
  1628. user.id == model_info.user_id
  1629. or has_access(
  1630. user.id, type="read", access_control=model_info.access_control
  1631. )
  1632. ):
  1633. raise HTTPException(
  1634. status_code=403,
  1635. detail="Model not found",
  1636. )
  1637. if model["owned_by"] == "arena":
  1638. model_ids = model.get("info", {}).get("meta", {}).get("model_ids")
  1639. filter_mode = model.get("info", {}).get("meta", {}).get("filter_mode")
  1640. if model_ids and filter_mode == "exclude":
  1641. model_ids = [
  1642. model["id"]
  1643. for model in await get_all_models()
  1644. if model.get("owned_by") != "arena" and model["id"] not in model_ids
  1645. ]
  1646. selected_model_id = None
  1647. if isinstance(model_ids, list) and model_ids:
  1648. selected_model_id = random.choice(model_ids)
  1649. else:
  1650. model_ids = [
  1651. model["id"]
  1652. for model in await get_all_models()
  1653. if model.get("owned_by") != "arena"
  1654. ]
  1655. selected_model_id = random.choice(model_ids)
  1656. form_data["model"] = selected_model_id
  1657. if form_data.get("stream") == True:
  1658. async def stream_wrapper(stream):
  1659. yield f"data: {json.dumps({'selected_model_id': selected_model_id})}\n\n"
  1660. async for chunk in stream:
  1661. yield chunk
  1662. response = await generate_chat_completions(
  1663. form_data, user, bypass_filter=True
  1664. )
  1665. return StreamingResponse(
  1666. stream_wrapper(response.body_iterator), media_type="text/event-stream"
  1667. )
  1668. else:
  1669. return {
  1670. **(
  1671. await generate_chat_completions(form_data, user, bypass_filter=True)
  1672. ),
  1673. "selected_model_id": selected_model_id,
  1674. }
  1675. if model.get("pipe"):
  1676. # Below does not require bypass_filter because this is the only route the uses this function and it is already bypassing the filter
  1677. return await generate_function_chat_completion(
  1678. form_data, user=user, models=models
  1679. )
  1680. if model["owned_by"] == "ollama":
  1681. # Using /ollama/api/chat endpoint
  1682. form_data = convert_payload_openai_to_ollama(form_data)
  1683. response = await generate_ollama_chat_completion(
  1684. form_data=form_data, user=user, bypass_filter=bypass_filter
  1685. )
  1686. if form_data.stream:
  1687. response.headers["content-type"] = "text/event-stream"
  1688. return StreamingResponse(
  1689. convert_streaming_response_ollama_to_openai(response),
  1690. headers=dict(response.headers),
  1691. )
  1692. else:
  1693. return convert_response_ollama_to_openai(response)
  1694. else:
  1695. return await generate_openai_chat_completion(
  1696. form_data, user=user, bypass_filter=bypass_filter
  1697. )
  1698. @app.post("/api/chat/completed")
  1699. async def chat_completed(
  1700. request: Request, form_data: dict, user=Depends(get_verified_user)
  1701. ):
  1702. model_list = await get_all_models()
  1703. models = {model["id"]: model for model in model_list}
  1704. data = form_data
  1705. model_id = data["model"]
  1706. if model_id not in models:
  1707. raise HTTPException(
  1708. status_code=status.HTTP_404_NOT_FOUND,
  1709. detail="Model not found",
  1710. )
  1711. model = models[model_id]
  1712. try:
  1713. data = process_pipeline_outlet_filter(request, data, user, models)
  1714. except Exception as e:
  1715. return HTTPException(
  1716. status_code=status.HTTP_400_BAD_REQUEST,
  1717. detail=str(e),
  1718. )
  1719. __event_emitter__ = get_event_emitter(
  1720. {
  1721. "chat_id": data["chat_id"],
  1722. "message_id": data["id"],
  1723. "session_id": data["session_id"],
  1724. }
  1725. )
  1726. __event_call__ = get_event_call(
  1727. {
  1728. "chat_id": data["chat_id"],
  1729. "message_id": data["id"],
  1730. "session_id": data["session_id"],
  1731. }
  1732. )
  1733. def get_priority(function_id):
  1734. function = Functions.get_function_by_id(function_id)
  1735. if function is not None and hasattr(function, "valves"):
  1736. # TODO: Fix FunctionModel to include vavles
  1737. return (function.valves if function.valves else {}).get("priority", 0)
  1738. return 0
  1739. filter_ids = [function.id for function in Functions.get_global_filter_functions()]
  1740. if "info" in model and "meta" in model["info"]:
  1741. filter_ids.extend(model["info"]["meta"].get("filterIds", []))
  1742. filter_ids = list(set(filter_ids))
  1743. enabled_filter_ids = [
  1744. function.id
  1745. for function in Functions.get_functions_by_type("filter", active_only=True)
  1746. ]
  1747. filter_ids = [
  1748. filter_id for filter_id in filter_ids if filter_id in enabled_filter_ids
  1749. ]
  1750. # Sort filter_ids by priority, using the get_priority function
  1751. filter_ids.sort(key=get_priority)
  1752. for filter_id in filter_ids:
  1753. filter = Functions.get_function_by_id(filter_id)
  1754. if not filter:
  1755. continue
  1756. if filter_id in app.state.FUNCTIONS:
  1757. function_module = app.state.FUNCTIONS[filter_id]
  1758. else:
  1759. function_module, _, _ = load_function_module_by_id(filter_id)
  1760. app.state.FUNCTIONS[filter_id] = function_module
  1761. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  1762. valves = Functions.get_function_valves_by_id(filter_id)
  1763. function_module.valves = function_module.Valves(
  1764. **(valves if valves else {})
  1765. )
  1766. if not hasattr(function_module, "outlet"):
  1767. continue
  1768. try:
  1769. outlet = function_module.outlet
  1770. # Get the signature of the function
  1771. sig = inspect.signature(outlet)
  1772. params = {"body": data}
  1773. # Extra parameters to be passed to the function
  1774. extra_params = {
  1775. "__model__": model,
  1776. "__id__": filter_id,
  1777. "__event_emitter__": __event_emitter__,
  1778. "__event_call__": __event_call__,
  1779. }
  1780. # Add extra params in contained in function signature
  1781. for key, value in extra_params.items():
  1782. if key in sig.parameters:
  1783. params[key] = value
  1784. if "__user__" in sig.parameters:
  1785. __user__ = {
  1786. "id": user.id,
  1787. "email": user.email,
  1788. "name": user.name,
  1789. "role": user.role,
  1790. }
  1791. try:
  1792. if hasattr(function_module, "UserValves"):
  1793. __user__["valves"] = function_module.UserValves(
  1794. **Functions.get_user_valves_by_id_and_user_id(
  1795. filter_id, user.id
  1796. )
  1797. )
  1798. except Exception as e:
  1799. print(e)
  1800. params = {**params, "__user__": __user__}
  1801. if inspect.iscoroutinefunction(outlet):
  1802. data = await outlet(**params)
  1803. else:
  1804. data = outlet(**params)
  1805. except Exception as e:
  1806. print(f"Error: {e}")
  1807. return JSONResponse(
  1808. status_code=status.HTTP_400_BAD_REQUEST,
  1809. content={"detail": str(e)},
  1810. )
  1811. return data
  1812. @app.post("/api/chat/actions/{action_id}")
  1813. async def chat_action(action_id: str, form_data: dict, user=Depends(get_verified_user)):
  1814. if "." in action_id:
  1815. action_id, sub_action_id = action_id.split(".")
  1816. else:
  1817. sub_action_id = None
  1818. action = Functions.get_function_by_id(action_id)
  1819. if not action:
  1820. raise HTTPException(
  1821. status_code=status.HTTP_404_NOT_FOUND,
  1822. detail="Action not found",
  1823. )
  1824. model_list = await get_all_models()
  1825. models = {model["id"]: model for model in model_list}
  1826. data = form_data
  1827. model_id = data["model"]
  1828. if model_id not in models:
  1829. raise HTTPException(
  1830. status_code=status.HTTP_404_NOT_FOUND,
  1831. detail="Model not found",
  1832. )
  1833. model = models[model_id]
  1834. __event_emitter__ = get_event_emitter(
  1835. {
  1836. "chat_id": data["chat_id"],
  1837. "message_id": data["id"],
  1838. "session_id": data["session_id"],
  1839. }
  1840. )
  1841. __event_call__ = get_event_call(
  1842. {
  1843. "chat_id": data["chat_id"],
  1844. "message_id": data["id"],
  1845. "session_id": data["session_id"],
  1846. }
  1847. )
  1848. if action_id in app.state.FUNCTIONS:
  1849. function_module = app.state.FUNCTIONS[action_id]
  1850. else:
  1851. function_module, _, _ = load_function_module_by_id(action_id)
  1852. app.state.FUNCTIONS[action_id] = function_module
  1853. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  1854. valves = Functions.get_function_valves_by_id(action_id)
  1855. function_module.valves = function_module.Valves(**(valves if valves else {}))
  1856. if hasattr(function_module, "action"):
  1857. try:
  1858. action = function_module.action
  1859. # Get the signature of the function
  1860. sig = inspect.signature(action)
  1861. params = {"body": data}
  1862. # Extra parameters to be passed to the function
  1863. extra_params = {
  1864. "__model__": model,
  1865. "__id__": sub_action_id if sub_action_id is not None else action_id,
  1866. "__event_emitter__": __event_emitter__,
  1867. "__event_call__": __event_call__,
  1868. }
  1869. # Add extra params in contained in function signature
  1870. for key, value in extra_params.items():
  1871. if key in sig.parameters:
  1872. params[key] = value
  1873. if "__user__" in sig.parameters:
  1874. __user__ = {
  1875. "id": user.id,
  1876. "email": user.email,
  1877. "name": user.name,
  1878. "role": user.role,
  1879. }
  1880. try:
  1881. if hasattr(function_module, "UserValves"):
  1882. __user__["valves"] = function_module.UserValves(
  1883. **Functions.get_user_valves_by_id_and_user_id(
  1884. action_id, user.id
  1885. )
  1886. )
  1887. except Exception as e:
  1888. print(e)
  1889. params = {**params, "__user__": __user__}
  1890. if inspect.iscoroutinefunction(action):
  1891. data = await action(**params)
  1892. else:
  1893. data = action(**params)
  1894. except Exception as e:
  1895. print(f"Error: {e}")
  1896. return JSONResponse(
  1897. status_code=status.HTTP_400_BAD_REQUEST,
  1898. content={"detail": str(e)},
  1899. )
  1900. return data
  1901. ##################################
  1902. #
  1903. # Config Endpoints
  1904. #
  1905. ##################################
  1906. @app.get("/api/config")
  1907. async def get_app_config(request: Request):
  1908. user = None
  1909. if "token" in request.cookies:
  1910. token = request.cookies.get("token")
  1911. try:
  1912. data = decode_token(token)
  1913. except Exception as e:
  1914. log.debug(e)
  1915. raise HTTPException(
  1916. status_code=status.HTTP_401_UNAUTHORIZED,
  1917. detail="Invalid token",
  1918. )
  1919. if data is not None and "id" in data:
  1920. user = Users.get_user_by_id(data["id"])
  1921. onboarding = False
  1922. if user is None:
  1923. user_count = Users.get_num_users()
  1924. onboarding = user_count == 0
  1925. return {
  1926. **({"onboarding": True} if onboarding else {}),
  1927. "status": True,
  1928. "name": WEBUI_NAME,
  1929. "version": VERSION,
  1930. "default_locale": str(DEFAULT_LOCALE),
  1931. "oauth": {
  1932. "providers": {
  1933. name: config.get("name", name)
  1934. for name, config in OAUTH_PROVIDERS.items()
  1935. }
  1936. },
  1937. "features": {
  1938. "auth": WEBUI_AUTH,
  1939. "auth_trusted_header": bool(app.state.AUTH_TRUSTED_EMAIL_HEADER),
  1940. "enable_ldap": app.state.config.ENABLE_LDAP,
  1941. "enable_api_key": app.state.config.ENABLE_API_KEY,
  1942. "enable_signup": app.state.config.ENABLE_SIGNUP,
  1943. "enable_login_form": app.state.config.ENABLE_LOGIN_FORM,
  1944. **(
  1945. {
  1946. "enable_web_search": app.state.config.ENABLE_RAG_WEB_SEARCH,
  1947. "enable_image_generation": app.state.config.ENABLE_IMAGE_GENERATION,
  1948. "enable_community_sharing": app.state.config.ENABLE_COMMUNITY_SHARING,
  1949. "enable_message_rating": app.state.config.ENABLE_MESSAGE_RATING,
  1950. "enable_admin_export": ENABLE_ADMIN_EXPORT,
  1951. "enable_admin_chat_access": ENABLE_ADMIN_CHAT_ACCESS,
  1952. }
  1953. if user is not None
  1954. else {}
  1955. ),
  1956. },
  1957. **(
  1958. {
  1959. "default_models": app.state.config.DEFAULT_MODELS,
  1960. "default_prompt_suggestions": app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
  1961. "audio": {
  1962. "tts": {
  1963. "engine": app.state.config.TTS_ENGINE,
  1964. "voice": app.state.config.TTS_VOICE,
  1965. "split_on": app.state.config.TTS_SPLIT_ON,
  1966. },
  1967. "stt": {
  1968. "engine": app.state.config.STT_ENGINE,
  1969. },
  1970. },
  1971. "file": {
  1972. "max_size": app.state.config.FILE_MAX_SIZE,
  1973. "max_count": app.state.config.FILE_MAX_COUNT,
  1974. },
  1975. "permissions": {**app.state.config.USER_PERMISSIONS},
  1976. }
  1977. if user is not None
  1978. else {}
  1979. ),
  1980. }
  1981. class UrlForm(BaseModel):
  1982. url: str
  1983. @app.get("/api/webhook")
  1984. async def get_webhook_url(user=Depends(get_admin_user)):
  1985. return {
  1986. "url": app.state.config.WEBHOOK_URL,
  1987. }
  1988. @app.post("/api/webhook")
  1989. async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
  1990. app.state.config.WEBHOOK_URL = form_data.url
  1991. app.state.WEBHOOK_URL = app.state.config.WEBHOOK_URL
  1992. return {"url": app.state.config.WEBHOOK_URL}
  1993. @app.get("/api/version")
  1994. async def get_app_version():
  1995. return {
  1996. "version": VERSION,
  1997. }
  1998. @app.get("/api/version/updates")
  1999. async def get_app_latest_release_version():
  2000. if OFFLINE_MODE:
  2001. log.debug(
  2002. f"Offline mode is enabled, returning current version as latest version"
  2003. )
  2004. return {"current": VERSION, "latest": VERSION}
  2005. try:
  2006. timeout = aiohttp.ClientTimeout(total=1)
  2007. async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session:
  2008. async with session.get(
  2009. "https://api.github.com/repos/open-webui/open-webui/releases/latest"
  2010. ) as response:
  2011. response.raise_for_status()
  2012. data = await response.json()
  2013. latest_version = data["tag_name"]
  2014. return {"current": VERSION, "latest": latest_version[1:]}
  2015. except Exception as e:
  2016. log.debug(e)
  2017. return {"current": VERSION, "latest": VERSION}
  2018. @app.get("/api/changelog")
  2019. async def get_app_changelog():
  2020. return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5}
  2021. ############################
  2022. # OAuth Login & Callback
  2023. ############################
  2024. # SessionMiddleware is used by authlib for oauth
  2025. if len(OAUTH_PROVIDERS) > 0:
  2026. app.add_middleware(
  2027. SessionMiddleware,
  2028. secret_key=WEBUI_SECRET_KEY,
  2029. session_cookie="oui-session",
  2030. same_site=WEBUI_SESSION_COOKIE_SAME_SITE,
  2031. https_only=WEBUI_SESSION_COOKIE_SECURE,
  2032. )
  2033. @app.get("/oauth/{provider}/login")
  2034. async def oauth_login(provider: str, request: Request):
  2035. return await oauth_manager.handle_login(provider, request)
  2036. # OAuth login logic is as follows:
  2037. # 1. Attempt to find a user with matching subject ID, tied to the provider
  2038. # 2. If OAUTH_MERGE_ACCOUNTS_BY_EMAIL is true, find a user with the email address provided via OAuth
  2039. # - This is considered insecure in general, as OAuth providers do not always verify email addresses
  2040. # 3. If there is no user, and ENABLE_OAUTH_SIGNUP is true, create a user
  2041. # - Email addresses are considered unique, so we fail registration if the email address is already taken
  2042. @app.get("/oauth/{provider}/callback")
  2043. async def oauth_callback(provider: str, request: Request, response: Response):
  2044. return await oauth_manager.handle_callback(provider, request, response)
  2045. @app.get("/manifest.json")
  2046. async def get_manifest_json():
  2047. return {
  2048. "name": WEBUI_NAME,
  2049. "short_name": WEBUI_NAME,
  2050. "description": "Open WebUI is an open, extensible, user-friendly interface for AI that adapts to your workflow.",
  2051. "start_url": "/",
  2052. "display": "standalone",
  2053. "background_color": "#343541",
  2054. "orientation": "natural",
  2055. "icons": [
  2056. {
  2057. "src": "/static/logo.png",
  2058. "type": "image/png",
  2059. "sizes": "500x500",
  2060. "purpose": "any",
  2061. },
  2062. {
  2063. "src": "/static/logo.png",
  2064. "type": "image/png",
  2065. "sizes": "500x500",
  2066. "purpose": "maskable",
  2067. },
  2068. ],
  2069. }
  2070. @app.get("/opensearch.xml")
  2071. async def get_opensearch_xml():
  2072. xml_content = rf"""
  2073. <OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/" xmlns:moz="http://www.mozilla.org/2006/browser/search/">
  2074. <ShortName>{WEBUI_NAME}</ShortName>
  2075. <Description>Search {WEBUI_NAME}</Description>
  2076. <InputEncoding>UTF-8</InputEncoding>
  2077. <Image width="16" height="16" type="image/x-icon">{WEBUI_URL}/static/favicon.png</Image>
  2078. <Url type="text/html" method="get" template="{WEBUI_URL}/?q={"{searchTerms}"}"/>
  2079. <moz:SearchForm>{WEBUI_URL}</moz:SearchForm>
  2080. </OpenSearchDescription>
  2081. """
  2082. return Response(content=xml_content, media_type="application/xml")
  2083. @app.get("/health")
  2084. async def healthcheck():
  2085. return {"status": True}
  2086. @app.get("/health/db")
  2087. async def healthcheck_with_db():
  2088. Session.execute(text("SELECT 1;")).all()
  2089. return {"status": True}
  2090. app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
  2091. app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
  2092. if os.path.exists(FRONTEND_BUILD_DIR):
  2093. mimetypes.add_type("text/javascript", ".js")
  2094. app.mount(
  2095. "/",
  2096. SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
  2097. name="spa-static-files",
  2098. )
  2099. else:
  2100. log.warning(
  2101. f"Frontend build directory not found at '{FRONTEND_BUILD_DIR}'. Serving API only."
  2102. )