main.py 85 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645
  1. import asyncio
  2. import inspect
  3. import json
  4. import logging
  5. import mimetypes
  6. import os
  7. import shutil
  8. import sys
  9. import time
  10. import random
  11. from typing import AsyncGenerator, Generator, Iterator
  12. from contextlib import asynccontextmanager
  13. from urllib.parse import urlencode, parse_qs, urlparse
  14. from pydantic import BaseModel
  15. from sqlalchemy import text
  16. from typing import Optional
  17. from aiocache import cached
  18. import aiohttp
  19. import requests
  20. from fastapi import (
  21. Depends,
  22. FastAPI,
  23. File,
  24. Form,
  25. HTTPException,
  26. Request,
  27. UploadFile,
  28. status,
  29. )
  30. from fastapi.middleware.cors import CORSMiddleware
  31. from fastapi.responses import JSONResponse, RedirectResponse
  32. from fastapi.staticfiles import StaticFiles
  33. from starlette.exceptions import HTTPException as StarletteHTTPException
  34. from starlette.middleware.base import BaseHTTPMiddleware
  35. from starlette.middleware.sessions import SessionMiddleware
  36. from starlette.responses import Response, StreamingResponse
  37. from open_webui.socket.main import (
  38. app as socket_app,
  39. periodic_usage_pool_cleanup,
  40. get_event_call,
  41. get_event_emitter,
  42. )
  43. from open_webui.routers import (
  44. audio,
  45. images,
  46. ollama,
  47. openai,
  48. retrieval,
  49. pipelines,
  50. tasks,
  51. auths,
  52. chats,
  53. folders,
  54. configs,
  55. groups,
  56. files,
  57. functions,
  58. memories,
  59. models,
  60. knowledge,
  61. prompts,
  62. evaluations,
  63. tools,
  64. users,
  65. utils,
  66. )
  67. from open_webui.routers.retrieval import (
  68. get_embedding_function,
  69. get_ef,
  70. get_rf,
  71. )
  72. from open_webui.retrieval.utils import get_sources_from_files
  73. from open_webui.internal.db import Session
  74. from open_webui.models.functions import Functions
  75. from open_webui.models.models import Models
  76. from open_webui.models.users import UserModel, Users
  77. from open_webui.constants import TASKS
  78. from open_webui.config import (
  79. # Ollama
  80. ENABLE_OLLAMA_API,
  81. OLLAMA_BASE_URLS,
  82. OLLAMA_API_CONFIGS,
  83. # OpenAI
  84. ENABLE_OPENAI_API,
  85. OPENAI_API_BASE_URLS,
  86. OPENAI_API_KEYS,
  87. OPENAI_API_CONFIGS,
  88. # Image
  89. AUTOMATIC1111_API_AUTH,
  90. AUTOMATIC1111_BASE_URL,
  91. AUTOMATIC1111_CFG_SCALE,
  92. AUTOMATIC1111_SAMPLER,
  93. AUTOMATIC1111_SCHEDULER,
  94. COMFYUI_BASE_URL,
  95. COMFYUI_WORKFLOW,
  96. COMFYUI_WORKFLOW_NODES,
  97. ENABLE_IMAGE_GENERATION,
  98. IMAGE_GENERATION_ENGINE,
  99. IMAGE_GENERATION_MODEL,
  100. IMAGE_SIZE,
  101. IMAGE_STEPS,
  102. IMAGES_OPENAI_API_BASE_URL,
  103. IMAGES_OPENAI_API_KEY,
  104. # Audio
  105. AUDIO_STT_ENGINE,
  106. AUDIO_STT_MODEL,
  107. AUDIO_STT_OPENAI_API_BASE_URL,
  108. AUDIO_STT_OPENAI_API_KEY,
  109. AUDIO_TTS_API_KEY,
  110. AUDIO_TTS_ENGINE,
  111. AUDIO_TTS_MODEL,
  112. AUDIO_TTS_OPENAI_API_BASE_URL,
  113. AUDIO_TTS_OPENAI_API_KEY,
  114. AUDIO_TTS_SPLIT_ON,
  115. AUDIO_TTS_VOICE,
  116. AUDIO_TTS_AZURE_SPEECH_REGION,
  117. AUDIO_TTS_AZURE_SPEECH_OUTPUT_FORMAT,
  118. WHISPER_MODEL,
  119. WHISPER_MODEL_AUTO_UPDATE,
  120. WHISPER_MODEL_DIR,
  121. # Retrieval
  122. RAG_TEMPLATE,
  123. DEFAULT_RAG_TEMPLATE,
  124. RAG_EMBEDDING_MODEL,
  125. RAG_EMBEDDING_MODEL_AUTO_UPDATE,
  126. RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
  127. RAG_RERANKING_MODEL,
  128. RAG_RERANKING_MODEL_AUTO_UPDATE,
  129. RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
  130. RAG_EMBEDDING_ENGINE,
  131. RAG_EMBEDDING_BATCH_SIZE,
  132. RAG_RELEVANCE_THRESHOLD,
  133. RAG_FILE_MAX_COUNT,
  134. RAG_FILE_MAX_SIZE,
  135. RAG_OPENAI_API_BASE_URL,
  136. RAG_OPENAI_API_KEY,
  137. RAG_OLLAMA_BASE_URL,
  138. RAG_OLLAMA_API_KEY,
  139. CHUNK_OVERLAP,
  140. CHUNK_SIZE,
  141. CONTENT_EXTRACTION_ENGINE,
  142. TIKA_SERVER_URL,
  143. RAG_TOP_K,
  144. RAG_TEXT_SPLITTER,
  145. TIKTOKEN_ENCODING_NAME,
  146. PDF_EXTRACT_IMAGES,
  147. YOUTUBE_LOADER_LANGUAGE,
  148. YOUTUBE_LOADER_PROXY_URL,
  149. # Retrieval (Web Search)
  150. RAG_WEB_SEARCH_ENGINE,
  151. RAG_WEB_SEARCH_RESULT_COUNT,
  152. RAG_WEB_SEARCH_CONCURRENT_REQUESTS,
  153. RAG_WEB_SEARCH_DOMAIN_FILTER_LIST,
  154. JINA_API_KEY,
  155. SEARCHAPI_API_KEY,
  156. SEARCHAPI_ENGINE,
  157. SEARXNG_QUERY_URL,
  158. SERPER_API_KEY,
  159. SERPLY_API_KEY,
  160. SERPSTACK_API_KEY,
  161. SERPSTACK_HTTPS,
  162. TAVILY_API_KEY,
  163. BING_SEARCH_V7_ENDPOINT,
  164. BING_SEARCH_V7_SUBSCRIPTION_KEY,
  165. BRAVE_SEARCH_API_KEY,
  166. KAGI_SEARCH_API_KEY,
  167. MOJEEK_SEARCH_API_KEY,
  168. GOOGLE_PSE_API_KEY,
  169. GOOGLE_PSE_ENGINE_ID,
  170. ENABLE_RAG_HYBRID_SEARCH,
  171. ENABLE_RAG_LOCAL_WEB_FETCH,
  172. ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
  173. ENABLE_RAG_WEB_SEARCH,
  174. UPLOAD_DIR,
  175. # WebUI
  176. WEBUI_AUTH,
  177. WEBUI_NAME,
  178. WEBUI_BANNERS,
  179. WEBHOOK_URL,
  180. ADMIN_EMAIL,
  181. SHOW_ADMIN_DETAILS,
  182. JWT_EXPIRES_IN,
  183. ENABLE_SIGNUP,
  184. ENABLE_LOGIN_FORM,
  185. ENABLE_API_KEY,
  186. ENABLE_COMMUNITY_SHARING,
  187. ENABLE_MESSAGE_RATING,
  188. ENABLE_EVALUATION_ARENA_MODELS,
  189. USER_PERMISSIONS,
  190. DEFAULT_USER_ROLE,
  191. DEFAULT_PROMPT_SUGGESTIONS,
  192. DEFAULT_MODELS,
  193. DEFAULT_ARENA_MODEL,
  194. MODEL_ORDER_LIST,
  195. EVALUATION_ARENA_MODELS,
  196. # WebUI (OAuth)
  197. ENABLE_OAUTH_ROLE_MANAGEMENT,
  198. OAUTH_ROLES_CLAIM,
  199. OAUTH_EMAIL_CLAIM,
  200. OAUTH_PICTURE_CLAIM,
  201. OAUTH_USERNAME_CLAIM,
  202. OAUTH_ALLOWED_ROLES,
  203. OAUTH_ADMIN_ROLES,
  204. # WebUI (LDAP)
  205. ENABLE_LDAP,
  206. LDAP_SERVER_LABEL,
  207. LDAP_SERVER_HOST,
  208. LDAP_SERVER_PORT,
  209. LDAP_ATTRIBUTE_FOR_USERNAME,
  210. LDAP_SEARCH_FILTERS,
  211. LDAP_SEARCH_BASE,
  212. LDAP_APP_DN,
  213. LDAP_APP_PASSWORD,
  214. LDAP_USE_TLS,
  215. LDAP_CA_CERT_FILE,
  216. LDAP_CIPHERS,
  217. # Misc
  218. ENV,
  219. CACHE_DIR,
  220. STATIC_DIR,
  221. FRONTEND_BUILD_DIR,
  222. CORS_ALLOW_ORIGIN,
  223. DEFAULT_LOCALE,
  224. OAUTH_PROVIDERS,
  225. # Admin
  226. ENABLE_ADMIN_CHAT_ACCESS,
  227. ENABLE_ADMIN_EXPORT,
  228. # Tasks
  229. TASK_MODEL,
  230. TASK_MODEL_EXTERNAL,
  231. ENABLE_TAGS_GENERATION,
  232. ENABLE_SEARCH_QUERY_GENERATION,
  233. ENABLE_RETRIEVAL_QUERY_GENERATION,
  234. ENABLE_AUTOCOMPLETE_GENERATION,
  235. TITLE_GENERATION_PROMPT_TEMPLATE,
  236. TAGS_GENERATION_PROMPT_TEMPLATE,
  237. TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
  238. QUERY_GENERATION_PROMPT_TEMPLATE,
  239. AUTOCOMPLETE_GENERATION_PROMPT_TEMPLATE,
  240. AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH,
  241. AppConfig,
  242. reset_config,
  243. )
  244. from open_webui.env import (
  245. CHANGELOG,
  246. GLOBAL_LOG_LEVEL,
  247. SAFE_MODE,
  248. SRC_LOG_LEVELS,
  249. VERSION,
  250. WEBUI_URL,
  251. WEBUI_BUILD_HASH,
  252. WEBUI_SECRET_KEY,
  253. WEBUI_SESSION_COOKIE_SAME_SITE,
  254. WEBUI_SESSION_COOKIE_SECURE,
  255. WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
  256. WEBUI_AUTH_TRUSTED_NAME_HEADER,
  257. BYPASS_MODEL_ACCESS_CONTROL,
  258. RESET_CONFIG_ON_START,
  259. OFFLINE_MODE,
  260. )
  261. from open_webui.utils.plugin import load_function_module_by_id
  262. from open_webui.utils.misc import (
  263. add_or_update_system_message,
  264. get_last_user_message,
  265. prepend_to_first_user_message_content,
  266. openai_chat_chunk_message_template,
  267. openai_chat_completion_message_template,
  268. )
  269. from open_webui.utils.payload import (
  270. apply_model_params_to_body_openai,
  271. apply_model_system_prompt_to_body,
  272. )
  273. from open_webui.utils.payload import convert_payload_openai_to_ollama
  274. from open_webui.utils.response import (
  275. convert_response_ollama_to_openai,
  276. convert_streaming_response_ollama_to_openai,
  277. )
  278. from open_webui.utils.task import (
  279. rag_template,
  280. tools_function_calling_generation_template,
  281. )
  282. from open_webui.utils.tools import get_tools
  283. from open_webui.utils.access_control import has_access
  284. from open_webui.utils.auth import (
  285. decode_token,
  286. get_admin_user,
  287. get_current_user,
  288. get_http_authorization_cred,
  289. get_verified_user,
  290. )
  291. from open_webui.utils.oauth import oauth_manager
  292. from open_webui.utils.security_headers import SecurityHeadersMiddleware
  293. if SAFE_MODE:
  294. print("SAFE MODE ENABLED")
  295. Functions.deactivate_all_functions()
  296. logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
  297. log = logging.getLogger(__name__)
  298. log.setLevel(SRC_LOG_LEVELS["MAIN"])
  299. class SPAStaticFiles(StaticFiles):
  300. async def get_response(self, path: str, scope):
  301. try:
  302. return await super().get_response(path, scope)
  303. except (HTTPException, StarletteHTTPException) as ex:
  304. if ex.status_code == 404:
  305. return await super().get_response("index.html", scope)
  306. else:
  307. raise ex
  308. print(
  309. rf"""
  310. ___ __ __ _ _ _ ___
  311. / _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _|
  312. | | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || |
  313. | |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || |
  314. \___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___|
  315. |_|
  316. v{VERSION} - building the best open-source AI user interface.
  317. {f"Commit: {WEBUI_BUILD_HASH}" if WEBUI_BUILD_HASH != "dev-build" else ""}
  318. https://github.com/open-webui/open-webui
  319. """
  320. )
  321. @asynccontextmanager
  322. async def lifespan(app: FastAPI):
  323. if RESET_CONFIG_ON_START:
  324. reset_config()
  325. asyncio.create_task(periodic_usage_pool_cleanup())
  326. yield
  327. app = FastAPI(
  328. docs_url="/docs" if ENV == "dev" else None,
  329. openapi_url="/openapi.json" if ENV == "dev" else None,
  330. redoc_url=None,
  331. lifespan=lifespan,
  332. )
  333. app.state.config = AppConfig()
  334. ########################################
  335. #
  336. # OLLAMA
  337. #
  338. ########################################
  339. app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
  340. app.state.config.OLLAMA_BASE_URLS = OLLAMA_BASE_URLS
  341. app.state.config.OLLAMA_API_CONFIGS = OLLAMA_API_CONFIGS
  342. app.state.OLLAMA_MODELS = {}
  343. ########################################
  344. #
  345. # OPENAI
  346. #
  347. ########################################
  348. app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
  349. app.state.config.OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS
  350. app.state.config.OPENAI_API_KEYS = OPENAI_API_KEYS
  351. app.state.config.OPENAI_API_CONFIGS = OPENAI_API_CONFIGS
  352. app.state.OPENAI_MODELS = {}
  353. ########################################
  354. #
  355. # WEBUI
  356. #
  357. ########################################
  358. app.state.config.ENABLE_SIGNUP = ENABLE_SIGNUP
  359. app.state.config.ENABLE_LOGIN_FORM = ENABLE_LOGIN_FORM
  360. app.state.config.ENABLE_API_KEY = ENABLE_API_KEY
  361. app.state.config.JWT_EXPIRES_IN = JWT_EXPIRES_IN
  362. app.state.config.SHOW_ADMIN_DETAILS = SHOW_ADMIN_DETAILS
  363. app.state.config.ADMIN_EMAIL = ADMIN_EMAIL
  364. app.state.config.DEFAULT_MODELS = DEFAULT_MODELS
  365. app.state.config.DEFAULT_PROMPT_SUGGESTIONS = DEFAULT_PROMPT_SUGGESTIONS
  366. app.state.config.DEFAULT_USER_ROLE = DEFAULT_USER_ROLE
  367. app.state.config.USER_PERMISSIONS = USER_PERMISSIONS
  368. app.state.config.WEBHOOK_URL = WEBHOOK_URL
  369. app.state.config.BANNERS = WEBUI_BANNERS
  370. app.state.config.MODEL_ORDER_LIST = MODEL_ORDER_LIST
  371. app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING
  372. app.state.config.ENABLE_MESSAGE_RATING = ENABLE_MESSAGE_RATING
  373. app.state.config.ENABLE_EVALUATION_ARENA_MODELS = ENABLE_EVALUATION_ARENA_MODELS
  374. app.state.config.EVALUATION_ARENA_MODELS = EVALUATION_ARENA_MODELS
  375. app.state.config.OAUTH_USERNAME_CLAIM = OAUTH_USERNAME_CLAIM
  376. app.state.config.OAUTH_PICTURE_CLAIM = OAUTH_PICTURE_CLAIM
  377. app.state.config.OAUTH_EMAIL_CLAIM = OAUTH_EMAIL_CLAIM
  378. app.state.config.ENABLE_OAUTH_ROLE_MANAGEMENT = ENABLE_OAUTH_ROLE_MANAGEMENT
  379. app.state.config.OAUTH_ROLES_CLAIM = OAUTH_ROLES_CLAIM
  380. app.state.config.OAUTH_ALLOWED_ROLES = OAUTH_ALLOWED_ROLES
  381. app.state.config.OAUTH_ADMIN_ROLES = OAUTH_ADMIN_ROLES
  382. app.state.config.ENABLE_LDAP = ENABLE_LDAP
  383. app.state.config.LDAP_SERVER_LABEL = LDAP_SERVER_LABEL
  384. app.state.config.LDAP_SERVER_HOST = LDAP_SERVER_HOST
  385. app.state.config.LDAP_SERVER_PORT = LDAP_SERVER_PORT
  386. app.state.config.LDAP_ATTRIBUTE_FOR_USERNAME = LDAP_ATTRIBUTE_FOR_USERNAME
  387. app.state.config.LDAP_APP_DN = LDAP_APP_DN
  388. app.state.config.LDAP_APP_PASSWORD = LDAP_APP_PASSWORD
  389. app.state.config.LDAP_SEARCH_BASE = LDAP_SEARCH_BASE
  390. app.state.config.LDAP_SEARCH_FILTERS = LDAP_SEARCH_FILTERS
  391. app.state.config.LDAP_USE_TLS = LDAP_USE_TLS
  392. app.state.config.LDAP_CA_CERT_FILE = LDAP_CA_CERT_FILE
  393. app.state.config.LDAP_CIPHERS = LDAP_CIPHERS
  394. app.state.AUTH_TRUSTED_EMAIL_HEADER = WEBUI_AUTH_TRUSTED_EMAIL_HEADER
  395. app.state.AUTH_TRUSTED_NAME_HEADER = WEBUI_AUTH_TRUSTED_NAME_HEADER
  396. app.state.TOOLS = {}
  397. app.state.FUNCTIONS = {}
  398. ########################################
  399. #
  400. # RETRIEVAL
  401. #
  402. ########################################
  403. app.state.config.TOP_K = RAG_TOP_K
  404. app.state.config.RELEVANCE_THRESHOLD = RAG_RELEVANCE_THRESHOLD
  405. app.state.config.FILE_MAX_SIZE = RAG_FILE_MAX_SIZE
  406. app.state.config.FILE_MAX_COUNT = RAG_FILE_MAX_COUNT
  407. app.state.config.ENABLE_RAG_HYBRID_SEARCH = ENABLE_RAG_HYBRID_SEARCH
  408. app.state.config.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
  409. ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
  410. )
  411. app.state.config.CONTENT_EXTRACTION_ENGINE = CONTENT_EXTRACTION_ENGINE
  412. app.state.config.TIKA_SERVER_URL = TIKA_SERVER_URL
  413. app.state.config.TEXT_SPLITTER = RAG_TEXT_SPLITTER
  414. app.state.config.TIKTOKEN_ENCODING_NAME = TIKTOKEN_ENCODING_NAME
  415. app.state.config.CHUNK_SIZE = CHUNK_SIZE
  416. app.state.config.CHUNK_OVERLAP = CHUNK_OVERLAP
  417. app.state.config.RAG_EMBEDDING_ENGINE = RAG_EMBEDDING_ENGINE
  418. app.state.config.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL
  419. app.state.config.RAG_EMBEDDING_BATCH_SIZE = RAG_EMBEDDING_BATCH_SIZE
  420. app.state.config.RAG_RERANKING_MODEL = RAG_RERANKING_MODEL
  421. app.state.config.RAG_TEMPLATE = RAG_TEMPLATE
  422. app.state.config.RAG_OPENAI_API_BASE_URL = RAG_OPENAI_API_BASE_URL
  423. app.state.config.RAG_OPENAI_API_KEY = RAG_OPENAI_API_KEY
  424. app.state.config.RAG_OLLAMA_BASE_URL = RAG_OLLAMA_BASE_URL
  425. app.state.config.RAG_OLLAMA_API_KEY = RAG_OLLAMA_API_KEY
  426. app.state.config.PDF_EXTRACT_IMAGES = PDF_EXTRACT_IMAGES
  427. app.state.config.YOUTUBE_LOADER_LANGUAGE = YOUTUBE_LOADER_LANGUAGE
  428. app.state.config.YOUTUBE_LOADER_PROXY_URL = YOUTUBE_LOADER_PROXY_URL
  429. app.state.config.ENABLE_RAG_WEB_SEARCH = ENABLE_RAG_WEB_SEARCH
  430. app.state.config.RAG_WEB_SEARCH_ENGINE = RAG_WEB_SEARCH_ENGINE
  431. app.state.config.RAG_WEB_SEARCH_DOMAIN_FILTER_LIST = RAG_WEB_SEARCH_DOMAIN_FILTER_LIST
  432. app.state.config.SEARXNG_QUERY_URL = SEARXNG_QUERY_URL
  433. app.state.config.GOOGLE_PSE_API_KEY = GOOGLE_PSE_API_KEY
  434. app.state.config.GOOGLE_PSE_ENGINE_ID = GOOGLE_PSE_ENGINE_ID
  435. app.state.config.BRAVE_SEARCH_API_KEY = BRAVE_SEARCH_API_KEY
  436. app.state.config.KAGI_SEARCH_API_KEY = KAGI_SEARCH_API_KEY
  437. app.state.config.MOJEEK_SEARCH_API_KEY = MOJEEK_SEARCH_API_KEY
  438. app.state.config.SERPSTACK_API_KEY = SERPSTACK_API_KEY
  439. app.state.config.SERPSTACK_HTTPS = SERPSTACK_HTTPS
  440. app.state.config.SERPER_API_KEY = SERPER_API_KEY
  441. app.state.config.SERPLY_API_KEY = SERPLY_API_KEY
  442. app.state.config.TAVILY_API_KEY = TAVILY_API_KEY
  443. app.state.config.SEARCHAPI_API_KEY = SEARCHAPI_API_KEY
  444. app.state.config.SEARCHAPI_ENGINE = SEARCHAPI_ENGINE
  445. app.state.config.JINA_API_KEY = JINA_API_KEY
  446. app.state.config.BING_SEARCH_V7_ENDPOINT = BING_SEARCH_V7_ENDPOINT
  447. app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY = BING_SEARCH_V7_SUBSCRIPTION_KEY
  448. app.state.config.RAG_WEB_SEARCH_RESULT_COUNT = RAG_WEB_SEARCH_RESULT_COUNT
  449. app.state.config.RAG_WEB_SEARCH_CONCURRENT_REQUESTS = RAG_WEB_SEARCH_CONCURRENT_REQUESTS
  450. app.state.EMBEDDING_FUNCTION = None
  451. app.state.ef = None
  452. app.state.rf = None
  453. app.state.YOUTUBE_LOADER_TRANSLATION = None
  454. app.state.EMBEDDING_FUNCTION = get_embedding_function(
  455. app.state.config.RAG_EMBEDDING_ENGINE,
  456. app.state.config.RAG_EMBEDDING_MODEL,
  457. app.state.ef,
  458. (
  459. app.state.config.RAG_OPENAI_API_BASE_URL
  460. if app.state.config.RAG_EMBEDDING_ENGINE == "openai"
  461. else app.state.config.RAG_OLLAMA_BASE_URL
  462. ),
  463. (
  464. app.state.config.RAG_OPENAI_API_KEY
  465. if app.state.config.RAG_EMBEDDING_ENGINE == "openai"
  466. else app.state.config.RAG_OLLAMA_API_KEY
  467. ),
  468. app.state.config.RAG_EMBEDDING_BATCH_SIZE,
  469. )
  470. try:
  471. app.state.ef = get_ef(
  472. app.state.config.RAG_EMBEDDING_ENGINE,
  473. app.state.config.RAG_EMBEDDING_MODEL,
  474. RAG_EMBEDDING_MODEL_AUTO_UPDATE,
  475. )
  476. app.state.rf = get_rf(
  477. app.state.config.RAG_RERANKING_MODEL,
  478. RAG_RERANKING_MODEL_AUTO_UPDATE,
  479. )
  480. except Exception as e:
  481. log.error(f"Error updating models: {e}")
  482. pass
  483. ########################################
  484. #
  485. # IMAGES
  486. #
  487. ########################################
  488. app.state.config.IMAGE_GENERATION_ENGINE = IMAGE_GENERATION_ENGINE
  489. app.state.config.ENABLE_IMAGE_GENERATION = ENABLE_IMAGE_GENERATION
  490. app.state.config.IMAGES_OPENAI_API_BASE_URL = IMAGES_OPENAI_API_BASE_URL
  491. app.state.config.IMAGES_OPENAI_API_KEY = IMAGES_OPENAI_API_KEY
  492. app.state.config.IMAGE_GENERATION_MODEL = IMAGE_GENERATION_MODEL
  493. app.state.config.AUTOMATIC1111_BASE_URL = AUTOMATIC1111_BASE_URL
  494. app.state.config.AUTOMATIC1111_API_AUTH = AUTOMATIC1111_API_AUTH
  495. app.state.config.AUTOMATIC1111_CFG_SCALE = AUTOMATIC1111_CFG_SCALE
  496. app.state.config.AUTOMATIC1111_SAMPLER = AUTOMATIC1111_SAMPLER
  497. app.state.config.AUTOMATIC1111_SCHEDULER = AUTOMATIC1111_SCHEDULER
  498. app.state.config.COMFYUI_BASE_URL = COMFYUI_BASE_URL
  499. app.state.config.COMFYUI_WORKFLOW = COMFYUI_WORKFLOW
  500. app.state.config.COMFYUI_WORKFLOW_NODES = COMFYUI_WORKFLOW_NODES
  501. app.state.config.IMAGE_SIZE = IMAGE_SIZE
  502. app.state.config.IMAGE_STEPS = IMAGE_STEPS
  503. ########################################
  504. #
  505. # AUDIO
  506. #
  507. ########################################
  508. app.state.config.STT_OPENAI_API_BASE_URL = AUDIO_STT_OPENAI_API_BASE_URL
  509. app.state.config.STT_OPENAI_API_KEY = AUDIO_STT_OPENAI_API_KEY
  510. app.state.config.STT_ENGINE = AUDIO_STT_ENGINE
  511. app.state.config.STT_MODEL = AUDIO_STT_MODEL
  512. app.state.config.WHISPER_MODEL = WHISPER_MODEL
  513. app.state.config.TTS_OPENAI_API_BASE_URL = AUDIO_TTS_OPENAI_API_BASE_URL
  514. app.state.config.TTS_OPENAI_API_KEY = AUDIO_TTS_OPENAI_API_KEY
  515. app.state.config.TTS_ENGINE = AUDIO_TTS_ENGINE
  516. app.state.config.TTS_MODEL = AUDIO_TTS_MODEL
  517. app.state.config.TTS_VOICE = AUDIO_TTS_VOICE
  518. app.state.config.TTS_API_KEY = AUDIO_TTS_API_KEY
  519. app.state.config.TTS_SPLIT_ON = AUDIO_TTS_SPLIT_ON
  520. app.state.config.TTS_AZURE_SPEECH_REGION = AUDIO_TTS_AZURE_SPEECH_REGION
  521. app.state.config.TTS_AZURE_SPEECH_OUTPUT_FORMAT = AUDIO_TTS_AZURE_SPEECH_OUTPUT_FORMAT
  522. app.state.faster_whisper_model = None
  523. app.state.speech_synthesiser = None
  524. app.state.speech_speaker_embeddings_dataset = None
  525. ########################################
  526. #
  527. # TASKS
  528. #
  529. ########################################
  530. app.state.config.TASK_MODEL = TASK_MODEL
  531. app.state.config.TASK_MODEL_EXTERNAL = TASK_MODEL_EXTERNAL
  532. app.state.config.ENABLE_SEARCH_QUERY_GENERATION = ENABLE_SEARCH_QUERY_GENERATION
  533. app.state.config.ENABLE_RETRIEVAL_QUERY_GENERATION = ENABLE_RETRIEVAL_QUERY_GENERATION
  534. app.state.config.ENABLE_AUTOCOMPLETE_GENERATION = ENABLE_AUTOCOMPLETE_GENERATION
  535. app.state.config.ENABLE_TAGS_GENERATION = ENABLE_TAGS_GENERATION
  536. app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE = TITLE_GENERATION_PROMPT_TEMPLATE
  537. app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE = TAGS_GENERATION_PROMPT_TEMPLATE
  538. app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = (
  539. TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
  540. )
  541. app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE = QUERY_GENERATION_PROMPT_TEMPLATE
  542. app.state.config.AUTOCOMPLETE_GENERATION_PROMPT_TEMPLATE = (
  543. AUTOCOMPLETE_GENERATION_PROMPT_TEMPLATE
  544. )
  545. app.state.config.AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH = (
  546. AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH
  547. )
  548. ########################################
  549. #
  550. # WEBUI
  551. #
  552. ########################################
  553. app.state.MODELS = {}
  554. ##################################
  555. #
  556. # ChatCompletion Middleware
  557. #
  558. ##################################
  559. def get_filter_function_ids(model):
  560. def get_priority(function_id):
  561. function = Functions.get_function_by_id(function_id)
  562. if function is not None and hasattr(function, "valves"):
  563. # TODO: Fix FunctionModel
  564. return (function.valves if function.valves else {}).get("priority", 0)
  565. return 0
  566. filter_ids = [function.id for function in Functions.get_global_filter_functions()]
  567. if "info" in model and "meta" in model["info"]:
  568. filter_ids.extend(model["info"]["meta"].get("filterIds", []))
  569. filter_ids = list(set(filter_ids))
  570. enabled_filter_ids = [
  571. function.id
  572. for function in Functions.get_functions_by_type("filter", active_only=True)
  573. ]
  574. filter_ids = [
  575. filter_id for filter_id in filter_ids if filter_id in enabled_filter_ids
  576. ]
  577. filter_ids.sort(key=get_priority)
  578. return filter_ids
  579. async def chat_completion_filter_functions_handler(body, model, extra_params):
  580. skip_files = None
  581. filter_ids = get_filter_function_ids(model)
  582. for filter_id in filter_ids:
  583. filter = Functions.get_function_by_id(filter_id)
  584. if not filter:
  585. continue
  586. if filter_id in webui_app.state.FUNCTIONS:
  587. function_module = webui_app.state.FUNCTIONS[filter_id]
  588. else:
  589. function_module, _, _ = load_function_module_by_id(filter_id)
  590. webui_app.state.FUNCTIONS[filter_id] = function_module
  591. # Check if the function has a file_handler variable
  592. if hasattr(function_module, "file_handler"):
  593. skip_files = function_module.file_handler
  594. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  595. valves = Functions.get_function_valves_by_id(filter_id)
  596. function_module.valves = function_module.Valves(
  597. **(valves if valves else {})
  598. )
  599. if not hasattr(function_module, "inlet"):
  600. continue
  601. try:
  602. inlet = function_module.inlet
  603. # Get the signature of the function
  604. sig = inspect.signature(inlet)
  605. params = {"body": body} | {
  606. k: v
  607. for k, v in {
  608. **extra_params,
  609. "__model__": model,
  610. "__id__": filter_id,
  611. }.items()
  612. if k in sig.parameters
  613. }
  614. if "__user__" in params and hasattr(function_module, "UserValves"):
  615. try:
  616. params["__user__"]["valves"] = function_module.UserValves(
  617. **Functions.get_user_valves_by_id_and_user_id(
  618. filter_id, params["__user__"]["id"]
  619. )
  620. )
  621. except Exception as e:
  622. print(e)
  623. if inspect.iscoroutinefunction(inlet):
  624. body = await inlet(**params)
  625. else:
  626. body = inlet(**params)
  627. except Exception as e:
  628. print(f"Error: {e}")
  629. raise e
  630. if skip_files and "files" in body.get("metadata", {}):
  631. del body["metadata"]["files"]
  632. return body, {}
  633. def get_tools_function_calling_payload(messages, task_model_id, content):
  634. user_message = get_last_user_message(messages)
  635. history = "\n".join(
  636. f"{message['role'].upper()}: \"\"\"{message['content']}\"\"\""
  637. for message in messages[::-1][:4]
  638. )
  639. prompt = f"History:\n{history}\nQuery: {user_message}"
  640. return {
  641. "model": task_model_id,
  642. "messages": [
  643. {"role": "system", "content": content},
  644. {"role": "user", "content": f"Query: {prompt}"},
  645. ],
  646. "stream": False,
  647. "metadata": {"task": str(TASKS.FUNCTION_CALLING)},
  648. }
  649. async def get_content_from_response(response) -> Optional[str]:
  650. content = None
  651. if hasattr(response, "body_iterator"):
  652. async for chunk in response.body_iterator:
  653. data = json.loads(chunk.decode("utf-8"))
  654. content = data["choices"][0]["message"]["content"]
  655. # Cleanup any remaining background tasks if necessary
  656. if response.background is not None:
  657. await response.background()
  658. else:
  659. content = response["choices"][0]["message"]["content"]
  660. return content
  661. def get_task_model_id(
  662. default_model_id: str, task_model: str, task_model_external: str, models
  663. ) -> str:
  664. # Set the task model
  665. task_model_id = default_model_id
  666. # Check if the user has a custom task model and use that model
  667. if models[task_model_id]["owned_by"] == "ollama":
  668. if task_model and task_model in models:
  669. task_model_id = task_model
  670. else:
  671. if task_model_external and task_model_external in models:
  672. task_model_id = task_model_external
  673. return task_model_id
  674. async def chat_completion_tools_handler(
  675. body: dict, user: UserModel, models, extra_params: dict
  676. ) -> tuple[dict, dict]:
  677. # If tool_ids field is present, call the functions
  678. metadata = body.get("metadata", {})
  679. tool_ids = metadata.get("tool_ids", None)
  680. log.debug(f"{tool_ids=}")
  681. if not tool_ids:
  682. return body, {}
  683. skip_files = False
  684. sources = []
  685. task_model_id = get_task_model_id(
  686. body["model"],
  687. app.state.config.TASK_MODEL,
  688. app.state.config.TASK_MODEL_EXTERNAL,
  689. models,
  690. )
  691. tools = get_tools(
  692. webui_app,
  693. tool_ids,
  694. user,
  695. {
  696. **extra_params,
  697. "__model__": models[task_model_id],
  698. "__messages__": body["messages"],
  699. "__files__": metadata.get("files", []),
  700. },
  701. )
  702. log.info(f"{tools=}")
  703. specs = [tool["spec"] for tool in tools.values()]
  704. tools_specs = json.dumps(specs)
  705. if app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE != "":
  706. template = app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
  707. else:
  708. template = """Available Tools: {{TOOLS}}\nReturn an empty string if no tools match the query. If a function tool matches, construct and return a JSON object in the format {\"name\": \"functionName\", \"parameters\": {\"requiredFunctionParamKey\": \"requiredFunctionParamValue\"}} using the appropriate tool and its parameters. Only return the object and limit the response to the JSON object without additional text."""
  709. tools_function_calling_prompt = tools_function_calling_generation_template(
  710. template, tools_specs
  711. )
  712. log.info(f"{tools_function_calling_prompt=}")
  713. payload = get_tools_function_calling_payload(
  714. body["messages"], task_model_id, tools_function_calling_prompt
  715. )
  716. try:
  717. payload = filter_pipeline(payload, user, models)
  718. except Exception as e:
  719. raise e
  720. try:
  721. response = await generate_chat_completions(form_data=payload, user=user)
  722. log.debug(f"{response=}")
  723. content = await get_content_from_response(response)
  724. log.debug(f"{content=}")
  725. if not content:
  726. return body, {}
  727. try:
  728. content = content[content.find("{") : content.rfind("}") + 1]
  729. if not content:
  730. raise Exception("No JSON object found in the response")
  731. result = json.loads(content)
  732. tool_function_name = result.get("name", None)
  733. if tool_function_name not in tools:
  734. return body, {}
  735. tool_function_params = result.get("parameters", {})
  736. try:
  737. required_params = (
  738. tools[tool_function_name]
  739. .get("spec", {})
  740. .get("parameters", {})
  741. .get("required", [])
  742. )
  743. tool_function = tools[tool_function_name]["callable"]
  744. tool_function_params = {
  745. k: v
  746. for k, v in tool_function_params.items()
  747. if k in required_params
  748. }
  749. tool_output = await tool_function(**tool_function_params)
  750. except Exception as e:
  751. tool_output = str(e)
  752. if isinstance(tool_output, str):
  753. if tools[tool_function_name]["citation"]:
  754. sources.append(
  755. {
  756. "source": {
  757. "name": f"TOOL:{tools[tool_function_name]['toolkit_id']}/{tool_function_name}"
  758. },
  759. "document": [tool_output],
  760. "metadata": [
  761. {
  762. "source": f"TOOL:{tools[tool_function_name]['toolkit_id']}/{tool_function_name}"
  763. }
  764. ],
  765. }
  766. )
  767. else:
  768. sources.append(
  769. {
  770. "source": {},
  771. "document": [tool_output],
  772. "metadata": [
  773. {
  774. "source": f"TOOL:{tools[tool_function_name]['toolkit_id']}/{tool_function_name}"
  775. }
  776. ],
  777. }
  778. )
  779. if tools[tool_function_name]["file_handler"]:
  780. skip_files = True
  781. except Exception as e:
  782. log.exception(f"Error: {e}")
  783. content = None
  784. except Exception as e:
  785. log.exception(f"Error: {e}")
  786. content = None
  787. log.debug(f"tool_contexts: {sources}")
  788. if skip_files and "files" in body.get("metadata", {}):
  789. del body["metadata"]["files"]
  790. return body, {"sources": sources}
  791. async def chat_completion_files_handler(
  792. body: dict, user: UserModel
  793. ) -> tuple[dict, dict[str, list]]:
  794. sources = []
  795. if files := body.get("metadata", {}).get("files", None):
  796. try:
  797. queries_response = await generate_queries(
  798. {
  799. "model": body["model"],
  800. "messages": body["messages"],
  801. "type": "retrieval",
  802. },
  803. user,
  804. )
  805. queries_response = queries_response["choices"][0]["message"]["content"]
  806. try:
  807. bracket_start = queries_response.find("{")
  808. bracket_end = queries_response.rfind("}") + 1
  809. if bracket_start == -1 or bracket_end == -1:
  810. raise Exception("No JSON object found in the response")
  811. queries_response = queries_response[bracket_start:bracket_end]
  812. queries_response = json.loads(queries_response)
  813. except Exception as e:
  814. queries_response = {"queries": [queries_response]}
  815. queries = queries_response.get("queries", [])
  816. except Exception as e:
  817. queries = []
  818. if len(queries) == 0:
  819. queries = [get_last_user_message(body["messages"])]
  820. sources = get_sources_from_files(
  821. files=files,
  822. queries=queries,
  823. embedding_function=app.state.EMBEDDING_FUNCTION,
  824. k=app.state.config.TOP_K,
  825. reranking_function=app.state.rf,
  826. r=app.state.config.RELEVANCE_THRESHOLD,
  827. hybrid_search=app.state.config.ENABLE_RAG_HYBRID_SEARCH,
  828. )
  829. log.debug(f"rag_contexts:sources: {sources}")
  830. return body, {"sources": sources}
  831. async def get_body_and_model_and_user(request, models):
  832. # Read the original request body
  833. body = await request.body()
  834. body_str = body.decode("utf-8")
  835. body = json.loads(body_str) if body_str else {}
  836. model_id = body["model"]
  837. if model_id not in models:
  838. raise Exception("Model not found")
  839. model = models[model_id]
  840. user = get_current_user(
  841. request,
  842. get_http_authorization_cred(request.headers.get("Authorization")),
  843. )
  844. return body, model, user
  845. class ChatCompletionMiddleware(BaseHTTPMiddleware):
  846. async def dispatch(self, request: Request, call_next):
  847. if not request.method == "POST" and any(
  848. endpoint in request.url.path
  849. for endpoint in ["/ollama/api/chat", "/chat/completions"]
  850. ):
  851. return await call_next(request)
  852. log.debug(f"request.url.path: {request.url.path}")
  853. model_list = await get_all_models()
  854. models = {model["id"]: model for model in model_list}
  855. try:
  856. body, model, user = await get_body_and_model_and_user(request, models)
  857. except Exception as e:
  858. return JSONResponse(
  859. status_code=status.HTTP_400_BAD_REQUEST,
  860. content={"detail": str(e)},
  861. )
  862. model_info = Models.get_model_by_id(model["id"])
  863. if user.role == "user" and not BYPASS_MODEL_ACCESS_CONTROL:
  864. if model.get("arena"):
  865. if not has_access(
  866. user.id,
  867. type="read",
  868. access_control=model.get("info", {})
  869. .get("meta", {})
  870. .get("access_control", {}),
  871. ):
  872. raise HTTPException(
  873. status_code=403,
  874. detail="Model not found",
  875. )
  876. else:
  877. if not model_info:
  878. return JSONResponse(
  879. status_code=status.HTTP_404_NOT_FOUND,
  880. content={"detail": "Model not found"},
  881. )
  882. elif not (
  883. user.id == model_info.user_id
  884. or has_access(
  885. user.id, type="read", access_control=model_info.access_control
  886. )
  887. ):
  888. return JSONResponse(
  889. status_code=status.HTTP_403_FORBIDDEN,
  890. content={"detail": "User does not have access to the model"},
  891. )
  892. metadata = {
  893. "chat_id": body.pop("chat_id", None),
  894. "message_id": body.pop("id", None),
  895. "session_id": body.pop("session_id", None),
  896. "tool_ids": body.get("tool_ids", None),
  897. "files": body.get("files", None),
  898. }
  899. body["metadata"] = metadata
  900. extra_params = {
  901. "__event_emitter__": get_event_emitter(metadata),
  902. "__event_call__": get_event_call(metadata),
  903. "__user__": {
  904. "id": user.id,
  905. "email": user.email,
  906. "name": user.name,
  907. "role": user.role,
  908. },
  909. "__metadata__": metadata,
  910. }
  911. # Initialize data_items to store additional data to be sent to the client
  912. # Initialize contexts and citation
  913. data_items = []
  914. sources = []
  915. try:
  916. body, flags = await chat_completion_filter_functions_handler(
  917. body, model, extra_params
  918. )
  919. except Exception as e:
  920. return JSONResponse(
  921. status_code=status.HTTP_400_BAD_REQUEST,
  922. content={"detail": str(e)},
  923. )
  924. tool_ids = body.pop("tool_ids", None)
  925. files = body.pop("files", None)
  926. metadata = {
  927. **metadata,
  928. "tool_ids": tool_ids,
  929. "files": files,
  930. }
  931. body["metadata"] = metadata
  932. try:
  933. body, flags = await chat_completion_tools_handler(
  934. body, user, models, extra_params
  935. )
  936. sources.extend(flags.get("sources", []))
  937. except Exception as e:
  938. log.exception(e)
  939. try:
  940. body, flags = await chat_completion_files_handler(body, user)
  941. sources.extend(flags.get("sources", []))
  942. except Exception as e:
  943. log.exception(e)
  944. # If context is not empty, insert it into the messages
  945. if len(sources) > 0:
  946. context_string = ""
  947. for source_idx, source in enumerate(sources):
  948. source_id = source.get("source", {}).get("name", "")
  949. if "document" in source:
  950. for doc_idx, doc_context in enumerate(source["document"]):
  951. metadata = source.get("metadata")
  952. doc_source_id = None
  953. if metadata:
  954. doc_source_id = metadata[doc_idx].get("source", source_id)
  955. if source_id:
  956. context_string += f"<source><source_id>{doc_source_id if doc_source_id is not None else source_id}</source_id><source_context>{doc_context}</source_context></source>\n"
  957. else:
  958. # If there is no source_id, then do not include the source_id tag
  959. context_string += f"<source><source_context>{doc_context}</source_context></source>\n"
  960. context_string = context_string.strip()
  961. prompt = get_last_user_message(body["messages"])
  962. if prompt is None:
  963. raise Exception("No user message found")
  964. if (
  965. retrieval_app.state.config.RELEVANCE_THRESHOLD == 0
  966. and context_string.strip() == ""
  967. ):
  968. log.debug(
  969. f"With a 0 relevancy threshold for RAG, the context cannot be empty"
  970. )
  971. # Workaround for Ollama 2.0+ system prompt issue
  972. # TODO: replace with add_or_update_system_message
  973. if model["owned_by"] == "ollama":
  974. body["messages"] = prepend_to_first_user_message_content(
  975. rag_template(
  976. retrieval_app.state.config.RAG_TEMPLATE, context_string, prompt
  977. ),
  978. body["messages"],
  979. )
  980. else:
  981. body["messages"] = add_or_update_system_message(
  982. rag_template(
  983. retrieval_app.state.config.RAG_TEMPLATE, context_string, prompt
  984. ),
  985. body["messages"],
  986. )
  987. # If there are citations, add them to the data_items
  988. sources = [
  989. source for source in sources if source.get("source", {}).get("name", "")
  990. ]
  991. if len(sources) > 0:
  992. data_items.append({"sources": sources})
  993. modified_body_bytes = json.dumps(body).encode("utf-8")
  994. # Replace the request body with the modified one
  995. request._body = modified_body_bytes
  996. # Set custom header to ensure content-length matches new body length
  997. request.headers.__dict__["_list"] = [
  998. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  999. *[(k, v) for k, v in request.headers.raw if k.lower() != b"content-length"],
  1000. ]
  1001. response = await call_next(request)
  1002. if not isinstance(response, StreamingResponse):
  1003. return response
  1004. content_type = response.headers["Content-Type"]
  1005. is_openai = "text/event-stream" in content_type
  1006. is_ollama = "application/x-ndjson" in content_type
  1007. if not is_openai and not is_ollama:
  1008. return response
  1009. def wrap_item(item):
  1010. return f"data: {item}\n\n" if is_openai else f"{item}\n"
  1011. async def stream_wrapper(original_generator, data_items):
  1012. for item in data_items:
  1013. yield wrap_item(json.dumps(item))
  1014. async for data in original_generator:
  1015. yield data
  1016. return StreamingResponse(
  1017. stream_wrapper(response.body_iterator, data_items),
  1018. headers=dict(response.headers),
  1019. )
  1020. async def _receive(self, body: bytes):
  1021. return {"type": "http.request", "body": body, "more_body": False}
  1022. app.add_middleware(ChatCompletionMiddleware)
  1023. ##################################
  1024. #
  1025. # Pipeline Middleware
  1026. #
  1027. ##################################
  1028. def get_sorted_filters(model_id, models):
  1029. filters = [
  1030. model
  1031. for model in models.values()
  1032. if "pipeline" in model
  1033. and "type" in model["pipeline"]
  1034. and model["pipeline"]["type"] == "filter"
  1035. and (
  1036. model["pipeline"]["pipelines"] == ["*"]
  1037. or any(
  1038. model_id == target_model_id
  1039. for target_model_id in model["pipeline"]["pipelines"]
  1040. )
  1041. )
  1042. ]
  1043. sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
  1044. return sorted_filters
  1045. def filter_pipeline(payload, user, models):
  1046. user = {"id": user.id, "email": user.email, "name": user.name, "role": user.role}
  1047. model_id = payload["model"]
  1048. sorted_filters = get_sorted_filters(model_id, models)
  1049. model = models[model_id]
  1050. if "pipeline" in model:
  1051. sorted_filters.append(model)
  1052. for filter in sorted_filters:
  1053. r = None
  1054. try:
  1055. urlIdx = filter["urlIdx"]
  1056. url = app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  1057. key = app.state.config.OPENAI_API_KEYS[urlIdx]
  1058. if key == "":
  1059. continue
  1060. headers = {"Authorization": f"Bearer {key}"}
  1061. r = requests.post(
  1062. f"{url}/{filter['id']}/filter/inlet",
  1063. headers=headers,
  1064. json={
  1065. "user": user,
  1066. "body": payload,
  1067. },
  1068. )
  1069. r.raise_for_status()
  1070. payload = r.json()
  1071. except Exception as e:
  1072. # Handle connection error here
  1073. print(f"Connection error: {e}")
  1074. if r is not None:
  1075. res = r.json()
  1076. if "detail" in res:
  1077. raise Exception(r.status_code, res["detail"])
  1078. return payload
  1079. class PipelineMiddleware(BaseHTTPMiddleware):
  1080. async def dispatch(self, request: Request, call_next):
  1081. if not request.method == "POST" and any(
  1082. endpoint in request.url.path
  1083. for endpoint in ["/ollama/api/chat", "/chat/completions"]
  1084. ):
  1085. return await call_next(request)
  1086. log.debug(f"request.url.path: {request.url.path}")
  1087. # Read the original request body
  1088. body = await request.body()
  1089. # Decode body to string
  1090. body_str = body.decode("utf-8")
  1091. # Parse string to JSON
  1092. data = json.loads(body_str) if body_str else {}
  1093. try:
  1094. user = get_current_user(
  1095. request,
  1096. get_http_authorization_cred(request.headers["Authorization"]),
  1097. )
  1098. except KeyError as e:
  1099. if len(e.args) > 1:
  1100. return JSONResponse(
  1101. status_code=e.args[0],
  1102. content={"detail": e.args[1]},
  1103. )
  1104. else:
  1105. return JSONResponse(
  1106. status_code=status.HTTP_401_UNAUTHORIZED,
  1107. content={"detail": "Not authenticated"},
  1108. )
  1109. except HTTPException as e:
  1110. return JSONResponse(
  1111. status_code=e.status_code,
  1112. content={"detail": e.detail},
  1113. )
  1114. model_list = await get_all_models()
  1115. models = {model["id"]: model for model in model_list}
  1116. try:
  1117. data = filter_pipeline(data, user, models)
  1118. except Exception as e:
  1119. if len(e.args) > 1:
  1120. return JSONResponse(
  1121. status_code=e.args[0],
  1122. content={"detail": e.args[1]},
  1123. )
  1124. else:
  1125. return JSONResponse(
  1126. status_code=status.HTTP_400_BAD_REQUEST,
  1127. content={"detail": str(e)},
  1128. )
  1129. modified_body_bytes = json.dumps(data).encode("utf-8")
  1130. # Replace the request body with the modified one
  1131. request._body = modified_body_bytes
  1132. # Set custom header to ensure content-length matches new body length
  1133. request.headers.__dict__["_list"] = [
  1134. (b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
  1135. *[(k, v) for k, v in request.headers.raw if k.lower() != b"content-length"],
  1136. ]
  1137. response = await call_next(request)
  1138. return response
  1139. async def _receive(self, body: bytes):
  1140. return {"type": "http.request", "body": body, "more_body": False}
  1141. app.add_middleware(PipelineMiddleware)
  1142. class RedirectMiddleware(BaseHTTPMiddleware):
  1143. async def dispatch(self, request: Request, call_next):
  1144. # Check if the request is a GET request
  1145. if request.method == "GET":
  1146. path = request.url.path
  1147. query_params = dict(parse_qs(urlparse(str(request.url)).query))
  1148. # Check for the specific watch path and the presence of 'v' parameter
  1149. if path.endswith("/watch") and "v" in query_params:
  1150. video_id = query_params["v"][0] # Extract the first 'v' parameter
  1151. encoded_video_id = urlencode({"youtube": video_id})
  1152. redirect_url = f"/?{encoded_video_id}"
  1153. return RedirectResponse(url=redirect_url)
  1154. # Proceed with the normal flow of other requests
  1155. response = await call_next(request)
  1156. return response
  1157. # Add the middleware to the app
  1158. app.add_middleware(RedirectMiddleware)
  1159. app.add_middleware(SecurityHeadersMiddleware)
  1160. @app.middleware("http")
  1161. async def commit_session_after_request(request: Request, call_next):
  1162. response = await call_next(request)
  1163. # log.debug("Commit session after request")
  1164. Session.commit()
  1165. return response
  1166. @app.middleware("http")
  1167. async def check_url(request: Request, call_next):
  1168. start_time = int(time.time())
  1169. request.state.enable_api_key = webui_app.state.config.ENABLE_API_KEY
  1170. response = await call_next(request)
  1171. process_time = int(time.time()) - start_time
  1172. response.headers["X-Process-Time"] = str(process_time)
  1173. return response
  1174. @app.middleware("http")
  1175. async def inspect_websocket(request: Request, call_next):
  1176. if (
  1177. "/ws/socket.io" in request.url.path
  1178. and request.query_params.get("transport") == "websocket"
  1179. ):
  1180. upgrade = (request.headers.get("Upgrade") or "").lower()
  1181. connection = (request.headers.get("Connection") or "").lower().split(",")
  1182. # Check that there's the correct headers for an upgrade, else reject the connection
  1183. # This is to work around this upstream issue: https://github.com/miguelgrinberg/python-engineio/issues/367
  1184. if upgrade != "websocket" or "upgrade" not in connection:
  1185. return JSONResponse(
  1186. status_code=status.HTTP_400_BAD_REQUEST,
  1187. content={"detail": "Invalid WebSocket upgrade request"},
  1188. )
  1189. return await call_next(request)
  1190. app.add_middleware(
  1191. CORSMiddleware,
  1192. allow_origins=CORS_ALLOW_ORIGIN,
  1193. allow_credentials=True,
  1194. allow_methods=["*"],
  1195. allow_headers=["*"],
  1196. )
  1197. app.mount("/ws", socket_app)
  1198. app.include_router(ollama.router, prefix="/ollama", tags=["ollama"])
  1199. app.include_router(openai.router, prefix="/openai", tags=["openai"])
  1200. app.include_router(pipelines.router, prefix="/pipelines", tags=["pipelines"])
  1201. app.include_router(tasks.router, prefix="/tasks", tags=["tasks"])
  1202. app.include_router(images.router, prefix="/api/v1/images", tags=["images"])
  1203. app.include_router(audio.router, prefix="/api/v1/audio", tags=["audio"])
  1204. app.include_router(retrieval.router, prefix="/api/v1/retrieval", tags=["retrieval"])
  1205. app.include_router(configs.router, prefix="/api/v1/configs", tags=["configs"])
  1206. app.include_router(auths.router, prefix="/api/v1/auths", tags=["auths"])
  1207. app.include_router(users.router, prefix="/api/v1/users", tags=["users"])
  1208. app.include_router(chats.router, prefix="/api/v1/chats", tags=["chats"])
  1209. app.include_router(models.router, prefix="/api/v1/models", tags=["models"])
  1210. app.include_router(knowledge.router, prefix="/api/v1/knowledge", tags=["knowledge"])
  1211. app.include_router(prompts.router, prefix="/api/v1/prompts", tags=["prompts"])
  1212. app.include_router(tools.router, prefix="/api/v1/tools", tags=["tools"])
  1213. app.include_router(memories.router, prefix="/api/v1/memories", tags=["memories"])
  1214. app.include_router(folders.router, prefix="/api/v1/folders", tags=["folders"])
  1215. app.include_router(groups.router, prefix="/api/v1/groups", tags=["groups"])
  1216. app.include_router(files.router, prefix="/api/v1/files", tags=["files"])
  1217. app.include_router(functions.router, prefix="/api/v1/functions", tags=["functions"])
  1218. app.include_router(
  1219. evaluations.router, prefix="/api/v1/evaluations", tags=["evaluations"]
  1220. )
  1221. app.include_router(utils.router, prefix="/api/v1/utils", tags=["utils"])
  1222. ##################################
  1223. #
  1224. # Chat Endpoints
  1225. #
  1226. ##################################
  1227. def get_function_module(pipe_id: str):
  1228. # Check if function is already loaded
  1229. if pipe_id not in app.state.FUNCTIONS:
  1230. function_module, _, _ = load_function_module_by_id(pipe_id)
  1231. app.state.FUNCTIONS[pipe_id] = function_module
  1232. else:
  1233. function_module = app.state.FUNCTIONS[pipe_id]
  1234. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  1235. valves = Functions.get_function_valves_by_id(pipe_id)
  1236. function_module.valves = function_module.Valves(**(valves if valves else {}))
  1237. return function_module
  1238. async def get_function_models():
  1239. pipes = Functions.get_functions_by_type("pipe", active_only=True)
  1240. pipe_models = []
  1241. for pipe in pipes:
  1242. function_module = get_function_module(pipe.id)
  1243. # Check if function is a manifold
  1244. if hasattr(function_module, "pipes"):
  1245. sub_pipes = []
  1246. # Check if pipes is a function or a list
  1247. try:
  1248. if callable(function_module.pipes):
  1249. sub_pipes = function_module.pipes()
  1250. else:
  1251. sub_pipes = function_module.pipes
  1252. except Exception as e:
  1253. log.exception(e)
  1254. sub_pipes = []
  1255. log.debug(
  1256. f"get_function_models: function '{pipe.id}' is a manifold of {sub_pipes}"
  1257. )
  1258. for p in sub_pipes:
  1259. sub_pipe_id = f'{pipe.id}.{p["id"]}'
  1260. sub_pipe_name = p["name"]
  1261. if hasattr(function_module, "name"):
  1262. sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
  1263. pipe_flag = {"type": pipe.type}
  1264. pipe_models.append(
  1265. {
  1266. "id": sub_pipe_id,
  1267. "name": sub_pipe_name,
  1268. "object": "model",
  1269. "created": pipe.created_at,
  1270. "owned_by": "openai",
  1271. "pipe": pipe_flag,
  1272. }
  1273. )
  1274. else:
  1275. pipe_flag = {"type": "pipe"}
  1276. log.debug(
  1277. f"get_function_models: function '{pipe.id}' is a single pipe {{ 'id': {pipe.id}, 'name': {pipe.name} }}"
  1278. )
  1279. pipe_models.append(
  1280. {
  1281. "id": pipe.id,
  1282. "name": pipe.name,
  1283. "object": "model",
  1284. "created": pipe.created_at,
  1285. "owned_by": "openai",
  1286. "pipe": pipe_flag,
  1287. }
  1288. )
  1289. return pipe_models
  1290. async def generate_function_chat_completion(form_data, user, models: dict = {}):
  1291. async def execute_pipe(pipe, params):
  1292. if inspect.iscoroutinefunction(pipe):
  1293. return await pipe(**params)
  1294. else:
  1295. return pipe(**params)
  1296. async def get_message_content(res: str | Generator | AsyncGenerator) -> str:
  1297. if isinstance(res, str):
  1298. return res
  1299. if isinstance(res, Generator):
  1300. return "".join(map(str, res))
  1301. if isinstance(res, AsyncGenerator):
  1302. return "".join([str(stream) async for stream in res])
  1303. def process_line(form_data: dict, line):
  1304. if isinstance(line, BaseModel):
  1305. line = line.model_dump_json()
  1306. line = f"data: {line}"
  1307. if isinstance(line, dict):
  1308. line = f"data: {json.dumps(line)}"
  1309. try:
  1310. line = line.decode("utf-8")
  1311. except Exception:
  1312. pass
  1313. if line.startswith("data:"):
  1314. return f"{line}\n\n"
  1315. else:
  1316. line = openai_chat_chunk_message_template(form_data["model"], line)
  1317. return f"data: {json.dumps(line)}\n\n"
  1318. def get_pipe_id(form_data: dict) -> str:
  1319. pipe_id = form_data["model"]
  1320. if "." in pipe_id:
  1321. pipe_id, _ = pipe_id.split(".", 1)
  1322. return pipe_id
  1323. def get_function_params(function_module, form_data, user, extra_params=None):
  1324. if extra_params is None:
  1325. extra_params = {}
  1326. pipe_id = get_pipe_id(form_data)
  1327. # Get the signature of the function
  1328. sig = inspect.signature(function_module.pipe)
  1329. params = {"body": form_data} | {
  1330. k: v for k, v in extra_params.items() if k in sig.parameters
  1331. }
  1332. if "__user__" in params and hasattr(function_module, "UserValves"):
  1333. user_valves = Functions.get_user_valves_by_id_and_user_id(pipe_id, user.id)
  1334. try:
  1335. params["__user__"]["valves"] = function_module.UserValves(**user_valves)
  1336. except Exception as e:
  1337. log.exception(e)
  1338. params["__user__"]["valves"] = function_module.UserValves()
  1339. return params
  1340. model_id = form_data.get("model")
  1341. model_info = Models.get_model_by_id(model_id)
  1342. metadata = form_data.pop("metadata", {})
  1343. files = metadata.get("files", [])
  1344. tool_ids = metadata.get("tool_ids", [])
  1345. # Check if tool_ids is None
  1346. if tool_ids is None:
  1347. tool_ids = []
  1348. __event_emitter__ = None
  1349. __event_call__ = None
  1350. __task__ = None
  1351. __task_body__ = None
  1352. if metadata:
  1353. if all(k in metadata for k in ("session_id", "chat_id", "message_id")):
  1354. __event_emitter__ = get_event_emitter(metadata)
  1355. __event_call__ = get_event_call(metadata)
  1356. __task__ = metadata.get("task", None)
  1357. __task_body__ = metadata.get("task_body", None)
  1358. extra_params = {
  1359. "__event_emitter__": __event_emitter__,
  1360. "__event_call__": __event_call__,
  1361. "__task__": __task__,
  1362. "__task_body__": __task_body__,
  1363. "__files__": files,
  1364. "__user__": {
  1365. "id": user.id,
  1366. "email": user.email,
  1367. "name": user.name,
  1368. "role": user.role,
  1369. },
  1370. "__metadata__": metadata,
  1371. }
  1372. extra_params["__tools__"] = get_tools(
  1373. app,
  1374. tool_ids,
  1375. user,
  1376. {
  1377. **extra_params,
  1378. "__model__": models.get(form_data["model"], None),
  1379. "__messages__": form_data["messages"],
  1380. "__files__": files,
  1381. },
  1382. )
  1383. if model_info:
  1384. if model_info.base_model_id:
  1385. form_data["model"] = model_info.base_model_id
  1386. params = model_info.params.model_dump()
  1387. form_data = apply_model_params_to_body_openai(params, form_data)
  1388. form_data = apply_model_system_prompt_to_body(params, form_data, user)
  1389. pipe_id = get_pipe_id(form_data)
  1390. function_module = get_function_module(pipe_id)
  1391. pipe = function_module.pipe
  1392. params = get_function_params(function_module, form_data, user, extra_params)
  1393. if form_data.get("stream", False):
  1394. async def stream_content():
  1395. try:
  1396. res = await execute_pipe(pipe, params)
  1397. # Directly return if the response is a StreamingResponse
  1398. if isinstance(res, StreamingResponse):
  1399. async for data in res.body_iterator:
  1400. yield data
  1401. return
  1402. if isinstance(res, dict):
  1403. yield f"data: {json.dumps(res)}\n\n"
  1404. return
  1405. except Exception as e:
  1406. log.error(f"Error: {e}")
  1407. yield f"data: {json.dumps({'error': {'detail':str(e)}})}\n\n"
  1408. return
  1409. if isinstance(res, str):
  1410. message = openai_chat_chunk_message_template(form_data["model"], res)
  1411. yield f"data: {json.dumps(message)}\n\n"
  1412. if isinstance(res, Iterator):
  1413. for line in res:
  1414. yield process_line(form_data, line)
  1415. if isinstance(res, AsyncGenerator):
  1416. async for line in res:
  1417. yield process_line(form_data, line)
  1418. if isinstance(res, str) or isinstance(res, Generator):
  1419. finish_message = openai_chat_chunk_message_template(
  1420. form_data["model"], ""
  1421. )
  1422. finish_message["choices"][0]["finish_reason"] = "stop"
  1423. yield f"data: {json.dumps(finish_message)}\n\n"
  1424. yield "data: [DONE]"
  1425. return StreamingResponse(stream_content(), media_type="text/event-stream")
  1426. else:
  1427. try:
  1428. res = await execute_pipe(pipe, params)
  1429. except Exception as e:
  1430. log.error(f"Error: {e}")
  1431. return {"error": {"detail": str(e)}}
  1432. if isinstance(res, StreamingResponse) or isinstance(res, dict):
  1433. return res
  1434. if isinstance(res, BaseModel):
  1435. return res.model_dump()
  1436. message = await get_message_content(res)
  1437. return openai_chat_completion_message_template(form_data["model"], message)
  1438. async def get_all_base_models():
  1439. function_models = []
  1440. openai_models = []
  1441. ollama_models = []
  1442. if app.state.config.ENABLE_OPENAI_API:
  1443. openai_models = await openai.get_all_models()
  1444. openai_models = openai_models["data"]
  1445. if app.state.config.ENABLE_OLLAMA_API:
  1446. ollama_models = await ollama.get_all_models()
  1447. ollama_models = [
  1448. {
  1449. "id": model["model"],
  1450. "name": model["name"],
  1451. "object": "model",
  1452. "created": int(time.time()),
  1453. "owned_by": "ollama",
  1454. "ollama": model,
  1455. }
  1456. for model in ollama_models["models"]
  1457. ]
  1458. function_models = await get_function_models()
  1459. models = function_models + openai_models + ollama_models
  1460. # Add arena models
  1461. if app.state.config.ENABLE_EVALUATION_ARENA_MODELS:
  1462. arena_models = []
  1463. if len(app.state.config.EVALUATION_ARENA_MODELS) > 0:
  1464. arena_models = [
  1465. {
  1466. "id": model["id"],
  1467. "name": model["name"],
  1468. "info": {
  1469. "meta": model["meta"],
  1470. },
  1471. "object": "model",
  1472. "created": int(time.time()),
  1473. "owned_by": "arena",
  1474. "arena": True,
  1475. }
  1476. for model in app.state.config.EVALUATION_ARENA_MODELS
  1477. ]
  1478. else:
  1479. # Add default arena model
  1480. arena_models = [
  1481. {
  1482. "id": DEFAULT_ARENA_MODEL["id"],
  1483. "name": DEFAULT_ARENA_MODEL["name"],
  1484. "info": {
  1485. "meta": DEFAULT_ARENA_MODEL["meta"],
  1486. },
  1487. "object": "model",
  1488. "created": int(time.time()),
  1489. "owned_by": "arena",
  1490. "arena": True,
  1491. }
  1492. ]
  1493. models = models + arena_models
  1494. return models
  1495. @cached(ttl=3)
  1496. async def get_all_models():
  1497. models = await get_all_base_models()
  1498. # If there are no models, return an empty list
  1499. if len([model for model in models if not model.get("arena", False)]) == 0:
  1500. return []
  1501. global_action_ids = [
  1502. function.id for function in Functions.get_global_action_functions()
  1503. ]
  1504. enabled_action_ids = [
  1505. function.id
  1506. for function in Functions.get_functions_by_type("action", active_only=True)
  1507. ]
  1508. custom_models = Models.get_all_models()
  1509. for custom_model in custom_models:
  1510. if custom_model.base_model_id is None:
  1511. for model in models:
  1512. if (
  1513. custom_model.id == model["id"]
  1514. or custom_model.id == model["id"].split(":")[0]
  1515. ):
  1516. if custom_model.is_active:
  1517. model["name"] = custom_model.name
  1518. model["info"] = custom_model.model_dump()
  1519. action_ids = []
  1520. if "info" in model and "meta" in model["info"]:
  1521. action_ids.extend(
  1522. model["info"]["meta"].get("actionIds", [])
  1523. )
  1524. model["action_ids"] = action_ids
  1525. else:
  1526. models.remove(model)
  1527. elif custom_model.is_active and (
  1528. custom_model.id not in [model["id"] for model in models]
  1529. ):
  1530. owned_by = "openai"
  1531. pipe = None
  1532. action_ids = []
  1533. for model in models:
  1534. if (
  1535. custom_model.base_model_id == model["id"]
  1536. or custom_model.base_model_id == model["id"].split(":")[0]
  1537. ):
  1538. owned_by = model["owned_by"]
  1539. if "pipe" in model:
  1540. pipe = model["pipe"]
  1541. break
  1542. if custom_model.meta:
  1543. meta = custom_model.meta.model_dump()
  1544. if "actionIds" in meta:
  1545. action_ids.extend(meta["actionIds"])
  1546. models.append(
  1547. {
  1548. "id": f"{custom_model.id}",
  1549. "name": custom_model.name,
  1550. "object": "model",
  1551. "created": custom_model.created_at,
  1552. "owned_by": owned_by,
  1553. "info": custom_model.model_dump(),
  1554. "preset": True,
  1555. **({"pipe": pipe} if pipe is not None else {}),
  1556. "action_ids": action_ids,
  1557. }
  1558. )
  1559. # Process action_ids to get the actions
  1560. def get_action_items_from_module(function, module):
  1561. actions = []
  1562. if hasattr(module, "actions"):
  1563. actions = module.actions
  1564. return [
  1565. {
  1566. "id": f"{function.id}.{action['id']}",
  1567. "name": action.get("name", f"{function.name} ({action['id']})"),
  1568. "description": function.meta.description,
  1569. "icon_url": action.get(
  1570. "icon_url", function.meta.manifest.get("icon_url", None)
  1571. ),
  1572. }
  1573. for action in actions
  1574. ]
  1575. else:
  1576. return [
  1577. {
  1578. "id": function.id,
  1579. "name": function.name,
  1580. "description": function.meta.description,
  1581. "icon_url": function.meta.manifest.get("icon_url", None),
  1582. }
  1583. ]
  1584. def get_function_module_by_id(function_id):
  1585. if function_id in webui_app.state.FUNCTIONS:
  1586. function_module = webui_app.state.FUNCTIONS[function_id]
  1587. else:
  1588. function_module, _, _ = load_function_module_by_id(function_id)
  1589. webui_app.state.FUNCTIONS[function_id] = function_module
  1590. for model in models:
  1591. action_ids = [
  1592. action_id
  1593. for action_id in list(set(model.pop("action_ids", []) + global_action_ids))
  1594. if action_id in enabled_action_ids
  1595. ]
  1596. model["actions"] = []
  1597. for action_id in action_ids:
  1598. action_function = Functions.get_function_by_id(action_id)
  1599. if action_function is None:
  1600. raise Exception(f"Action not found: {action_id}")
  1601. function_module = get_function_module_by_id(action_id)
  1602. model["actions"].extend(
  1603. get_action_items_from_module(action_function, function_module)
  1604. )
  1605. log.debug(f"get_all_models() returned {len(models)} models")
  1606. app.state.MODELS = {model["id"]: model for model in models}
  1607. return models
  1608. @app.get("/api/models")
  1609. async def get_models(user=Depends(get_verified_user)):
  1610. models = await get_all_models()
  1611. # Filter out filter pipelines
  1612. models = [
  1613. model
  1614. for model in models
  1615. if "pipeline" not in model or model["pipeline"].get("type", None) != "filter"
  1616. ]
  1617. model_order_list = webui_app.state.config.MODEL_ORDER_LIST
  1618. if model_order_list:
  1619. model_order_dict = {model_id: i for i, model_id in enumerate(model_order_list)}
  1620. # Sort models by order list priority, with fallback for those not in the list
  1621. models.sort(
  1622. key=lambda x: (model_order_dict.get(x["id"], float("inf")), x["name"])
  1623. )
  1624. # Filter out models that the user does not have access to
  1625. if user.role == "user" and not BYPASS_MODEL_ACCESS_CONTROL:
  1626. filtered_models = []
  1627. for model in models:
  1628. if model.get("arena"):
  1629. if has_access(
  1630. user.id,
  1631. type="read",
  1632. access_control=model.get("info", {})
  1633. .get("meta", {})
  1634. .get("access_control", {}),
  1635. ):
  1636. filtered_models.append(model)
  1637. continue
  1638. model_info = Models.get_model_by_id(model["id"])
  1639. if model_info:
  1640. if user.id == model_info.user_id or has_access(
  1641. user.id, type="read", access_control=model_info.access_control
  1642. ):
  1643. filtered_models.append(model)
  1644. models = filtered_models
  1645. log.debug(
  1646. f"/api/models returned filtered models accessible to the user: {json.dumps([model['id'] for model in models])}"
  1647. )
  1648. return {"data": models}
  1649. @app.get("/api/models/base")
  1650. async def get_base_models(user=Depends(get_admin_user)):
  1651. models = await get_all_base_models()
  1652. # Filter out arena models
  1653. models = [model for model in models if not model.get("arena", False)]
  1654. return {"data": models}
  1655. @app.post("/api/chat/completions")
  1656. async def generate_chat_completions(
  1657. form_data: dict,
  1658. user=Depends(get_verified_user),
  1659. bypass_filter: bool = False,
  1660. ):
  1661. if BYPASS_MODEL_ACCESS_CONTROL:
  1662. bypass_filter = True
  1663. model_list = app.state.MODELS
  1664. models = {model["id"]: model for model in model_list}
  1665. model_id = form_data["model"]
  1666. if model_id not in models:
  1667. raise HTTPException(
  1668. status_code=status.HTTP_404_NOT_FOUND,
  1669. detail="Model not found",
  1670. )
  1671. model = models[model_id]
  1672. # Check if user has access to the model
  1673. if not bypass_filter and user.role == "user":
  1674. if model.get("arena"):
  1675. if not has_access(
  1676. user.id,
  1677. type="read",
  1678. access_control=model.get("info", {})
  1679. .get("meta", {})
  1680. .get("access_control", {}),
  1681. ):
  1682. raise HTTPException(
  1683. status_code=403,
  1684. detail="Model not found",
  1685. )
  1686. else:
  1687. model_info = Models.get_model_by_id(model_id)
  1688. if not model_info:
  1689. raise HTTPException(
  1690. status_code=404,
  1691. detail="Model not found",
  1692. )
  1693. elif not (
  1694. user.id == model_info.user_id
  1695. or has_access(
  1696. user.id, type="read", access_control=model_info.access_control
  1697. )
  1698. ):
  1699. raise HTTPException(
  1700. status_code=403,
  1701. detail="Model not found",
  1702. )
  1703. if model["owned_by"] == "arena":
  1704. model_ids = model.get("info", {}).get("meta", {}).get("model_ids")
  1705. filter_mode = model.get("info", {}).get("meta", {}).get("filter_mode")
  1706. if model_ids and filter_mode == "exclude":
  1707. model_ids = [
  1708. model["id"]
  1709. for model in await get_all_models()
  1710. if model.get("owned_by") != "arena" and model["id"] not in model_ids
  1711. ]
  1712. selected_model_id = None
  1713. if isinstance(model_ids, list) and model_ids:
  1714. selected_model_id = random.choice(model_ids)
  1715. else:
  1716. model_ids = [
  1717. model["id"]
  1718. for model in await get_all_models()
  1719. if model.get("owned_by") != "arena"
  1720. ]
  1721. selected_model_id = random.choice(model_ids)
  1722. form_data["model"] = selected_model_id
  1723. if form_data.get("stream") == True:
  1724. async def stream_wrapper(stream):
  1725. yield f"data: {json.dumps({'selected_model_id': selected_model_id})}\n\n"
  1726. async for chunk in stream:
  1727. yield chunk
  1728. response = await generate_chat_completions(
  1729. form_data, user, bypass_filter=True
  1730. )
  1731. return StreamingResponse(
  1732. stream_wrapper(response.body_iterator), media_type="text/event-stream"
  1733. )
  1734. else:
  1735. return {
  1736. **(
  1737. await generate_chat_completions(form_data, user, bypass_filter=True)
  1738. ),
  1739. "selected_model_id": selected_model_id,
  1740. }
  1741. if model.get("pipe"):
  1742. # Below does not require bypass_filter because this is the only route the uses this function and it is already bypassing the filter
  1743. return await generate_function_chat_completion(
  1744. form_data, user=user, models=models
  1745. )
  1746. if model["owned_by"] == "ollama":
  1747. # Using /ollama/api/chat endpoint
  1748. form_data = convert_payload_openai_to_ollama(form_data)
  1749. form_data = GenerateChatCompletionForm(**form_data)
  1750. response = await generate_ollama_chat_completion(
  1751. form_data=form_data, user=user, bypass_filter=bypass_filter
  1752. )
  1753. if form_data.stream:
  1754. response.headers["content-type"] = "text/event-stream"
  1755. return StreamingResponse(
  1756. convert_streaming_response_ollama_to_openai(response),
  1757. headers=dict(response.headers),
  1758. )
  1759. else:
  1760. return convert_response_ollama_to_openai(response)
  1761. else:
  1762. return await generate_openai_chat_completion(
  1763. form_data, user=user, bypass_filter=bypass_filter
  1764. )
  1765. @app.post("/api/chat/completed")
  1766. async def chat_completed(form_data: dict, user=Depends(get_verified_user)):
  1767. model_list = await get_all_models()
  1768. models = {model["id"]: model for model in model_list}
  1769. data = form_data
  1770. model_id = data["model"]
  1771. if model_id not in models:
  1772. raise HTTPException(
  1773. status_code=status.HTTP_404_NOT_FOUND,
  1774. detail="Model not found",
  1775. )
  1776. model = models[model_id]
  1777. sorted_filters = get_sorted_filters(model_id, models)
  1778. if "pipeline" in model:
  1779. sorted_filters = [model] + sorted_filters
  1780. for filter in sorted_filters:
  1781. r = None
  1782. try:
  1783. urlIdx = filter["urlIdx"]
  1784. url = app.state.config.OPENAI_API_BASE_URLS[urlIdx]
  1785. key = app.state.config.OPENAI_API_KEYS[urlIdx]
  1786. if key != "":
  1787. headers = {"Authorization": f"Bearer {key}"}
  1788. r = requests.post(
  1789. f"{url}/{filter['id']}/filter/outlet",
  1790. headers=headers,
  1791. json={
  1792. "user": {
  1793. "id": user.id,
  1794. "name": user.name,
  1795. "email": user.email,
  1796. "role": user.role,
  1797. },
  1798. "body": data,
  1799. },
  1800. )
  1801. r.raise_for_status()
  1802. data = r.json()
  1803. except Exception as e:
  1804. # Handle connection error here
  1805. print(f"Connection error: {e}")
  1806. if r is not None:
  1807. try:
  1808. res = r.json()
  1809. if "detail" in res:
  1810. return JSONResponse(
  1811. status_code=r.status_code,
  1812. content=res,
  1813. )
  1814. except Exception:
  1815. pass
  1816. else:
  1817. pass
  1818. __event_emitter__ = get_event_emitter(
  1819. {
  1820. "chat_id": data["chat_id"],
  1821. "message_id": data["id"],
  1822. "session_id": data["session_id"],
  1823. }
  1824. )
  1825. __event_call__ = get_event_call(
  1826. {
  1827. "chat_id": data["chat_id"],
  1828. "message_id": data["id"],
  1829. "session_id": data["session_id"],
  1830. }
  1831. )
  1832. def get_priority(function_id):
  1833. function = Functions.get_function_by_id(function_id)
  1834. if function is not None and hasattr(function, "valves"):
  1835. # TODO: Fix FunctionModel to include vavles
  1836. return (function.valves if function.valves else {}).get("priority", 0)
  1837. return 0
  1838. filter_ids = [function.id for function in Functions.get_global_filter_functions()]
  1839. if "info" in model and "meta" in model["info"]:
  1840. filter_ids.extend(model["info"]["meta"].get("filterIds", []))
  1841. filter_ids = list(set(filter_ids))
  1842. enabled_filter_ids = [
  1843. function.id
  1844. for function in Functions.get_functions_by_type("filter", active_only=True)
  1845. ]
  1846. filter_ids = [
  1847. filter_id for filter_id in filter_ids if filter_id in enabled_filter_ids
  1848. ]
  1849. # Sort filter_ids by priority, using the get_priority function
  1850. filter_ids.sort(key=get_priority)
  1851. for filter_id in filter_ids:
  1852. filter = Functions.get_function_by_id(filter_id)
  1853. if not filter:
  1854. continue
  1855. if filter_id in webui_app.state.FUNCTIONS:
  1856. function_module = webui_app.state.FUNCTIONS[filter_id]
  1857. else:
  1858. function_module, _, _ = load_function_module_by_id(filter_id)
  1859. webui_app.state.FUNCTIONS[filter_id] = function_module
  1860. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  1861. valves = Functions.get_function_valves_by_id(filter_id)
  1862. function_module.valves = function_module.Valves(
  1863. **(valves if valves else {})
  1864. )
  1865. if not hasattr(function_module, "outlet"):
  1866. continue
  1867. try:
  1868. outlet = function_module.outlet
  1869. # Get the signature of the function
  1870. sig = inspect.signature(outlet)
  1871. params = {"body": data}
  1872. # Extra parameters to be passed to the function
  1873. extra_params = {
  1874. "__model__": model,
  1875. "__id__": filter_id,
  1876. "__event_emitter__": __event_emitter__,
  1877. "__event_call__": __event_call__,
  1878. }
  1879. # Add extra params in contained in function signature
  1880. for key, value in extra_params.items():
  1881. if key in sig.parameters:
  1882. params[key] = value
  1883. if "__user__" in sig.parameters:
  1884. __user__ = {
  1885. "id": user.id,
  1886. "email": user.email,
  1887. "name": user.name,
  1888. "role": user.role,
  1889. }
  1890. try:
  1891. if hasattr(function_module, "UserValves"):
  1892. __user__["valves"] = function_module.UserValves(
  1893. **Functions.get_user_valves_by_id_and_user_id(
  1894. filter_id, user.id
  1895. )
  1896. )
  1897. except Exception as e:
  1898. print(e)
  1899. params = {**params, "__user__": __user__}
  1900. if inspect.iscoroutinefunction(outlet):
  1901. data = await outlet(**params)
  1902. else:
  1903. data = outlet(**params)
  1904. except Exception as e:
  1905. print(f"Error: {e}")
  1906. return JSONResponse(
  1907. status_code=status.HTTP_400_BAD_REQUEST,
  1908. content={"detail": str(e)},
  1909. )
  1910. return data
  1911. @app.post("/api/chat/actions/{action_id}")
  1912. async def chat_action(action_id: str, form_data: dict, user=Depends(get_verified_user)):
  1913. if "." in action_id:
  1914. action_id, sub_action_id = action_id.split(".")
  1915. else:
  1916. sub_action_id = None
  1917. action = Functions.get_function_by_id(action_id)
  1918. if not action:
  1919. raise HTTPException(
  1920. status_code=status.HTTP_404_NOT_FOUND,
  1921. detail="Action not found",
  1922. )
  1923. model_list = await get_all_models()
  1924. models = {model["id"]: model for model in model_list}
  1925. data = form_data
  1926. model_id = data["model"]
  1927. if model_id not in models:
  1928. raise HTTPException(
  1929. status_code=status.HTTP_404_NOT_FOUND,
  1930. detail="Model not found",
  1931. )
  1932. model = models[model_id]
  1933. __event_emitter__ = get_event_emitter(
  1934. {
  1935. "chat_id": data["chat_id"],
  1936. "message_id": data["id"],
  1937. "session_id": data["session_id"],
  1938. }
  1939. )
  1940. __event_call__ = get_event_call(
  1941. {
  1942. "chat_id": data["chat_id"],
  1943. "message_id": data["id"],
  1944. "session_id": data["session_id"],
  1945. }
  1946. )
  1947. if action_id in webui_app.state.FUNCTIONS:
  1948. function_module = webui_app.state.FUNCTIONS[action_id]
  1949. else:
  1950. function_module, _, _ = load_function_module_by_id(action_id)
  1951. webui_app.state.FUNCTIONS[action_id] = function_module
  1952. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  1953. valves = Functions.get_function_valves_by_id(action_id)
  1954. function_module.valves = function_module.Valves(**(valves if valves else {}))
  1955. if hasattr(function_module, "action"):
  1956. try:
  1957. action = function_module.action
  1958. # Get the signature of the function
  1959. sig = inspect.signature(action)
  1960. params = {"body": data}
  1961. # Extra parameters to be passed to the function
  1962. extra_params = {
  1963. "__model__": model,
  1964. "__id__": sub_action_id if sub_action_id is not None else action_id,
  1965. "__event_emitter__": __event_emitter__,
  1966. "__event_call__": __event_call__,
  1967. }
  1968. # Add extra params in contained in function signature
  1969. for key, value in extra_params.items():
  1970. if key in sig.parameters:
  1971. params[key] = value
  1972. if "__user__" in sig.parameters:
  1973. __user__ = {
  1974. "id": user.id,
  1975. "email": user.email,
  1976. "name": user.name,
  1977. "role": user.role,
  1978. }
  1979. try:
  1980. if hasattr(function_module, "UserValves"):
  1981. __user__["valves"] = function_module.UserValves(
  1982. **Functions.get_user_valves_by_id_and_user_id(
  1983. action_id, user.id
  1984. )
  1985. )
  1986. except Exception as e:
  1987. print(e)
  1988. params = {**params, "__user__": __user__}
  1989. if inspect.iscoroutinefunction(action):
  1990. data = await action(**params)
  1991. else:
  1992. data = action(**params)
  1993. except Exception as e:
  1994. print(f"Error: {e}")
  1995. return JSONResponse(
  1996. status_code=status.HTTP_400_BAD_REQUEST,
  1997. content={"detail": str(e)},
  1998. )
  1999. return data
  2000. ##################################
  2001. #
  2002. # Config Endpoints
  2003. #
  2004. ##################################
  2005. @app.get("/api/config")
  2006. async def get_app_config(request: Request):
  2007. user = None
  2008. if "token" in request.cookies:
  2009. token = request.cookies.get("token")
  2010. try:
  2011. data = decode_token(token)
  2012. except Exception as e:
  2013. log.debug(e)
  2014. raise HTTPException(
  2015. status_code=status.HTTP_401_UNAUTHORIZED,
  2016. detail="Invalid token",
  2017. )
  2018. if data is not None and "id" in data:
  2019. user = Users.get_user_by_id(data["id"])
  2020. onboarding = False
  2021. if user is None:
  2022. user_count = Users.get_num_users()
  2023. onboarding = user_count == 0
  2024. return {
  2025. **({"onboarding": True} if onboarding else {}),
  2026. "status": True,
  2027. "name": WEBUI_NAME,
  2028. "version": VERSION,
  2029. "default_locale": str(DEFAULT_LOCALE),
  2030. "oauth": {
  2031. "providers": {
  2032. name: config.get("name", name)
  2033. for name, config in OAUTH_PROVIDERS.items()
  2034. }
  2035. },
  2036. "features": {
  2037. "auth": WEBUI_AUTH,
  2038. "auth_trusted_header": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
  2039. "enable_ldap": webui_app.state.config.ENABLE_LDAP,
  2040. "enable_api_key": webui_app.state.config.ENABLE_API_KEY,
  2041. "enable_signup": webui_app.state.config.ENABLE_SIGNUP,
  2042. "enable_login_form": webui_app.state.config.ENABLE_LOGIN_FORM,
  2043. **(
  2044. {
  2045. "enable_web_search": retrieval_app.state.config.ENABLE_RAG_WEB_SEARCH,
  2046. "enable_image_generation": images_app.state.config.ENABLED,
  2047. "enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING,
  2048. "enable_message_rating": webui_app.state.config.ENABLE_MESSAGE_RATING,
  2049. "enable_admin_export": ENABLE_ADMIN_EXPORT,
  2050. "enable_admin_chat_access": ENABLE_ADMIN_CHAT_ACCESS,
  2051. }
  2052. if user is not None
  2053. else {}
  2054. ),
  2055. },
  2056. **(
  2057. {
  2058. "default_models": webui_app.state.config.DEFAULT_MODELS,
  2059. "default_prompt_suggestions": webui_app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
  2060. "audio": {
  2061. "tts": {
  2062. "engine": audio_app.state.config.TTS_ENGINE,
  2063. "voice": audio_app.state.config.TTS_VOICE,
  2064. "split_on": audio_app.state.config.TTS_SPLIT_ON,
  2065. },
  2066. "stt": {
  2067. "engine": audio_app.state.config.STT_ENGINE,
  2068. },
  2069. },
  2070. "file": {
  2071. "max_size": retrieval_app.state.config.FILE_MAX_SIZE,
  2072. "max_count": retrieval_app.state.config.FILE_MAX_COUNT,
  2073. },
  2074. "permissions": {**webui_app.state.config.USER_PERMISSIONS},
  2075. }
  2076. if user is not None
  2077. else {}
  2078. ),
  2079. }
  2080. class UrlForm(BaseModel):
  2081. url: str
  2082. @app.get("/api/webhook")
  2083. async def get_webhook_url(user=Depends(get_admin_user)):
  2084. return {
  2085. "url": app.state.config.WEBHOOK_URL,
  2086. }
  2087. @app.post("/api/webhook")
  2088. async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
  2089. app.state.config.WEBHOOK_URL = form_data.url
  2090. webui_app.state.WEBHOOK_URL = app.state.config.WEBHOOK_URL
  2091. return {"url": app.state.config.WEBHOOK_URL}
  2092. @app.get("/api/version")
  2093. async def get_app_version():
  2094. return {
  2095. "version": VERSION,
  2096. }
  2097. @app.get("/api/version/updates")
  2098. async def get_app_latest_release_version():
  2099. if OFFLINE_MODE:
  2100. log.debug(
  2101. f"Offline mode is enabled, returning current version as latest version"
  2102. )
  2103. return {"current": VERSION, "latest": VERSION}
  2104. try:
  2105. timeout = aiohttp.ClientTimeout(total=1)
  2106. async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session:
  2107. async with session.get(
  2108. "https://api.github.com/repos/open-webui/open-webui/releases/latest"
  2109. ) as response:
  2110. response.raise_for_status()
  2111. data = await response.json()
  2112. latest_version = data["tag_name"]
  2113. return {"current": VERSION, "latest": latest_version[1:]}
  2114. except Exception as e:
  2115. log.debug(e)
  2116. return {"current": VERSION, "latest": VERSION}
  2117. @app.get("/api/changelog")
  2118. async def get_app_changelog():
  2119. return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5}
  2120. ############################
  2121. # OAuth Login & Callback
  2122. ############################
  2123. # SessionMiddleware is used by authlib for oauth
  2124. if len(OAUTH_PROVIDERS) > 0:
  2125. app.add_middleware(
  2126. SessionMiddleware,
  2127. secret_key=WEBUI_SECRET_KEY,
  2128. session_cookie="oui-session",
  2129. same_site=WEBUI_SESSION_COOKIE_SAME_SITE,
  2130. https_only=WEBUI_SESSION_COOKIE_SECURE,
  2131. )
  2132. @app.get("/oauth/{provider}/login")
  2133. async def oauth_login(provider: str, request: Request):
  2134. return await oauth_manager.handle_login(provider, request)
  2135. # OAuth login logic is as follows:
  2136. # 1. Attempt to find a user with matching subject ID, tied to the provider
  2137. # 2. If OAUTH_MERGE_ACCOUNTS_BY_EMAIL is true, find a user with the email address provided via OAuth
  2138. # - This is considered insecure in general, as OAuth providers do not always verify email addresses
  2139. # 3. If there is no user, and ENABLE_OAUTH_SIGNUP is true, create a user
  2140. # - Email addresses are considered unique, so we fail registration if the email address is already taken
  2141. @app.get("/oauth/{provider}/callback")
  2142. async def oauth_callback(provider: str, request: Request, response: Response):
  2143. return await oauth_manager.handle_callback(provider, request, response)
  2144. @app.get("/manifest.json")
  2145. async def get_manifest_json():
  2146. return {
  2147. "name": WEBUI_NAME,
  2148. "short_name": WEBUI_NAME,
  2149. "description": "Open WebUI is an open, extensible, user-friendly interface for AI that adapts to your workflow.",
  2150. "start_url": "/",
  2151. "display": "standalone",
  2152. "background_color": "#343541",
  2153. "orientation": "natural",
  2154. "icons": [
  2155. {
  2156. "src": "/static/logo.png",
  2157. "type": "image/png",
  2158. "sizes": "500x500",
  2159. "purpose": "any",
  2160. },
  2161. {
  2162. "src": "/static/logo.png",
  2163. "type": "image/png",
  2164. "sizes": "500x500",
  2165. "purpose": "maskable",
  2166. },
  2167. ],
  2168. }
  2169. @app.get("/opensearch.xml")
  2170. async def get_opensearch_xml():
  2171. xml_content = rf"""
  2172. <OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/" xmlns:moz="http://www.mozilla.org/2006/browser/search/">
  2173. <ShortName>{WEBUI_NAME}</ShortName>
  2174. <Description>Search {WEBUI_NAME}</Description>
  2175. <InputEncoding>UTF-8</InputEncoding>
  2176. <Image width="16" height="16" type="image/x-icon">{WEBUI_URL}/static/favicon.png</Image>
  2177. <Url type="text/html" method="get" template="{WEBUI_URL}/?q={"{searchTerms}"}"/>
  2178. <moz:SearchForm>{WEBUI_URL}</moz:SearchForm>
  2179. </OpenSearchDescription>
  2180. """
  2181. return Response(content=xml_content, media_type="application/xml")
  2182. @app.get("/health")
  2183. async def healthcheck():
  2184. return {"status": True}
  2185. @app.get("/health/db")
  2186. async def healthcheck_with_db():
  2187. Session.execute(text("SELECT 1;")).all()
  2188. return {"status": True}
  2189. app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
  2190. app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
  2191. if os.path.exists(FRONTEND_BUILD_DIR):
  2192. mimetypes.add_type("text/javascript", ".js")
  2193. app.mount(
  2194. "/",
  2195. SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
  2196. name="spa-static-files",
  2197. )
  2198. else:
  2199. log.warning(
  2200. f"Frontend build directory not found at '{FRONTEND_BUILD_DIR}'. Serving API only."
  2201. )