main.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406
  1. from fastapi import FastAPI
  2. from fastapi.responses import StreamingResponse
  3. from fastapi.middleware.cors import CORSMiddleware
  4. from apps.webui.routers import (
  5. auths,
  6. users,
  7. chats,
  8. documents,
  9. tools,
  10. models,
  11. prompts,
  12. configs,
  13. memories,
  14. utils,
  15. files,
  16. functions,
  17. )
  18. from apps.webui.models.functions import Functions
  19. from apps.webui.models.models import Models
  20. from apps.webui.utils import load_function_module_by_id
  21. from utils.misc import (
  22. openai_chat_chunk_message_template,
  23. openai_chat_completion_message_template,
  24. add_or_update_system_message,
  25. )
  26. from utils.task import prompt_template
  27. from config import (
  28. SHOW_ADMIN_DETAILS,
  29. ADMIN_EMAIL,
  30. WEBUI_AUTH,
  31. DEFAULT_MODELS,
  32. DEFAULT_PROMPT_SUGGESTIONS,
  33. DEFAULT_USER_ROLE,
  34. ENABLE_SIGNUP,
  35. ENABLE_LOGIN_FORM,
  36. USER_PERMISSIONS,
  37. WEBHOOK_URL,
  38. WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
  39. WEBUI_AUTH_TRUSTED_NAME_HEADER,
  40. JWT_EXPIRES_IN,
  41. WEBUI_BANNERS,
  42. ENABLE_COMMUNITY_SHARING,
  43. AppConfig,
  44. OAUTH_USERNAME_CLAIM,
  45. OAUTH_PICTURE_CLAIM,
  46. )
  47. from apps.socket.main import get_event_call, get_event_emitter
  48. import inspect
  49. import json
  50. from typing import Iterator, Generator, AsyncGenerator
  51. from pydantic import BaseModel
  52. app = FastAPI()
  53. origins = ["*"]
  54. app.state.config = AppConfig()
  55. app.state.config.ENABLE_SIGNUP = ENABLE_SIGNUP
  56. app.state.config.ENABLE_LOGIN_FORM = ENABLE_LOGIN_FORM
  57. app.state.config.JWT_EXPIRES_IN = JWT_EXPIRES_IN
  58. app.state.AUTH_TRUSTED_EMAIL_HEADER = WEBUI_AUTH_TRUSTED_EMAIL_HEADER
  59. app.state.AUTH_TRUSTED_NAME_HEADER = WEBUI_AUTH_TRUSTED_NAME_HEADER
  60. app.state.config.SHOW_ADMIN_DETAILS = SHOW_ADMIN_DETAILS
  61. app.state.config.ADMIN_EMAIL = ADMIN_EMAIL
  62. app.state.config.DEFAULT_MODELS = DEFAULT_MODELS
  63. app.state.config.DEFAULT_PROMPT_SUGGESTIONS = DEFAULT_PROMPT_SUGGESTIONS
  64. app.state.config.DEFAULT_USER_ROLE = DEFAULT_USER_ROLE
  65. app.state.config.USER_PERMISSIONS = USER_PERMISSIONS
  66. app.state.config.WEBHOOK_URL = WEBHOOK_URL
  67. app.state.config.BANNERS = WEBUI_BANNERS
  68. app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING
  69. app.state.config.OAUTH_USERNAME_CLAIM = OAUTH_USERNAME_CLAIM
  70. app.state.config.OAUTH_PICTURE_CLAIM = OAUTH_PICTURE_CLAIM
  71. app.state.MODELS = {}
  72. app.state.TOOLS = {}
  73. app.state.FUNCTIONS = {}
  74. app.add_middleware(
  75. CORSMiddleware,
  76. allow_origins=origins,
  77. allow_credentials=True,
  78. allow_methods=["*"],
  79. allow_headers=["*"],
  80. )
  81. app.include_router(configs.router, prefix="/configs", tags=["configs"])
  82. app.include_router(auths.router, prefix="/auths", tags=["auths"])
  83. app.include_router(users.router, prefix="/users", tags=["users"])
  84. app.include_router(chats.router, prefix="/chats", tags=["chats"])
  85. app.include_router(documents.router, prefix="/documents", tags=["documents"])
  86. app.include_router(models.router, prefix="/models", tags=["models"])
  87. app.include_router(prompts.router, prefix="/prompts", tags=["prompts"])
  88. app.include_router(memories.router, prefix="/memories", tags=["memories"])
  89. app.include_router(files.router, prefix="/files", tags=["files"])
  90. app.include_router(tools.router, prefix="/tools", tags=["tools"])
  91. app.include_router(functions.router, prefix="/functions", tags=["functions"])
  92. app.include_router(utils.router, prefix="/utils", tags=["utils"])
  93. @app.get("/")
  94. async def get_status():
  95. return {
  96. "status": True,
  97. "auth": WEBUI_AUTH,
  98. "default_models": app.state.config.DEFAULT_MODELS,
  99. "default_prompt_suggestions": app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
  100. }
  101. def get_function_module(pipe_id: str):
  102. # Check if function is already loaded
  103. if pipe_id not in app.state.FUNCTIONS:
  104. function_module, _, _ = load_function_module_by_id(pipe_id)
  105. app.state.FUNCTIONS[pipe_id] = function_module
  106. else:
  107. function_module = app.state.FUNCTIONS[pipe_id]
  108. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  109. valves = Functions.get_function_valves_by_id(pipe_id)
  110. function_module.valves = function_module.Valves(**(valves if valves else {}))
  111. return function_module
  112. async def get_pipe_models():
  113. pipes = Functions.get_functions_by_type("pipe", active_only=True)
  114. pipe_models = []
  115. for pipe in pipes:
  116. function_module = get_function_module(pipe.id)
  117. # Check if function is a manifold
  118. if hasattr(function_module, "pipes"):
  119. manifold_pipes = []
  120. # Check if pipes is a function or a list
  121. if callable(function_module.pipes):
  122. manifold_pipes = function_module.pipes()
  123. else:
  124. manifold_pipes = function_module.pipes
  125. for p in manifold_pipes:
  126. manifold_pipe_id = f'{pipe.id}.{p["id"]}'
  127. manifold_pipe_name = p["name"]
  128. if hasattr(function_module, "name"):
  129. manifold_pipe_name = f"{function_module.name}{manifold_pipe_name}"
  130. pipe_flag = {"type": pipe.type}
  131. if hasattr(function_module, "ChatValves"):
  132. pipe_flag["valves_spec"] = function_module.ChatValves.schema()
  133. pipe_models.append(
  134. {
  135. "id": manifold_pipe_id,
  136. "name": manifold_pipe_name,
  137. "object": "model",
  138. "created": pipe.created_at,
  139. "owned_by": "openai",
  140. "pipe": pipe_flag,
  141. }
  142. )
  143. else:
  144. pipe_flag = {"type": "pipe"}
  145. if hasattr(function_module, "ChatValves"):
  146. pipe_flag["valves_spec"] = function_module.ChatValves.schema()
  147. pipe_models.append(
  148. {
  149. "id": pipe.id,
  150. "name": pipe.name,
  151. "object": "model",
  152. "created": pipe.created_at,
  153. "owned_by": "openai",
  154. "pipe": pipe_flag,
  155. }
  156. )
  157. return pipe_models
  158. async def execute_pipe(pipe, params):
  159. if inspect.iscoroutinefunction(pipe):
  160. return await pipe(**params)
  161. else:
  162. return pipe(**params)
  163. async def get_message_content(res: str | Generator | AsyncGenerator) -> str:
  164. if isinstance(res, str):
  165. return res
  166. if isinstance(res, Generator):
  167. return "".join(map(str, res))
  168. if isinstance(res, AsyncGenerator):
  169. return "".join([str(stream) async for stream in res])
  170. def process_line(form_data: dict, line):
  171. if isinstance(line, BaseModel):
  172. line = line.model_dump_json()
  173. line = f"data: {line}"
  174. if isinstance(line, dict):
  175. line = f"data: {json.dumps(line)}"
  176. try:
  177. line = line.decode("utf-8")
  178. except Exception:
  179. pass
  180. if line.startswith("data:"):
  181. return f"{line}\n\n"
  182. else:
  183. line = openai_chat_chunk_message_template(form_data["model"], line)
  184. return f"data: {json.dumps(line)}\n\n"
  185. def get_pipe_id(form_data: dict) -> str:
  186. pipe_id = form_data["model"]
  187. if "." in pipe_id:
  188. pipe_id, _ = pipe_id.split(".", 1)
  189. print(pipe_id)
  190. return pipe_id
  191. def get_function_params(function_module, form_data, user, extra_params={}):
  192. pipe_id = get_pipe_id(form_data)
  193. # Get the signature of the function
  194. sig = inspect.signature(function_module.pipe)
  195. params = {"body": form_data}
  196. for key, value in extra_params.items():
  197. if key in sig.parameters:
  198. params[key] = value
  199. if "__user__" in sig.parameters:
  200. __user__ = {
  201. "id": user.id,
  202. "email": user.email,
  203. "name": user.name,
  204. "role": user.role,
  205. }
  206. try:
  207. if hasattr(function_module, "UserValves"):
  208. __user__["valves"] = function_module.UserValves(
  209. **Functions.get_user_valves_by_id_and_user_id(pipe_id, user.id)
  210. )
  211. except Exception as e:
  212. print(e)
  213. params["__user__"] = __user__
  214. return params
  215. # inplace function: form_data is modified
  216. def apply_model_params_to_body(params: dict, form_data: dict) -> dict:
  217. if not params:
  218. return form_data
  219. mappings = {
  220. "temperature": float,
  221. "top_p": int,
  222. "max_tokens": int,
  223. "frequency_penalty": int,
  224. "seed": lambda x: x,
  225. "stop": lambda x: [bytes(s, "utf-8").decode("unicode_escape") for s in x],
  226. }
  227. for key, cast_func in mappings.items():
  228. if (value := params.get(key)) is not None:
  229. form_data[key] = cast_func(value)
  230. return form_data
  231. # inplace function: form_data is modified
  232. def apply_model_system_prompt_to_body(params: dict, form_data: dict, user) -> dict:
  233. system = params.get("system", None)
  234. if not system:
  235. return form_data
  236. if user:
  237. template_params = {
  238. "user_name": user.name,
  239. "user_location": user.info.get("location") if user.info else None,
  240. }
  241. else:
  242. template_params = {}
  243. system = prompt_template(system, **template_params)
  244. form_data["messages"] = add_or_update_system_message(
  245. system, form_data.get("messages", [])
  246. )
  247. return form_data
  248. async def generate_function_chat_completion(form_data, user):
  249. model_id = form_data.get("model")
  250. model_info = Models.get_model_by_id(model_id)
  251. metadata = form_data.pop("metadata", None)
  252. __event_emitter__ = None
  253. __event_call__ = None
  254. __task__ = None
  255. if metadata:
  256. if all(k in metadata for k in ("session_id", "chat_id", "message_id")):
  257. __event_emitter__ = get_event_emitter(metadata)
  258. __event_call__ = get_event_call(metadata)
  259. __task__ = metadata.get("task", None)
  260. if model_info:
  261. if model_info.base_model_id:
  262. form_data["model"] = model_info.base_model_id
  263. params = model_info.params.model_dump()
  264. form_data = apply_model_params_to_body(params, form_data)
  265. form_data = apply_model_system_prompt_to_body(params, form_data, user)
  266. pipe_id = get_pipe_id(form_data)
  267. function_module = get_function_module(pipe_id)
  268. pipe = function_module.pipe
  269. params = get_function_params(
  270. function_module,
  271. form_data,
  272. user,
  273. {
  274. "__event_emitter__": __event_emitter__,
  275. "__event_call__": __event_call__,
  276. "__task__": __task__,
  277. },
  278. )
  279. if form_data["stream"]:
  280. async def stream_content():
  281. try:
  282. res = await execute_pipe(pipe, params)
  283. # Directly return if the response is a StreamingResponse
  284. if isinstance(res, StreamingResponse):
  285. async for data in res.body_iterator:
  286. yield data
  287. return
  288. if isinstance(res, dict):
  289. yield f"data: {json.dumps(res)}\n\n"
  290. return
  291. except Exception as e:
  292. print(f"Error: {e}")
  293. yield f"data: {json.dumps({'error': {'detail':str(e)}})}\n\n"
  294. return
  295. if isinstance(res, str):
  296. message = openai_chat_chunk_message_template(form_data["model"], res)
  297. yield f"data: {json.dumps(message)}\n\n"
  298. if isinstance(res, Iterator):
  299. for line in res:
  300. yield process_line(form_data, line)
  301. if isinstance(res, AsyncGenerator):
  302. async for line in res:
  303. yield process_line(form_data, line)
  304. if isinstance(res, str) or isinstance(res, Generator):
  305. finish_message = openai_chat_chunk_message_template(
  306. form_data["model"], ""
  307. )
  308. finish_message["choices"][0]["finish_reason"] = "stop"
  309. yield f"data: {json.dumps(finish_message)}\n\n"
  310. yield "data: [DONE]"
  311. return StreamingResponse(stream_content(), media_type="text/event-stream")
  312. else:
  313. try:
  314. res = await execute_pipe(pipe, params)
  315. except Exception as e:
  316. print(f"Error: {e}")
  317. return {"error": {"detail": str(e)}}
  318. if isinstance(res, StreamingResponse) or isinstance(res, dict):
  319. return res
  320. if isinstance(res, BaseModel):
  321. return res.model_dump()
  322. message = await get_message_content(res)
  323. return openai_chat_completion_message_template(form_data["model"], message)