main.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395
  1. import inspect
  2. import json
  3. import logging
  4. from typing import AsyncGenerator, Generator, Iterator
  5. from open_webui.apps.socket.main import get_event_call, get_event_emitter
  6. from open_webui.apps.webui.models.functions import Functions
  7. from open_webui.apps.webui.models.models import Models
  8. from open_webui.apps.webui.routers import (
  9. auths,
  10. chats,
  11. folders,
  12. configs,
  13. files,
  14. functions,
  15. memories,
  16. models,
  17. knowledge,
  18. prompts,
  19. tools,
  20. users,
  21. utils,
  22. )
  23. from open_webui.apps.webui.utils import load_function_module_by_id
  24. from open_webui.config import (
  25. ADMIN_EMAIL,
  26. CORS_ALLOW_ORIGIN,
  27. DEFAULT_MODELS,
  28. DEFAULT_PROMPT_SUGGESTIONS,
  29. DEFAULT_USER_ROLE,
  30. ENABLE_COMMUNITY_SHARING,
  31. ENABLE_LOGIN_FORM,
  32. ENABLE_MESSAGE_RATING,
  33. ENABLE_SIGNUP,
  34. JWT_EXPIRES_IN,
  35. OAUTH_EMAIL_CLAIM,
  36. OAUTH_PICTURE_CLAIM,
  37. OAUTH_USERNAME_CLAIM,
  38. SHOW_ADMIN_DETAILS,
  39. USER_PERMISSIONS,
  40. WEBHOOK_URL,
  41. WEBUI_AUTH,
  42. WEBUI_BANNERS,
  43. AppConfig,
  44. )
  45. from open_webui.env import (
  46. WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
  47. WEBUI_AUTH_TRUSTED_NAME_HEADER,
  48. )
  49. from fastapi import FastAPI
  50. from fastapi.middleware.cors import CORSMiddleware
  51. from fastapi.responses import StreamingResponse
  52. from pydantic import BaseModel
  53. from open_webui.utils.misc import (
  54. openai_chat_chunk_message_template,
  55. openai_chat_completion_message_template,
  56. )
  57. from open_webui.utils.payload import (
  58. apply_model_params_to_body_openai,
  59. apply_model_system_prompt_to_body,
  60. )
  61. from open_webui.utils.tools import get_tools
  62. app = FastAPI()
  63. log = logging.getLogger(__name__)
  64. app.state.config = AppConfig()
  65. app.state.config.ENABLE_SIGNUP = ENABLE_SIGNUP
  66. app.state.config.ENABLE_LOGIN_FORM = ENABLE_LOGIN_FORM
  67. app.state.config.JWT_EXPIRES_IN = JWT_EXPIRES_IN
  68. app.state.AUTH_TRUSTED_EMAIL_HEADER = WEBUI_AUTH_TRUSTED_EMAIL_HEADER
  69. app.state.AUTH_TRUSTED_NAME_HEADER = WEBUI_AUTH_TRUSTED_NAME_HEADER
  70. app.state.config.SHOW_ADMIN_DETAILS = SHOW_ADMIN_DETAILS
  71. app.state.config.ADMIN_EMAIL = ADMIN_EMAIL
  72. app.state.config.DEFAULT_MODELS = DEFAULT_MODELS
  73. app.state.config.DEFAULT_PROMPT_SUGGESTIONS = DEFAULT_PROMPT_SUGGESTIONS
  74. app.state.config.DEFAULT_USER_ROLE = DEFAULT_USER_ROLE
  75. app.state.config.USER_PERMISSIONS = USER_PERMISSIONS
  76. app.state.config.WEBHOOK_URL = WEBHOOK_URL
  77. app.state.config.BANNERS = WEBUI_BANNERS
  78. app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING
  79. app.state.config.ENABLE_MESSAGE_RATING = ENABLE_MESSAGE_RATING
  80. app.state.config.OAUTH_USERNAME_CLAIM = OAUTH_USERNAME_CLAIM
  81. app.state.config.OAUTH_PICTURE_CLAIM = OAUTH_PICTURE_CLAIM
  82. app.state.config.OAUTH_EMAIL_CLAIM = OAUTH_EMAIL_CLAIM
  83. app.state.MODELS = {}
  84. app.state.TOOLS = {}
  85. app.state.FUNCTIONS = {}
  86. app.add_middleware(
  87. CORSMiddleware,
  88. allow_origins=CORS_ALLOW_ORIGIN,
  89. allow_credentials=True,
  90. allow_methods=["*"],
  91. allow_headers=["*"],
  92. )
  93. app.include_router(configs.router, prefix="/configs", tags=["configs"])
  94. app.include_router(auths.router, prefix="/auths", tags=["auths"])
  95. app.include_router(users.router, prefix="/users", tags=["users"])
  96. app.include_router(chats.router, prefix="/chats", tags=["chats"])
  97. app.include_router(folders.router, prefix="/folders", tags=["folders"])
  98. app.include_router(models.router, prefix="/models", tags=["models"])
  99. app.include_router(knowledge.router, prefix="/knowledge", tags=["knowledge"])
  100. app.include_router(prompts.router, prefix="/prompts", tags=["prompts"])
  101. app.include_router(files.router, prefix="/files", tags=["files"])
  102. app.include_router(tools.router, prefix="/tools", tags=["tools"])
  103. app.include_router(functions.router, prefix="/functions", tags=["functions"])
  104. app.include_router(memories.router, prefix="/memories", tags=["memories"])
  105. app.include_router(utils.router, prefix="/utils", tags=["utils"])
  106. @app.get("/")
  107. async def get_status():
  108. return {
  109. "status": True,
  110. "auth": WEBUI_AUTH,
  111. "default_models": app.state.config.DEFAULT_MODELS,
  112. "default_prompt_suggestions": app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
  113. }
  114. def get_function_module(pipe_id: str):
  115. # Check if function is already loaded
  116. if pipe_id not in app.state.FUNCTIONS:
  117. function_module, _, _ = load_function_module_by_id(pipe_id)
  118. app.state.FUNCTIONS[pipe_id] = function_module
  119. else:
  120. function_module = app.state.FUNCTIONS[pipe_id]
  121. if hasattr(function_module, "valves") and hasattr(function_module, "Valves"):
  122. valves = Functions.get_function_valves_by_id(pipe_id)
  123. function_module.valves = function_module.Valves(**(valves if valves else {}))
  124. return function_module
  125. async def get_pipe_models():
  126. pipes = Functions.get_functions_by_type("pipe", active_only=True)
  127. pipe_models = []
  128. for pipe in pipes:
  129. function_module = get_function_module(pipe.id)
  130. # Check if function is a manifold
  131. if hasattr(function_module, "pipes"):
  132. sub_pipes = []
  133. # Check if pipes is a function or a list
  134. try:
  135. if callable(function_module.pipes):
  136. sub_pipes = function_module.pipes()
  137. else:
  138. sub_pipes = function_module.pipes
  139. except Exception as e:
  140. log.exception(e)
  141. sub_pipes = []
  142. print(sub_pipes)
  143. for p in sub_pipes:
  144. sub_pipe_id = f'{pipe.id}.{p["id"]}'
  145. sub_pipe_name = p["name"]
  146. if hasattr(function_module, "name"):
  147. sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
  148. pipe_flag = {"type": pipe.type}
  149. pipe_models.append(
  150. {
  151. "id": sub_pipe_id,
  152. "name": sub_pipe_name,
  153. "object": "model",
  154. "created": pipe.created_at,
  155. "owned_by": "openai",
  156. "pipe": pipe_flag,
  157. }
  158. )
  159. else:
  160. pipe_flag = {"type": "pipe"}
  161. pipe_models.append(
  162. {
  163. "id": pipe.id,
  164. "name": pipe.name,
  165. "object": "model",
  166. "created": pipe.created_at,
  167. "owned_by": "openai",
  168. "pipe": pipe_flag,
  169. }
  170. )
  171. return pipe_models
  172. async def execute_pipe(pipe, params):
  173. if inspect.iscoroutinefunction(pipe):
  174. return await pipe(**params)
  175. else:
  176. return pipe(**params)
  177. async def get_message_content(res: str | Generator | AsyncGenerator) -> str:
  178. if isinstance(res, str):
  179. return res
  180. if isinstance(res, Generator):
  181. return "".join(map(str, res))
  182. if isinstance(res, AsyncGenerator):
  183. return "".join([str(stream) async for stream in res])
  184. def process_line(form_data: dict, line):
  185. if isinstance(line, BaseModel):
  186. line = line.model_dump_json()
  187. line = f"data: {line}"
  188. if isinstance(line, dict):
  189. line = f"data: {json.dumps(line)}"
  190. try:
  191. line = line.decode("utf-8")
  192. except Exception:
  193. pass
  194. if line.startswith("data:"):
  195. return f"{line}\n\n"
  196. else:
  197. line = openai_chat_chunk_message_template(form_data["model"], line)
  198. return f"data: {json.dumps(line)}\n\n"
  199. def get_pipe_id(form_data: dict) -> str:
  200. pipe_id = form_data["model"]
  201. if "." in pipe_id:
  202. pipe_id, _ = pipe_id.split(".", 1)
  203. print(pipe_id)
  204. return pipe_id
  205. def get_function_params(function_module, form_data, user, extra_params=None):
  206. if extra_params is None:
  207. extra_params = {}
  208. pipe_id = get_pipe_id(form_data)
  209. # Get the signature of the function
  210. sig = inspect.signature(function_module.pipe)
  211. params = {"body": form_data} | {
  212. k: v for k, v in extra_params.items() if k in sig.parameters
  213. }
  214. if "__user__" in params and hasattr(function_module, "UserValves"):
  215. user_valves = Functions.get_user_valves_by_id_and_user_id(pipe_id, user.id)
  216. try:
  217. params["__user__"]["valves"] = function_module.UserValves(**user_valves)
  218. except Exception as e:
  219. log.exception(e)
  220. params["__user__"]["valves"] = function_module.UserValves()
  221. return params
  222. async def generate_function_chat_completion(form_data, user):
  223. model_id = form_data.get("model")
  224. model_info = Models.get_model_by_id(model_id)
  225. metadata = form_data.pop("metadata", {})
  226. files = metadata.get("files", [])
  227. tool_ids = metadata.get("tool_ids", [])
  228. # Check if tool_ids is None
  229. if tool_ids is None:
  230. tool_ids = []
  231. __event_emitter__ = None
  232. __event_call__ = None
  233. __task__ = None
  234. __task_body__ = None
  235. if metadata:
  236. if all(k in metadata for k in ("session_id", "chat_id", "message_id")):
  237. __event_emitter__ = get_event_emitter(metadata)
  238. __event_call__ = get_event_call(metadata)
  239. __task__ = metadata.get("task", None)
  240. __task_body__ = metadata.get("task_body", None)
  241. extra_params = {
  242. "__event_emitter__": __event_emitter__,
  243. "__event_call__": __event_call__,
  244. "__task__": __task__,
  245. "__task_body__": __task_body__,
  246. "__files__": files,
  247. "__user__": {
  248. "id": user.id,
  249. "email": user.email,
  250. "name": user.name,
  251. "role": user.role,
  252. },
  253. }
  254. extra_params["__tools__"] = get_tools(
  255. app,
  256. tool_ids,
  257. user,
  258. {
  259. **extra_params,
  260. "__model__": app.state.MODELS[form_data["model"]],
  261. "__messages__": form_data["messages"],
  262. "__files__": files,
  263. },
  264. )
  265. if model_info:
  266. if model_info.base_model_id:
  267. form_data["model"] = model_info.base_model_id
  268. params = model_info.params.model_dump()
  269. form_data = apply_model_params_to_body_openai(params, form_data)
  270. form_data = apply_model_system_prompt_to_body(params, form_data, user)
  271. pipe_id = get_pipe_id(form_data)
  272. function_module = get_function_module(pipe_id)
  273. pipe = function_module.pipe
  274. params = get_function_params(function_module, form_data, user, extra_params)
  275. if form_data.get("stream", False):
  276. async def stream_content():
  277. try:
  278. res = await execute_pipe(pipe, params)
  279. # Directly return if the response is a StreamingResponse
  280. if isinstance(res, StreamingResponse):
  281. async for data in res.body_iterator:
  282. yield data
  283. return
  284. if isinstance(res, dict):
  285. yield f"data: {json.dumps(res)}\n\n"
  286. return
  287. except Exception as e:
  288. print(f"Error: {e}")
  289. yield f"data: {json.dumps({'error': {'detail':str(e)}})}\n\n"
  290. return
  291. if isinstance(res, str):
  292. message = openai_chat_chunk_message_template(form_data["model"], res)
  293. yield f"data: {json.dumps(message)}\n\n"
  294. if isinstance(res, Iterator):
  295. for line in res:
  296. yield process_line(form_data, line)
  297. if isinstance(res, AsyncGenerator):
  298. async for line in res:
  299. yield process_line(form_data, line)
  300. if isinstance(res, str) or isinstance(res, Generator):
  301. finish_message = openai_chat_chunk_message_template(
  302. form_data["model"], ""
  303. )
  304. finish_message["choices"][0]["finish_reason"] = "stop"
  305. yield f"data: {json.dumps(finish_message)}\n\n"
  306. yield "data: [DONE]"
  307. return StreamingResponse(stream_content(), media_type="text/event-stream")
  308. else:
  309. try:
  310. res = await execute_pipe(pipe, params)
  311. except Exception as e:
  312. print(f"Error: {e}")
  313. return {"error": {"detail": str(e)}}
  314. if isinstance(res, StreamingResponse) or isinstance(res, dict):
  315. return res
  316. if isinstance(res, BaseModel):
  317. return res.model_dump()
  318. message = await get_message_content(res)
  319. return openai_chat_completion_message_template(form_data["model"], message)