env.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446
  1. import importlib.metadata
  2. import json
  3. import logging
  4. import os
  5. import pkgutil
  6. import sys
  7. import shutil
  8. from pathlib import Path
  9. import markdown
  10. from bs4 import BeautifulSoup
  11. from open_webui.constants import ERROR_MESSAGES
  12. ####################################
  13. # Load .env file
  14. ####################################
  15. OPEN_WEBUI_DIR = Path(__file__).parent # the path containing this file
  16. print(OPEN_WEBUI_DIR)
  17. BACKEND_DIR = OPEN_WEBUI_DIR.parent # the path containing this file
  18. BASE_DIR = BACKEND_DIR.parent # the path containing the backend/
  19. print(BACKEND_DIR)
  20. print(BASE_DIR)
  21. try:
  22. from dotenv import find_dotenv, load_dotenv
  23. load_dotenv(find_dotenv(str(BASE_DIR / ".env")))
  24. except ImportError:
  25. print("dotenv not installed, skipping...")
  26. DOCKER = os.environ.get("DOCKER", "False").lower() == "true"
  27. # device type embedding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance
  28. USE_CUDA = os.environ.get("USE_CUDA_DOCKER", "false")
  29. if USE_CUDA.lower() == "true":
  30. try:
  31. import torch
  32. assert torch.cuda.is_available(), "CUDA not available"
  33. DEVICE_TYPE = "cuda"
  34. except Exception as e:
  35. cuda_error = (
  36. "Error when testing CUDA but USE_CUDA_DOCKER is true. "
  37. f"Resetting USE_CUDA_DOCKER to false: {e}"
  38. )
  39. os.environ["USE_CUDA_DOCKER"] = "false"
  40. USE_CUDA = "false"
  41. DEVICE_TYPE = "cpu"
  42. else:
  43. DEVICE_TYPE = "cpu"
  44. try:
  45. import torch
  46. if torch.backends.mps.is_available() and torch.backends.mps.is_built():
  47. DEVICE_TYPE = "mps"
  48. except Exception:
  49. pass
  50. ####################################
  51. # LOGGING
  52. ####################################
  53. log_levels = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"]
  54. GLOBAL_LOG_LEVEL = os.environ.get("GLOBAL_LOG_LEVEL", "").upper()
  55. if GLOBAL_LOG_LEVEL in log_levels:
  56. logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL, force=True)
  57. else:
  58. GLOBAL_LOG_LEVEL = "INFO"
  59. log = logging.getLogger(__name__)
  60. log.info(f"GLOBAL_LOG_LEVEL: {GLOBAL_LOG_LEVEL}")
  61. if "cuda_error" in locals():
  62. log.exception(cuda_error)
  63. del cuda_error
  64. log_sources = [
  65. "AUDIO",
  66. "COMFYUI",
  67. "CONFIG",
  68. "DB",
  69. "IMAGES",
  70. "MAIN",
  71. "MODELS",
  72. "OLLAMA",
  73. "OPENAI",
  74. "RAG",
  75. "WEBHOOK",
  76. "SOCKET",
  77. "OAUTH",
  78. ]
  79. SRC_LOG_LEVELS = {}
  80. for source in log_sources:
  81. log_env_var = source + "_LOG_LEVEL"
  82. SRC_LOG_LEVELS[source] = os.environ.get(log_env_var, "").upper()
  83. if SRC_LOG_LEVELS[source] not in log_levels:
  84. SRC_LOG_LEVELS[source] = GLOBAL_LOG_LEVEL
  85. log.info(f"{log_env_var}: {SRC_LOG_LEVELS[source]}")
  86. log.setLevel(SRC_LOG_LEVELS["CONFIG"])
  87. WEBUI_NAME = os.environ.get("WEBUI_NAME", "Open WebUI")
  88. if WEBUI_NAME != "Open WebUI":
  89. WEBUI_NAME += " (Open WebUI)"
  90. WEBUI_FAVICON_URL = "https://openwebui.com/favicon.png"
  91. TRUSTED_SIGNATURE_KEY = os.environ.get("TRUSTED_SIGNATURE_KEY", "")
  92. ####################################
  93. # ENV (dev,test,prod)
  94. ####################################
  95. ENV = os.environ.get("ENV", "dev")
  96. FROM_INIT_PY = os.environ.get("FROM_INIT_PY", "False").lower() == "true"
  97. if FROM_INIT_PY:
  98. PACKAGE_DATA = {"version": importlib.metadata.version("open-webui")}
  99. else:
  100. try:
  101. PACKAGE_DATA = json.loads((BASE_DIR / "package.json").read_text())
  102. except Exception:
  103. PACKAGE_DATA = {"version": "0.0.0"}
  104. VERSION = PACKAGE_DATA["version"]
  105. # Function to parse each section
  106. def parse_section(section):
  107. items = []
  108. for li in section.find_all("li"):
  109. # Extract raw HTML string
  110. raw_html = str(li)
  111. # Extract text without HTML tags
  112. text = li.get_text(separator=" ", strip=True)
  113. # Split into title and content
  114. parts = text.split(": ", 1)
  115. title = parts[0].strip() if len(parts) > 1 else ""
  116. content = parts[1].strip() if len(parts) > 1 else text
  117. items.append({"title": title, "content": content, "raw": raw_html})
  118. return items
  119. try:
  120. changelog_path = BASE_DIR / "CHANGELOG.md"
  121. with open(str(changelog_path.absolute()), "r", encoding="utf8") as file:
  122. changelog_content = file.read()
  123. except Exception:
  124. changelog_content = (pkgutil.get_data("open_webui", "CHANGELOG.md") or b"").decode()
  125. # Convert markdown content to HTML
  126. html_content = markdown.markdown(changelog_content)
  127. # Parse the HTML content
  128. soup = BeautifulSoup(html_content, "html.parser")
  129. # Initialize JSON structure
  130. changelog_json = {}
  131. # Iterate over each version
  132. for version in soup.find_all("h2"):
  133. version_number = version.get_text().strip().split(" - ")[0][1:-1] # Remove brackets
  134. date = version.get_text().strip().split(" - ")[1]
  135. version_data = {"date": date}
  136. # Find the next sibling that is a h3 tag (section title)
  137. current = version.find_next_sibling()
  138. while current and current.name != "h2":
  139. if current.name == "h3":
  140. section_title = current.get_text().lower() # e.g., "added", "fixed"
  141. section_items = parse_section(current.find_next_sibling("ul"))
  142. version_data[section_title] = section_items
  143. # Move to the next element
  144. current = current.find_next_sibling()
  145. changelog_json[version_number] = version_data
  146. CHANGELOG = changelog_json
  147. ####################################
  148. # SAFE_MODE
  149. ####################################
  150. SAFE_MODE = os.environ.get("SAFE_MODE", "false").lower() == "true"
  151. ####################################
  152. # ENABLE_FORWARD_USER_INFO_HEADERS
  153. ####################################
  154. ENABLE_FORWARD_USER_INFO_HEADERS = (
  155. os.environ.get("ENABLE_FORWARD_USER_INFO_HEADERS", "False").lower() == "true"
  156. )
  157. ####################################
  158. # WEBUI_BUILD_HASH
  159. ####################################
  160. WEBUI_BUILD_HASH = os.environ.get("WEBUI_BUILD_HASH", "dev-build")
  161. ####################################
  162. # DATA/FRONTEND BUILD DIR
  163. ####################################
  164. DATA_DIR = Path(os.getenv("DATA_DIR", BACKEND_DIR / "data")).resolve()
  165. if FROM_INIT_PY:
  166. NEW_DATA_DIR = Path(os.getenv("DATA_DIR", OPEN_WEBUI_DIR / "data")).resolve()
  167. NEW_DATA_DIR.mkdir(parents=True, exist_ok=True)
  168. # Check if the data directory exists in the package directory
  169. if DATA_DIR.exists() and DATA_DIR != NEW_DATA_DIR:
  170. log.info(f"Moving {DATA_DIR} to {NEW_DATA_DIR}")
  171. for item in DATA_DIR.iterdir():
  172. dest = NEW_DATA_DIR / item.name
  173. if item.is_dir():
  174. shutil.copytree(item, dest, dirs_exist_ok=True)
  175. else:
  176. shutil.copy2(item, dest)
  177. # Zip the data directory
  178. shutil.make_archive(DATA_DIR.parent / "open_webui_data", "zip", DATA_DIR)
  179. # Remove the old data directory
  180. shutil.rmtree(DATA_DIR)
  181. DATA_DIR = Path(os.getenv("DATA_DIR", OPEN_WEBUI_DIR / "data"))
  182. STATIC_DIR = Path(os.getenv("STATIC_DIR", OPEN_WEBUI_DIR / "static"))
  183. FONTS_DIR = Path(os.getenv("FONTS_DIR", OPEN_WEBUI_DIR / "static" / "fonts"))
  184. FRONTEND_BUILD_DIR = Path(os.getenv("FRONTEND_BUILD_DIR", BASE_DIR / "build")).resolve()
  185. if FROM_INIT_PY:
  186. FRONTEND_BUILD_DIR = Path(
  187. os.getenv("FRONTEND_BUILD_DIR", OPEN_WEBUI_DIR / "frontend")
  188. ).resolve()
  189. ####################################
  190. # Database
  191. ####################################
  192. # Check if the file exists
  193. if os.path.exists(f"{DATA_DIR}/ollama.db"):
  194. # Rename the file
  195. os.rename(f"{DATA_DIR}/ollama.db", f"{DATA_DIR}/webui.db")
  196. log.info("Database migrated from Ollama-WebUI successfully.")
  197. else:
  198. pass
  199. DATABASE_URL = os.environ.get("DATABASE_URL", f"sqlite:///{DATA_DIR}/webui.db")
  200. # Replace the postgres:// with postgresql://
  201. if "postgres://" in DATABASE_URL:
  202. DATABASE_URL = DATABASE_URL.replace("postgres://", "postgresql://")
  203. DATABASE_SCHEMA = os.environ.get("DATABASE_SCHEMA", None)
  204. DATABASE_POOL_SIZE = os.environ.get("DATABASE_POOL_SIZE", 0)
  205. if DATABASE_POOL_SIZE == "":
  206. DATABASE_POOL_SIZE = 0
  207. else:
  208. try:
  209. DATABASE_POOL_SIZE = int(DATABASE_POOL_SIZE)
  210. except Exception:
  211. DATABASE_POOL_SIZE = 0
  212. DATABASE_POOL_MAX_OVERFLOW = os.environ.get("DATABASE_POOL_MAX_OVERFLOW", 0)
  213. if DATABASE_POOL_MAX_OVERFLOW == "":
  214. DATABASE_POOL_MAX_OVERFLOW = 0
  215. else:
  216. try:
  217. DATABASE_POOL_MAX_OVERFLOW = int(DATABASE_POOL_MAX_OVERFLOW)
  218. except Exception:
  219. DATABASE_POOL_MAX_OVERFLOW = 0
  220. DATABASE_POOL_TIMEOUT = os.environ.get("DATABASE_POOL_TIMEOUT", 30)
  221. if DATABASE_POOL_TIMEOUT == "":
  222. DATABASE_POOL_TIMEOUT = 30
  223. else:
  224. try:
  225. DATABASE_POOL_TIMEOUT = int(DATABASE_POOL_TIMEOUT)
  226. except Exception:
  227. DATABASE_POOL_TIMEOUT = 30
  228. DATABASE_POOL_RECYCLE = os.environ.get("DATABASE_POOL_RECYCLE", 3600)
  229. if DATABASE_POOL_RECYCLE == "":
  230. DATABASE_POOL_RECYCLE = 3600
  231. else:
  232. try:
  233. DATABASE_POOL_RECYCLE = int(DATABASE_POOL_RECYCLE)
  234. except Exception:
  235. DATABASE_POOL_RECYCLE = 3600
  236. RESET_CONFIG_ON_START = (
  237. os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true"
  238. )
  239. ENABLE_REALTIME_CHAT_SAVE = (
  240. os.environ.get("ENABLE_REALTIME_CHAT_SAVE", "False").lower() == "true"
  241. )
  242. ####################################
  243. # REDIS
  244. ####################################
  245. REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379/0")
  246. ####################################
  247. # WEBUI_AUTH (Required for security)
  248. ####################################
  249. WEBUI_AUTH = os.environ.get("WEBUI_AUTH", "True").lower() == "true"
  250. WEBUI_AUTH_TRUSTED_EMAIL_HEADER = os.environ.get(
  251. "WEBUI_AUTH_TRUSTED_EMAIL_HEADER", None
  252. )
  253. WEBUI_AUTH_TRUSTED_NAME_HEADER = os.environ.get("WEBUI_AUTH_TRUSTED_NAME_HEADER", None)
  254. BYPASS_MODEL_ACCESS_CONTROL = (
  255. os.environ.get("BYPASS_MODEL_ACCESS_CONTROL", "False").lower() == "true"
  256. )
  257. ####################################
  258. # WEBUI_SECRET_KEY
  259. ####################################
  260. WEBUI_SECRET_KEY = os.environ.get(
  261. "WEBUI_SECRET_KEY",
  262. os.environ.get(
  263. "WEBUI_JWT_SECRET_KEY", "t0p-s3cr3t"
  264. ), # DEPRECATED: remove at next major version
  265. )
  266. WEBUI_SESSION_COOKIE_SAME_SITE = os.environ.get("WEBUI_SESSION_COOKIE_SAME_SITE", "lax")
  267. WEBUI_SESSION_COOKIE_SECURE = (
  268. os.environ.get("WEBUI_SESSION_COOKIE_SECURE", "false").lower() == "true"
  269. )
  270. WEBUI_AUTH_COOKIE_SAME_SITE = os.environ.get(
  271. "WEBUI_AUTH_COOKIE_SAME_SITE", WEBUI_SESSION_COOKIE_SAME_SITE
  272. )
  273. WEBUI_AUTH_COOKIE_SECURE = (
  274. os.environ.get(
  275. "WEBUI_AUTH_COOKIE_SECURE",
  276. os.environ.get("WEBUI_SESSION_COOKIE_SECURE", "false"),
  277. ).lower()
  278. == "true"
  279. )
  280. if WEBUI_AUTH and WEBUI_SECRET_KEY == "":
  281. raise ValueError(ERROR_MESSAGES.ENV_VAR_NOT_FOUND)
  282. ENABLE_WEBSOCKET_SUPPORT = (
  283. os.environ.get("ENABLE_WEBSOCKET_SUPPORT", "True").lower() == "true"
  284. )
  285. WEBSOCKET_MANAGER = os.environ.get("WEBSOCKET_MANAGER", "")
  286. WEBSOCKET_REDIS_URL = os.environ.get("WEBSOCKET_REDIS_URL", REDIS_URL)
  287. WEBSOCKET_REDIS_LOCK_TIMEOUT = os.environ.get("WEBSOCKET_REDIS_LOCK_TIMEOUT", 60)
  288. AIOHTTP_CLIENT_TIMEOUT = os.environ.get("AIOHTTP_CLIENT_TIMEOUT", "")
  289. if AIOHTTP_CLIENT_TIMEOUT == "":
  290. AIOHTTP_CLIENT_TIMEOUT = None
  291. else:
  292. try:
  293. AIOHTTP_CLIENT_TIMEOUT = int(AIOHTTP_CLIENT_TIMEOUT)
  294. except Exception:
  295. AIOHTTP_CLIENT_TIMEOUT = 300
  296. AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = os.environ.get(
  297. "AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST",
  298. os.environ.get("AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST", ""),
  299. )
  300. if AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST == "":
  301. AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = None
  302. else:
  303. try:
  304. AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = int(AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST)
  305. except Exception:
  306. AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = 5
  307. ####################################
  308. # OFFLINE_MODE
  309. ####################################
  310. OFFLINE_MODE = os.environ.get("OFFLINE_MODE", "false").lower() == "true"
  311. if OFFLINE_MODE:
  312. os.environ["HF_HUB_OFFLINE"] = "1"
  313. ####################################
  314. # AUDIT LOGGING
  315. ####################################
  316. ENABLE_AUDIT_LOGS = os.getenv("ENABLE_AUDIT_LOGS", "false").lower() == "true"
  317. # Where to store log file
  318. AUDIT_LOGS_FILE_PATH = f"{DATA_DIR}/audit.log"
  319. # Maximum size of a file before rotating into a new log file
  320. AUDIT_LOG_FILE_ROTATION_SIZE = os.getenv("AUDIT_LOG_FILE_ROTATION_SIZE", "10MB")
  321. # METADATA | REQUEST | REQUEST_RESPONSE
  322. AUDIT_LOG_LEVEL = os.getenv("AUDIT_LOG_LEVEL", "REQUEST_RESPONSE").upper()
  323. try:
  324. MAX_BODY_LOG_SIZE = int(os.environ.get("MAX_BODY_LOG_SIZE") or 2048)
  325. except ValueError:
  326. MAX_BODY_LOG_SIZE = 2048
  327. # Comma separated list for urls to exclude from audit
  328. AUDIT_EXCLUDED_PATHS = os.getenv("AUDIT_EXCLUDED_PATHS", "/chats,/chat,/folders").split(
  329. ","
  330. )
  331. AUDIT_EXCLUDED_PATHS = [path.strip() for path in AUDIT_EXCLUDED_PATHS]
  332. AUDIT_EXCLUDED_PATHS = [path.lstrip("/") for path in AUDIT_EXCLUDED_PATHS]