env.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444
  1. import importlib.metadata
  2. import json
  3. import logging
  4. import os
  5. import pkgutil
  6. import sys
  7. import shutil
  8. from pathlib import Path
  9. import markdown
  10. from bs4 import BeautifulSoup
  11. from open_webui.constants import ERROR_MESSAGES
  12. ####################################
  13. # Load .env file
  14. ####################################
  15. OPEN_WEBUI_DIR = Path(__file__).parent # the path containing this file
  16. print(OPEN_WEBUI_DIR)
  17. BACKEND_DIR = OPEN_WEBUI_DIR.parent # the path containing this file
  18. BASE_DIR = BACKEND_DIR.parent # the path containing the backend/
  19. print(BACKEND_DIR)
  20. print(BASE_DIR)
  21. try:
  22. from dotenv import find_dotenv, load_dotenv
  23. load_dotenv(find_dotenv(str(BASE_DIR / ".env")))
  24. except ImportError:
  25. print("dotenv not installed, skipping...")
  26. DOCKER = os.environ.get("DOCKER", "False").lower() == "true"
  27. # device type embedding models - "cpu" (default), "cuda" (nvidia gpu required) or "mps" (apple silicon) - choosing this right can lead to better performance
  28. USE_CUDA = os.environ.get("USE_CUDA_DOCKER", "false")
  29. if USE_CUDA.lower() == "true":
  30. try:
  31. import torch
  32. assert torch.cuda.is_available(), "CUDA not available"
  33. DEVICE_TYPE = "cuda"
  34. except Exception as e:
  35. cuda_error = (
  36. "Error when testing CUDA but USE_CUDA_DOCKER is true. "
  37. f"Resetting USE_CUDA_DOCKER to false: {e}"
  38. )
  39. os.environ["USE_CUDA_DOCKER"] = "false"
  40. USE_CUDA = "false"
  41. DEVICE_TYPE = "cpu"
  42. else:
  43. DEVICE_TYPE = "cpu"
  44. try:
  45. import torch
  46. if torch.backends.mps.is_available() and torch.backends.mps.is_built():
  47. DEVICE_TYPE = "mps"
  48. except Exception:
  49. pass
  50. ####################################
  51. # LOGGING
  52. ####################################
  53. GLOBAL_LOG_LEVEL = os.environ.get("GLOBAL_LOG_LEVEL", "").upper()
  54. if GLOBAL_LOG_LEVEL in logging.getLevelNamesMapping():
  55. logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL, force=True)
  56. else:
  57. GLOBAL_LOG_LEVEL = "INFO"
  58. log = logging.getLogger(__name__)
  59. log.info(f"GLOBAL_LOG_LEVEL: {GLOBAL_LOG_LEVEL}")
  60. if "cuda_error" in locals():
  61. log.exception(cuda_error)
  62. del cuda_error
  63. log_sources = [
  64. "AUDIO",
  65. "COMFYUI",
  66. "CONFIG",
  67. "DB",
  68. "IMAGES",
  69. "MAIN",
  70. "MODELS",
  71. "OLLAMA",
  72. "OPENAI",
  73. "RAG",
  74. "WEBHOOK",
  75. "SOCKET",
  76. "OAUTH",
  77. ]
  78. SRC_LOG_LEVELS = {}
  79. for source in log_sources:
  80. log_env_var = source + "_LOG_LEVEL"
  81. SRC_LOG_LEVELS[source] = os.environ.get(log_env_var, "").upper()
  82. if SRC_LOG_LEVELS[source] not in logging.getLevelNamesMapping():
  83. SRC_LOG_LEVELS[source] = GLOBAL_LOG_LEVEL
  84. log.info(f"{log_env_var}: {SRC_LOG_LEVELS[source]}")
  85. log.setLevel(SRC_LOG_LEVELS["CONFIG"])
  86. WEBUI_NAME = os.environ.get("WEBUI_NAME", "Open WebUI")
  87. if WEBUI_NAME != "Open WebUI":
  88. WEBUI_NAME += " (Open WebUI)"
  89. WEBUI_FAVICON_URL = "https://openwebui.com/favicon.png"
  90. TRUSTED_SIGNATURE_KEY = os.environ.get("TRUSTED_SIGNATURE_KEY", "")
  91. ####################################
  92. # ENV (dev,test,prod)
  93. ####################################
  94. ENV = os.environ.get("ENV", "dev")
  95. FROM_INIT_PY = os.environ.get("FROM_INIT_PY", "False").lower() == "true"
  96. if FROM_INIT_PY:
  97. PACKAGE_DATA = {"version": importlib.metadata.version("open-webui")}
  98. else:
  99. try:
  100. PACKAGE_DATA = json.loads((BASE_DIR / "package.json").read_text())
  101. except Exception:
  102. PACKAGE_DATA = {"version": "0.0.0"}
  103. VERSION = PACKAGE_DATA["version"]
  104. # Function to parse each section
  105. def parse_section(section):
  106. items = []
  107. for li in section.find_all("li"):
  108. # Extract raw HTML string
  109. raw_html = str(li)
  110. # Extract text without HTML tags
  111. text = li.get_text(separator=" ", strip=True)
  112. # Split into title and content
  113. parts = text.split(": ", 1)
  114. title = parts[0].strip() if len(parts) > 1 else ""
  115. content = parts[1].strip() if len(parts) > 1 else text
  116. items.append({"title": title, "content": content, "raw": raw_html})
  117. return items
  118. try:
  119. changelog_path = BASE_DIR / "CHANGELOG.md"
  120. with open(str(changelog_path.absolute()), "r", encoding="utf8") as file:
  121. changelog_content = file.read()
  122. except Exception:
  123. changelog_content = (pkgutil.get_data("open_webui", "CHANGELOG.md") or b"").decode()
  124. # Convert markdown content to HTML
  125. html_content = markdown.markdown(changelog_content)
  126. # Parse the HTML content
  127. soup = BeautifulSoup(html_content, "html.parser")
  128. # Initialize JSON structure
  129. changelog_json = {}
  130. # Iterate over each version
  131. for version in soup.find_all("h2"):
  132. version_number = version.get_text().strip().split(" - ")[0][1:-1] # Remove brackets
  133. date = version.get_text().strip().split(" - ")[1]
  134. version_data = {"date": date}
  135. # Find the next sibling that is a h3 tag (section title)
  136. current = version.find_next_sibling()
  137. while current and current.name != "h2":
  138. if current.name == "h3":
  139. section_title = current.get_text().lower() # e.g., "added", "fixed"
  140. section_items = parse_section(current.find_next_sibling("ul"))
  141. version_data[section_title] = section_items
  142. # Move to the next element
  143. current = current.find_next_sibling()
  144. changelog_json[version_number] = version_data
  145. CHANGELOG = changelog_json
  146. ####################################
  147. # SAFE_MODE
  148. ####################################
  149. SAFE_MODE = os.environ.get("SAFE_MODE", "false").lower() == "true"
  150. ####################################
  151. # ENABLE_FORWARD_USER_INFO_HEADERS
  152. ####################################
  153. ENABLE_FORWARD_USER_INFO_HEADERS = (
  154. os.environ.get("ENABLE_FORWARD_USER_INFO_HEADERS", "False").lower() == "true"
  155. )
  156. ####################################
  157. # WEBUI_BUILD_HASH
  158. ####################################
  159. WEBUI_BUILD_HASH = os.environ.get("WEBUI_BUILD_HASH", "dev-build")
  160. ####################################
  161. # DATA/FRONTEND BUILD DIR
  162. ####################################
  163. DATA_DIR = Path(os.getenv("DATA_DIR", BACKEND_DIR / "data")).resolve()
  164. if FROM_INIT_PY:
  165. NEW_DATA_DIR = Path(os.getenv("DATA_DIR", OPEN_WEBUI_DIR / "data")).resolve()
  166. NEW_DATA_DIR.mkdir(parents=True, exist_ok=True)
  167. # Check if the data directory exists in the package directory
  168. if DATA_DIR.exists() and DATA_DIR != NEW_DATA_DIR:
  169. log.info(f"Moving {DATA_DIR} to {NEW_DATA_DIR}")
  170. for item in DATA_DIR.iterdir():
  171. dest = NEW_DATA_DIR / item.name
  172. if item.is_dir():
  173. shutil.copytree(item, dest, dirs_exist_ok=True)
  174. else:
  175. shutil.copy2(item, dest)
  176. # Zip the data directory
  177. shutil.make_archive(DATA_DIR.parent / "open_webui_data", "zip", DATA_DIR)
  178. # Remove the old data directory
  179. shutil.rmtree(DATA_DIR)
  180. DATA_DIR = Path(os.getenv("DATA_DIR", OPEN_WEBUI_DIR / "data"))
  181. STATIC_DIR = Path(os.getenv("STATIC_DIR", OPEN_WEBUI_DIR / "static"))
  182. FONTS_DIR = Path(os.getenv("FONTS_DIR", OPEN_WEBUI_DIR / "static" / "fonts"))
  183. FRONTEND_BUILD_DIR = Path(os.getenv("FRONTEND_BUILD_DIR", BASE_DIR / "build")).resolve()
  184. if FROM_INIT_PY:
  185. FRONTEND_BUILD_DIR = Path(
  186. os.getenv("FRONTEND_BUILD_DIR", OPEN_WEBUI_DIR / "frontend")
  187. ).resolve()
  188. ####################################
  189. # Database
  190. ####################################
  191. # Check if the file exists
  192. if os.path.exists(f"{DATA_DIR}/ollama.db"):
  193. # Rename the file
  194. os.rename(f"{DATA_DIR}/ollama.db", f"{DATA_DIR}/webui.db")
  195. log.info("Database migrated from Ollama-WebUI successfully.")
  196. else:
  197. pass
  198. DATABASE_URL = os.environ.get("DATABASE_URL", f"sqlite:///{DATA_DIR}/webui.db")
  199. # Replace the postgres:// with postgresql://
  200. if "postgres://" in DATABASE_URL:
  201. DATABASE_URL = DATABASE_URL.replace("postgres://", "postgresql://")
  202. DATABASE_SCHEMA = os.environ.get("DATABASE_SCHEMA", None)
  203. DATABASE_POOL_SIZE = os.environ.get("DATABASE_POOL_SIZE", 0)
  204. if DATABASE_POOL_SIZE == "":
  205. DATABASE_POOL_SIZE = 0
  206. else:
  207. try:
  208. DATABASE_POOL_SIZE = int(DATABASE_POOL_SIZE)
  209. except Exception:
  210. DATABASE_POOL_SIZE = 0
  211. DATABASE_POOL_MAX_OVERFLOW = os.environ.get("DATABASE_POOL_MAX_OVERFLOW", 0)
  212. if DATABASE_POOL_MAX_OVERFLOW == "":
  213. DATABASE_POOL_MAX_OVERFLOW = 0
  214. else:
  215. try:
  216. DATABASE_POOL_MAX_OVERFLOW = int(DATABASE_POOL_MAX_OVERFLOW)
  217. except Exception:
  218. DATABASE_POOL_MAX_OVERFLOW = 0
  219. DATABASE_POOL_TIMEOUT = os.environ.get("DATABASE_POOL_TIMEOUT", 30)
  220. if DATABASE_POOL_TIMEOUT == "":
  221. DATABASE_POOL_TIMEOUT = 30
  222. else:
  223. try:
  224. DATABASE_POOL_TIMEOUT = int(DATABASE_POOL_TIMEOUT)
  225. except Exception:
  226. DATABASE_POOL_TIMEOUT = 30
  227. DATABASE_POOL_RECYCLE = os.environ.get("DATABASE_POOL_RECYCLE", 3600)
  228. if DATABASE_POOL_RECYCLE == "":
  229. DATABASE_POOL_RECYCLE = 3600
  230. else:
  231. try:
  232. DATABASE_POOL_RECYCLE = int(DATABASE_POOL_RECYCLE)
  233. except Exception:
  234. DATABASE_POOL_RECYCLE = 3600
  235. RESET_CONFIG_ON_START = (
  236. os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true"
  237. )
  238. ENABLE_REALTIME_CHAT_SAVE = (
  239. os.environ.get("ENABLE_REALTIME_CHAT_SAVE", "False").lower() == "true"
  240. )
  241. ####################################
  242. # REDIS
  243. ####################################
  244. REDIS_URL = os.environ.get("REDIS_URL", "redis://localhost:6379/0")
  245. ####################################
  246. # WEBUI_AUTH (Required for security)
  247. ####################################
  248. WEBUI_AUTH = os.environ.get("WEBUI_AUTH", "True").lower() == "true"
  249. WEBUI_AUTH_TRUSTED_EMAIL_HEADER = os.environ.get(
  250. "WEBUI_AUTH_TRUSTED_EMAIL_HEADER", None
  251. )
  252. WEBUI_AUTH_TRUSTED_NAME_HEADER = os.environ.get("WEBUI_AUTH_TRUSTED_NAME_HEADER", None)
  253. BYPASS_MODEL_ACCESS_CONTROL = (
  254. os.environ.get("BYPASS_MODEL_ACCESS_CONTROL", "False").lower() == "true"
  255. )
  256. ####################################
  257. # WEBUI_SECRET_KEY
  258. ####################################
  259. WEBUI_SECRET_KEY = os.environ.get(
  260. "WEBUI_SECRET_KEY",
  261. os.environ.get(
  262. "WEBUI_JWT_SECRET_KEY", "t0p-s3cr3t"
  263. ), # DEPRECATED: remove at next major version
  264. )
  265. WEBUI_SESSION_COOKIE_SAME_SITE = os.environ.get("WEBUI_SESSION_COOKIE_SAME_SITE", "lax")
  266. WEBUI_SESSION_COOKIE_SECURE = (
  267. os.environ.get("WEBUI_SESSION_COOKIE_SECURE", "false").lower() == "true"
  268. )
  269. WEBUI_AUTH_COOKIE_SAME_SITE = os.environ.get(
  270. "WEBUI_AUTH_COOKIE_SAME_SITE", WEBUI_SESSION_COOKIE_SAME_SITE
  271. )
  272. WEBUI_AUTH_COOKIE_SECURE = (
  273. os.environ.get(
  274. "WEBUI_AUTH_COOKIE_SECURE",
  275. os.environ.get("WEBUI_SESSION_COOKIE_SECURE", "false"),
  276. ).lower()
  277. == "true"
  278. )
  279. if WEBUI_AUTH and WEBUI_SECRET_KEY == "":
  280. raise ValueError(ERROR_MESSAGES.ENV_VAR_NOT_FOUND)
  281. ENABLE_WEBSOCKET_SUPPORT = (
  282. os.environ.get("ENABLE_WEBSOCKET_SUPPORT", "True").lower() == "true"
  283. )
  284. WEBSOCKET_MANAGER = os.environ.get("WEBSOCKET_MANAGER", "")
  285. WEBSOCKET_REDIS_URL = os.environ.get("WEBSOCKET_REDIS_URL", REDIS_URL)
  286. WEBSOCKET_REDIS_LOCK_TIMEOUT = os.environ.get("WEBSOCKET_REDIS_LOCK_TIMEOUT", 60)
  287. AIOHTTP_CLIENT_TIMEOUT = os.environ.get("AIOHTTP_CLIENT_TIMEOUT", "")
  288. if AIOHTTP_CLIENT_TIMEOUT == "":
  289. AIOHTTP_CLIENT_TIMEOUT = None
  290. else:
  291. try:
  292. AIOHTTP_CLIENT_TIMEOUT = int(AIOHTTP_CLIENT_TIMEOUT)
  293. except Exception:
  294. AIOHTTP_CLIENT_TIMEOUT = 300
  295. AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = os.environ.get(
  296. "AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST",
  297. os.environ.get("AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST", ""),
  298. )
  299. if AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST == "":
  300. AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = None
  301. else:
  302. try:
  303. AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = int(AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST)
  304. except Exception:
  305. AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = 5
  306. ####################################
  307. # OFFLINE_MODE
  308. ####################################
  309. OFFLINE_MODE = os.environ.get("OFFLINE_MODE", "false").lower() == "true"
  310. if OFFLINE_MODE:
  311. os.environ["HF_HUB_OFFLINE"] = "1"
  312. ####################################
  313. # AUDIT LOGGING
  314. ####################################
  315. ENABLE_AUDIT_LOGS = os.getenv("ENABLE_AUDIT_LOGS", "false").lower() == "true"
  316. # Where to store log file
  317. AUDIT_LOGS_FILE_PATH = f"{DATA_DIR}/audit.log"
  318. # Maximum size of a file before rotating into a new log file
  319. AUDIT_LOG_FILE_ROTATION_SIZE = os.getenv("AUDIT_LOG_FILE_ROTATION_SIZE", "10MB")
  320. # METADATA | REQUEST | REQUEST_RESPONSE
  321. AUDIT_LOG_LEVEL = os.getenv("AUDIT_LOG_LEVEL", "REQUEST_RESPONSE").upper()
  322. try:
  323. MAX_BODY_LOG_SIZE = int(os.environ.get("MAX_BODY_LOG_SIZE") or 2048)
  324. except ValueError:
  325. MAX_BODY_LOG_SIZE = 2048
  326. # Comma separated list for urls to exclude from audit
  327. AUDIT_EXCLUDED_PATHS = os.getenv("AUDIT_EXCLUDED_PATHS", "/chats,/chat,/folders").split(
  328. ","
  329. )
  330. AUDIT_EXCLUDED_PATHS = [path.strip() for path in AUDIT_EXCLUDED_PATHS]
  331. AUDIT_EXCLUDED_PATHS = [path.lstrip("/") for path in AUDIT_EXCLUDED_PATHS]