123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263 |
- import importlib.metadata
- import json
- import logging
- import os
- import pkgutil
- import sys
- from pathlib import Path
- import markdown
- from bs4 import BeautifulSoup
- from open_webui.constants import ERROR_MESSAGES
- ####################################
- # Load .env file
- ####################################
- OPEN_WEBUI_DIR = Path(__file__).parent # the path containing this file
- print(OPEN_WEBUI_DIR)
- BACKEND_DIR = OPEN_WEBUI_DIR.parent # the path containing this file
- BASE_DIR = BACKEND_DIR.parent # the path containing the backend/
- print(BACKEND_DIR)
- print(BASE_DIR)
- try:
- from dotenv import find_dotenv, load_dotenv
- load_dotenv(find_dotenv(str(BASE_DIR / ".env")))
- except ImportError:
- print("dotenv not installed, skipping...")
- ####################################
- # LOGGING
- ####################################
- log_levels = ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"]
- GLOBAL_LOG_LEVEL = os.environ.get("GLOBAL_LOG_LEVEL", "").upper()
- if GLOBAL_LOG_LEVEL in log_levels:
- logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL, force=True)
- else:
- GLOBAL_LOG_LEVEL = "INFO"
- log = logging.getLogger(__name__)
- log.info(f"GLOBAL_LOG_LEVEL: {GLOBAL_LOG_LEVEL}")
- log_sources = [
- "AUDIO",
- "COMFYUI",
- "CONFIG",
- "DB",
- "IMAGES",
- "MAIN",
- "MODELS",
- "OLLAMA",
- "OPENAI",
- "RAG",
- "WEBHOOK",
- ]
- SRC_LOG_LEVELS = {}
- for source in log_sources:
- log_env_var = source + "_LOG_LEVEL"
- SRC_LOG_LEVELS[source] = os.environ.get(log_env_var, "").upper()
- if SRC_LOG_LEVELS[source] not in log_levels:
- SRC_LOG_LEVELS[source] = GLOBAL_LOG_LEVEL
- log.info(f"{log_env_var}: {SRC_LOG_LEVELS[source]}")
- log.setLevel(SRC_LOG_LEVELS["CONFIG"])
- WEBUI_NAME = os.environ.get("WEBUI_NAME", "Open WebUI")
- if WEBUI_NAME != "Open WebUI":
- WEBUI_NAME += " (Open WebUI)"
- WEBUI_URL = os.environ.get("WEBUI_URL", "http://localhost:3000")
- WEBUI_FAVICON_URL = "https://openwebui.com/favicon.png"
- ####################################
- # ENV (dev,test,prod)
- ####################################
- ENV = os.environ.get("ENV", "dev")
- PIP_INSTALL = False
- try:
- importlib.metadata.version("open-webui")
- PIP_INSTALL = True
- except importlib.metadata.PackageNotFoundError:
- pass
- if PIP_INSTALL:
- PACKAGE_DATA = {"version": importlib.metadata.version("open-webui")}
- else:
- try:
- PACKAGE_DATA = json.loads((BASE_DIR / "package.json").read_text())
- except Exception:
- PACKAGE_DATA = {"version": "0.0.0"}
- VERSION = PACKAGE_DATA["version"]
- # Function to parse each section
- def parse_section(section):
- items = []
- for li in section.find_all("li"):
- # Extract raw HTML string
- raw_html = str(li)
- # Extract text without HTML tags
- text = li.get_text(separator=" ", strip=True)
- # Split into title and content
- parts = text.split(": ", 1)
- title = parts[0].strip() if len(parts) > 1 else ""
- content = parts[1].strip() if len(parts) > 1 else text
- items.append({"title": title, "content": content, "raw": raw_html})
- return items
- try:
- changelog_path = BASE_DIR / "CHANGELOG.md"
- with open(str(changelog_path.absolute()), "r", encoding="utf8") as file:
- changelog_content = file.read()
- except Exception:
- changelog_content = (pkgutil.get_data("open_webui", "CHANGELOG.md") or b"").decode()
- # Convert markdown content to HTML
- html_content = markdown.markdown(changelog_content)
- # Parse the HTML content
- soup = BeautifulSoup(html_content, "html.parser")
- # Initialize JSON structure
- changelog_json = {}
- # Iterate over each version
- for version in soup.find_all("h2"):
- version_number = version.get_text().strip().split(" - ")[0][1:-1] # Remove brackets
- date = version.get_text().strip().split(" - ")[1]
- version_data = {"date": date}
- # Find the next sibling that is a h3 tag (section title)
- current = version.find_next_sibling()
- while current and current.name != "h2":
- if current.name == "h3":
- section_title = current.get_text().lower() # e.g., "added", "fixed"
- section_items = parse_section(current.find_next_sibling("ul"))
- version_data[section_title] = section_items
- # Move to the next element
- current = current.find_next_sibling()
- changelog_json[version_number] = version_data
- CHANGELOG = changelog_json
- ####################################
- # SAFE_MODE
- ####################################
- SAFE_MODE = os.environ.get("SAFE_MODE", "false").lower() == "true"
- ####################################
- # WEBUI_BUILD_HASH
- ####################################
- WEBUI_BUILD_HASH = os.environ.get("WEBUI_BUILD_HASH", "dev-build")
- ####################################
- # DATA/FRONTEND BUILD DIR
- ####################################
- DATA_DIR = Path(os.getenv("DATA_DIR", BACKEND_DIR / "data")).resolve()
- if PIP_INSTALL:
- # Check if the data directory exists in the package directory
- if DATA_DIR.exists():
- log.info(f"Moving {DATA_DIR} to {OPEN_WEBUI_DIR / 'data'}")
- DATA_DIR.rename(OPEN_WEBUI_DIR / "data")
- DATA_DIR = OPEN_WEBUI_DIR / "data"
- FRONTEND_BUILD_DIR = Path(os.getenv("FRONTEND_BUILD_DIR", BASE_DIR / "build")).resolve()
- RESET_CONFIG_ON_START = (
- os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true"
- )
- if RESET_CONFIG_ON_START:
- try:
- os.remove(f"{DATA_DIR}/config.json")
- with open(f"{DATA_DIR}/config.json", "w") as f:
- f.write("{}")
- except Exception:
- pass
- ####################################
- # Database
- ####################################
- # Check if the file exists
- if os.path.exists(f"{DATA_DIR}/ollama.db"):
- # Rename the file
- os.rename(f"{DATA_DIR}/ollama.db", f"{DATA_DIR}/webui.db")
- log.info("Database migrated from Ollama-WebUI successfully.")
- else:
- pass
- DATABASE_URL = os.environ.get("DATABASE_URL", f"sqlite:///{DATA_DIR}/webui.db")
- # Replace the postgres:// with postgresql://
- if "postgres://" in DATABASE_URL:
- DATABASE_URL = DATABASE_URL.replace("postgres://", "postgresql://")
- ####################################
- # WEBUI_AUTH (Required for security)
- ####################################
- WEBUI_AUTH = os.environ.get("WEBUI_AUTH", "True").lower() == "true"
- WEBUI_AUTH_TRUSTED_EMAIL_HEADER = os.environ.get(
- "WEBUI_AUTH_TRUSTED_EMAIL_HEADER", None
- )
- WEBUI_AUTH_TRUSTED_NAME_HEADER = os.environ.get("WEBUI_AUTH_TRUSTED_NAME_HEADER", None)
- ####################################
- # WEBUI_SECRET_KEY
- ####################################
- WEBUI_SECRET_KEY = os.environ.get(
- "WEBUI_SECRET_KEY",
- os.environ.get(
- "WEBUI_JWT_SECRET_KEY", "t0p-s3cr3t"
- ), # DEPRECATED: remove at next major version
- )
- WEBUI_SESSION_COOKIE_SAME_SITE = os.environ.get(
- "WEBUI_SESSION_COOKIE_SAME_SITE",
- os.environ.get("WEBUI_SESSION_COOKIE_SAME_SITE", "lax"),
- )
- WEBUI_SESSION_COOKIE_SECURE = os.environ.get(
- "WEBUI_SESSION_COOKIE_SECURE",
- os.environ.get("WEBUI_SESSION_COOKIE_SECURE", "false").lower() == "true",
- )
- if WEBUI_AUTH and WEBUI_SECRET_KEY == "":
- raise ValueError(ERROR_MESSAGES.ENV_VAR_NOT_FOUND)
|