db.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116
  1. import json
  2. import logging
  3. from contextlib import contextmanager
  4. from typing import Any, Optional
  5. from open_webui.internal.wrappers import register_connection
  6. from open_webui.env import (
  7. OPEN_WEBUI_DIR,
  8. DATABASE_URL,
  9. DATABASE_SCHEMA,
  10. SRC_LOG_LEVELS,
  11. DATABASE_POOL_MAX_OVERFLOW,
  12. DATABASE_POOL_RECYCLE,
  13. DATABASE_POOL_SIZE,
  14. DATABASE_POOL_TIMEOUT,
  15. )
  16. from peewee_migrate import Router
  17. from sqlalchemy import Dialect, create_engine, MetaData, types
  18. from sqlalchemy.ext.declarative import declarative_base
  19. from sqlalchemy.orm import scoped_session, sessionmaker
  20. from sqlalchemy.pool import QueuePool, NullPool
  21. from sqlalchemy.sql.type_api import _T
  22. from typing_extensions import Self
  23. log = logging.getLogger(__name__)
  24. log.setLevel(SRC_LOG_LEVELS["DB"])
  25. class JSONField(types.TypeDecorator):
  26. impl = types.Text
  27. cache_ok = True
  28. def process_bind_param(self, value: Optional[_T], dialect: Dialect) -> Any:
  29. return json.dumps(value)
  30. def process_result_value(self, value: Optional[_T], dialect: Dialect) -> Any:
  31. if value is not None:
  32. return json.loads(value)
  33. def copy(self, **kw: Any) -> Self:
  34. return JSONField(self.impl.length)
  35. def db_value(self, value):
  36. return json.dumps(value)
  37. def python_value(self, value):
  38. if value is not None:
  39. return json.loads(value)
  40. # Workaround to handle the peewee migration
  41. # This is required to ensure the peewee migration is handled before the alembic migration
  42. def handle_peewee_migration(DATABASE_URL):
  43. # db = None
  44. try:
  45. # Replace the postgresql:// with postgres:// to handle the peewee migration
  46. db = register_connection(DATABASE_URL.replace("postgresql://", "postgres://"))
  47. migrate_dir = OPEN_WEBUI_DIR / "internal" / "migrations"
  48. router = Router(db, logger=log, migrate_dir=migrate_dir)
  49. router.run()
  50. db.close()
  51. except Exception as e:
  52. log.error(f"Failed to initialize the database connection: {e}")
  53. raise
  54. finally:
  55. # Properly closing the database connection
  56. if db and not db.is_closed():
  57. db.close()
  58. # Assert if db connection has been closed
  59. assert db.is_closed(), "Database connection is still open."
  60. handle_peewee_migration(DATABASE_URL)
  61. SQLALCHEMY_DATABASE_URL = DATABASE_URL
  62. if "sqlite" in SQLALCHEMY_DATABASE_URL:
  63. engine = create_engine(
  64. SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
  65. )
  66. else:
  67. if DATABASE_POOL_SIZE > 0:
  68. engine = create_engine(
  69. SQLALCHEMY_DATABASE_URL,
  70. pool_size=DATABASE_POOL_SIZE,
  71. max_overflow=DATABASE_POOL_MAX_OVERFLOW,
  72. pool_timeout=DATABASE_POOL_TIMEOUT,
  73. pool_recycle=DATABASE_POOL_RECYCLE,
  74. pool_pre_ping=True,
  75. poolclass=QueuePool,
  76. )
  77. else:
  78. engine = create_engine(
  79. SQLALCHEMY_DATABASE_URL, pool_pre_ping=True, poolclass=NullPool
  80. )
  81. SessionLocal = sessionmaker(
  82. autocommit=False, autoflush=False, bind=engine, expire_on_commit=False
  83. )
  84. metadata_obj = MetaData(schema=DATABASE_SCHEMA)
  85. Base = declarative_base(metadata=metadata_obj)
  86. Session = scoped_session(SessionLocal)
  87. def get_session():
  88. db = SessionLocal()
  89. try:
  90. yield db
  91. finally:
  92. db.close()
  93. get_db = contextmanager(get_session)