provider.py 6.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164
  1. import os
  2. import boto3
  3. from botocore.exceptions import ClientError
  4. import shutil
  5. from typing import BinaryIO, Tuple, Optional, Union
  6. from open_webui.constants import ERROR_MESSAGES
  7. from open_webui.config import (
  8. STORAGE_PROVIDER,
  9. S3_ACCESS_KEY_ID,
  10. S3_SECRET_ACCESS_KEY,
  11. S3_BUCKET_NAME,
  12. S3_REGION_NAME,
  13. S3_ENDPOINT_URL,
  14. UPLOAD_DIR,
  15. )
  16. import boto3
  17. from boto3.s3 import S3Client
  18. from botocore.exceptions import ClientError
  19. from typing import BinaryIO, Tuple, Optional
  20. class StorageProvider:
  21. def __init__(self, provider: Optional[str] = None):
  22. self.storage_provider: str = provider or STORAGE_PROVIDER
  23. self.s3_client = None
  24. self.s3_bucket_name: Optional[str] = None
  25. if self.storage_provider == "s3":
  26. self._initialize_s3()
  27. def _initialize_s3(self) -> None:
  28. """Initializes the S3 client and bucket name if using S3 storage."""
  29. self.s3_client = boto3.client(
  30. "s3",
  31. region_name=S3_REGION_NAME,
  32. endpoint_url=S3_ENDPOINT_URL,
  33. aws_access_key_id=S3_ACCESS_KEY_ID,
  34. aws_secret_access_key=S3_SECRET_ACCESS_KEY,
  35. )
  36. self.bucket_name = S3_BUCKET_NAME
  37. def _upload_to_s3(self, file: BinaryIO, filename: str) -> Tuple[bytes, str]:
  38. """Handles uploading of the file to S3 storage."""
  39. if not self.s3_client:
  40. raise RuntimeError("S3 Client is not initialized.")
  41. try:
  42. self.s3_client.upload_fileobj(file, self.bucket_name, filename)
  43. return file.read(), f"s3://{self.bucket_name}/{filename}"
  44. except ClientError as e:
  45. raise RuntimeError(f"Error uploading file to S3: {e}")
  46. def _upload_to_local(self, contents: bytes, filename: str) -> Tuple[bytes, str]:
  47. """Handles uploading of the file to local storage."""
  48. file_path = f"{UPLOAD_DIR}/{filename}"
  49. with open(file_path, "wb") as f:
  50. f.write(contents)
  51. return contents, file_path
  52. def _get_file_from_s3(self, file_path: str) -> str:
  53. """Handles downloading of the file from S3 storage."""
  54. if not self.s3_client:
  55. raise RuntimeError("S3 Client is not initialized.")
  56. try:
  57. bucket_name, key = file_path.split("//")[1].split("/")
  58. local_file_path = f"{UPLOAD_DIR}/{key}"
  59. self.s3_client.download_file(bucket_name, key, local_file_path)
  60. return local_file_path
  61. except ClientError as e:
  62. raise RuntimeError(f"Error downloading file from S3: {e}")
  63. def _get_file_from_local(self, file_path: str) -> str:
  64. """Handles downloading of the file from local storage."""
  65. return file_path
  66. def _delete_from_s3(self, filename: str) -> None:
  67. """Handles deletion of the file from S3 storage."""
  68. if not self.s3_client:
  69. raise RuntimeError("S3 Client is not initialized.")
  70. try:
  71. self.s3_client.delete_object(Bucket=self.bucket_name, Key=filename)
  72. except ClientError as e:
  73. raise RuntimeError(f"Error deleting file from S3: {e}")
  74. def _delete_from_local(self, filename: str) -> None:
  75. """Handles deletion of the file from local storage."""
  76. file_path = f"{UPLOAD_DIR}/{filename}"
  77. if os.path.isfile(file_path):
  78. os.remove(file_path)
  79. else:
  80. raise FileNotFoundError(f"File {filename} not found in local storage.")
  81. def _delete_all_from_s3(self) -> None:
  82. """Handles deletion of all files from S3 storage."""
  83. if not self.s3_client:
  84. raise RuntimeError("S3 Client is not initialized.")
  85. try:
  86. response = self.s3_client.list_objects_v2(Bucket=self.bucket_name)
  87. if "Contents" in response:
  88. for content in response["Contents"]:
  89. self.s3_client.delete_object(
  90. Bucket=self.bucket_name, Key=content["Key"]
  91. )
  92. except ClientError as e:
  93. raise RuntimeError(f"Error deleting all files from S3: {e}")
  94. def _delete_all_from_local(self) -> None:
  95. """Handles deletion of all files from local storage."""
  96. if os.path.exists(UPLOAD_DIR):
  97. for filename in os.listdir(UPLOAD_DIR):
  98. file_path = os.path.join(UPLOAD_DIR, filename)
  99. try:
  100. if os.path.isfile(file_path) or os.path.islink(file_path):
  101. os.unlink(file_path) # Remove the file or link
  102. elif os.path.isdir(file_path):
  103. shutil.rmtree(file_path) # Remove the directory
  104. except Exception as e:
  105. print(f"Failed to delete {file_path}. Reason: {e}")
  106. else:
  107. raise FileNotFoundError(
  108. f"Directory {UPLOAD_DIR} not found in local storage."
  109. )
  110. def upload_file(self, file: BinaryIO, filename: str) -> Tuple[bytes, str]:
  111. """Uploads a file either to S3 or the local file system."""
  112. contents = file.read()
  113. if not contents:
  114. raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT)
  115. if self.storage_provider == "s3":
  116. return self._upload_to_s3(file, filename)
  117. return self._upload_to_local(contents, filename)
  118. def get_file(self, file_path: str) -> str:
  119. """Downloads a file either from S3 or the local file system and returns the file path."""
  120. if self.storage_provider == "s3":
  121. return self._get_file_from_s3(file_path)
  122. return self._get_file_from_local(file_path)
  123. def delete_file(self, filename: str) -> None:
  124. """Deletes a file either from S3 or the local file system."""
  125. if self.storage_provider == "s3":
  126. self._delete_from_s3(filename)
  127. else:
  128. self._delete_from_local(filename)
  129. def delete_all_files(self) -> None:
  130. """Deletes all files from the storage."""
  131. if self.storage_provider == "s3":
  132. self._delete_all_from_s3()
  133. else:
  134. self._delete_all_from_local()
  135. Storage = StorageProvider(provider=STORAGE_PROVIDER)