utils.py 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. from fastapi import APIRouter, UploadFile, File, BackgroundTasks
  2. from fastapi import Depends, HTTPException, status
  3. from starlette.responses import StreamingResponse
  4. from pydantic import BaseModel
  5. import requests
  6. import os
  7. import aiohttp
  8. import json
  9. from utils.misc import calculate_sha256, get_gravatar_url
  10. from config import OLLAMA_API_BASE_URL, DATA_DIR, UPLOAD_DIR
  11. from constants import ERROR_MESSAGES
  12. router = APIRouter()
  13. class UploadBlobForm(BaseModel):
  14. filename: str
  15. from urllib.parse import urlparse
  16. def parse_huggingface_url(hf_url):
  17. try:
  18. # Parse the URL
  19. parsed_url = urlparse(hf_url)
  20. # Get the path and split it into components
  21. path_components = parsed_url.path.split("/")
  22. # Extract the desired output
  23. user_repo = "/".join(path_components[1:3])
  24. model_file = path_components[-1]
  25. return model_file
  26. except ValueError:
  27. return None
  28. async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024):
  29. done = False
  30. if os.path.exists(file_path):
  31. current_size = os.path.getsize(file_path)
  32. else:
  33. current_size = 0
  34. headers = {"Range": f"bytes={current_size}-"} if current_size > 0 else {}
  35. timeout = aiohttp.ClientTimeout(total=600) # Set the timeout
  36. async with aiohttp.ClientSession(timeout=timeout) as session:
  37. async with session.get(url, headers=headers) as response:
  38. total_size = int(response.headers.get("content-length", 0)) + current_size
  39. with open(file_path, "ab+") as file:
  40. async for data in response.content.iter_chunked(chunk_size):
  41. current_size += len(data)
  42. file.write(data)
  43. done = current_size == total_size
  44. progress = round((current_size / total_size) * 100, 2)
  45. yield f'data: {{"progress": {progress}, "completed": {current_size}, "total": {total_size}}}\n\n'
  46. if done:
  47. file.seek(0)
  48. hashed = calculate_sha256(file)
  49. file.seek(0)
  50. url = f"{OLLAMA_API_BASE_URL}/blobs/sha256:{hashed}"
  51. response = requests.post(url, data=file)
  52. if response.ok:
  53. res = {
  54. "done": done,
  55. "blob": f"sha256:{hashed}",
  56. "name": file_name,
  57. }
  58. os.remove(file_path)
  59. yield f"data: {json.dumps(res)}\n\n"
  60. else:
  61. raise "Ollama: Could not create blob, Please try again."
  62. @router.get("/download")
  63. async def download(
  64. url: str,
  65. ):
  66. # url = "https://huggingface.co/TheBloke/stablelm-zephyr-3b-GGUF/resolve/main/stablelm-zephyr-3b.Q2_K.gguf"
  67. file_name = parse_huggingface_url(url)
  68. if file_name:
  69. file_path = f"{UPLOAD_DIR}/{file_name}"
  70. return StreamingResponse(
  71. download_file_stream(url, file_path, file_name),
  72. media_type="text/event-stream",
  73. )
  74. else:
  75. return None
  76. @router.post("/upload")
  77. def upload(file: UploadFile = File(...)):
  78. file_path = f"{UPLOAD_DIR}/{file.filename}"
  79. # Save file in chunks
  80. with open(file_path, "wb+") as f:
  81. for chunk in file.file:
  82. f.write(chunk)
  83. def file_process_stream():
  84. total_size = os.path.getsize(file_path)
  85. chunk_size = 1024 * 1024
  86. try:
  87. with open(file_path, "rb") as f:
  88. total = 0
  89. done = False
  90. while not done:
  91. chunk = f.read(chunk_size)
  92. if not chunk:
  93. done = True
  94. continue
  95. total += len(chunk)
  96. progress = round((total / total_size) * 100, 2)
  97. res = {
  98. "progress": progress,
  99. "total": total_size,
  100. "completed": total,
  101. }
  102. yield f"data: {json.dumps(res)}\n\n"
  103. if done:
  104. f.seek(0)
  105. hashed = calculate_sha256(f)
  106. f.seek(0)
  107. url = f"{OLLAMA_API_BASE_URL}/blobs/sha256:{hashed}"
  108. response = requests.post(url, data=f)
  109. if response.ok:
  110. res = {
  111. "done": done,
  112. "blob": f"sha256:{hashed}",
  113. "name": file.filename,
  114. }
  115. os.remove(file_path)
  116. yield f"data: {json.dumps(res)}\n\n"
  117. else:
  118. raise Exception(
  119. "Ollama: Could not create blob, Please try again."
  120. )
  121. except Exception as e:
  122. res = {"error": str(e)}
  123. yield f"data: {json.dumps(res)}\n\n"
  124. return StreamingResponse(file_process_stream(), media_type="text/event-stream")
  125. @router.get("/gravatar")
  126. async def get_gravatar(
  127. email: str,
  128. ):
  129. return get_gravatar_url(email)