123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187 |
- from fastapi import APIRouter, UploadFile, File, BackgroundTasks
- from fastapi import Depends, HTTPException, status
- from starlette.responses import StreamingResponse, FileResponse
- from pydantic import BaseModel
- import requests
- import os
- import aiohttp
- import json
- from utils.utils import get_admin_user
- from utils.misc import calculate_sha256, get_gravatar_url
- from config import OLLAMA_BASE_URLS, DATA_DIR, UPLOAD_DIR
- from constants import ERROR_MESSAGES
- router = APIRouter()
- class UploadBlobForm(BaseModel):
- filename: str
- from urllib.parse import urlparse
- def parse_huggingface_url(hf_url):
- try:
- # Parse the URL
- parsed_url = urlparse(hf_url)
- # Get the path and split it into components
- path_components = parsed_url.path.split("/")
- # Extract the desired output
- user_repo = "/".join(path_components[1:3])
- model_file = path_components[-1]
- return model_file
- except ValueError:
- return None
- async def download_file_stream(url, file_path, file_name, chunk_size=1024 * 1024):
- done = False
- if os.path.exists(file_path):
- current_size = os.path.getsize(file_path)
- else:
- current_size = 0
- headers = {"Range": f"bytes={current_size}-"} if current_size > 0 else {}
- timeout = aiohttp.ClientTimeout(total=600) # Set the timeout
- async with aiohttp.ClientSession(timeout=timeout) as session:
- async with session.get(url, headers=headers) as response:
- total_size = int(response.headers.get("content-length", 0)) + current_size
- with open(file_path, "ab+") as file:
- async for data in response.content.iter_chunked(chunk_size):
- current_size += len(data)
- file.write(data)
- done = current_size == total_size
- progress = round((current_size / total_size) * 100, 2)
- yield f'data: {{"progress": {progress}, "completed": {current_size}, "total": {total_size}}}\n\n'
- if done:
- file.seek(0)
- hashed = calculate_sha256(file)
- file.seek(0)
- url = f"{OLLAMA_BASE_URLS[0]}/blobs/sha256:{hashed}"
- response = requests.post(url, data=file)
- if response.ok:
- res = {
- "done": done,
- "blob": f"sha256:{hashed}",
- "name": file_name,
- }
- os.remove(file_path)
- yield f"data: {json.dumps(res)}\n\n"
- else:
- raise "Ollama: Could not create blob, Please try again."
- @router.get("/download")
- async def download(
- url: str,
- ):
- # url = "https://huggingface.co/TheBloke/stablelm-zephyr-3b-GGUF/resolve/main/stablelm-zephyr-3b.Q2_K.gguf"
- file_name = parse_huggingface_url(url)
- if file_name:
- file_path = f"{UPLOAD_DIR}/{file_name}"
- return StreamingResponse(
- download_file_stream(url, file_path, file_name),
- media_type="text/event-stream",
- )
- else:
- return None
- @router.post("/upload")
- def upload(file: UploadFile = File(...)):
- file_path = f"{UPLOAD_DIR}/{file.filename}"
- # Save file in chunks
- with open(file_path, "wb+") as f:
- for chunk in file.file:
- f.write(chunk)
- def file_process_stream():
- total_size = os.path.getsize(file_path)
- chunk_size = 1024 * 1024
- try:
- with open(file_path, "rb") as f:
- total = 0
- done = False
- while not done:
- chunk = f.read(chunk_size)
- if not chunk:
- done = True
- continue
- total += len(chunk)
- progress = round((total / total_size) * 100, 2)
- res = {
- "progress": progress,
- "total": total_size,
- "completed": total,
- }
- yield f"data: {json.dumps(res)}\n\n"
- if done:
- f.seek(0)
- hashed = calculate_sha256(f)
- f.seek(0)
- url = f"{OLLAMA_BASE_URLS[0]}/blobs/sha256:{hashed}"
- response = requests.post(url, data=f)
- if response.ok:
- res = {
- "done": done,
- "blob": f"sha256:{hashed}",
- "name": file.filename,
- }
- os.remove(file_path)
- yield f"data: {json.dumps(res)}\n\n"
- else:
- raise Exception(
- "Ollama: Could not create blob, Please try again."
- )
- except Exception as e:
- res = {"error": str(e)}
- yield f"data: {json.dumps(res)}\n\n"
- return StreamingResponse(file_process_stream(), media_type="text/event-stream")
- @router.get("/gravatar")
- async def get_gravatar(
- email: str,
- ):
- return get_gravatar_url(email)
- @router.get("/db/download")
- async def download_db(user=Depends(get_admin_user)):
- return FileResponse(
- f"{DATA_DIR}/webui.db",
- media_type="application/octet-stream",
- filename="webui.db",
- )
|