main.py 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200
  1. from fastapi import FastAPI, Request, Response, HTTPException, Depends
  2. from fastapi.middleware.cors import CORSMiddleware
  3. from fastapi.responses import StreamingResponse, JSONResponse, FileResponse
  4. import requests
  5. import json
  6. from pydantic import BaseModel
  7. from apps.web.models.users import Users
  8. from constants import ERROR_MESSAGES
  9. from utils.utils import decode_token, get_current_user, get_verified_user, get_admin_user
  10. from config import OPENAI_API_BASE_URL, OPENAI_API_KEY, CACHE_DIR
  11. import hashlib
  12. from pathlib import Path
  13. app = FastAPI()
  14. app.add_middleware(
  15. CORSMiddleware,
  16. allow_origins=["*"],
  17. allow_credentials=True,
  18. allow_methods=["*"],
  19. allow_headers=["*"],
  20. )
  21. app.state.OPENAI_API_BASE_URL = OPENAI_API_BASE_URL
  22. app.state.OPENAI_API_KEY = OPENAI_API_KEY
  23. class UrlUpdateForm(BaseModel):
  24. url: str
  25. class KeyUpdateForm(BaseModel):
  26. key: str
  27. @app.get("/url")
  28. async def get_openai_url(user=Depends(get_admin_user)):
  29. return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
  30. @app.post("/url/update")
  31. async def update_openai_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
  32. app.state.OPENAI_API_BASE_URL = form_data.url
  33. return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
  34. @app.get("/key")
  35. async def get_openai_key(user=Depends(get_admin_user)):
  36. return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
  37. @app.post("/key/update")
  38. async def update_openai_key(form_data: KeyUpdateForm, user=Depends(get_admin_user)):
  39. app.state.OPENAI_API_KEY = form_data.key
  40. return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
  41. @app.post("/audio/speech")
  42. async def speech(request: Request, user=Depends(get_verified_user)):
  43. target_url = f"{app.state.OPENAI_API_BASE_URL}/audio/speech"
  44. if app.state.OPENAI_API_KEY == "":
  45. raise HTTPException(status_code=401, detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
  46. body = await request.body()
  47. name = hashlib.sha256(body).hexdigest()
  48. SPEECH_CACHE_DIR = Path(CACHE_DIR).joinpath("./audio/speech/")
  49. SPEECH_CACHE_DIR.mkdir(parents=True, exist_ok=True)
  50. file_path = SPEECH_CACHE_DIR.joinpath(f"{name}.mp3")
  51. file_body_path = SPEECH_CACHE_DIR.joinpath(f"{name}.json")
  52. # Check if the file already exists in the cache
  53. if file_path.is_file():
  54. return FileResponse(file_path)
  55. headers = {}
  56. headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
  57. headers["Content-Type"] = "application/json"
  58. try:
  59. print("openai")
  60. r = requests.post(
  61. url=target_url,
  62. data=body,
  63. headers=headers,
  64. stream=True,
  65. )
  66. r.raise_for_status()
  67. # Save the streaming content to a file
  68. with open(file_path, "wb") as f:
  69. for chunk in r.iter_content(chunk_size=8192):
  70. f.write(chunk)
  71. with open(file_body_path, "w") as f:
  72. json.dump(json.loads(body.decode("utf-8")), f)
  73. # Return the saved file
  74. return FileResponse(file_path)
  75. except Exception as e:
  76. print(e)
  77. error_detail = "Ollama WebUI: Server Connection Error"
  78. if r is not None:
  79. try:
  80. res = r.json()
  81. if "error" in res:
  82. error_detail = f"External: {res['error']}"
  83. except:
  84. error_detail = f"External: {e}"
  85. raise HTTPException(status_code=r.status_code, detail=error_detail)
  86. @app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
  87. async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
  88. target_url = f"{app.state.OPENAI_API_BASE_URL}/{path}"
  89. print(target_url, app.state.OPENAI_API_KEY)
  90. if app.state.OPENAI_API_KEY == "":
  91. raise HTTPException(status_code=401, detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
  92. body = await request.body()
  93. # TODO: Remove below after gpt-4-vision fix from Open AI
  94. # Try to decode the body of the request from bytes to a UTF-8 string (Require add max_token to fix gpt-4-vision)
  95. try:
  96. body = body.decode("utf-8")
  97. body = json.loads(body)
  98. # Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 4000
  99. # This is a workaround until OpenAI fixes the issue with this model
  100. if body.get("model") == "gpt-4-vision-preview":
  101. if "max_tokens" not in body:
  102. body["max_tokens"] = 4000
  103. print("Modified body_dict:", body)
  104. # Convert the modified body back to JSON
  105. body = json.dumps(body)
  106. except json.JSONDecodeError as e:
  107. print("Error loading request body into a dictionary:", e)
  108. headers = {}
  109. headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
  110. headers["Content-Type"] = "application/json"
  111. try:
  112. r = requests.request(
  113. method=request.method,
  114. url=target_url,
  115. data=body,
  116. headers=headers,
  117. stream=True,
  118. )
  119. r.raise_for_status()
  120. # Check if response is SSE
  121. if "text/event-stream" in r.headers.get("Content-Type", ""):
  122. return StreamingResponse(
  123. r.iter_content(chunk_size=8192),
  124. status_code=r.status_code,
  125. headers=dict(r.headers),
  126. )
  127. else:
  128. # For non-SSE, read the response and return it
  129. # response_data = (
  130. # r.json()
  131. # if r.headers.get("Content-Type", "")
  132. # == "application/json"
  133. # else r.text
  134. # )
  135. response_data = r.json()
  136. if "openai" in app.state.OPENAI_API_BASE_URL and path == "models":
  137. response_data["data"] = list(
  138. filter(lambda model: "gpt" in model["id"], response_data["data"])
  139. )
  140. return response_data
  141. except Exception as e:
  142. print(e)
  143. error_detail = "Ollama WebUI: Server Connection Error"
  144. if r is not None:
  145. try:
  146. res = r.json()
  147. if "error" in res:
  148. error_detail = f"External: {res['error']}"
  149. except:
  150. error_detail = f"External: {e}"
  151. raise HTTPException(status_code=r.status_code, detail=error_detail)