main.py 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151
  1. from fastapi import FastAPI, Request, Response, HTTPException, Depends
  2. from fastapi.middleware.cors import CORSMiddleware
  3. from fastapi.responses import StreamingResponse, JSONResponse
  4. import requests
  5. import json
  6. from pydantic import BaseModel
  7. from apps.web.models.users import Users
  8. from constants import ERROR_MESSAGES
  9. from utils.utils import decode_token, get_current_user
  10. from config import OPENAI_API_BASE_URL, OPENAI_API_KEY
  11. app = FastAPI()
  12. app.add_middleware(
  13. CORSMiddleware,
  14. allow_origins=["*"],
  15. allow_credentials=True,
  16. allow_methods=["*"],
  17. allow_headers=["*"],
  18. )
  19. app.state.OPENAI_API_BASE_URL = OPENAI_API_BASE_URL
  20. app.state.OPENAI_API_KEY = OPENAI_API_KEY
  21. class UrlUpdateForm(BaseModel):
  22. url: str
  23. class KeyUpdateForm(BaseModel):
  24. key: str
  25. @app.get("/url")
  26. async def get_openai_url(user=Depends(get_current_user)):
  27. if user and user.role == "admin":
  28. return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
  29. else:
  30. raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
  31. @app.post("/url/update")
  32. async def update_openai_url(form_data: UrlUpdateForm, user=Depends(get_current_user)):
  33. if user and user.role == "admin":
  34. app.state.OPENAI_API_BASE_URL = form_data.url
  35. return {"OPENAI_API_BASE_URL": app.state.OPENAI_API_BASE_URL}
  36. else:
  37. raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
  38. @app.get("/key")
  39. async def get_openai_key(user=Depends(get_current_user)):
  40. if user and user.role == "admin":
  41. return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
  42. else:
  43. raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
  44. @app.post("/key/update")
  45. async def update_openai_key(form_data: KeyUpdateForm, user=Depends(get_current_user)):
  46. if user and user.role == "admin":
  47. app.state.OPENAI_API_KEY = form_data.key
  48. return {"OPENAI_API_KEY": app.state.OPENAI_API_KEY}
  49. else:
  50. raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
  51. @app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
  52. async def proxy(path: str, request: Request, user=Depends(get_current_user)):
  53. target_url = f"{app.state.OPENAI_API_BASE_URL}/{path}"
  54. print(target_url, app.state.OPENAI_API_KEY)
  55. if user.role not in ["user", "admin"]:
  56. raise HTTPException(status_code=401, detail=ERROR_MESSAGES.ACCESS_PROHIBITED)
  57. if app.state.OPENAI_API_KEY == "":
  58. raise HTTPException(status_code=401, detail=ERROR_MESSAGES.API_KEY_NOT_FOUND)
  59. body = await request.body()
  60. # TODO: Remove below after gpt-4-vision fix from Open AI
  61. # Try to decode the body of the request from bytes to a UTF-8 string (Require add max_token to fix gpt-4-vision)
  62. try:
  63. body = body.decode("utf-8")
  64. body = json.loads(body)
  65. # Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 4000
  66. # This is a workaround until OpenAI fixes the issue with this model
  67. if body.get("model") == "gpt-4-vision-preview":
  68. if "max_tokens" not in body:
  69. body["max_tokens"] = 4000
  70. print("Modified body_dict:", body)
  71. # Convert the modified body back to JSON
  72. body = json.dumps(body)
  73. except json.JSONDecodeError as e:
  74. print("Error loading request body into a dictionary:", e)
  75. headers = {}
  76. headers["Authorization"] = f"Bearer {app.state.OPENAI_API_KEY}"
  77. headers["Content-Type"] = "application/json"
  78. try:
  79. r = requests.request(
  80. method=request.method,
  81. url=target_url,
  82. data=body,
  83. headers=headers,
  84. stream=True,
  85. )
  86. r.raise_for_status()
  87. # Check if response is SSE
  88. if "text/event-stream" in r.headers.get("Content-Type", ""):
  89. return StreamingResponse(
  90. r.iter_content(chunk_size=8192),
  91. status_code=r.status_code,
  92. headers=dict(r.headers),
  93. )
  94. else:
  95. # For non-SSE, read the response and return it
  96. # response_data = (
  97. # r.json()
  98. # if r.headers.get("Content-Type", "")
  99. # == "application/json"
  100. # else r.text
  101. # )
  102. response_data = r.json()
  103. print(type(response_data))
  104. if "openai" in app.state.OPENAI_API_BASE_URL and path == "models":
  105. response_data["data"] = list(
  106. filter(lambda model: "gpt" in model["id"], response_data["data"])
  107. )
  108. return response_data
  109. except Exception as e:
  110. print(e)
  111. error_detail = "Ollama WebUI: Server Connection Error"
  112. if r is not None:
  113. try:
  114. res = r.json()
  115. if "error" in res:
  116. error_detail = f"External: {res['error']}"
  117. except:
  118. error_detail = f"External: {e}"
  119. raise HTTPException(status_code=r.status_code, detail=error_detail)