response.py 982 B

12345678910111213141516171819202122232425262728293031
  1. import json
  2. from open_webui.utils.misc import (
  3. openai_chat_chunk_message_template,
  4. openai_chat_completion_message_template,
  5. )
  6. def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
  7. model = ollama_response.get("model", "ollama")
  8. message_content = ollama_response.get("message", {}).get("content", "")
  9. response = openai_chat_completion_message_template(model, message_content)
  10. return response
  11. async def convert_streaming_response_ollama_to_openai(ollama_streaming_response):
  12. async for data in ollama_streaming_response.body_iterator:
  13. data = json.loads(data)
  14. model = data.get("model", "ollama")
  15. message_content = data.get("message", {}).get("content", "")
  16. done = data.get("done", False)
  17. data = openai_chat_chunk_message_template(
  18. model, message_content if not done else None
  19. )
  20. line = f"data: {json.dumps(data)}\n\n"
  21. yield line
  22. yield "data: [DONE]\n\n"