12345678910111213141516171819202122232425262728293031 |
- import json
- from open_webui.utils.misc import (
- openai_chat_chunk_message_template,
- openai_chat_completion_message_template,
- )
- def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
- model = ollama_response.get("model", "ollama")
- message_content = ollama_response.get("message", {}).get("content", "")
- response = openai_chat_completion_message_template(model, message_content)
- return response
- async def convert_streaming_response_ollama_to_openai(ollama_streaming_response):
- async for data in ollama_streaming_response.body_iterator:
- data = json.loads(data)
- model = data.get("model", "ollama")
- message_content = data.get("message", {}).get("content", "")
- done = data.get("done", False)
- data = openai_chat_chunk_message_template(
- model, message_content if not done else None
- )
- line = f"data: {json.dumps(data)}\n\n"
- yield line
- yield "data: [DONE]\n\n"
|