From a8859a81454a55f80c15a72a3c87507ef23459a0 Mon Sep 17 00:00:00 2001 From: Simone Date: Thu, 20 Feb 2025 21:25:32 +0100 Subject: [PATCH] Fix on ollama to openai conversion - stream can return a single message with content --- backend/open_webui/utils/response.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/open_webui/utils/response.py b/backend/open_webui/utils/response.py index bc47e1e13..8c3f1a58e 100644 --- a/backend/open_webui/utils/response.py +++ b/backend/open_webui/utils/response.py @@ -104,7 +104,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response) data = json.loads(data) model = data.get("model", "ollama") - message_content = data.get("message", {}).get("content", "") + message_content = data.get("message", {}).get("content", None) tool_calls = data.get("message", {}).get("tool_calls", None) openai_tool_calls = None @@ -118,7 +118,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response) usage = convert_ollama_usage_to_openai(data) data = openai_chat_chunk_message_template( - model, message_content if not done else None, openai_tool_calls, usage + model, message_content, openai_tool_calls, usage ) line = f"data: {json.dumps(data)}\n\n"