From 7766a08b701af31494f7c8797ea67d2766062e3a Mon Sep 17 00:00:00 2001 From: Timothy Jaeryang Baek Date: Tue, 4 Feb 2025 21:42:49 -0800 Subject: [PATCH] refac: ollama tool calls --- backend/open_webui/utils/misc.py | 19 ++++++++++++++----- backend/open_webui/utils/response.py | 20 +++++++++++++++++++- 2 files changed, 33 insertions(+), 6 deletions(-) diff --git a/backend/open_webui/utils/misc.py b/backend/open_webui/utils/misc.py index 0487595a1..c3655bf07 100644 --- a/backend/open_webui/utils/misc.py +++ b/backend/open_webui/utils/misc.py @@ -179,15 +179,24 @@ def openai_chat_message_template(model: str): def openai_chat_chunk_message_template( - model: str, message: Optional[str] = None, usage: Optional[dict] = None + model: str, + content: Optional[str] = None, + tool_calls: Optional[list[dict]] = None, + usage: Optional[dict] = None, ) -> dict: template = openai_chat_message_template(model) template["object"] = "chat.completion.chunk" - if message: - template["choices"][0]["delta"] = {"content": message} - else: + + template["choices"][0]["delta"] = {} + + if content: + template["choices"][0]["delta"]["content"] = content + + if tool_calls: + template["choices"][0]["delta"]["tool_calls"] = tool_calls + + if not content and not tool_calls: template["choices"][0]["finish_reason"] = "stop" - template["choices"][0]["delta"] = {} if usage: template["usage"] = usage diff --git a/backend/open_webui/utils/response.py b/backend/open_webui/utils/response.py index f461f7cc2..b16805bf3 100644 --- a/backend/open_webui/utils/response.py +++ b/backend/open_webui/utils/response.py @@ -1,4 +1,5 @@ import json +from uuid import uuid4 from open_webui.utils.misc import ( openai_chat_chunk_message_template, openai_chat_completion_message_template, @@ -60,6 +61,23 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response) model = data.get("model", "ollama") message_content = data.get("message", {}).get("content", "") + tool_calls = data.get("message", {}).get("tool_calls", None) + openai_tool_calls = None + + if tool_calls: + openai_tool_calls = [] + for tool_call in tool_calls: + openai_tool_call = { + "index": tool_call.get("index", 0), + "id": tool_call.get("id", f"call_{str(uuid4())}"), + "type": "function", + "function": { + "name": tool_call.get("function", {}).get("name", ""), + "arguments": f"{tool_call.get('function', {}).get('arguments', {})}", + }, + } + openai_tool_calls.append(openai_tool_call) + done = data.get("done", False) usage = None @@ -105,7 +123,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response) } data = openai_chat_chunk_message_template( - model, message_content if not done else None, usage + model, message_content if not done else None, openai_tool_calls, usage ) line = f"data: {json.dumps(data)}\n\n"