diff --git a/backend/open_webui/utils/misc.py b/backend/open_webui/utils/misc.py index 99e6d9c39..42559a431 100644 --- a/backend/open_webui/utils/misc.py +++ b/backend/open_webui/utils/misc.py @@ -217,12 +217,19 @@ def openai_chat_chunk_message_template( def openai_chat_completion_message_template( - model: str, message: Optional[str] = None, usage: Optional[dict] = None + model: str, + message: Optional[str] = None, + tool_calls: Optional[list[dict]] = None, + usage: Optional[dict] = None ) -> dict: template = openai_chat_message_template(model) template["object"] = "chat.completion" if message is not None: template["choices"][0]["message"] = {"content": message, "role": "assistant"} + + if tool_calls: + template["choices"][0]["tool_calls"] = tool_calls + template["choices"][0]["finish_reason"] = "stop" if usage: diff --git a/backend/open_webui/utils/response.py b/backend/open_webui/utils/response.py index 4917d3852..9e9e93232 100644 --- a/backend/open_webui/utils/response.py +++ b/backend/open_webui/utils/response.py @@ -6,9 +6,31 @@ from open_webui.utils.misc import ( ) +def conver_ollama_tool_call_to_openai(tool_calls: dict) -> dict: + openai_tool_calls = [] + for tool_call in tool_calls: + openai_tool_call = { + "index": tool_call.get("index", 0), + "id": tool_call.get("id", f"call_{str(uuid4())}"), + "type": "function", + "function": { + "name": tool_call.get("function", {}).get("name", ""), + "arguments": json.dumps( + tool_call.get('function', {}).get('arguments', {}) + ), + }, + } + openai_tool_calls.append(openai_tool_call) + return openai_tool_calls + def convert_response_ollama_to_openai(ollama_response: dict) -> dict: model = ollama_response.get("model", "ollama") message_content = ollama_response.get("message", {}).get("content", "") + tool_calls = ollama_response.get("message", {}).get("tool_calls", None) + openai_tool_calls = None + + if tool_calls: + openai_tool_calls = conver_ollama_tool_call_to_openai(tool_calls) data = ollama_response usage = { @@ -51,7 +73,7 @@ def convert_response_ollama_to_openai(ollama_response: dict) -> dict: ), } - response = openai_chat_completion_message_template(model, message_content, usage) + response = openai_chat_completion_message_template(model, message_content, openai_tool_calls, usage) return response @@ -65,20 +87,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response) openai_tool_calls = None if tool_calls: - openai_tool_calls = [] - for tool_call in tool_calls: - openai_tool_call = { - "index": tool_call.get("index", 0), - "id": tool_call.get("id", f"call_{str(uuid4())}"), - "type": "function", - "function": { - "name": tool_call.get("function", {}).get("name", ""), - "arguments": json.dumps( - tool_call.get("function", {}).get("arguments", {}) - ), - }, - } - openai_tool_calls.append(openai_tool_call) + openai_tool_calls = conver_ollama_tool_call_to_openai(tool_calls) done = data.get("done", False)