added support for API tool_calls if stream false

This commit is contained in:
Simone 2025-02-12 09:11:26 +01:00
parent cd2f4142d5
commit 7dc000a6b9
2 changed files with 32 additions and 16 deletions

View File

@ -217,12 +217,19 @@ def openai_chat_chunk_message_template(
def openai_chat_completion_message_template(
model: str, message: Optional[str] = None, usage: Optional[dict] = None
model: str,
message: Optional[str] = None,
tool_calls: Optional[list[dict]] = None,
usage: Optional[dict] = None
) -> dict:
template = openai_chat_message_template(model)
template["object"] = "chat.completion"
if message is not None:
template["choices"][0]["message"] = {"content": message, "role": "assistant"}
if tool_calls:
template["choices"][0]["tool_calls"] = tool_calls
template["choices"][0]["finish_reason"] = "stop"
if usage:

View File

@ -6,9 +6,31 @@ from open_webui.utils.misc import (
)
def conver_ollama_tool_call_to_openai(tool_calls: dict) -> dict:
openai_tool_calls = []
for tool_call in tool_calls:
openai_tool_call = {
"index": tool_call.get("index", 0),
"id": tool_call.get("id", f"call_{str(uuid4())}"),
"type": "function",
"function": {
"name": tool_call.get("function", {}).get("name", ""),
"arguments": json.dumps(
tool_call.get('function', {}).get('arguments', {})
),
},
}
openai_tool_calls.append(openai_tool_call)
return openai_tool_calls
def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
model = ollama_response.get("model", "ollama")
message_content = ollama_response.get("message", {}).get("content", "")
tool_calls = ollama_response.get("message", {}).get("tool_calls", None)
openai_tool_calls = None
if tool_calls:
openai_tool_calls = conver_ollama_tool_call_to_openai(tool_calls)
data = ollama_response
usage = {
@ -51,7 +73,7 @@ def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
),
}
response = openai_chat_completion_message_template(model, message_content, usage)
response = openai_chat_completion_message_template(model, message_content, openai_tool_calls, usage)
return response
@ -65,20 +87,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
openai_tool_calls = None
if tool_calls:
openai_tool_calls = []
for tool_call in tool_calls:
openai_tool_call = {
"index": tool_call.get("index", 0),
"id": tool_call.get("id", f"call_{str(uuid4())}"),
"type": "function",
"function": {
"name": tool_call.get("function", {}).get("name", ""),
"arguments": json.dumps(
tool_call.get("function", {}).get("arguments", {})
),
},
}
openai_tool_calls.append(openai_tool_call)
openai_tool_calls = conver_ollama_tool_call_to_openai(tool_calls)
done = data.get("done", False)