mirror of
https://github.com/open-webui/open-webui
synced 2025-04-21 06:50:44 +00:00
Merge branch 'dev' of https://github.com/open-webui/open-webui into dev
This commit is contained in:
commit
c28d82b10d
@ -217,12 +217,19 @@ def openai_chat_chunk_message_template(
|
|||||||
|
|
||||||
|
|
||||||
def openai_chat_completion_message_template(
|
def openai_chat_completion_message_template(
|
||||||
model: str, message: Optional[str] = None, usage: Optional[dict] = None
|
model: str,
|
||||||
|
message: Optional[str] = None,
|
||||||
|
tool_calls: Optional[list[dict]] = None,
|
||||||
|
usage: Optional[dict] = None
|
||||||
) -> dict:
|
) -> dict:
|
||||||
template = openai_chat_message_template(model)
|
template = openai_chat_message_template(model)
|
||||||
template["object"] = "chat.completion"
|
template["object"] = "chat.completion"
|
||||||
if message is not None:
|
if message is not None:
|
||||||
template["choices"][0]["message"] = {"content": message, "role": "assistant"}
|
template["choices"][0]["message"] = {"content": message, "role": "assistant"}
|
||||||
|
|
||||||
|
if tool_calls:
|
||||||
|
template["choices"][0]["tool_calls"] = tool_calls
|
||||||
|
|
||||||
template["choices"][0]["finish_reason"] = "stop"
|
template["choices"][0]["finish_reason"] = "stop"
|
||||||
|
|
||||||
if usage:
|
if usage:
|
||||||
|
@ -6,9 +6,31 @@ from open_webui.utils.misc import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_ollama_tool_call_to_openai(tool_calls: dict) -> dict:
|
||||||
|
openai_tool_calls = []
|
||||||
|
for tool_call in tool_calls:
|
||||||
|
openai_tool_call = {
|
||||||
|
"index": tool_call.get("index", 0),
|
||||||
|
"id": tool_call.get("id", f"call_{str(uuid4())}"),
|
||||||
|
"type": "function",
|
||||||
|
"function": {
|
||||||
|
"name": tool_call.get("function", {}).get("name", ""),
|
||||||
|
"arguments": json.dumps(
|
||||||
|
tool_call.get('function', {}).get('arguments', {})
|
||||||
|
),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
openai_tool_calls.append(openai_tool_call)
|
||||||
|
return openai_tool_calls
|
||||||
|
|
||||||
def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
|
def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
|
||||||
model = ollama_response.get("model", "ollama")
|
model = ollama_response.get("model", "ollama")
|
||||||
message_content = ollama_response.get("message", {}).get("content", "")
|
message_content = ollama_response.get("message", {}).get("content", "")
|
||||||
|
tool_calls = ollama_response.get("message", {}).get("tool_calls", None)
|
||||||
|
openai_tool_calls = None
|
||||||
|
|
||||||
|
if tool_calls:
|
||||||
|
openai_tool_calls = convert_ollama_tool_call_to_openai(tool_calls)
|
||||||
|
|
||||||
data = ollama_response
|
data = ollama_response
|
||||||
usage = {
|
usage = {
|
||||||
@ -51,7 +73,7 @@ def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
response = openai_chat_completion_message_template(model, message_content, usage)
|
response = openai_chat_completion_message_template(model, message_content, openai_tool_calls, usage)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
@ -65,20 +87,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
|
|||||||
openai_tool_calls = None
|
openai_tool_calls = None
|
||||||
|
|
||||||
if tool_calls:
|
if tool_calls:
|
||||||
openai_tool_calls = []
|
openai_tool_calls = convert_ollama_tool_call_to_openai(tool_calls)
|
||||||
for tool_call in tool_calls:
|
|
||||||
openai_tool_call = {
|
|
||||||
"index": tool_call.get("index", 0),
|
|
||||||
"id": tool_call.get("id", f"call_{str(uuid4())}"),
|
|
||||||
"type": "function",
|
|
||||||
"function": {
|
|
||||||
"name": tool_call.get("function", {}).get("name", ""),
|
|
||||||
"arguments": json.dumps(
|
|
||||||
tool_call.get("function", {}).get("arguments", {})
|
|
||||||
),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
openai_tool_calls.append(openai_tool_call)
|
|
||||||
|
|
||||||
done = data.get("done", False)
|
done = data.get("done", False)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user