refac: ollama response

This commit is contained in:
Timothy Jaeryang Baek 2025-06-10 13:10:31 +04:00
parent 2ccc441b41
commit b7a91b1963
3 changed files with 14 additions and 5 deletions

View File

@ -1866,9 +1866,11 @@ async def process_chat_response(
value = delta.get("content")
reasoning_content = delta.get(
"reasoning_content"
) or delta.get("reasoning")
reasoning_content = (
delta.get("reasoning_content")
or delta.get("reasoning")
or delta.get("thinking")
)
if reasoning_content:
if (
not content_blocks

View File

@ -208,6 +208,7 @@ def openai_chat_message_template(model: str):
def openai_chat_chunk_message_template(
model: str,
content: Optional[str] = None,
reasoning_content: Optional[str] = None,
tool_calls: Optional[list[dict]] = None,
usage: Optional[dict] = None,
) -> dict:
@ -220,6 +221,9 @@ def openai_chat_chunk_message_template(
if content:
template["choices"][0]["delta"]["content"] = content
if reasoning_content:
template["choices"][0]["delta"]["reasonsing_content"] = reasoning_content
if tool_calls:
template["choices"][0]["delta"]["tool_calls"] = tool_calls
@ -234,6 +238,7 @@ def openai_chat_chunk_message_template(
def openai_chat_completion_message_template(
model: str,
message: Optional[str] = None,
reasoning_content: Optional[str] = None,
tool_calls: Optional[list[dict]] = None,
usage: Optional[dict] = None,
) -> dict:
@ -241,8 +246,9 @@ def openai_chat_completion_message_template(
template["object"] = "chat.completion"
if message is not None:
template["choices"][0]["message"] = {
"content": message,
"role": "assistant",
"content": message,
**({"reasoning_content": reasoning_content} if reasoning_content else {}),
**({"tool_calls": tool_calls} if tool_calls else {}),
}

View File

@ -105,6 +105,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
model = data.get("model", "ollama")
message_content = data.get("message", {}).get("content", None)
reasoning_content = data.get("message", {}).get("thinking", None)
tool_calls = data.get("message", {}).get("tool_calls", None)
openai_tool_calls = None
@ -118,7 +119,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
usage = convert_ollama_usage_to_openai(data)
data = openai_chat_chunk_message_template(
model, message_content, openai_tool_calls, usage
model, message_content, reasoning_content, openai_tool_calls, usage
)
line = f"data: {json.dumps(data)}\n\n"