mirror of
https://github.com/open-webui/open-webui
synced 2025-06-26 18:26:48 +00:00
Merge pull request #10462 from Seniorsimo/fix-ollama-stream-conversion
**fix** ollama to openai conversion on one shot streamed response
This commit is contained in:
commit
1bf16ec23d
@ -104,7 +104,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
|
|||||||
data = json.loads(data)
|
data = json.loads(data)
|
||||||
|
|
||||||
model = data.get("model", "ollama")
|
model = data.get("model", "ollama")
|
||||||
message_content = data.get("message", {}).get("content", "")
|
message_content = data.get("message", {}).get("content", None)
|
||||||
tool_calls = data.get("message", {}).get("tool_calls", None)
|
tool_calls = data.get("message", {}).get("tool_calls", None)
|
||||||
openai_tool_calls = None
|
openai_tool_calls = None
|
||||||
|
|
||||||
@ -118,7 +118,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
|
|||||||
usage = convert_ollama_usage_to_openai(data)
|
usage = convert_ollama_usage_to_openai(data)
|
||||||
|
|
||||||
data = openai_chat_chunk_message_template(
|
data = openai_chat_chunk_message_template(
|
||||||
model, message_content if not done else None, openai_tool_calls, usage
|
model, message_content, openai_tool_calls, usage
|
||||||
)
|
)
|
||||||
|
|
||||||
line = f"data: {json.dumps(data)}\n\n"
|
line = f"data: {json.dumps(data)}\n\n"
|
||||||
|
Loading…
Reference in New Issue
Block a user