2024-09-20 23:07:57 +00:00
|
|
|
import json
|
2024-09-20 22:30:13 +00:00
|
|
|
from open_webui.utils.misc import (
|
2024-09-20 23:07:57 +00:00
|
|
|
openai_chat_chunk_message_template,
|
2024-09-20 22:30:13 +00:00
|
|
|
openai_chat_completion_message_template,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
|
|
|
|
model = ollama_response.get("model", "ollama")
|
|
|
|
message_content = ollama_response.get("message", {}).get("content", "")
|
|
|
|
|
|
|
|
response = openai_chat_completion_message_template(model, message_content)
|
|
|
|
return response
|
2024-09-20 23:07:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def convert_streaming_response_ollama_to_openai(ollama_streaming_response):
|
|
|
|
async for data in ollama_streaming_response.body_iterator:
|
|
|
|
data = json.loads(data)
|
|
|
|
|
|
|
|
model = data.get("model", "ollama")
|
|
|
|
message_content = data.get("message", {}).get("content", "")
|
|
|
|
done = data.get("done", False)
|
|
|
|
|
|
|
|
data = openai_chat_chunk_message_template(
|
|
|
|
model, message_content if not done else None
|
|
|
|
)
|
|
|
|
|
|
|
|
line = f"data: {json.dumps(data)}\n\n"
|
|
|
|
if done:
|
|
|
|
line += "data: [DONE]\n\n"
|
|
|
|
|
|
|
|
yield line
|