mirror of
https://github.com/open-webui/open-webui
synced 2025-06-11 08:56:21 +00:00
refac: rename stream_message_template
This commit is contained in:
parent
f8726719ef
commit
2e0fa1c6a0
@ -20,7 +20,7 @@ from apps.webui.models.models import Models
|
|||||||
from apps.webui.utils import load_function_module_by_id
|
from apps.webui.utils import load_function_module_by_id
|
||||||
|
|
||||||
from utils.misc import (
|
from utils.misc import (
|
||||||
stream_message_template,
|
openai_chat_chunk_message_template,
|
||||||
openai_chat_completion_message_template,
|
openai_chat_completion_message_template,
|
||||||
add_or_update_system_message,
|
add_or_update_system_message,
|
||||||
)
|
)
|
||||||
@ -227,7 +227,7 @@ def process_line(form_data: dict, line):
|
|||||||
if line.startswith("data:"):
|
if line.startswith("data:"):
|
||||||
return f"{line}\n\n"
|
return f"{line}\n\n"
|
||||||
else:
|
else:
|
||||||
line = stream_message_template(form_data["model"], line)
|
line = openai_chat_chunk_message_template(form_data["model"], line)
|
||||||
return f"data: {json.dumps(line)}\n\n"
|
return f"data: {json.dumps(line)}\n\n"
|
||||||
|
|
||||||
|
|
||||||
@ -371,7 +371,9 @@ async def generate_function_chat_completion(form_data, user):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if isinstance(res, str):
|
if isinstance(res, str):
|
||||||
message = stream_message_template(form_data["model"], res)
|
message = openai_chat_chunk_message_template(
|
||||||
|
form_data["model"], res
|
||||||
|
)
|
||||||
yield f"data: {json.dumps(message)}\n\n"
|
yield f"data: {json.dumps(message)}\n\n"
|
||||||
|
|
||||||
if isinstance(res, Iterator):
|
if isinstance(res, Iterator):
|
||||||
@ -383,7 +385,9 @@ async def generate_function_chat_completion(form_data, user):
|
|||||||
yield process_line(form_data, line)
|
yield process_line(form_data, line)
|
||||||
|
|
||||||
if isinstance(res, str) or isinstance(res, Generator):
|
if isinstance(res, str) or isinstance(res, Generator):
|
||||||
finish_message = stream_message_template(form_data["model"], "")
|
finish_message = openai_chat_chunk_message_template(
|
||||||
|
form_data["model"], ""
|
||||||
|
)
|
||||||
finish_message["choices"][0]["finish_reason"] = "stop"
|
finish_message["choices"][0]["finish_reason"] = "stop"
|
||||||
yield f"data: {json.dumps(finish_message)}\n\n"
|
yield f"data: {json.dumps(finish_message)}\n\n"
|
||||||
yield "data: [DONE]"
|
yield "data: [DONE]"
|
||||||
|
@ -97,7 +97,7 @@ def message_template(model: str):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def stream_message_template(model: str, message: str):
|
def openai_chat_chunk_message_template(model: str, message: str):
|
||||||
template = message_template(model)
|
template = message_template(model)
|
||||||
template["object"] = "chat.completion.chunk"
|
template["object"] = "chat.completion.chunk"
|
||||||
template["choices"][0]["delta"] = {"content": message}
|
template["choices"][0]["delta"] = {"content": message}
|
||||||
|
Loading…
Reference in New Issue
Block a user