refac: system prompt template

This commit is contained in:
Timothy Jaeryang Baek 2025-02-05 14:35:18 -08:00
parent fb92694d61
commit 7ba717d171
4 changed files with 20 additions and 6 deletions

View File

@ -250,7 +250,7 @@ async def generate_function_chat_completion(
params = model_info.params.model_dump() params = model_info.params.model_dump()
form_data = apply_model_params_to_body_openai(params, form_data) form_data = apply_model_params_to_body_openai(params, form_data)
form_data = apply_model_system_prompt_to_body(params, form_data, user) form_data = apply_model_system_prompt_to_body(params, form_data, metadata, user)
pipe_id = get_pipe_id(form_data) pipe_id = get_pipe_id(form_data)
function_module = get_function_module_by_id(request, pipe_id) function_module = get_function_module_by_id(request, pipe_id)

View File

@ -1007,7 +1007,7 @@ async def generate_chat_completion(
payload["options"] = apply_model_params_to_body_ollama( payload["options"] = apply_model_params_to_body_ollama(
params, payload["options"] params, payload["options"]
) )
payload = apply_model_system_prompt_to_body(params, payload, metadata) payload = apply_model_system_prompt_to_body(params, payload, metadata, user)
# Check if user has access to the model # Check if user has access to the model
if not bypass_filter and user.role == "user": if not bypass_filter and user.role == "user":
@ -1160,6 +1160,8 @@ async def generate_openai_chat_completion(
url_idx: Optional[int] = None, url_idx: Optional[int] = None,
user=Depends(get_verified_user), user=Depends(get_verified_user),
): ):
metadata = form_data.pop("metadata", None)
try: try:
completion_form = OpenAIChatCompletionForm(**form_data) completion_form = OpenAIChatCompletionForm(**form_data)
except Exception as e: except Exception as e:
@ -1186,7 +1188,7 @@ async def generate_openai_chat_completion(
if params: if params:
payload = apply_model_params_to_body_openai(params, payload) payload = apply_model_params_to_body_openai(params, payload)
payload = apply_model_system_prompt_to_body(params, payload, user) payload = apply_model_system_prompt_to_body(params, payload, metadata, user)
# Check if user has access to the model # Check if user has access to the model
if user.role == "user": if user.role == "user":

View File

@ -566,7 +566,7 @@ async def generate_chat_completion(
params = model_info.params.model_dump() params = model_info.params.model_dump()
payload = apply_model_params_to_body_openai(params, payload) payload = apply_model_params_to_body_openai(params, payload)
payload = apply_model_system_prompt_to_body(params, payload, metadata) payload = apply_model_system_prompt_to_body(params, payload, metadata, user)
# Check if user has access to the model # Check if user has access to the model
if not bypass_filter and user.role == "user": if not bypass_filter and user.role == "user":

View File

@ -1,4 +1,4 @@
from open_webui.utils.task import prompt_variables_template from open_webui.utils.task import prompt_template, prompt_variables_template
from open_webui.utils.misc import ( from open_webui.utils.misc import (
add_or_update_system_message, add_or_update_system_message,
) )
@ -8,12 +8,24 @@ from typing import Callable, Optional
# inplace function: form_data is modified # inplace function: form_data is modified
def apply_model_system_prompt_to_body( def apply_model_system_prompt_to_body(
params: dict, form_data: dict, metadata: Optional[dict] = None params: dict, form_data: dict, metadata: Optional[dict] = None, user=None
) -> dict: ) -> dict:
system = params.get("system", None) system = params.get("system", None)
if not system: if not system:
return form_data return form_data
# Legacy (API Usage)
if user:
template_params = {
"user_name": user.name,
"user_location": user.info.get("location") if user.info else None,
}
else:
template_params = {}
system = prompt_template(system, **template_params)
# Metadata (WebUI Usage)
if metadata: if metadata:
variables = metadata.get("variables", {}) variables = metadata.get("variables", {})
if variables: if variables: