From 7ba717d171522db6c654c9525078c461a5c50ae7 Mon Sep 17 00:00:00 2001 From: Timothy Jaeryang Baek Date: Wed, 5 Feb 2025 14:35:18 -0800 Subject: [PATCH] refac: system prompt template --- backend/open_webui/functions.py | 2 +- backend/open_webui/routers/ollama.py | 6 ++++-- backend/open_webui/routers/openai.py | 2 +- backend/open_webui/utils/payload.py | 16 ++++++++++++++-- 4 files changed, 20 insertions(+), 6 deletions(-) diff --git a/backend/open_webui/functions.py b/backend/open_webui/functions.py index 16536a612..274be56ec 100644 --- a/backend/open_webui/functions.py +++ b/backend/open_webui/functions.py @@ -250,7 +250,7 @@ async def generate_function_chat_completion( params = model_info.params.model_dump() form_data = apply_model_params_to_body_openai(params, form_data) - form_data = apply_model_system_prompt_to_body(params, form_data, user) + form_data = apply_model_system_prompt_to_body(params, form_data, metadata, user) pipe_id = get_pipe_id(form_data) function_module = get_function_module_by_id(request, pipe_id) diff --git a/backend/open_webui/routers/ollama.py b/backend/open_webui/routers/ollama.py index 10367b020..2ab06eb95 100644 --- a/backend/open_webui/routers/ollama.py +++ b/backend/open_webui/routers/ollama.py @@ -1007,7 +1007,7 @@ async def generate_chat_completion( payload["options"] = apply_model_params_to_body_ollama( params, payload["options"] ) - payload = apply_model_system_prompt_to_body(params, payload, metadata) + payload = apply_model_system_prompt_to_body(params, payload, metadata, user) # Check if user has access to the model if not bypass_filter and user.role == "user": @@ -1160,6 +1160,8 @@ async def generate_openai_chat_completion( url_idx: Optional[int] = None, user=Depends(get_verified_user), ): + metadata = form_data.pop("metadata", None) + try: completion_form = OpenAIChatCompletionForm(**form_data) except Exception as e: @@ -1186,7 +1188,7 @@ async def generate_openai_chat_completion( if params: payload = apply_model_params_to_body_openai(params, payload) - payload = apply_model_system_prompt_to_body(params, payload, user) + payload = apply_model_system_prompt_to_body(params, payload, metadata, user) # Check if user has access to the model if user.role == "user": diff --git a/backend/open_webui/routers/openai.py b/backend/open_webui/routers/openai.py index c27f35e7e..d18f2a8ff 100644 --- a/backend/open_webui/routers/openai.py +++ b/backend/open_webui/routers/openai.py @@ -566,7 +566,7 @@ async def generate_chat_completion( params = model_info.params.model_dump() payload = apply_model_params_to_body_openai(params, payload) - payload = apply_model_system_prompt_to_body(params, payload, metadata) + payload = apply_model_system_prompt_to_body(params, payload, metadata, user) # Check if user has access to the model if not bypass_filter and user.role == "user": diff --git a/backend/open_webui/utils/payload.py b/backend/open_webui/utils/payload.py index 2e94575b4..b68b313de 100644 --- a/backend/open_webui/utils/payload.py +++ b/backend/open_webui/utils/payload.py @@ -1,4 +1,4 @@ -from open_webui.utils.task import prompt_variables_template +from open_webui.utils.task import prompt_template, prompt_variables_template from open_webui.utils.misc import ( add_or_update_system_message, ) @@ -8,12 +8,24 @@ from typing import Callable, Optional # inplace function: form_data is modified def apply_model_system_prompt_to_body( - params: dict, form_data: dict, metadata: Optional[dict] = None + params: dict, form_data: dict, metadata: Optional[dict] = None, user=None ) -> dict: system = params.get("system", None) if not system: return form_data + # Legacy (API Usage) + if user: + template_params = { + "user_name": user.name, + "user_location": user.info.get("location") if user.info else None, + } + else: + template_params = {} + + system = prompt_template(system, **template_params) + + # Metadata (WebUI Usage) if metadata: variables = metadata.get("variables", {}) if variables: