refac: prompt variables

This commit is contained in:
Timothy Jaeryang Baek
2025-01-29 21:56:51 -08:00
parent d6c82939e6
commit cc99673906
8 changed files with 49 additions and 18 deletions

View File

@@ -749,6 +749,8 @@ async def process_chat_payload(request, form_data, metadata, user, model):
files.extend(knowledge_files)
form_data["files"] = files
variables = form_data.pop("variables", None)
features = form_data.pop("features", None)
if features:
if "web_search" in features and features["web_search"]:

View File

@@ -1,4 +1,4 @@
from open_webui.utils.task import prompt_template
from open_webui.utils.task import prompt_variables_template
from open_webui.utils.misc import (
add_or_update_system_message,
)
@@ -7,19 +7,18 @@ from typing import Callable, Optional
# inplace function: form_data is modified
def apply_model_system_prompt_to_body(params: dict, form_data: dict, user) -> dict:
def apply_model_system_prompt_to_body(
params: dict, form_data: dict, metadata: Optional[dict] = None
) -> dict:
system = params.get("system", None)
if not system:
return form_data
if user:
template_params = {
"user_name": user.name,
"user_location": user.info.get("location") if user.info else None,
}
else:
template_params = {}
system = prompt_template(system, **template_params)
if metadata:
print("apply_model_system_prompt_to_body: metadata", metadata)
variables = metadata.get("variables", {})
system = prompt_variables_template(system, variables)
form_data["messages"] = add_or_update_system_message(
system, form_data.get("messages", [])
)
@@ -188,4 +187,7 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict:
if ollama_options:
ollama_payload["options"] = ollama_options
if "metadata" in openai_payload:
ollama_payload["metadata"] = openai_payload["metadata"]
return ollama_payload

View File

@@ -32,6 +32,12 @@ def get_task_model_id(
return task_model_id
def prompt_variables_template(template: str, variables: dict[str, str]) -> str:
for variable, value in variables.items():
template = template.replace(variable, value)
return template
def prompt_template(
template: str, user_name: Optional[str] = None, user_location: Optional[str] = None
) -> str: