mirror of
https://github.com/open-webui/open-webui
synced 2025-06-26 18:26:48 +00:00
refac: prompt variables
This commit is contained in:
@@ -875,6 +875,7 @@ async def chat_completion(
|
||||
"tool_ids": form_data.get("tool_ids", None),
|
||||
"files": form_data.get("files", None),
|
||||
"features": form_data.get("features", None),
|
||||
"variables": form_data.get("variables", None),
|
||||
}
|
||||
form_data["metadata"] = metadata
|
||||
|
||||
|
||||
@@ -977,6 +977,7 @@ async def generate_chat_completion(
|
||||
if BYPASS_MODEL_ACCESS_CONTROL:
|
||||
bypass_filter = True
|
||||
|
||||
metadata = form_data.pop("metadata", None)
|
||||
try:
|
||||
form_data = GenerateChatCompletionForm(**form_data)
|
||||
except Exception as e:
|
||||
@@ -987,8 +988,6 @@ async def generate_chat_completion(
|
||||
)
|
||||
|
||||
payload = {**form_data.model_dump(exclude_none=True)}
|
||||
if "metadata" in payload:
|
||||
del payload["metadata"]
|
||||
|
||||
model_id = payload["model"]
|
||||
model_info = Models.get_model_by_id(model_id)
|
||||
@@ -1006,7 +1005,7 @@ async def generate_chat_completion(
|
||||
payload["options"] = apply_model_params_to_body_ollama(
|
||||
params, payload["options"]
|
||||
)
|
||||
payload = apply_model_system_prompt_to_body(params, payload, user)
|
||||
payload = apply_model_system_prompt_to_body(params, payload, metadata)
|
||||
|
||||
# Check if user has access to the model
|
||||
if not bypass_filter and user.role == "user":
|
||||
|
||||
@@ -551,9 +551,9 @@ async def generate_chat_completion(
|
||||
bypass_filter = True
|
||||
|
||||
idx = 0
|
||||
|
||||
payload = {**form_data}
|
||||
if "metadata" in payload:
|
||||
del payload["metadata"]
|
||||
metadata = payload.pop("metadata", None)
|
||||
|
||||
model_id = form_data.get("model")
|
||||
model_info = Models.get_model_by_id(model_id)
|
||||
@@ -566,7 +566,7 @@ async def generate_chat_completion(
|
||||
|
||||
params = model_info.params.model_dump()
|
||||
payload = apply_model_params_to_body_openai(params, payload)
|
||||
payload = apply_model_system_prompt_to_body(params, payload, user)
|
||||
payload = apply_model_system_prompt_to_body(params, payload, metadata)
|
||||
|
||||
# Check if user has access to the model
|
||||
if not bypass_filter and user.role == "user":
|
||||
|
||||
@@ -749,6 +749,8 @@ async def process_chat_payload(request, form_data, metadata, user, model):
|
||||
files.extend(knowledge_files)
|
||||
form_data["files"] = files
|
||||
|
||||
variables = form_data.pop("variables", None)
|
||||
|
||||
features = form_data.pop("features", None)
|
||||
if features:
|
||||
if "web_search" in features and features["web_search"]:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from open_webui.utils.task import prompt_template
|
||||
from open_webui.utils.task import prompt_variables_template
|
||||
from open_webui.utils.misc import (
|
||||
add_or_update_system_message,
|
||||
)
|
||||
@@ -7,19 +7,18 @@ from typing import Callable, Optional
|
||||
|
||||
|
||||
# inplace function: form_data is modified
|
||||
def apply_model_system_prompt_to_body(params: dict, form_data: dict, user) -> dict:
|
||||
def apply_model_system_prompt_to_body(
|
||||
params: dict, form_data: dict, metadata: Optional[dict] = None
|
||||
) -> dict:
|
||||
system = params.get("system", None)
|
||||
if not system:
|
||||
return form_data
|
||||
|
||||
if user:
|
||||
template_params = {
|
||||
"user_name": user.name,
|
||||
"user_location": user.info.get("location") if user.info else None,
|
||||
}
|
||||
else:
|
||||
template_params = {}
|
||||
system = prompt_template(system, **template_params)
|
||||
if metadata:
|
||||
print("apply_model_system_prompt_to_body: metadata", metadata)
|
||||
variables = metadata.get("variables", {})
|
||||
system = prompt_variables_template(system, variables)
|
||||
|
||||
form_data["messages"] = add_or_update_system_message(
|
||||
system, form_data.get("messages", [])
|
||||
)
|
||||
@@ -188,4 +187,7 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict:
|
||||
if ollama_options:
|
||||
ollama_payload["options"] = ollama_options
|
||||
|
||||
if "metadata" in openai_payload:
|
||||
ollama_payload["metadata"] = openai_payload["metadata"]
|
||||
|
||||
return ollama_payload
|
||||
|
||||
@@ -32,6 +32,12 @@ def get_task_model_id(
|
||||
return task_model_id
|
||||
|
||||
|
||||
def prompt_variables_template(template: str, variables: dict[str, str]) -> str:
|
||||
for variable, value in variables.items():
|
||||
template = template.replace(variable, value)
|
||||
return template
|
||||
|
||||
|
||||
def prompt_template(
|
||||
template: str, user_name: Optional[str] = None, user_location: Optional[str] = None
|
||||
) -> str:
|
||||
|
||||
Reference in New Issue
Block a user