From 57b01cf8fbbdd4a86f62ea909ca3cfe74f9c3714 Mon Sep 17 00:00:00 2001 From: ferret99gt Date: Wed, 19 Feb 2025 08:55:11 -0500 Subject: [PATCH] Fix for system prompt setting 1) Ollama supports sending the system prompt as a parameter, not as an option. (See https://github.com/ollama/ollama/blob/main/docs/api.md#request-8) However, it is in the options dictionary and needs moved to the payload dictionary. 2) After moving the system parameter from ollama_options to ollama_payload, delete it from ollama_options. This is to prevent Ollama throwing a warning about invalid options. --- backend/open_webui/utils/payload.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/backend/open_webui/utils/payload.py b/backend/open_webui/utils/payload.py index 8d0a27f2b..e1f62a93b 100644 --- a/backend/open_webui/utils/payload.py +++ b/backend/open_webui/utils/payload.py @@ -182,7 +182,12 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict: # Re-Mapping OpenAI's `max_tokens` -> Ollama's `num_predict` if "max_tokens" in ollama_options: ollama_options["num_predict"] = ollama_options["max_tokens"] - del ollama_options["max_tokens"] # To prevent Ollama warning of invalid option provided + del ollama_options["max_tokens"] # To prevent Ollama warning of invalid option provided + + # Ollama lacks a "system" prompt option. It has to be provided as a direct parameter, so we copy it down. + if "system" in ollama_options: + ollama_payload["system"] = ollama_options["system"] + del ollama_options["system"] # To prevent Ollama warning of invalid option provided # Add options to payload if any have been set if ollama_options: