From 53448b3f0addff177f9bf704dd5b1d5443ea3d74 Mon Sep 17 00:00:00 2001 From: foraxe <1055696449@qq.com> Date: Sat, 15 Mar 2025 00:08:05 +0800 Subject: [PATCH] Fix: Promote keep_alive to top-level in payload for Ollama API on dev branch --- backend/open_webui/utils/payload.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/backend/open_webui/utils/payload.py b/backend/open_webui/utils/payload.py index 46656cc82..14fae4589 100644 --- a/backend/open_webui/utils/payload.py +++ b/backend/open_webui/utils/payload.py @@ -110,6 +110,11 @@ def apply_model_params_to_body_ollama(params: dict, form_data: dict) -> dict: "num_thread": int, } + # Extract keep_alive from options if it exists + if "options" in form_data and "keep_alive" in form_data["options"]: + form_data["keep_alive"] = form_data["options"]["keep_alive"] + del form_data["options"]["keep_alive"] + return apply_model_params_to_body(params, form_data, mappings) @@ -231,6 +236,11 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict: "system" ] # To prevent Ollama warning of invalid option provided + # Extract keep_alive from options if it exists + if "keep_alive" in ollama_options: + ollama_payload["keep_alive"] = ollama_options["keep_alive"] + del ollama_options["keep_alive"] + # If there is the "stop" parameter in the openai_payload, remap it to the ollama_payload.options if "stop" in openai_payload: ollama_options = ollama_payload.get("options", {})