diff --git a/backend/open_webui/routers/ollama.py b/backend/open_webui/routers/ollama.py index 490e8bfbb..ea46a1cca 100644 --- a/backend/open_webui/routers/ollama.py +++ b/backend/open_webui/routers/ollama.py @@ -1290,11 +1290,7 @@ async def generate_chat_completion( if params: system = params.pop("system", None) - # Unlike OpenAI, Ollama does not support params directly in the body - payload["options"] = apply_model_params_to_body_ollama( - params, (payload.get("options", {}) or {}) - ) - + payload = apply_model_params_to_body_ollama(params, payload) payload = apply_model_system_prompt_to_body(system, payload, metadata, user) # Check if user has access to the model diff --git a/backend/open_webui/utils/payload.py b/backend/open_webui/utils/payload.py index af4995d1e..9b7f74835 100644 --- a/backend/open_webui/utils/payload.py +++ b/backend/open_webui/utils/payload.py @@ -196,7 +196,11 @@ def apply_model_params_to_body_ollama(params: dict, form_data: dict) -> dict: form_data[key] = value(param) del params[key] - return apply_model_params_to_body(params, form_data, mappings) + # Unlike OpenAI, Ollama does not support params directly in the body + form_data["options"] = apply_model_params_to_body( + params, (form_data.get("options", {}) or {}), mappings + ) + return form_data def convert_messages_openai_to_ollama(messages: list[dict]) -> list[dict]: