diff --git a/backend/open_webui/main.py b/backend/open_webui/main.py index 885a9828d..b20bc5bce 100644 --- a/backend/open_webui/main.py +++ b/backend/open_webui/main.py @@ -578,11 +578,11 @@ async def handle_nonstreaming_response(request: Request, response: Response, tool_function_name = tool_call["function"]["name"] if not tool_call["function"]["arguments"]: tool_function_params = {} - else: + else: if is_openai: - tool_function_params = json.loads(tool_call["function"]["arguments"]) + tool_function_params = json.loads(tool_call["function"]["arguments"]) if is_ollama: - tool_function_params = tool_call["function"]["arguments"] + tool_function_params = tool_call["function"]["arguments"] try: tool_output = await tools[tool_function_name]["callable"](**tool_function_params) diff --git a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte index 002d70329..c3ef3df8c 100644 --- a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte +++ b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte @@ -12,7 +12,7 @@ export let params = { // Advanced stream_response: null, // Set stream responses for this model individually - native_tool_call: null, + native_tool_call: null, seed: null, stop: null, temperature: null,