diff --git a/backend/apps/ollama/main.py b/backend/apps/ollama/main.py index 82cd8d383..d406f0670 100644 --- a/backend/apps/ollama/main.py +++ b/backend/apps/ollama/main.py @@ -764,7 +764,7 @@ async def generate_chat_completion( "frequency_penalty", None ) - if model_info.params.get("temperature", None): + if model_info.params.get("temperature", None) is not None: payload["options"]["temperature"] = model_info.params.get( "temperature", None ) diff --git a/backend/apps/openai/main.py b/backend/apps/openai/main.py index 472699f1d..ab24b4113 100644 --- a/backend/apps/openai/main.py +++ b/backend/apps/openai/main.py @@ -373,8 +373,8 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)): model_info.params = model_info.params.model_dump() if model_info.params: - if model_info.params.get("temperature", None): - payload["temperature"] = int( + if model_info.params.get("temperature", None) is not None: + payload["temperature"] = float( model_info.params.get("temperature") )