diff --git a/backend/apps/ollama/main.py b/backend/apps/ollama/main.py index 0a36d4c2b..0e4d54e46 100644 --- a/backend/apps/ollama/main.py +++ b/backend/apps/ollama/main.py @@ -805,7 +805,7 @@ async def generate_chat_completion( ) if ( - model_info.params.get("temperature", None) + model_info.params.get("temperature", None) is not None and payload["options"].get("temperature") is None ): payload["options"]["temperature"] = model_info.params.get( @@ -813,7 +813,7 @@ async def generate_chat_completion( ) if ( - model_info.params.get("seed", None) + model_info.params.get("seed", None) is not None and payload["options"].get("seed") is None ): payload["options"]["seed"] = model_info.params.get("seed", None) diff --git a/backend/apps/openai/main.py b/backend/apps/openai/main.py index 6c2906095..cfdb55eee 100644 --- a/backend/apps/openai/main.py +++ b/backend/apps/openai/main.py @@ -372,7 +372,7 @@ async def generate_chat_completion( if model_info.params: if ( - model_info.params.get("temperature", None) + model_info.params.get("temperature", None) is not None and payload.get("temperature") is None ): payload["temperature"] = float(model_info.params.get("temperature")) @@ -394,7 +394,10 @@ async def generate_chat_completion( model_info.params.get("frequency_penalty", None) ) - if model_info.params.get("seed", None) and payload.get("seed") is None: + if ( + model_info.params.get("seed", None) is not None + and payload.get("seed") is None + ): payload["seed"] = model_info.params.get("seed", None) if model_info.params.get("stop", None) and payload.get("stop") is None: