Merge pull request #2943 from choltha/fix/temperature-params

fix: model settings temperature not passed correctly from model settings to ollama/openai api
This commit is contained in:
Timothy Jaeryang Baek 2024-06-09 01:56:18 -07:00 committed by GitHub
commit 8c95a8be3a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 3 additions and 3 deletions

View File

@ -764,7 +764,7 @@ async def generate_chat_completion(
"frequency_penalty", None "frequency_penalty", None
) )
if model_info.params.get("temperature", None): if model_info.params.get("temperature", None) is not None:
payload["options"]["temperature"] = model_info.params.get( payload["options"]["temperature"] = model_info.params.get(
"temperature", None "temperature", None
) )

View File

@ -373,8 +373,8 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
model_info.params = model_info.params.model_dump() model_info.params = model_info.params.model_dump()
if model_info.params: if model_info.params:
if model_info.params.get("temperature", None): if model_info.params.get("temperature", None) is not None:
payload["temperature"] = int( payload["temperature"] = float(
model_info.params.get("temperature") model_info.params.get("temperature")
) )