Merge pull request #5710 from kivvi3412/fix_o1_max_tokens

Fix: o1 input parameter must be max_completion_tokens
This commit is contained in:
Timothy Jaeryang Baek 2024-09-26 03:03:15 +02:00 committed by GitHub
commit cc19b8049a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -415,6 +415,9 @@ async def generate_chat_completion(
payload["max_tokens"] = payload["max_completion_tokens"]
del payload["max_completion_tokens"]
else:
if payload["model"].lower().startswith("o1-") and "max_tokens" in payload:
payload["max_completion_tokens"] = payload["max_tokens"]
del payload["max_tokens"]
if "max_tokens" in payload and "max_completion_tokens" in payload:
del payload["max_tokens"]