Merge pull request #9471 from hurxxxx/fix/o3_not_support_max_tokens

fix: o3 does not support max_tokens parameter
This commit is contained in:
Timothy Jaeryang Baek 2025-02-06 00:31:48 -08:00 committed by GitHub
commit 10e2b3c055
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -75,9 +75,9 @@ async def cleanup_response(
await session.close() await session.close()
def openai_o1_handler(payload): def openai_o1_o3_handler(payload):
""" """
Handle O1 specific parameters Handle o1, o3 specific parameters
""" """
if "max_tokens" in payload: if "max_tokens" in payload:
# Remove "max_tokens" from the payload # Remove "max_tokens" from the payload
@ -621,10 +621,10 @@ async def generate_chat_completion(
url = request.app.state.config.OPENAI_API_BASE_URLS[idx] url = request.app.state.config.OPENAI_API_BASE_URLS[idx]
key = request.app.state.config.OPENAI_API_KEYS[idx] key = request.app.state.config.OPENAI_API_KEYS[idx]
# Fix: O1 does not support the "max_tokens" parameter, Modify "max_tokens" to "max_completion_tokens" # Fix: o1,o3 does not support the "max_tokens" parameter, Modify "max_tokens" to "max_completion_tokens"
is_o1 = payload["model"].lower().startswith("o1-") is_o1_o3 = payload["model"].lower().startswith(("o1", "o3-"))
if is_o1: if is_o1_o3:
payload = openai_o1_handler(payload) payload = openai_o1_o3_handler(payload)
elif "api.openai.com" not in url: elif "api.openai.com" not in url:
# Remove "max_completion_tokens" from the payload for backward compatibility # Remove "max_completion_tokens" from the payload for backward compatibility
if "max_completion_tokens" in payload: if "max_completion_tokens" in payload: