From 60afd6ecddf4eae0808f193a6c146eb378ba076b Mon Sep 17 00:00:00 2001 From: Aditya Pratap Singh Date: Sat, 20 Jan 2024 04:34:47 +0530 Subject: [PATCH] Add workaround for gpt-4-vision-preview model that support 4k tokens --- backend/apps/openai/main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/apps/openai/main.py b/backend/apps/openai/main.py index cbf3043a6..1544949e5 100644 --- a/backend/apps/openai/main.py +++ b/backend/apps/openai/main.py @@ -99,10 +99,10 @@ async def proxy(path: str, request: Request, user=Depends(get_current_user)): print("Error loading request body into a dictionary:", e) raise HTTPException(status_code=400, detail="Invalid JSON in request body") - # Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 10000 + # Check if the model is "gpt-4-vision-preview" and set "max_tokens" to 4000 # This is a workaround until OpenAI fixes the issue with this model if body_dict.get("model") == "gpt-4-vision-preview": - body_dict["max_tokens"] = 10000 + body_dict["max_tokens"] = 4000 print("Modified body_dict:", body_dict) # Try to convert the modified body back to JSON