remove max_tokens from the official version of gpt4-turbo

This commit is contained in:
l.tingting 2024-04-24 22:59:14 +08:00
parent 9b2cb1e1c3
commit dd4648ed9a
1 changed files with 1 additions and 1 deletions

View File

@ -129,7 +129,7 @@ export class ChatGPTApi implements LLMApi {
};
// add max_tokens to vision model
if (visionModel) {
if (visionModel && modelConfig.model.includes("preview")) {
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
}