fix: #3192 use smaller max_tokens as default

This commit is contained in:
Yifei Zhang 2023-11-09 11:00:09 +08:00 committed by GitHub
parent d0a1d910d4
commit 87e3d663a2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 1 additions and 1 deletions

View File

@ -49,7 +49,7 @@ export const DEFAULT_CONFIG = {
model: "gpt-3.5-turbo" as ModelType,
temperature: 0.5,
top_p: 1,
max_tokens: 8192,
max_tokens: 4000,
presence_penalty: 0,
frequency_penalty: 0,
sendMemory: true,