diff --git a/src/lib/components/chat/Chat.svelte b/src/lib/components/chat/Chat.svelte index c698698ce..2ed489e45 100644 --- a/src/lib/components/chat/Chat.svelte +++ b/src/lib/components/chat/Chat.svelte @@ -952,7 +952,7 @@ await tick(); - const stream = $settings?.streamResponse ?? true; + const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true; const [res, controller] = await generateChatCompletion(localStorage.token, { stream: stream, model: model.id, @@ -1248,7 +1248,7 @@ await tick(); try { - const stream = $settings?.streamResponse ?? true; + const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true; const [res, controller] = await generateOpenAIChatCompletion( localStorage.token, { diff --git a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte index 198b000b6..9766d3458 100644 --- a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte +++ b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte @@ -10,6 +10,7 @@ export let params = { // Advanced + stream_response: null, // Set stream responses for this model individually seed: null, stop: null, temperature: null, @@ -42,6 +43,30 @@