diff --git a/src/lib/components/chat/Chat.svelte b/src/lib/components/chat/Chat.svelte index b6a90a953..45577520a 100644 --- a/src/lib/components/chat/Chat.svelte +++ b/src/lib/components/chat/Chat.svelte @@ -912,7 +912,7 @@ await tick(); - const stream = $settings?.streamResponse ?? true; + const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true; const [res, controller] = await generateChatCompletion(localStorage.token, { stream: stream, model: model.id, @@ -1206,7 +1206,7 @@ await tick(); try { - const stream = $settings?.streamResponse ?? true; + const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true; const [res, controller] = await generateOpenAIChatCompletion( localStorage.token, { diff --git a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte index d1b566b63..8f3a3973b 100644 --- a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte +++ b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte @@ -10,6 +10,7 @@ export let params = { // Advanced + stream_response: null, // Set stream responses for this model individually seed: null, stop: null, temperature: null, @@ -42,6 +43,30 @@
+
+
+
+ {$i18n.t('Stream Chat Response')} +
+ + +
+
+
{$i18n.t('Seed')}