From 0d01ea5f2fa3f5692ed94a88a7ab9b67a61bf62c Mon Sep 17 00:00:00 2001 From: kivvi Date: Tue, 24 Sep 2024 11:51:47 +0800 Subject: [PATCH] Set whether to stream responses for this model individually, overriding the global setting --- src/lib/components/chat/Chat.svelte | 2 +- .../Settings/Advanced/AdvancedParams.svelte | 25 +++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/src/lib/components/chat/Chat.svelte b/src/lib/components/chat/Chat.svelte index b6a90a953..54b24dfec 100644 --- a/src/lib/components/chat/Chat.svelte +++ b/src/lib/components/chat/Chat.svelte @@ -1206,7 +1206,7 @@ await tick(); try { - const stream = $settings?.streamResponse ?? true; + const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true; const [res, controller] = await generateOpenAIChatCompletion( localStorage.token, { diff --git a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte index d1b566b63..8f3a3973b 100644 --- a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte +++ b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte @@ -10,6 +10,7 @@ export let params = { // Advanced + stream_response: null, // Set stream responses for this model individually seed: null, stop: null, temperature: null, @@ -42,6 +43,30 @@
+
+
+
+ {$i18n.t('Stream Chat Response')} +
+ + +
+
+
{$i18n.t('Seed')}