From 6739983cf1f39ede84b38cce78e90653ea0e9b73 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Tue, 24 Sep 2024 12:49:35 +0200 Subject: [PATCH] refac: deprecate interface "stream response" settings for advanced params --- src/lib/components/chat/Chat.svelte | 13 ++++++-- .../Settings/Advanced/AdvancedParams.svelte | 2 +- .../components/chat/Settings/General.svelte | 2 ++ .../components/chat/Settings/Interface.svelte | 30 ------------------- 4 files changed, 14 insertions(+), 33 deletions(-) diff --git a/src/lib/components/chat/Chat.svelte b/src/lib/components/chat/Chat.svelte index 2ed489e45..4e521f789 100644 --- a/src/lib/components/chat/Chat.svelte +++ b/src/lib/components/chat/Chat.svelte @@ -952,7 +952,11 @@ await tick(); - const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true; + const stream = + model?.info?.params?.stream_response ?? + $settings?.params?.stream_response ?? + params?.stream_response ?? + true; const [res, controller] = await generateChatCompletion(localStorage.token, { stream: stream, model: model.id, @@ -1248,7 +1252,12 @@ await tick(); try { - const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true; + const stream = + model?.info?.params?.stream_response ?? + $settings?.params?.stream_response ?? + params?.stream_response ?? + true; + const [res, controller] = await generateOpenAIChatCompletion( localStorage.token, { diff --git a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte index df897ec0e..95b27b889 100644 --- a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte +++ b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte @@ -45,7 +45,7 @@
-
+
{$i18n.t('Stream Chat Response')}
diff --git a/src/lib/components/chat/Settings/General.svelte b/src/lib/components/chat/Settings/General.svelte index c308b9005..ee04deeb1 100644 --- a/src/lib/components/chat/Settings/General.svelte +++ b/src/lib/components/chat/Settings/General.svelte @@ -45,6 +45,7 @@ let params = { // Advanced + stream_response: null, seed: null, temperature: null, frequency_penalty: null, @@ -327,6 +328,7 @@ saveSettings({ system: system !== '' ? system : undefined, params: { + stream_response: params.stream_response !== null ? params.stream_response : undefined, seed: (params.seed !== null ? params.seed : undefined) ?? undefined, stop: params.stop ? params.stop.split(',').filter((e) => e) : undefined, temperature: params.temperature !== null ? params.temperature : undefined, diff --git a/src/lib/components/chat/Settings/Interface.svelte b/src/lib/components/chat/Settings/Interface.svelte index 139474c8a..58de9be4c 100644 --- a/src/lib/components/chat/Settings/Interface.svelte +++ b/src/lib/components/chat/Settings/Interface.svelte @@ -36,18 +36,11 @@ let voiceInterruption = false; let hapticFeedback = false; - let streamResponse = true; - const toggleSplitLargeChunks = async () => { splitLargeChunks = !splitLargeChunks; saveSettings({ splitLargeChunks: splitLargeChunks }); }; - const toggleStreamResponse = async () => { - streamResponse = !streamResponse; - saveSettings({ streamResponse: streamResponse }); - }; - const togglesScrollOnBranchChange = async () => { scrollOnBranchChange = !scrollOnBranchChange; saveSettings({ scrollOnBranchChange: scrollOnBranchChange }); @@ -165,7 +158,6 @@ userLocation = $settings.userLocation ?? false; hapticFeedback = $settings.hapticFeedback ?? false; - streamResponse = $settings?.streamResponse ?? true; defaultModelId = $settings?.models?.at(0) ?? ''; if ($config?.default_models) { @@ -319,28 +311,6 @@
-
-
-
- {$i18n.t('Stream Chat Response')} -
- - -
-
-