From bd81fc8bffc27739920986f5bab1dd218e639d85 Mon Sep 17 00:00:00 2001 From: itaybar <46649219+itaybar@users.noreply.github.com> Date: Fri, 20 Sep 2024 15:13:03 +0300 Subject: [PATCH 1/4] downgrade authlib to 1.3.1 --- backend/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/requirements.txt b/backend/requirements.txt index 2554bb5f8..cfe79f3ef 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -69,7 +69,7 @@ rank-bm25==0.2.2 faster-whisper==1.0.3 PyJWT[crypto]==2.9.0 -authlib==1.3.2 +authlib==1.3.1 black==24.8.0 langfuse==2.44.0 From 9b05fe3c54b1f0388b640b356ea49bcc30536694 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Fri, 20 Sep 2024 17:40:30 +0200 Subject: [PATCH 2/4] Update pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index b2558e4d1..06aca67cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ dependencies = [ "faster-whisper==1.0.3", "PyJWT[crypto]==2.9.0", - "authlib==1.3.2", + "authlib==1.3.1", "black==24.8.0", "langfuse==2.44.0", From 0d01ea5f2fa3f5692ed94a88a7ab9b67a61bf62c Mon Sep 17 00:00:00 2001 From: kivvi Date: Tue, 24 Sep 2024 11:51:47 +0800 Subject: [PATCH 3/4] Set whether to stream responses for this model individually, overriding the global setting --- src/lib/components/chat/Chat.svelte | 2 +- .../Settings/Advanced/AdvancedParams.svelte | 25 +++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/src/lib/components/chat/Chat.svelte b/src/lib/components/chat/Chat.svelte index b6a90a953..54b24dfec 100644 --- a/src/lib/components/chat/Chat.svelte +++ b/src/lib/components/chat/Chat.svelte @@ -1206,7 +1206,7 @@ await tick(); try { - const stream = $settings?.streamResponse ?? true; + const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true; const [res, controller] = await generateOpenAIChatCompletion( localStorage.token, { diff --git a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte index d1b566b63..8f3a3973b 100644 --- a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte +++ b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte @@ -10,6 +10,7 @@ export let params = { // Advanced + stream_response: null, // Set stream responses for this model individually seed: null, stop: null, temperature: null, @@ -42,6 +43,30 @@
+
+
+
+ {$i18n.t('Stream Chat Response')} +
+ + +
+
+
{$i18n.t('Seed')}
From 40d7d7d6dd7049e900d9ff301276db95eacac76c Mon Sep 17 00:00:00 2001 From: kivvi Date: Tue, 24 Sep 2024 11:51:47 +0800 Subject: [PATCH 4/4] Set whether to stream responses for this model individually, overriding the global setting --- src/lib/components/chat/Chat.svelte | 4 +-- .../Settings/Advanced/AdvancedParams.svelte | 25 +++++++++++++++++++ 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/src/lib/components/chat/Chat.svelte b/src/lib/components/chat/Chat.svelte index b6a90a953..45577520a 100644 --- a/src/lib/components/chat/Chat.svelte +++ b/src/lib/components/chat/Chat.svelte @@ -912,7 +912,7 @@ await tick(); - const stream = $settings?.streamResponse ?? true; + const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true; const [res, controller] = await generateChatCompletion(localStorage.token, { stream: stream, model: model.id, @@ -1206,7 +1206,7 @@ await tick(); try { - const stream = $settings?.streamResponse ?? true; + const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true; const [res, controller] = await generateOpenAIChatCompletion( localStorage.token, { diff --git a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte index d1b566b63..8f3a3973b 100644 --- a/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte +++ b/src/lib/components/chat/Settings/Advanced/AdvancedParams.svelte @@ -10,6 +10,7 @@ export let params = { // Advanced + stream_response: null, // Set stream responses for this model individually seed: null, stop: null, temperature: null, @@ -42,6 +43,30 @@
+
+
+
+ {$i18n.t('Stream Chat Response')} +
+ + +
+
+
{$i18n.t('Seed')}