mirror of
https://github.com/open-webui/open-webui
synced 2025-03-25 15:08:18 +00:00
refac: deprecate interface "stream response" settings for advanced params
This commit is contained in:
parent
ff00815b61
commit
6739983cf1
@ -952,7 +952,11 @@
|
|||||||
|
|
||||||
await tick();
|
await tick();
|
||||||
|
|
||||||
const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true;
|
const stream =
|
||||||
|
model?.info?.params?.stream_response ??
|
||||||
|
$settings?.params?.stream_response ??
|
||||||
|
params?.stream_response ??
|
||||||
|
true;
|
||||||
const [res, controller] = await generateChatCompletion(localStorage.token, {
|
const [res, controller] = await generateChatCompletion(localStorage.token, {
|
||||||
stream: stream,
|
stream: stream,
|
||||||
model: model.id,
|
model: model.id,
|
||||||
@ -1248,7 +1252,12 @@
|
|||||||
await tick();
|
await tick();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const stream = model?.info?.params?.stream_response ?? $settings?.streamResponse ?? true;
|
const stream =
|
||||||
|
model?.info?.params?.stream_response ??
|
||||||
|
$settings?.params?.stream_response ??
|
||||||
|
params?.stream_response ??
|
||||||
|
true;
|
||||||
|
|
||||||
const [res, controller] = await generateOpenAIChatCompletion(
|
const [res, controller] = await generateOpenAIChatCompletion(
|
||||||
localStorage.token,
|
localStorage.token,
|
||||||
{
|
{
|
||||||
|
@ -45,7 +45,7 @@
|
|||||||
<div class=" space-y-1 text-xs pb-safe-bottom">
|
<div class=" space-y-1 text-xs pb-safe-bottom">
|
||||||
<div>
|
<div>
|
||||||
<div class=" py-0.5 flex w-full justify-between">
|
<div class=" py-0.5 flex w-full justify-between">
|
||||||
<div class=" self-center text-xs">
|
<div class=" self-center text-xs font-medium">
|
||||||
{$i18n.t('Stream Chat Response')}
|
{$i18n.t('Stream Chat Response')}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -45,6 +45,7 @@
|
|||||||
|
|
||||||
let params = {
|
let params = {
|
||||||
// Advanced
|
// Advanced
|
||||||
|
stream_response: null,
|
||||||
seed: null,
|
seed: null,
|
||||||
temperature: null,
|
temperature: null,
|
||||||
frequency_penalty: null,
|
frequency_penalty: null,
|
||||||
@ -327,6 +328,7 @@
|
|||||||
saveSettings({
|
saveSettings({
|
||||||
system: system !== '' ? system : undefined,
|
system: system !== '' ? system : undefined,
|
||||||
params: {
|
params: {
|
||||||
|
stream_response: params.stream_response !== null ? params.stream_response : undefined,
|
||||||
seed: (params.seed !== null ? params.seed : undefined) ?? undefined,
|
seed: (params.seed !== null ? params.seed : undefined) ?? undefined,
|
||||||
stop: params.stop ? params.stop.split(',').filter((e) => e) : undefined,
|
stop: params.stop ? params.stop.split(',').filter((e) => e) : undefined,
|
||||||
temperature: params.temperature !== null ? params.temperature : undefined,
|
temperature: params.temperature !== null ? params.temperature : undefined,
|
||||||
|
@ -36,18 +36,11 @@
|
|||||||
let voiceInterruption = false;
|
let voiceInterruption = false;
|
||||||
let hapticFeedback = false;
|
let hapticFeedback = false;
|
||||||
|
|
||||||
let streamResponse = true;
|
|
||||||
|
|
||||||
const toggleSplitLargeChunks = async () => {
|
const toggleSplitLargeChunks = async () => {
|
||||||
splitLargeChunks = !splitLargeChunks;
|
splitLargeChunks = !splitLargeChunks;
|
||||||
saveSettings({ splitLargeChunks: splitLargeChunks });
|
saveSettings({ splitLargeChunks: splitLargeChunks });
|
||||||
};
|
};
|
||||||
|
|
||||||
const toggleStreamResponse = async () => {
|
|
||||||
streamResponse = !streamResponse;
|
|
||||||
saveSettings({ streamResponse: streamResponse });
|
|
||||||
};
|
|
||||||
|
|
||||||
const togglesScrollOnBranchChange = async () => {
|
const togglesScrollOnBranchChange = async () => {
|
||||||
scrollOnBranchChange = !scrollOnBranchChange;
|
scrollOnBranchChange = !scrollOnBranchChange;
|
||||||
saveSettings({ scrollOnBranchChange: scrollOnBranchChange });
|
saveSettings({ scrollOnBranchChange: scrollOnBranchChange });
|
||||||
@ -165,7 +158,6 @@
|
|||||||
userLocation = $settings.userLocation ?? false;
|
userLocation = $settings.userLocation ?? false;
|
||||||
|
|
||||||
hapticFeedback = $settings.hapticFeedback ?? false;
|
hapticFeedback = $settings.hapticFeedback ?? false;
|
||||||
streamResponse = $settings?.streamResponse ?? true;
|
|
||||||
|
|
||||||
defaultModelId = $settings?.models?.at(0) ?? '';
|
defaultModelId = $settings?.models?.at(0) ?? '';
|
||||||
if ($config?.default_models) {
|
if ($config?.default_models) {
|
||||||
@ -319,28 +311,6 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div>
|
|
||||||
<div class=" py-0.5 flex w-full justify-between">
|
|
||||||
<div class=" self-center text-xs">
|
|
||||||
{$i18n.t('Stream Chat Response')}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<button
|
|
||||||
class="p-1 px-3 text-xs flex rounded transition"
|
|
||||||
on:click={() => {
|
|
||||||
toggleStreamResponse();
|
|
||||||
}}
|
|
||||||
type="button"
|
|
||||||
>
|
|
||||||
{#if streamResponse === true}
|
|
||||||
<span class="ml-2 self-center">{$i18n.t('On')}</span>
|
|
||||||
{:else}
|
|
||||||
<span class="ml-2 self-center">{$i18n.t('Off')}</span>
|
|
||||||
{/if}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<div class=" py-0.5 flex w-full justify-between">
|
<div class=" py-0.5 flex w-full justify-between">
|
||||||
<div class=" self-center text-xs">
|
<div class=" self-center text-xs">
|
||||||
|
Loading…
Reference in New Issue
Block a user