mirror of
https://github.com/open-webui/open-webui
synced 2025-06-26 18:26:48 +00:00
enh: stream=false support
This commit is contained in:
parent
628d7ae72d
commit
e99cba53fe
@ -115,8 +115,6 @@
|
||||
|
||||
$: if (history.currentId !== null) {
|
||||
let _messages = [];
|
||||
console.log(history.currentId);
|
||||
|
||||
let currentMessage = history.messages[history.currentId];
|
||||
while (currentMessage) {
|
||||
_messages.unshift({ ...currentMessage });
|
||||
@ -885,8 +883,9 @@
|
||||
|
||||
await tick();
|
||||
|
||||
const stream = $settings?.streamResponse ?? true;
|
||||
const [res, controller] = await generateChatCompletion(localStorage.token, {
|
||||
stream: true,
|
||||
stream: stream,
|
||||
model: model.id,
|
||||
messages: messagesBody,
|
||||
options: {
|
||||
@ -911,6 +910,21 @@
|
||||
});
|
||||
|
||||
if (res && res.ok) {
|
||||
if (!stream) {
|
||||
const response = await res.json();
|
||||
console.log(response);
|
||||
|
||||
responseMessage.content = response.message.content;
|
||||
responseMessage.info = {
|
||||
eval_count: response.eval_count,
|
||||
eval_duration: response.eval_duration,
|
||||
load_duration: response.load_duration,
|
||||
prompt_eval_count: response.prompt_eval_count,
|
||||
prompt_eval_duration: response.prompt_eval_duration,
|
||||
total_duration: response.total_duration
|
||||
};
|
||||
responseMessage.done = true;
|
||||
} else {
|
||||
console.log('controller', controller);
|
||||
|
||||
const reader = res.body
|
||||
@ -926,9 +940,6 @@
|
||||
|
||||
if (stopResponseFlag) {
|
||||
controller.abort('User: Stop Response');
|
||||
} else {
|
||||
const messages = createMessagesList(responseMessageId);
|
||||
await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
|
||||
}
|
||||
|
||||
_response = responseMessage.content;
|
||||
@ -1047,6 +1058,14 @@
|
||||
scrollToBottom();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await chatCompletedHandler(
|
||||
_chatId,
|
||||
model.id,
|
||||
responseMessageId,
|
||||
createMessagesList(responseMessageId)
|
||||
);
|
||||
} else {
|
||||
if (res !== null) {
|
||||
const error = await res.json();
|
||||
@ -1158,17 +1177,19 @@
|
||||
await tick();
|
||||
|
||||
try {
|
||||
const stream = $settings?.streamResponse ?? true;
|
||||
const [res, controller] = await generateOpenAIChatCompletion(
|
||||
localStorage.token,
|
||||
{
|
||||
stream: true,
|
||||
stream: stream,
|
||||
model: model.id,
|
||||
stream_options:
|
||||
(model.info?.meta?.capabilities?.usage ?? false)
|
||||
...(stream && (model.info?.meta?.capabilities?.usage ?? false)
|
||||
? {
|
||||
stream_options: {
|
||||
include_usage: true
|
||||
}
|
||||
: undefined,
|
||||
}
|
||||
: {}),
|
||||
messages: [
|
||||
params?.system || $settings.system || (responseMessage?.userContext ?? null)
|
||||
? {
|
||||
@ -1246,6 +1267,14 @@
|
||||
scrollToBottom();
|
||||
|
||||
if (res && res.ok && res.body) {
|
||||
if (!stream) {
|
||||
const response = await res.json();
|
||||
console.log(response);
|
||||
|
||||
responseMessage.content = response.choices[0].message.content;
|
||||
responseMessage.info = { ...response.usage, openai: true };
|
||||
responseMessage.done = true;
|
||||
} else {
|
||||
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
|
||||
|
||||
for await (const update of textStream) {
|
||||
@ -1260,14 +1289,8 @@
|
||||
|
||||
if (stopResponseFlag) {
|
||||
controller.abort('User: Stop Response');
|
||||
} else {
|
||||
const messages = createMessagesList(responseMessageId);
|
||||
|
||||
await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
|
||||
}
|
||||
|
||||
_response = responseMessage.content;
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
@ -1324,6 +1347,14 @@
|
||||
scrollToBottom();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await chatCompletedHandler(
|
||||
_chatId,
|
||||
model.id,
|
||||
responseMessageId,
|
||||
createMessagesList(responseMessageId)
|
||||
);
|
||||
|
||||
if ($settings.notificationEnabled && !document.hasFocus()) {
|
||||
const notification = new Notification(`${model.id}`, {
|
||||
|
@ -36,11 +36,18 @@
|
||||
let voiceInterruption = false;
|
||||
let hapticFeedback = false;
|
||||
|
||||
let streamResponse = true;
|
||||
|
||||
const toggleSplitLargeChunks = async () => {
|
||||
splitLargeChunks = !splitLargeChunks;
|
||||
saveSettings({ splitLargeChunks: splitLargeChunks });
|
||||
};
|
||||
|
||||
const toggleStreamResponse = async () => {
|
||||
streamResponse = !streamResponse;
|
||||
saveSettings({ streamResponse: streamResponse });
|
||||
};
|
||||
|
||||
const togglesScrollOnBranchChange = async () => {
|
||||
scrollOnBranchChange = !scrollOnBranchChange;
|
||||
saveSettings({ scrollOnBranchChange: scrollOnBranchChange });
|
||||
@ -158,6 +165,7 @@
|
||||
userLocation = $settings.userLocation ?? false;
|
||||
|
||||
hapticFeedback = $settings.hapticFeedback ?? false;
|
||||
streamResponse = $settings?.streamResponse ?? true;
|
||||
|
||||
defaultModelId = $settings?.models?.at(0) ?? '';
|
||||
if ($config?.default_models) {
|
||||
@ -311,6 +319,28 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div class=" py-0.5 flex w-full justify-between">
|
||||
<div class=" self-center text-xs">
|
||||
{$i18n.t('Stream Chat Response')}
|
||||
</div>
|
||||
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded transition"
|
||||
on:click={() => {
|
||||
toggleStreamResponse();
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
{#if streamResponse === true}
|
||||
<span class="ml-2 self-center">{$i18n.t('On')}</span>
|
||||
{:else}
|
||||
<span class="ml-2 self-center">{$i18n.t('Off')}</span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div class=" py-0.5 flex w-full justify-between">
|
||||
<div class=" self-center text-xs">
|
||||
|
Loading…
Reference in New Issue
Block a user