mirror of
https://github.com/open-webui/open-webui
synced 2025-06-09 16:07:54 +00:00
enh: stream=false support
This commit is contained in:
parent
628d7ae72d
commit
e99cba53fe
@ -115,8 +115,6 @@
|
|||||||
|
|
||||||
$: if (history.currentId !== null) {
|
$: if (history.currentId !== null) {
|
||||||
let _messages = [];
|
let _messages = [];
|
||||||
console.log(history.currentId);
|
|
||||||
|
|
||||||
let currentMessage = history.messages[history.currentId];
|
let currentMessage = history.messages[history.currentId];
|
||||||
while (currentMessage) {
|
while (currentMessage) {
|
||||||
_messages.unshift({ ...currentMessage });
|
_messages.unshift({ ...currentMessage });
|
||||||
@ -885,8 +883,9 @@
|
|||||||
|
|
||||||
await tick();
|
await tick();
|
||||||
|
|
||||||
|
const stream = $settings?.streamResponse ?? true;
|
||||||
const [res, controller] = await generateChatCompletion(localStorage.token, {
|
const [res, controller] = await generateChatCompletion(localStorage.token, {
|
||||||
stream: true,
|
stream: stream,
|
||||||
model: model.id,
|
model: model.id,
|
||||||
messages: messagesBody,
|
messages: messagesBody,
|
||||||
options: {
|
options: {
|
||||||
@ -911,142 +910,162 @@
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (res && res.ok) {
|
if (res && res.ok) {
|
||||||
console.log('controller', controller);
|
if (!stream) {
|
||||||
|
const response = await res.json();
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
const reader = res.body
|
responseMessage.content = response.message.content;
|
||||||
.pipeThrough(new TextDecoderStream())
|
responseMessage.info = {
|
||||||
.pipeThrough(splitStream('\n'))
|
eval_count: response.eval_count,
|
||||||
.getReader();
|
eval_duration: response.eval_duration,
|
||||||
|
load_duration: response.load_duration,
|
||||||
|
prompt_eval_count: response.prompt_eval_count,
|
||||||
|
prompt_eval_duration: response.prompt_eval_duration,
|
||||||
|
total_duration: response.total_duration
|
||||||
|
};
|
||||||
|
responseMessage.done = true;
|
||||||
|
} else {
|
||||||
|
console.log('controller', controller);
|
||||||
|
|
||||||
while (true) {
|
const reader = res.body
|
||||||
const { value, done } = await reader.read();
|
.pipeThrough(new TextDecoderStream())
|
||||||
if (done || stopResponseFlag || _chatId !== $chatId) {
|
.pipeThrough(splitStream('\n'))
|
||||||
responseMessage.done = true;
|
.getReader();
|
||||||
messages = messages;
|
|
||||||
|
|
||||||
if (stopResponseFlag) {
|
while (true) {
|
||||||
controller.abort('User: Stop Response');
|
const { value, done } = await reader.read();
|
||||||
} else {
|
if (done || stopResponseFlag || _chatId !== $chatId) {
|
||||||
const messages = createMessagesList(responseMessageId);
|
responseMessage.done = true;
|
||||||
await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
|
messages = messages;
|
||||||
|
|
||||||
|
if (stopResponseFlag) {
|
||||||
|
controller.abort('User: Stop Response');
|
||||||
|
}
|
||||||
|
|
||||||
|
_response = responseMessage.content;
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
_response = responseMessage.content;
|
try {
|
||||||
break;
|
let lines = value.split('\n');
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
for (const line of lines) {
|
||||||
let lines = value.split('\n');
|
if (line !== '') {
|
||||||
|
console.log(line);
|
||||||
|
let data = JSON.parse(line);
|
||||||
|
|
||||||
for (const line of lines) {
|
if ('citations' in data) {
|
||||||
if (line !== '') {
|
responseMessage.citations = data.citations;
|
||||||
console.log(line);
|
// Only remove status if it was initially set
|
||||||
let data = JSON.parse(line);
|
if (model?.info?.meta?.knowledge ?? false) {
|
||||||
|
responseMessage.statusHistory = responseMessage.statusHistory.filter(
|
||||||
if ('citations' in data) {
|
(status) => status.action !== 'knowledge_search'
|
||||||
responseMessage.citations = data.citations;
|
|
||||||
// Only remove status if it was initially set
|
|
||||||
if (model?.info?.meta?.knowledge ?? false) {
|
|
||||||
responseMessage.statusHistory = responseMessage.statusHistory.filter(
|
|
||||||
(status) => status.action !== 'knowledge_search'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ('detail' in data) {
|
|
||||||
throw data;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (data.done == false) {
|
|
||||||
if (responseMessage.content == '' && data.message.content == '\n') {
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
responseMessage.content += data.message.content;
|
|
||||||
|
|
||||||
if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
|
|
||||||
navigator.vibrate(5);
|
|
||||||
}
|
|
||||||
|
|
||||||
const messageContentParts = getMessageContentParts(
|
|
||||||
responseMessage.content,
|
|
||||||
$config?.audio?.tts?.split_on ?? 'punctuation'
|
|
||||||
);
|
|
||||||
messageContentParts.pop();
|
|
||||||
|
|
||||||
// dispatch only last sentence and make sure it hasn't been dispatched before
|
|
||||||
if (
|
|
||||||
messageContentParts.length > 0 &&
|
|
||||||
messageContentParts[messageContentParts.length - 1] !==
|
|
||||||
responseMessage.lastSentence
|
|
||||||
) {
|
|
||||||
responseMessage.lastSentence =
|
|
||||||
messageContentParts[messageContentParts.length - 1];
|
|
||||||
eventTarget.dispatchEvent(
|
|
||||||
new CustomEvent('chat', {
|
|
||||||
detail: {
|
|
||||||
id: responseMessageId,
|
|
||||||
content: messageContentParts[messageContentParts.length - 1]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
continue;
|
||||||
messages = messages;
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
responseMessage.done = true;
|
|
||||||
|
|
||||||
if (responseMessage.content == '') {
|
if ('detail' in data) {
|
||||||
responseMessage.error = {
|
throw data;
|
||||||
code: 400,
|
}
|
||||||
content: `Oops! No text generated from Ollama, Please try again.`
|
|
||||||
|
if (data.done == false) {
|
||||||
|
if (responseMessage.content == '' && data.message.content == '\n') {
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
responseMessage.content += data.message.content;
|
||||||
|
|
||||||
|
if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
|
||||||
|
navigator.vibrate(5);
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageContentParts = getMessageContentParts(
|
||||||
|
responseMessage.content,
|
||||||
|
$config?.audio?.tts?.split_on ?? 'punctuation'
|
||||||
|
);
|
||||||
|
messageContentParts.pop();
|
||||||
|
|
||||||
|
// dispatch only last sentence and make sure it hasn't been dispatched before
|
||||||
|
if (
|
||||||
|
messageContentParts.length > 0 &&
|
||||||
|
messageContentParts[messageContentParts.length - 1] !==
|
||||||
|
responseMessage.lastSentence
|
||||||
|
) {
|
||||||
|
responseMessage.lastSentence =
|
||||||
|
messageContentParts[messageContentParts.length - 1];
|
||||||
|
eventTarget.dispatchEvent(
|
||||||
|
new CustomEvent('chat', {
|
||||||
|
detail: {
|
||||||
|
id: responseMessageId,
|
||||||
|
content: messageContentParts[messageContentParts.length - 1]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
messages = messages;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
responseMessage.done = true;
|
||||||
|
|
||||||
|
if (responseMessage.content == '') {
|
||||||
|
responseMessage.error = {
|
||||||
|
code: 400,
|
||||||
|
content: `Oops! No text generated from Ollama, Please try again.`
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
responseMessage.context = data.context ?? null;
|
||||||
|
responseMessage.info = {
|
||||||
|
total_duration: data.total_duration,
|
||||||
|
load_duration: data.load_duration,
|
||||||
|
sample_count: data.sample_count,
|
||||||
|
sample_duration: data.sample_duration,
|
||||||
|
prompt_eval_count: data.prompt_eval_count,
|
||||||
|
prompt_eval_duration: data.prompt_eval_duration,
|
||||||
|
eval_count: data.eval_count,
|
||||||
|
eval_duration: data.eval_duration
|
||||||
};
|
};
|
||||||
}
|
messages = messages;
|
||||||
|
|
||||||
responseMessage.context = data.context ?? null;
|
if ($settings.notificationEnabled && !document.hasFocus()) {
|
||||||
responseMessage.info = {
|
const notification = new Notification(`${model.id}`, {
|
||||||
total_duration: data.total_duration,
|
body: responseMessage.content,
|
||||||
load_duration: data.load_duration,
|
icon: `${WEBUI_BASE_URL}/static/favicon.png`
|
||||||
sample_count: data.sample_count,
|
});
|
||||||
sample_duration: data.sample_duration,
|
}
|
||||||
prompt_eval_count: data.prompt_eval_count,
|
|
||||||
prompt_eval_duration: data.prompt_eval_duration,
|
|
||||||
eval_count: data.eval_count,
|
|
||||||
eval_duration: data.eval_duration
|
|
||||||
};
|
|
||||||
messages = messages;
|
|
||||||
|
|
||||||
if ($settings.notificationEnabled && !document.hasFocus()) {
|
if ($settings?.responseAutoCopy ?? false) {
|
||||||
const notification = new Notification(`${model.id}`, {
|
copyToClipboard(responseMessage.content);
|
||||||
body: responseMessage.content,
|
}
|
||||||
icon: `${WEBUI_BASE_URL}/static/favicon.png`
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($settings?.responseAutoCopy ?? false) {
|
if ($settings.responseAutoPlayback && !$showCallOverlay) {
|
||||||
copyToClipboard(responseMessage.content);
|
await tick();
|
||||||
}
|
document.getElementById(`speak-button-${responseMessage.id}`)?.click();
|
||||||
|
}
|
||||||
if ($settings.responseAutoPlayback && !$showCallOverlay) {
|
|
||||||
await tick();
|
|
||||||
document.getElementById(`speak-button-${responseMessage.id}`)?.click();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log(error);
|
||||||
|
if ('detail' in error) {
|
||||||
|
toast.error(error.detail);
|
||||||
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
|
||||||
console.log(error);
|
|
||||||
if ('detail' in error) {
|
|
||||||
toast.error(error.detail);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (autoScroll) {
|
if (autoScroll) {
|
||||||
scrollToBottom();
|
scrollToBottom();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await chatCompletedHandler(
|
||||||
|
_chatId,
|
||||||
|
model.id,
|
||||||
|
responseMessageId,
|
||||||
|
createMessagesList(responseMessageId)
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
if (res !== null) {
|
if (res !== null) {
|
||||||
const error = await res.json();
|
const error = await res.json();
|
||||||
@ -1158,17 +1177,19 @@
|
|||||||
await tick();
|
await tick();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const stream = $settings?.streamResponse ?? true;
|
||||||
const [res, controller] = await generateOpenAIChatCompletion(
|
const [res, controller] = await generateOpenAIChatCompletion(
|
||||||
localStorage.token,
|
localStorage.token,
|
||||||
{
|
{
|
||||||
stream: true,
|
stream: stream,
|
||||||
model: model.id,
|
model: model.id,
|
||||||
stream_options:
|
...(stream && (model.info?.meta?.capabilities?.usage ?? false)
|
||||||
(model.info?.meta?.capabilities?.usage ?? false)
|
? {
|
||||||
? {
|
stream_options: {
|
||||||
include_usage: true
|
include_usage: true
|
||||||
}
|
}
|
||||||
: undefined,
|
}
|
||||||
|
: {}),
|
||||||
messages: [
|
messages: [
|
||||||
params?.system || $settings.system || (responseMessage?.userContext ?? null)
|
params?.system || $settings.system || (responseMessage?.userContext ?? null)
|
||||||
? {
|
? {
|
||||||
@ -1246,85 +1267,95 @@
|
|||||||
scrollToBottom();
|
scrollToBottom();
|
||||||
|
|
||||||
if (res && res.ok && res.body) {
|
if (res && res.ok && res.body) {
|
||||||
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
|
if (!stream) {
|
||||||
|
const response = await res.json();
|
||||||
|
console.log(response);
|
||||||
|
|
||||||
for await (const update of textStream) {
|
responseMessage.content = response.choices[0].message.content;
|
||||||
const { value, done, citations, error, usage } = update;
|
responseMessage.info = { ...response.usage, openai: true };
|
||||||
if (error) {
|
responseMessage.done = true;
|
||||||
await handleOpenAIError(error, null, model, responseMessage);
|
} else {
|
||||||
break;
|
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
|
||||||
}
|
|
||||||
if (done || stopResponseFlag || _chatId !== $chatId) {
|
|
||||||
responseMessage.done = true;
|
|
||||||
messages = messages;
|
|
||||||
|
|
||||||
if (stopResponseFlag) {
|
for await (const update of textStream) {
|
||||||
controller.abort('User: Stop Response');
|
const { value, done, citations, error, usage } = update;
|
||||||
|
if (error) {
|
||||||
|
await handleOpenAIError(error, null, model, responseMessage);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (done || stopResponseFlag || _chatId !== $chatId) {
|
||||||
|
responseMessage.done = true;
|
||||||
|
messages = messages;
|
||||||
|
|
||||||
|
if (stopResponseFlag) {
|
||||||
|
controller.abort('User: Stop Response');
|
||||||
|
}
|
||||||
|
_response = responseMessage.content;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (usage) {
|
||||||
|
responseMessage.info = { ...usage, openai: true };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (citations) {
|
||||||
|
responseMessage.citations = citations;
|
||||||
|
// Only remove status if it was initially set
|
||||||
|
if (model?.info?.meta?.knowledge ?? false) {
|
||||||
|
responseMessage.statusHistory = responseMessage.statusHistory.filter(
|
||||||
|
(status) => status.action !== 'knowledge_search'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (responseMessage.content == '' && value == '\n') {
|
||||||
|
continue;
|
||||||
} else {
|
} else {
|
||||||
const messages = createMessagesList(responseMessageId);
|
responseMessage.content += value;
|
||||||
|
|
||||||
await chatCompletedHandler(_chatId, model.id, responseMessageId, messages);
|
if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
|
||||||
}
|
navigator.vibrate(5);
|
||||||
|
}
|
||||||
|
|
||||||
_response = responseMessage.content;
|
const messageContentParts = getMessageContentParts(
|
||||||
|
responseMessage.content,
|
||||||
break;
|
$config?.audio?.tts?.split_on ?? 'punctuation'
|
||||||
}
|
|
||||||
|
|
||||||
if (usage) {
|
|
||||||
responseMessage.info = { ...usage, openai: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (citations) {
|
|
||||||
responseMessage.citations = citations;
|
|
||||||
// Only remove status if it was initially set
|
|
||||||
if (model?.info?.meta?.knowledge ?? false) {
|
|
||||||
responseMessage.statusHistory = responseMessage.statusHistory.filter(
|
|
||||||
(status) => status.action !== 'knowledge_search'
|
|
||||||
);
|
);
|
||||||
}
|
messageContentParts.pop();
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (responseMessage.content == '' && value == '\n') {
|
// dispatch only last sentence and make sure it hasn't been dispatched before
|
||||||
continue;
|
if (
|
||||||
} else {
|
messageContentParts.length > 0 &&
|
||||||
responseMessage.content += value;
|
messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
|
||||||
|
) {
|
||||||
|
responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
|
||||||
|
eventTarget.dispatchEvent(
|
||||||
|
new CustomEvent('chat', {
|
||||||
|
detail: {
|
||||||
|
id: responseMessageId,
|
||||||
|
content: messageContentParts[messageContentParts.length - 1]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
|
messages = messages;
|
||||||
navigator.vibrate(5);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const messageContentParts = getMessageContentParts(
|
if (autoScroll) {
|
||||||
responseMessage.content,
|
scrollToBottom();
|
||||||
$config?.audio?.tts?.split_on ?? 'punctuation'
|
|
||||||
);
|
|
||||||
messageContentParts.pop();
|
|
||||||
|
|
||||||
// dispatch only last sentence and make sure it hasn't been dispatched before
|
|
||||||
if (
|
|
||||||
messageContentParts.length > 0 &&
|
|
||||||
messageContentParts[messageContentParts.length - 1] !== responseMessage.lastSentence
|
|
||||||
) {
|
|
||||||
responseMessage.lastSentence = messageContentParts[messageContentParts.length - 1];
|
|
||||||
eventTarget.dispatchEvent(
|
|
||||||
new CustomEvent('chat', {
|
|
||||||
detail: {
|
|
||||||
id: responseMessageId,
|
|
||||||
content: messageContentParts[messageContentParts.length - 1]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
messages = messages;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (autoScroll) {
|
|
||||||
scrollToBottom();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await chatCompletedHandler(
|
||||||
|
_chatId,
|
||||||
|
model.id,
|
||||||
|
responseMessageId,
|
||||||
|
createMessagesList(responseMessageId)
|
||||||
|
);
|
||||||
|
|
||||||
if ($settings.notificationEnabled && !document.hasFocus()) {
|
if ($settings.notificationEnabled && !document.hasFocus()) {
|
||||||
const notification = new Notification(`${model.id}`, {
|
const notification = new Notification(`${model.id}`, {
|
||||||
body: responseMessage.content,
|
body: responseMessage.content,
|
||||||
|
@ -36,11 +36,18 @@
|
|||||||
let voiceInterruption = false;
|
let voiceInterruption = false;
|
||||||
let hapticFeedback = false;
|
let hapticFeedback = false;
|
||||||
|
|
||||||
|
let streamResponse = true;
|
||||||
|
|
||||||
const toggleSplitLargeChunks = async () => {
|
const toggleSplitLargeChunks = async () => {
|
||||||
splitLargeChunks = !splitLargeChunks;
|
splitLargeChunks = !splitLargeChunks;
|
||||||
saveSettings({ splitLargeChunks: splitLargeChunks });
|
saveSettings({ splitLargeChunks: splitLargeChunks });
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const toggleStreamResponse = async () => {
|
||||||
|
streamResponse = !streamResponse;
|
||||||
|
saveSettings({ streamResponse: streamResponse });
|
||||||
|
};
|
||||||
|
|
||||||
const togglesScrollOnBranchChange = async () => {
|
const togglesScrollOnBranchChange = async () => {
|
||||||
scrollOnBranchChange = !scrollOnBranchChange;
|
scrollOnBranchChange = !scrollOnBranchChange;
|
||||||
saveSettings({ scrollOnBranchChange: scrollOnBranchChange });
|
saveSettings({ scrollOnBranchChange: scrollOnBranchChange });
|
||||||
@ -158,6 +165,7 @@
|
|||||||
userLocation = $settings.userLocation ?? false;
|
userLocation = $settings.userLocation ?? false;
|
||||||
|
|
||||||
hapticFeedback = $settings.hapticFeedback ?? false;
|
hapticFeedback = $settings.hapticFeedback ?? false;
|
||||||
|
streamResponse = $settings?.streamResponse ?? true;
|
||||||
|
|
||||||
defaultModelId = $settings?.models?.at(0) ?? '';
|
defaultModelId = $settings?.models?.at(0) ?? '';
|
||||||
if ($config?.default_models) {
|
if ($config?.default_models) {
|
||||||
@ -311,6 +319,28 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<div class=" py-0.5 flex w-full justify-between">
|
||||||
|
<div class=" self-center text-xs">
|
||||||
|
{$i18n.t('Stream Chat Response')}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<button
|
||||||
|
class="p-1 px-3 text-xs flex rounded transition"
|
||||||
|
on:click={() => {
|
||||||
|
toggleStreamResponse();
|
||||||
|
}}
|
||||||
|
type="button"
|
||||||
|
>
|
||||||
|
{#if streamResponse === true}
|
||||||
|
<span class="ml-2 self-center">{$i18n.t('On')}</span>
|
||||||
|
{:else}
|
||||||
|
<span class="ml-2 self-center">{$i18n.t('Off')}</span>
|
||||||
|
{/if}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<div class=" py-0.5 flex w-full justify-between">
|
<div class=" py-0.5 flex w-full justify-between">
|
||||||
<div class=" self-center text-xs">
|
<div class=" self-center text-xs">
|
||||||
|
Loading…
Reference in New Issue
Block a user