open-webui/src/routes/(app)/c/[id]/+page.svelte

993 lines
26 KiB
Svelte
Raw Normal View History

2023-11-20 01:47:07 +00:00
<script lang="ts">
import { v4 as uuidv4 } from 'uuid';
2024-03-01 09:18:07 +00:00
import { toast } from 'svelte-sonner';
2023-11-20 01:47:07 +00:00
2024-03-03 10:01:34 +00:00
import { onMount, tick, getContext } from 'svelte';
2023-11-20 01:47:07 +00:00
import { goto } from '$app/navigation';
2023-12-27 00:35:01 +00:00
import { page } from '$app/stores';
2024-01-18 10:55:25 +00:00
import {
models,
modelfiles,
user,
settings,
chats,
chatId,
config,
WEBUI_NAME,
2024-05-02 06:11:16 +00:00
tags as _tags,
showSidebar
2024-01-18 10:55:25 +00:00
} from '$lib/stores';
2024-01-16 00:58:51 +00:00
import { copyToClipboard, splitStream, convertMessagesToHistory } from '$lib/utils';
2023-12-27 00:35:01 +00:00
2024-03-26 08:47:33 +00:00
import { generateChatCompletion, cancelOllamaRequest } from '$lib/apis/ollama';
2024-01-18 10:10:07 +00:00
import {
addTagById,
createNewChat,
deleteTagById,
2024-01-18 10:55:25 +00:00
getAllChatTags,
2024-01-18 10:10:07 +00:00
getChatById,
getChatList,
getTagsById,
updateChatById
} from '$lib/apis/chats';
2024-03-26 08:47:33 +00:00
import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
2024-01-05 02:54:00 +00:00
2023-11-20 01:47:07 +00:00
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte';
2024-04-16 22:08:50 +00:00
2024-03-26 08:47:33 +00:00
import {
LITELLM_API_BASE_URL,
OPENAI_API_BASE_URL,
OLLAMA_API_BASE_URL,
WEBUI_BASE_URL
} from '$lib/constants';
import { createOpenAITextStream } from '$lib/apis/streaming';
2024-05-19 15:40:46 +00:00
import { queryMemory } from '$lib/apis/memories';
2023-11-20 01:47:07 +00:00
2024-03-03 10:01:34 +00:00
const i18n = getContext('i18n');
2023-11-20 01:47:07 +00:00
let loaded = false;
2024-01-07 11:19:34 +00:00
2023-11-20 01:47:07 +00:00
let stopResponseFlag = false;
let autoScroll = true;
2024-01-07 11:19:34 +00:00
let processing = '';
2024-03-03 22:42:01 +00:00
let messagesContainerElement: HTMLDivElement;
2024-01-18 03:31:55 +00:00
let currentRequestId = null;
2023-11-20 01:47:07 +00:00
// let chatId = $page.params.id;
let showModelSelector = true;
2023-11-20 01:47:07 +00:00
let selectedModels = [''];
2024-05-02 09:20:57 +00:00
let atSelectedModel = '';
let selectedModelfile = null;
2024-03-30 00:20:07 +00:00
$: selectedModelfile =
selectedModels.length === 1 &&
$modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0
? $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0])[0]
: null;
2023-11-20 01:47:07 +00:00
2023-12-30 07:25:40 +00:00
let selectedModelfiles = {};
$: selectedModelfiles = selectedModels.reduce((a, tagName, i, arr) => {
const modelfile =
$modelfiles.filter((modelfile) => modelfile.tagName === tagName)?.at(0) ?? undefined;
return {
...a,
...(modelfile && { [tagName]: modelfile })
};
}, {});
2023-12-26 11:28:30 +00:00
let chat = null;
2024-01-18 10:10:07 +00:00
let tags = [];
2023-12-26 11:28:30 +00:00
2023-11-20 01:47:07 +00:00
let title = '';
let prompt = '';
2023-12-07 19:52:55 +00:00
let files = [];
2023-11-20 01:47:07 +00:00
let messages = [];
let history = {
messages: {},
currentId: null
};
$: if (history.currentId !== null) {
let _messages = [];
let currentMessage = history.messages[history.currentId];
while (currentMessage !== null) {
_messages.unshift({ ...currentMessage });
currentMessage =
currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
}
messages = _messages;
2023-12-06 16:14:20 +00:00
} else {
messages = [];
2023-11-20 01:47:07 +00:00
}
$: if ($page.params.id) {
(async () => {
2023-12-26 11:28:30 +00:00
if (await loadChat()) {
await tick();
loaded = true;
2024-02-28 03:56:52 +00:00
window.setTimeout(() => scrollToBottom(), 0);
2024-02-28 03:56:52 +00:00
const chatInput = document.getElementById('chat-textarea');
chatInput?.focus();
} else {
await goto('/');
}
2023-11-20 01:47:07 +00:00
})();
}
//////////////////////////
// Web functions
//////////////////////////
const loadChat = async () => {
await chatId.set($page.params.id);
2023-12-26 21:21:47 +00:00
chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
await goto('/');
2023-11-20 01:47:07 +00:00
return null;
2023-12-26 21:21:47 +00:00
});
if (chat) {
2024-01-18 10:10:07 +00:00
tags = await getTags();
2023-12-26 21:21:47 +00:00
const chatContent = chat.chat;
if (chatContent) {
console.log(chatContent);
selectedModels =
(chatContent?.models ?? undefined) !== undefined
? chatContent.models
2024-01-16 00:58:51 +00:00
: [chatContent.models ?? ''];
2023-12-26 21:21:47 +00:00
history =
(chatContent?.history ?? undefined) !== undefined
? chatContent.history
: convertMessagesToHistory(chatContent.messages);
title = chatContent.title;
let _settings = JSON.parse(localStorage.getItem('settings') ?? '{}');
await settings.set({
..._settings,
system: chatContent.system ?? _settings.system,
options: chatContent.options ?? _settings.options
});
autoScroll = true;
await tick();
if (messages.length > 0) {
history.messages[messages.at(-1).id].done = true;
}
await tick();
return true;
} else {
return null;
}
2023-11-20 01:47:07 +00:00
}
};
2024-05-02 09:20:57 +00:00
const scrollToBottom = async () => {
await tick();
2024-03-12 20:47:49 +00:00
if (messagesContainerElement) {
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
}
2024-02-16 00:20:46 +00:00
};
2023-11-20 01:47:07 +00:00
//////////////////////////
// Ollama functions
//////////////////////////
2024-01-17 23:09:38 +00:00
const submitPrompt = async (userPrompt, _user = null) => {
2024-01-07 10:54:31 +00:00
console.log('submitPrompt', $chatId);
if (selectedModels.includes('')) {
2024-03-03 10:01:34 +00:00
toast.error($i18n.t('Model not selected'));
2024-01-07 10:54:31 +00:00
} else if (messages.length != 0 && messages.at(-1).done != true) {
// Response not done
console.log('wait');
2024-01-10 06:47:31 +00:00
} else if (
files.length > 0 &&
files.filter((file) => file.upload_status === false).length > 0
) {
// Upload not done
toast.error(
`Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
);
2024-01-07 10:54:31 +00:00
} else {
// Reset chat message textarea height
document.getElementById('chat-textarea').style.height = '';
// Create user message
let userMessageId = uuidv4();
let userMessage = {
id: userMessageId,
parentId: messages.length !== 0 ? messages.at(-1).id : null,
childrenIds: [],
role: 'user',
2024-01-17 23:09:38 +00:00
user: _user ?? undefined,
2024-01-07 10:54:31 +00:00
content: userPrompt,
2024-01-17 23:09:38 +00:00
files: files.length > 0 ? files : undefined,
2024-05-19 11:46:35 +00:00
timestamp: Math.floor(Date.now() / 1000), // Unix epoch
models: selectedModels
2024-01-07 10:54:31 +00:00
};
// Add message to history and Set currentId to messageId
history.messages[userMessageId] = userMessage;
history.currentId = userMessageId;
// Append messageId to childrenIds of parent message
if (messages.length !== 0) {
history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
}
// Wait until history/message have been updated
await tick();
// Create new chat if only one message in messages
if (messages.length == 1) {
if ($settings.saveChatHistory ?? true) {
chat = await createNewChat(localStorage.token, {
id: $chatId,
title: $i18n.t('New Chat'),
2024-01-07 10:54:31 +00:00
models: selectedModels,
system: $settings.system ?? undefined,
options: {
...($settings.options ?? {})
},
messages: messages,
history: history,
timestamp: Date.now()
});
await chats.set(await getChatList(localStorage.token));
await chatId.set(chat.id);
} else {
await chatId.set('local');
}
await tick();
}
2024-01-07 10:54:31 +00:00
// Reset chat input textarea
prompt = '';
files = [];
// Send prompt
await sendPrompt(userPrompt, userMessageId);
}
};
2024-05-19 11:46:35 +00:00
const sendPrompt = async (prompt, parentId, modelId = null) => {
2023-12-26 11:28:30 +00:00
const _chatId = JSON.parse(JSON.stringify($chatId));
2024-01-07 10:54:31 +00:00
2024-05-19 15:40:46 +00:00
let userContext = null;
if ($settings?.memory ?? false) {
const res = await queryMemory(localStorage.token, prompt).catch((error) => {
toast.error(error);
return null;
});
if (res) {
2024-05-19 15:53:28 +00:00
if (res.documents[0].length > 0) {
userContext = res.documents.reduce((acc, doc, index) => {
const createdAtTimestamp = res.metadatas[index][0].created_at;
const createdAtDate = new Date(createdAtTimestamp * 1000).toISOString().split('T')[0];
acc.push(`${index + 1}. [${createdAtDate}]. ${doc[0]}`);
return acc;
}, []);
}
2024-05-19 15:40:46 +00:00
console.log(userContext);
}
}
2023-11-20 01:47:07 +00:00
await Promise.all(
2024-05-19 11:46:35 +00:00
(modelId ? [modelId] : atSelectedModel !== '' ? [atSelectedModel.id] : selectedModels).map(
async (modelId) => {
console.log('modelId', modelId);
const model = $models.filter((m) => m.id === modelId).at(0);
if (model) {
// Create response message
let responseMessageId = uuidv4();
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model.id,
2024-05-19 19:00:10 +00:00
userContext: userContext,
2024-05-19 11:46:35 +00:00
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
// Append messageId to childrenIds of parent message
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
2024-02-15 09:23:02 +00:00
2024-05-19 11:46:35 +00:00
if (model?.external) {
await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
} else if (model) {
await sendPromptOllama(model, prompt, responseMessageId, _chatId);
}
} else {
toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
2024-02-25 23:46:12 +00:00
}
2023-11-20 01:47:07 +00:00
}
2024-05-19 11:46:35 +00:00
)
2023-11-20 01:47:07 +00:00
);
2023-12-26 20:50:52 +00:00
await chats.set(await getChatList(localStorage.token));
2023-11-20 01:47:07 +00:00
};
2024-01-07 10:54:31 +00:00
2024-02-15 09:23:02 +00:00
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
2024-02-25 23:46:12 +00:00
model = model.id;
2024-02-15 09:23:02 +00:00
const responseMessage = history.messages[responseMessageId];
2023-11-20 01:47:07 +00:00
2023-12-26 20:50:52 +00:00
// Wait until history/message have been updated
2023-12-11 06:47:16 +00:00
await tick();
2023-12-26 20:50:52 +00:00
// Scroll down
2024-02-16 00:20:46 +00:00
scrollToBottom();
2023-11-20 01:47:07 +00:00
2024-02-04 08:29:40 +00:00
const messagesBody = [
2024-05-19 18:54:55 +00:00
$settings.system || (responseMessage?.userContext ?? null)
2024-02-04 08:29:40 +00:00
? {
role: 'system',
2024-05-19 15:40:46 +00:00
content:
2024-05-19 18:54:55 +00:00
$settings.system + !!(responseMessage?.userContext ?? null)
2024-05-19 15:40:46 +00:00
? `\n\nUser Context:\n${responseMessage.userContext.join('\n')}`
: ''
2024-02-04 08:29:40 +00:00
}
: undefined,
2024-03-03 07:23:49 +00:00
...messages
2024-02-04 08:29:40 +00:00
]
.filter((message) => message)
2024-03-09 07:19:20 +00:00
.map((message, idx, arr) => {
// Prepare the base message object
const baseMessage = {
role: message.role,
content: arr.length - 2 !== idx ? message.content : message?.raContent ?? message.content
};
// Extract and format image URLs if any exist
const imageUrls = message.files
?.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1));
// Add images array only if it contains elements
2024-03-12 20:43:49 +00:00
if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
2024-03-09 07:19:20 +00:00
baseMessage.images = imageUrls;
}
return baseMessage;
});
2024-02-04 08:29:40 +00:00
let lastImageIndex = -1;
// Find the index of the last object with images
messagesBody.forEach((item, index) => {
if (item.images) {
lastImageIndex = index;
}
});
// Remove images from all but the last one
messagesBody.forEach((item, index) => {
if (index !== lastImageIndex) {
delete item.images;
}
});
2024-03-09 06:43:06 +00:00
const docs = messages
.filter((message) => message?.files ?? null)
.map((message) =>
message.files.filter((item) => item.type === 'doc' || item.type === 'collection')
)
.flat(1);
2024-01-18 03:19:44 +00:00
const [res, controller] = await generateChatCompletion(localStorage.token, {
2024-01-04 21:06:31 +00:00
model: model,
2024-02-04 08:29:40 +00:00
messages: messagesBody,
2024-01-04 21:06:31 +00:00
options: {
2024-04-28 00:46:34 +00:00
...($settings.options ?? {}),
stop:
$settings?.options?.stop ?? undefined
? $settings.options.stop.map((str) =>
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
)
: undefined
2024-01-04 21:06:31 +00:00
},
2024-02-13 07:35:31 +00:00
format: $settings.requestFormat ?? undefined,
2024-03-09 06:43:06 +00:00
keep_alive: $settings.keepAlive ?? undefined,
docs: docs.length > 0 ? docs : undefined,
citations: docs.length > 0
2024-01-04 21:06:31 +00:00
});
2023-12-11 06:47:16 +00:00
if (res && res.ok) {
2024-02-04 08:29:40 +00:00
console.log('controller', controller);
2023-12-11 10:28:49 +00:00
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
2024-01-18 03:19:44 +00:00
if (stopResponseFlag) {
controller.abort('User: Stop Response');
2024-03-23 20:12:23 +00:00
await cancelOllamaRequest(localStorage.token, currentRequestId);
2024-01-18 03:19:44 +00:00
}
2024-01-18 03:31:55 +00:00
currentRequestId = null;
2024-02-04 08:29:40 +00:00
2023-12-11 10:28:49 +00:00
break;
}
2023-11-20 01:47:07 +00:00
2023-12-11 10:28:49 +00:00
try {
let lines = value.split('\n');
2023-11-20 01:47:07 +00:00
2023-12-11 10:28:49 +00:00
for (const line of lines) {
if (line !== '') {
console.log(line);
let data = JSON.parse(line);
2023-12-11 06:47:16 +00:00
if ('citations' in data) {
responseMessage.citations = data.citations;
continue;
}
2023-12-11 10:28:49 +00:00
if ('detail' in data) {
throw data;
}
2023-12-11 06:47:16 +00:00
2024-01-18 03:19:44 +00:00
if ('id' in data) {
console.log(data);
2024-01-18 03:31:55 +00:00
currentRequestId = data.id;
2024-01-18 03:19:44 +00:00
} else {
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
} else {
responseMessage.content += data.message.content;
messages = messages;
}
2023-12-11 10:28:49 +00:00
} else {
2024-01-18 03:19:44 +00:00
responseMessage.done = true;
if (responseMessage.content == '') {
responseMessage.error = true;
responseMessage.content =
'Oops! No text generated from Ollama, Please try again.';
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
load_duration: data.load_duration,
sample_count: data.sample_count,
sample_duration: data.sample_duration,
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
2023-12-11 10:28:49 +00:00
messages = messages;
2023-12-19 02:48:51 +00:00
2024-01-18 03:19:44 +00:00
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
2024-02-15 22:43:10 +00:00
: `${model}`,
2024-01-18 03:19:44 +00:00
{
body: responseMessage.content,
2024-02-24 01:12:19 +00:00
icon: selectedModelfile?.imageUrl ?? `${WEBUI_BASE_URL}/static/favicon.png`
2024-01-18 03:19:44 +00:00
}
);
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
2024-02-10 00:00:39 +00:00
if ($settings.responseAutoPlayback) {
2024-02-10 00:37:21 +00:00
await tick();
2024-02-10 00:00:39 +00:00
document.getElementById(`speak-button-${responseMessage.id}`)?.click();
}
2023-12-19 02:48:51 +00:00
}
2023-11-20 01:47:07 +00:00
}
}
}
2023-12-11 10:28:49 +00:00
} catch (error) {
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
}
break;
2023-11-20 01:47:07 +00:00
}
2023-12-11 10:28:49 +00:00
if (autoScroll) {
2024-02-16 00:20:46 +00:00
scrollToBottom();
2023-11-20 01:47:07 +00:00
}
2023-12-26 11:28:30 +00:00
}
2023-11-20 01:47:07 +00:00
2023-12-26 11:28:30 +00:00
if ($chatId == _chatId) {
2024-01-07 10:54:31 +00:00
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
}
2023-11-20 01:47:07 +00:00
}
} else {
if (res !== null) {
const error = await res.json();
2023-11-20 01:47:07 +00:00
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
2023-12-13 23:31:07 +00:00
responseMessage.content = error.detail;
} else {
toast.error(error.error);
2023-12-13 23:31:07 +00:00
responseMessage.content = error.error;
2023-11-20 01:47:07 +00:00
}
} else {
2024-03-03 10:01:34 +00:00
toast.error(
$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, { provider: 'Ollama' })
);
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: 'Ollama'
});
2023-11-20 01:47:07 +00:00
}
2023-12-13 23:31:07 +00:00
responseMessage.error = true;
2024-03-03 10:01:34 +00:00
responseMessage.content = $i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: 'Ollama'
});
2023-12-13 23:31:07 +00:00
responseMessage.done = true;
messages = messages;
2023-11-20 01:47:07 +00:00
}
stopResponseFlag = false;
await tick();
2023-12-26 20:50:52 +00:00
2023-11-20 01:47:07 +00:00
if (autoScroll) {
2024-02-16 00:20:46 +00:00
scrollToBottom();
2023-11-20 01:47:07 +00:00
}
if (messages.length == 2 && messages.at(1).content !== '') {
2023-12-06 16:14:20 +00:00
window.history.replaceState(history.state, '', `/c/${_chatId}`);
2024-03-26 08:45:25 +00:00
const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
2023-11-20 01:47:07 +00:00
}
};
2024-02-15 09:23:02 +00:00
const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
const responseMessage = history.messages[responseMessageId];
2024-03-09 06:43:06 +00:00
const docs = messages
.filter((message) => message?.files ?? null)
.map((message) =>
message.files.filter((item) => item.type === 'doc' || item.type === 'collection')
)
.flat(1);
console.log(docs);
2024-05-02 09:20:57 +00:00
scrollToBottom();
try {
const [res, controller] = await generateOpenAIChatCompletion(
localStorage.token,
{
model: model.id,
stream: true,
messages: [
2024-05-19 18:54:55 +00:00
$settings.system || (responseMessage?.userContext ?? null)
2024-02-25 23:46:12 +00:00
? {
role: 'system',
2024-05-19 15:40:46 +00:00
content:
2024-05-19 18:54:55 +00:00
$settings.system + !!(responseMessage?.userContext ?? null)
2024-05-19 15:40:46 +00:00
? `\n\nUser Context:\n${responseMessage.userContext.join('\n')}`
: ''
2024-02-25 23:46:12 +00:00
}
: undefined,
...messages
]
.filter((message) => message)
.map((message, idx, arr) => ({
role: message.role,
...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
message.role === 'user'
? {
content: [
{
type: 'text',
text:
arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: {
content:
arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
})
})),
seed: $settings?.options?.seed ?? undefined,
stop:
$settings?.options?.stop ?? undefined
? $settings.options.stop.map((str) =>
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
)
: undefined,
temperature: $settings?.options?.temperature ?? undefined,
top_p: $settings?.options?.top_p ?? undefined,
num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined,
docs: docs.length > 0 ? docs : undefined,
citations: docs.length > 0
},
model?.source?.toLowerCase() === 'litellm'
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
);
2024-04-01 04:02:29 +00:00
// Wait until history/message have been updated
await tick();
2024-04-01 04:02:29 +00:00
scrollToBottom();
if (res && res.ok && res.body) {
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
for await (const update of textStream) {
const { value, done, citations, error } = update;
if (error) {
await handleOpenAIError(error, null, model, responseMessage);
break;
}
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
}
2023-11-20 01:47:07 +00:00
break;
}
if (citations) {
responseMessage.citations = citations;
continue;
}
2023-12-19 02:48:51 +00:00
if (responseMessage.content == '' && value == '\n') {
continue;
} else {
responseMessage.content += value;
messages = messages;
}
2023-12-19 02:48:51 +00:00
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${model}`, {
body: responseMessage.content,
icon: `${WEBUI_BASE_URL}/static/favicon.png`
});
}
2023-12-19 02:48:51 +00:00
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
2024-02-10 00:00:39 +00:00
if ($settings.responseAutoPlayback) {
await tick();
document.getElementById(`speak-button-${responseMessage.id}`)?.click();
}
2023-11-20 01:47:07 +00:00
if (autoScroll) {
scrollToBottom();
}
2024-01-07 10:54:31 +00:00
}
if ($chatId == _chatId) {
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
2024-01-05 03:20:49 +00:00
}
2023-11-20 01:47:07 +00:00
}
2024-01-05 03:20:49 +00:00
} else {
await handleOpenAIError(null, res, model, responseMessage);
2023-11-20 01:47:07 +00:00
}
} catch (error) {
await handleOpenAIError(error, null, model, responseMessage);
2024-01-05 03:20:49 +00:00
}
2024-05-19 15:40:46 +00:00
messages = messages;
2024-01-05 03:20:49 +00:00
stopResponseFlag = false;
await tick();
if (autoScroll) {
2024-02-16 00:20:46 +00:00
scrollToBottom();
2024-01-05 03:20:49 +00:00
}
if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${_chatId}`);
2024-03-12 20:43:49 +00:00
2024-03-26 08:45:25 +00:00
const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
2023-11-20 01:47:07 +00:00
}
};
2024-03-09 06:43:06 +00:00
const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
let errorMessage = '';
let innerError;
if (error) {
innerError = error;
} else if (res !== null) {
innerError = await res.json();
}
console.error(innerError);
if ('detail' in innerError) {
toast.error(innerError.detail);
errorMessage = innerError.detail;
} else if ('error' in innerError) {
if ('message' in innerError.error) {
toast.error(innerError.error.message);
errorMessage = innerError.error.message;
} else {
toast.error(innerError.error);
errorMessage = innerError.error;
}
} else if ('message' in innerError) {
toast.error(innerError.message);
errorMessage = innerError.message;
}
responseMessage.error = true;
responseMessage.content =
$i18n.t(`Uh-oh! There was an issue connecting to {{provider}}.`, {
provider: model.name ?? model.id
}) +
'\n' +
errorMessage;
responseMessage.done = true;
messages = messages;
};
2023-11-20 01:47:07 +00:00
const stopResponse = () => {
stopResponseFlag = true;
console.log('stopResponse');
};
2024-05-19 12:01:29 +00:00
const regenerateResponse = async (message) => {
2024-03-26 08:45:25 +00:00
console.log('regenerateResponse');
2024-05-19 12:01:29 +00:00
if (messages.length != 0) {
let userMessage = history.messages[message.parentId];
2024-03-26 08:45:25 +00:00
let userPrompt = userMessage.content;
2024-05-19 12:53:43 +00:00
if ((userMessage?.models ?? [...selectedModels]).length == 1) {
await sendPrompt(userPrompt, userMessage.id);
} else {
await sendPrompt(userPrompt, userMessage.id, message.model);
}
2024-03-26 08:45:25 +00:00
}
};
2024-02-15 09:23:02 +00:00
const continueGeneration = async () => {
console.log('continueGeneration');
const _chatId = JSON.parse(JSON.stringify($chatId));
if (messages.length != 0 && messages.at(-1).done == true) {
const responseMessage = history.messages[history.currentId];
2024-02-16 21:56:50 +00:00
responseMessage.done = false;
await tick();
2024-02-25 23:46:12 +00:00
const model = $models.filter((m) => m.id === responseMessage.model).at(0);
if (model) {
if (model?.external) {
await sendPromptOpenAI(
model,
history.messages[responseMessage.parentId].content,
responseMessage.id,
_chatId
);
} else
await sendPromptOllama(
model,
history.messages[responseMessage.parentId].content,
responseMessage.id,
_chatId
);
2024-02-15 09:23:02 +00:00
}
2024-02-25 23:46:12 +00:00
} else {
2024-03-03 10:01:34 +00:00
toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
2024-02-15 09:23:02 +00:00
}
};
2024-03-26 08:45:25 +00:00
const generateChatTitle = async (userPrompt) => {
if ($settings?.title?.auto ?? true) {
const model = $models.find((model) => model.id === selectedModels[0]);
2023-11-20 01:47:07 +00:00
2024-03-26 08:45:25 +00:00
const titleModelId =
model?.external ?? false
? $settings?.title?.modelExternal ?? selectedModels[0]
: $settings?.title?.model ?? selectedModels[0];
const titleModel = $models.find((model) => model.id === titleModelId);
2023-11-20 01:47:07 +00:00
2024-03-26 08:45:25 +00:00
console.log(titleModel);
const title = await generateTitle(
localStorage.token,
2024-03-26 08:45:25 +00:00
$settings?.title?.prompt ??
2024-03-07 18:51:00 +00:00
$i18n.t(
"Create a concise, 3-5 word phrase as a header for the following query, strictly adhering to the 3-5 word limit and avoiding the use of the word 'title':"
) + ' {{prompt}}',
2024-03-26 08:45:25 +00:00
titleModelId,
userPrompt,
titleModel?.external ?? false
2024-03-30 01:14:05 +00:00
? titleModel?.source?.toLowerCase() === 'litellm'
2024-03-26 08:45:25 +00:00
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1`
);
2023-12-26 20:50:52 +00:00
2024-03-26 08:45:25 +00:00
return title;
} else {
2024-03-26 08:45:25 +00:00
return `${userPrompt}`;
2023-11-20 01:47:07 +00:00
}
};
const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) {
2023-11-20 01:47:07 +00:00
title = _title;
}
2023-12-26 20:50:52 +00:00
2024-03-26 08:45:25 +00:00
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, { title: _title });
await chats.set(await getChatList(localStorage.token));
}
2023-11-20 01:47:07 +00:00
};
2024-01-06 10:33:00 +00:00
2024-01-18 10:10:07 +00:00
const getTags = async () => {
return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
return [];
});
};
const addTag = async (tagName) => {
const res = await addTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
2024-01-18 10:17:31 +00:00
chat = await updateChatById(localStorage.token, $chatId, {
2024-01-18 10:55:25 +00:00
tags: tags
2024-01-18 10:17:31 +00:00
});
2024-01-18 10:55:25 +00:00
_tags.set(await getAllChatTags(localStorage.token));
2024-01-18 10:10:07 +00:00
};
const deleteTag = async (tagName) => {
const res = await deleteTagById(localStorage.token, $chatId, tagName);
tags = await getTags();
2024-01-18 10:17:31 +00:00
chat = await updateChatById(localStorage.token, $chatId, {
2024-01-18 10:55:25 +00:00
tags: tags
2024-01-18 10:17:31 +00:00
});
2024-01-18 10:55:25 +00:00
_tags.set(await getAllChatTags(localStorage.token));
2024-01-18 10:10:07 +00:00
};
2024-01-06 10:33:00 +00:00
onMount(async () => {
if (!($settings.saveChatHistory ?? true)) {
await goto('/');
}
});
2023-11-20 01:47:07 +00:00
</script>
<svelte:head>
2024-03-02 06:35:08 +00:00
<title>
{title
? `${title.length > 30 ? `${title.slice(0, 30)}...` : title} | ${$WEBUI_NAME}`
: `${$WEBUI_NAME}`}
</title>
</svelte:head>
2023-11-20 04:39:13 +00:00
{#if loaded}
2024-05-02 06:11:16 +00:00
<div
class="min-h-screen max-h-screen {$showSidebar
2024-05-15 18:20:36 +00:00
? 'md:max-w-[calc(100%-260px)]'
2024-05-02 07:07:04 +00:00
: ''} w-full max-w-full flex flex-col"
2024-05-02 06:11:16 +00:00
>
2024-02-15 22:43:10 +00:00
<Navbar
{title}
2024-04-04 02:30:25 +00:00
{chat}
2024-03-30 00:20:07 +00:00
bind:selectedModels
bind:showModelSelector
2024-02-15 22:43:10 +00:00
shareEnabled={messages.length > 0}
initNewChat={async () => {
if (currentRequestId !== null) {
2024-03-23 20:12:23 +00:00
await cancelOllamaRequest(localStorage.token, currentRequestId);
2024-02-15 22:43:10 +00:00
currentRequestId = null;
}
2024-02-15 22:43:10 +00:00
goto('/');
}}
/>
2024-02-16 00:20:46 +00:00
<div class="flex flex-col flex-auto">
<div
2024-05-02 06:11:16 +00:00
class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
2024-02-16 00:20:46 +00:00
id="messages-container"
2024-03-03 22:42:01 +00:00
bind:this={messagesContainerElement}
2024-02-16 00:20:46 +00:00
on:scroll={(e) => {
2024-03-03 22:42:01 +00:00
autoScroll =
messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
messagesContainerElement.clientHeight + 5;
2024-02-16 00:20:46 +00:00
}}
>
2024-03-30 00:20:07 +00:00
<div class=" h-full w-full flex flex-col py-4">
2024-02-15 22:43:10 +00:00
<Messages
chatId={$chatId}
{selectedModels}
{selectedModelfiles}
{processing}
bind:history
bind:messages
bind:autoScroll
2024-05-17 06:25:55 +00:00
bind:prompt
2024-02-15 22:43:10 +00:00
bottomPadding={files.length > 0}
{sendPrompt}
{continueGeneration}
{regenerateResponse}
/>
</div>
2023-11-20 04:39:13 +00:00
</div>
2023-11-20 01:47:07 +00:00
</div>
</div>
2024-04-30 23:34:29 +00:00
<MessageInput
bind:files
bind:prompt
bind:autoScroll
2024-05-02 09:20:57 +00:00
bind:selectedModel={atSelectedModel}
2024-04-30 23:34:29 +00:00
{messages}
{submitPrompt}
{stopResponse}
/>
2023-11-20 04:39:13 +00:00
{/if}