2024-05-21 17:30:31 +00:00
< script lang = "ts" >
import { v4 as uuidv4 } from 'uuid';
import { toast } from 'svelte-sonner';
2024-06-03 01:03:30 +00:00
import mermaid from 'mermaid';
2024-05-21 17:30:31 +00:00
import { getContext , onMount , tick } from 'svelte';
import { goto } from '$app/navigation';
import { page } from '$app/stores';
2024-06-09 21:25:31 +00:00
import type { Writable } from 'svelte/store';
import type { i18n as i18nType } from 'i18next';
import { OLLAMA_API_BASE_URL , OPENAI_API_BASE_URL , WEBUI_BASE_URL } from '$lib/constants';
2024-05-21 17:30:31 +00:00
import {
chatId,
chats,
config,
2024-05-21 18:17:23 +00:00
type Model,
2024-05-21 17:30:31 +00:00
models,
settings,
showSidebar,
tags as _tags,
2024-05-26 19:18:43 +00:00
WEBUI_NAME,
2024-05-31 18:11:28 +00:00
banners,
2024-06-04 18:13:43 +00:00
user,
2024-06-07 06:29:08 +00:00
socket,
showCallOverlay
2024-05-21 17:30:31 +00:00
} from '$lib/stores';
2024-05-31 18:11:28 +00:00
import {
convertMessagesToHistory,
copyToClipboard,
promptTemplate,
splitStream
} from '$lib/utils';
2024-05-21 17:30:31 +00:00
2024-06-02 17:06:12 +00:00
import { generateChatCompletion } from '$lib/apis/ollama';
2024-05-21 17:30:31 +00:00
import {
addTagById,
createNewChat,
deleteTagById,
getAllChatTags,
getChatById,
getChatList,
getTagsById,
updateChatById
} from '$lib/apis/chats';
2024-06-09 21:53:10 +00:00
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
2024-06-09 21:25:31 +00:00
import { runWebSearch } from '$lib/apis/rag';
import { createOpenAITextStream } from '$lib/apis/streaming';
import { queryMemory } from '$lib/apis/memories';
import { getUserSettings } from '$lib/apis/users';
2024-06-09 21:53:10 +00:00
import { chatCompleted , generateTitle , generateSearchQuery } from '$lib/apis';
2024-05-21 17:30:31 +00:00
2024-06-09 21:25:31 +00:00
import Banner from '../common/Banner.svelte';
2024-05-21 17:30:31 +00:00
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte';
2024-06-07 06:29:08 +00:00
import CallOverlay from './MessageInput/CallOverlay.svelte';
2024-06-09 22:19:36 +00:00
import { error } from '@sveltejs/kit';
2024-05-27 19:48:08 +00:00
2024-05-21 17:30:31 +00:00
const i18n: Writable< i18nType > = getContext('i18n');
export let chatIdProp = '';
let loaded = false;
let stopResponseFlag = false;
let autoScroll = true;
let processing = '';
let messagesContainerElement: HTMLDivElement;
let showModelSelector = true;
let selectedModels = [''];
2024-05-21 18:17:23 +00:00
let atSelectedModel: Model | undefined;
2024-05-21 17:30:31 +00:00
2024-05-27 20:22:24 +00:00
let webSearchEnabled = false;
2024-05-22 21:28:45 +00:00
2024-05-21 17:30:31 +00:00
let chat = null;
let tags = [];
let title = '';
let prompt = '';
let files = [];
let messages = [];
let history = {
messages: {} ,
currentId: null
};
$: if (history.currentId !== null) {
let _messages = [];
let currentMessage = history.messages[history.currentId];
while (currentMessage !== null) {
_messages.unshift({ ... currentMessage } );
currentMessage =
currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
}
messages = _messages;
} else {
messages = [];
}
$: if (chatIdProp) {
(async () => {
if (await loadChat()) {
await tick();
loaded = true;
window.setTimeout(() => scrollToBottom(), 0);
const chatInput = document.getElementById('chat-textarea');
chatInput?.focus();
} else {
await goto('/');
}
})();
}
onMount(async () => {
2024-05-22 00:42:39 +00:00
if (!$chatId) {
2024-05-21 17:30:31 +00:00
await initNewChat();
} else {
if (!($settings.saveChatHistory ?? true)) {
await goto('/');
}
}
});
//////////////////////////
// Web functions
//////////////////////////
const initNewChat = async () => {
window.history.replaceState(history.state, '', `/`);
await chatId.set('');
autoScroll = true;
title = '';
messages = [];
history = {
messages: {} ,
currentId: null
};
if ($page.url.searchParams.get('models')) {
selectedModels = $page.url.searchParams.get('models')?.split(',');
} else if ($settings?.models) {
selectedModels = $settings?.models;
} else if ($config?.default_models) {
2024-05-27 06:04:54 +00:00
console.log($config?.default_models.split(',') ?? '');
2024-05-21 17:30:31 +00:00
selectedModels = $config?.default_models.split(',');
} else {
selectedModels = [''];
}
if ($page.url.searchParams.get('q')) {
prompt = $page.url.searchParams.get('q') ?? '';
2024-05-23 06:34:29 +00:00
2024-05-21 17:30:31 +00:00
if (prompt) {
await tick();
submitPrompt(prompt);
}
}
selectedModels = selectedModels.map((modelId) =>
$models.map((m) => m.id).includes(modelId) ? modelId : ''
);
2024-05-27 05:47:42 +00:00
const userSettings = await getUserSettings(localStorage.token);
if (userSettings) {
settings.set(userSettings.ui);
} else {
settings.set(JSON.parse(localStorage.getItem('settings') ?? '{} '));
}
2024-05-21 17:30:31 +00:00
const chatInput = document.getElementById('chat-textarea');
setTimeout(() => chatInput?.focus(), 0);
};
const loadChat = async () => {
chatId.set(chatIdProp);
chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
await goto('/');
return null;
});
if (chat) {
tags = await getTags();
const chatContent = chat.chat;
if (chatContent) {
console.log(chatContent);
selectedModels =
(chatContent?.models ?? undefined) !== undefined
? chatContent.models
: [chatContent.models ?? ''];
history =
(chatContent?.history ?? undefined) !== undefined
? chatContent.history
: convertMessagesToHistory(chatContent.messages);
title = chatContent.title;
2024-05-27 05:47:42 +00:00
const userSettings = await getUserSettings(localStorage.token);
if (userSettings) {
await settings.set(userSettings.ui);
} else {
await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{} '));
}
2024-05-21 17:30:31 +00:00
await settings.set({
2024-05-27 05:47:42 +00:00
...$settings,
system: chatContent.system ?? $settings.system,
params: chatContent.options ?? $settings.params
2024-05-21 17:30:31 +00:00
});
2024-05-27 06:04:54 +00:00
2024-05-21 17:30:31 +00:00
autoScroll = true;
await tick();
if (messages.length > 0) {
history.messages[messages.at(-1).id].done = true;
}
await tick();
return true;
} else {
return null;
}
}
};
const scrollToBottom = async () => {
await tick();
if (messagesContainerElement) {
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
}
};
2024-05-30 09:37:43 +00:00
const createMessagesList = (responseMessageId) => {
const message = history.messages[responseMessageId];
if (message.parentId) {
return [...createMessagesList(message.parentId), message];
} else {
return [message];
}
};
2024-06-03 02:02:46 +00:00
const chatCompletedHandler = async (modelId, messages) => {
2024-06-03 01:03:30 +00:00
await mermaid.run({
querySelector: '.mermaid'
});
const res = await chatCompleted(localStorage.token, {
2024-06-03 02:02:46 +00:00
model: modelId,
2024-06-03 01:03:30 +00:00
messages: messages.map((m) => ({
id: m.id,
role: m.role,
content: m.content,
timestamp: m.timestamp
})),
chat_id: $chatId
}).catch((error) => {
console.error(error);
return null;
});
if (res !== null) {
// Update chat history with the new messages
for (const message of res.messages) {
history.messages[message.id] = {
...history.messages[message.id],
...(history.messages[message.id].content !== message.content
? { originalContent : history.messages [ message . id ]. content }
: {} ),
...message
};
}
}
};
2024-06-04 18:13:43 +00:00
const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
return setInterval(() => {
$socket?.emit('usage', {
action: 'chat',
model: modelId,
chat_id: chatId
});
}, 1000);
};
2024-05-21 17:30:31 +00:00
//////////////////////////
2024-06-09 08:22:21 +00:00
// Chat functions
2024-05-21 17:30:31 +00:00
//////////////////////////
const submitPrompt = async (userPrompt, _user = null) => {
2024-06-07 07:04:47 +00:00
let _responses = [];
2024-05-21 17:30:31 +00:00
console.log('submitPrompt', $chatId);
selectedModels = selectedModels.map((modelId) =>
$models.map((m) => m.id).includes(modelId) ? modelId : ''
);
if (selectedModels.includes('')) {
toast.error($i18n.t('Model not selected'));
} else if (messages.length != 0 && messages.at(-1).done != true) {
// Response not done
console.log('wait');
} else if (
files.length > 0 & &
files.filter((file) => file.upload_status === false).length > 0
) {
// Upload not done
toast.error(
$i18n.t(
`Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
)
);
} else {
2024-06-10 00:20:23 +00:00
// Reset chat input textarea
const chatTextAreaElement = document.getElementById('chat-textarea');
if (chatTextAreaElement) {
chatTextAreaElement.value = '';
chatTextAreaElement.style.height = '';
}
2024-06-10 07:07:47 +00:00
const _files = JSON.parse(JSON.stringify(files));
files = [];
2024-06-10 00:20:23 +00:00
prompt = '';
2024-05-21 17:30:31 +00:00
// Create user message
let userMessageId = uuidv4();
let userMessage = {
id: userMessageId,
parentId: messages.length !== 0 ? messages.at(-1).id : null,
childrenIds: [],
role: 'user',
user: _user ?? undefined,
content: userPrompt,
2024-06-10 07:07:47 +00:00
files: _files.length > 0 ? _files : undefined,
2024-05-21 17:30:31 +00:00
timestamp: Math.floor(Date.now() / 1000), // Unix epoch
models: selectedModels.filter((m, mIdx) => selectedModels.indexOf(m) === mIdx)
};
// Add message to history and Set currentId to messageId
history.messages[userMessageId] = userMessage;
history.currentId = userMessageId;
// Append messageId to childrenIds of parent message
if (messages.length !== 0) {
history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
}
// Wait until history/message have been updated
await tick();
// Send prompt
2024-06-07 07:04:47 +00:00
_responses = await sendPrompt(userPrompt, userMessageId);
2024-05-21 17:30:31 +00:00
}
2024-06-07 07:04:47 +00:00
return _responses;
2024-05-21 17:30:31 +00:00
};
2024-06-11 00:58:07 +00:00
const sendPrompt = async (prompt, parentId, modelId = null, newChat = true) => {
2024-06-07 07:04:47 +00:00
let _responses = [];
2024-06-10 07:56:13 +00:00
// If modelId is provided, use it, else use selected model
let selectedModelIds = modelId
? [modelId]
: atSelectedModel !== undefined
? [atSelectedModel.id]
: selectedModels;
// Create response messages for each selected model
const responseMessageIds = {} ;
for (const modelId of selectedModelIds) {
const model = $models.filter((m) => m.id === modelId).at(0);
if (model) {
let responseMessageId = uuidv4();
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model.id,
modelName: model.name ?? model.id,
userContext: null,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// Add message to history and Set currentId to messageId
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
// Append messageId to childrenIds of parent message
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
responseMessageIds[modelId] = responseMessageId;
}
}
await tick();
// Create new chat if only one message in messages
2024-06-11 00:58:07 +00:00
if (newChat && messages.length == 2) {
2024-06-10 07:56:13 +00:00
if ($settings.saveChatHistory ?? true) {
chat = await createNewChat(localStorage.token, {
id: $chatId,
title: $i18n.t('New Chat'),
models: selectedModels,
system: $settings.system ?? undefined,
options: {
...($settings.params ?? {} )
},
messages: messages,
history: history,
tags: [],
timestamp: Date.now()
});
await chats.set(await getChatList(localStorage.token));
await chatId.set(chat.id);
} else {
await chatId.set('local');
}
await tick();
}
2024-05-21 17:30:31 +00:00
const _chatId = JSON.parse(JSON.stringify($chatId));
await Promise.all(
2024-06-10 07:56:13 +00:00
selectedModelIds.map(async (modelId) => {
2024-05-21 18:17:23 +00:00
console.log('modelId', modelId);
const model = $models.filter((m) => m.id === modelId).at(0);
if (model) {
// If there are image files, check if model is vision capable
const hasImages = messages.some((message) =>
message.files?.some((file) => file.type === 'image')
);
2024-05-21 17:30:31 +00:00
2024-05-25 07:44:21 +00:00
if (hasImages && !(model.info?.meta?.capabilities?.vision ?? true)) {
2024-05-21 18:17:23 +00:00
toast.error(
$i18n.t('Model {{ modelName }} is not vision capable', {
2024-05-25 07:44:21 +00:00
modelName: model.name ?? model.id
2024-05-21 18:17:23 +00:00
})
);
}
2024-05-21 17:30:31 +00:00
2024-06-10 07:56:13 +00:00
let responseMessageId = responseMessageIds[modelId];
let responseMessage = history.messages[responseMessageId];
2024-05-21 18:17:23 +00:00
let userContext = null;
if ($settings?.memory ?? false) {
if (userContext === null) {
const res = await queryMemory(localStorage.token, prompt).catch((error) => {
toast.error(error);
return null;
});
if (res) {
if (res.documents[0].length > 0) {
userContext = res.documents.reduce((acc, doc, index) => {
const createdAtTimestamp = res.metadatas[index][0].created_at;
const createdAtDate = new Date(createdAtTimestamp * 1000)
.toISOString()
.split('T')[0];
acc.push(`${ index + 1 } . [${ createdAtDate } ]. ${ doc [ 0 ]} `);
return acc;
}, []);
2024-05-21 17:30:31 +00:00
}
2024-05-21 18:17:23 +00:00
console.log(userContext);
2024-05-21 17:30:31 +00:00
}
}
2024-05-21 18:17:23 +00:00
}
responseMessage.userContext = userContext;
2024-05-21 17:30:31 +00:00
2024-06-04 18:13:43 +00:00
const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
2024-05-27 20:22:24 +00:00
if (webSearchEnabled) {
2024-05-27 21:25:36 +00:00
await getWebSearchResults(model.id, parentId, responseMessageId);
2024-05-25 13:49:49 +00:00
}
2024-05-22 21:28:45 +00:00
2024-06-07 07:04:47 +00:00
let _response = null;
2024-05-24 10:02:56 +00:00
if (model?.owned_by === 'openai') {
2024-06-07 07:04:47 +00:00
_response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
2024-05-21 18:17:23 +00:00
} else if (model) {
2024-06-07 07:04:47 +00:00
_response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
2024-05-21 17:30:31 +00:00
}
2024-06-07 07:04:47 +00:00
_responses.push(_response);
2024-06-04 18:13:43 +00:00
console.log('chatEventEmitter', chatEventEmitter);
if (chatEventEmitter) clearInterval(chatEventEmitter);
2024-05-21 18:17:23 +00:00
} else {
toast.error($i18n.t(`Model {{ modelId }} not found`, { modelId } ));
2024-05-21 17:30:31 +00:00
}
2024-05-21 18:17:23 +00:00
})
2024-05-21 17:30:31 +00:00
);
await chats.set(await getChatList(localStorage.token));
2024-06-07 07:04:47 +00:00
return _responses;
2024-05-21 17:30:31 +00:00
};
2024-05-27 21:25:36 +00:00
const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
2024-05-22 21:28:45 +00:00
const responseMessage = history.messages[responseId];
2024-05-27 21:25:36 +00:00
2024-06-10 18:40:58 +00:00
responseMessage.statusHistory = [
{
done: false,
action: 'web_search',
description: $i18n.t('Generating search query')
}
];
2024-05-22 21:28:45 +00:00
messages = messages;
2024-05-27 19:48:08 +00:00
2024-05-27 21:55:08 +00:00
const prompt = history.messages[parentId].content;
2024-06-09 22:19:36 +00:00
let searchQuery = await generateSearchQuery(localStorage.token, model, messages, prompt).catch(
(error) => {
console.log(error);
return prompt;
}
);
2024-06-09 22:08:23 +00:00
if (!searchQuery) {
toast.warning($i18n.t('No search query generated'));
2024-06-10 18:40:58 +00:00
responseMessage.statusHistory.push({
2024-06-09 22:08:23 +00:00
done: true,
error: true,
2024-06-10 18:40:58 +00:00
action: 'web_search',
2024-06-09 22:08:23 +00:00
description: 'No search query generated'
2024-06-10 18:40:58 +00:00
});
2024-06-09 22:08:23 +00:00
messages = messages;
2024-05-22 21:28:45 +00:00
}
2024-05-27 19:48:08 +00:00
2024-06-10 18:40:58 +00:00
responseMessage.statusHistory.push({
done: false,
action: 'web_search',
description: $i18n.t(`Searching "{{ searchQuery }} "`, { searchQuery } )
});
2024-05-22 21:28:45 +00:00
messages = messages;
2024-05-27 19:48:08 +00:00
2024-06-02 02:52:12 +00:00
const results = await runWebSearch(localStorage.token, searchQuery).catch((error) => {
console.log(error);
toast.error(error);
return null;
});
if (results) {
2024-06-10 18:40:58 +00:00
responseMessage.statusHistory.push({
2024-06-02 02:52:12 +00:00
done: true,
2024-06-10 18:40:58 +00:00
action: 'web_search',
2024-06-02 02:52:12 +00:00
description: $i18n.t('Searched {{ count }} sites', { count : results.filenames.length } ),
2024-06-10 18:40:58 +00:00
query: searchQuery,
2024-06-02 02:52:12 +00:00
urls: results.filenames
2024-06-10 18:40:58 +00:00
});
2024-06-02 02:52:12 +00:00
if (responseMessage?.files ?? undefined === undefined) {
responseMessage.files = [];
}
responseMessage.files.push({
collection_name: results.collection_name,
name: searchQuery,
type: 'web_search_results',
urls: results.filenames
});
messages = messages;
} else {
2024-06-10 18:40:58 +00:00
responseMessage.statusHistory.push({
2024-05-27 21:25:36 +00:00
done: true,
error: true,
2024-06-10 18:40:58 +00:00
action: 'web_search',
2024-05-27 21:25:36 +00:00
description: 'No search results found'
2024-06-10 18:40:58 +00:00
});
2024-05-22 21:28:45 +00:00
messages = messages;
}
};
2024-05-21 17:30:31 +00:00
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
2024-06-07 07:04:47 +00:00
let _response = null;
2024-05-21 17:30:31 +00:00
const responseMessage = history.messages[responseMessageId];
// Wait until history/message have been updated
await tick();
// Scroll down
scrollToBottom();
const messagesBody = [
$settings.system || (responseMessage?.userContext ?? null)
? {
role: 'system',
2024-05-31 18:11:28 +00:00
content: `${ promptTemplate ( $settings ? . system ?? '' , $user . name )} ${
2024-05-21 17:30:31 +00:00
responseMessage?.userContext ?? null
? `\n\nUser Context:\n${( responseMessage ? . userContext ?? []). join ( '\n' )} `
: ''
}`
}
: undefined,
...messages
]
2024-05-22 01:35:59 +00:00
.filter((message) => message?.content?.trim())
2024-05-21 17:30:31 +00:00
.map((message, idx, arr) => {
// Prepare the base message object
const baseMessage = {
role: message.role,
2024-05-25 05:21:57 +00:00
content: message.content
2024-05-21 17:30:31 +00:00
};
// Extract and format image URLs if any exist
const imageUrls = message.files
?.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1));
// Add images array only if it contains elements
if (imageUrls && imageUrls.length > 0 && message.role === 'user') {
baseMessage.images = imageUrls;
}
return baseMessage;
});
let lastImageIndex = -1;
// Find the index of the last object with images
messagesBody.forEach((item, index) => {
if (item.images) {
lastImageIndex = index;
}
});
// Remove images from all but the last one
messagesBody.forEach((item, index) => {
if (index !== lastImageIndex) {
delete item.images;
}
});
2024-06-10 00:34:42 +00:00
let docs = [];
2024-06-10 01:59:45 +00:00
if (model?.info?.meta?.knowledge ?? false) {
2024-06-10 00:34:42 +00:00
docs = model.info.meta.knowledge;
}
docs = [
...docs,
...messages
.filter((message) => message?.files ?? null)
.map((message) =>
message.files.filter((item) =>
['doc', 'collection', 'web_search_results'].includes(item.type)
)
2024-05-27 21:25:36 +00:00
)
2024-06-10 00:34:42 +00:00
.flat(1)
].filter(
(item, index, array) =>
array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
);
2024-05-21 17:30:31 +00:00
const [res, controller] = await generateChatCompletion(localStorage.token, {
2024-06-10 00:34:42 +00:00
model: model.id,
2024-05-21 17:30:31 +00:00
messages: messagesBody,
options: {
2024-05-25 05:21:57 +00:00
...($settings.params ?? {} ),
2024-05-21 17:30:31 +00:00
stop:
2024-05-25 05:21:57 +00:00
$settings?.params?.stop ?? undefined
? $settings.params.stop.map((str) =>
2024-05-21 17:30:31 +00:00
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
)
2024-05-25 07:58:20 +00:00
: undefined,
2024-05-25 09:04:47 +00:00
num_predict: $settings?.params?.max_tokens ?? undefined,
repeat_penalty: $settings?.params?.frequency_penalty ?? undefined
2024-05-21 17:30:31 +00:00
},
format: $settings.requestFormat ?? undefined,
keep_alive: $settings.keepAlive ?? undefined,
docs: docs.length > 0 ? docs : undefined,
2024-05-30 09:04:29 +00:00
citations: docs.length > 0,
chat_id: $chatId
2024-05-21 17:30:31 +00:00
});
if (res && res.ok) {
console.log('controller', controller);
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value , done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
2024-05-30 09:04:29 +00:00
} else {
2024-05-30 09:37:43 +00:00
const messages = createMessagesList(responseMessageId);
2024-06-10 00:34:42 +00:00
await chatCompletedHandler(model.id, messages);
2024-05-21 17:30:31 +00:00
}
2024-06-07 07:04:47 +00:00
_response = responseMessage.content;
2024-05-21 17:30:31 +00:00
break;
}
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
let data = JSON.parse(line);
if ('citations' in data) {
responseMessage.citations = data.citations;
continue;
}
if ('detail' in data) {
throw data;
}
2024-06-02 17:06:12 +00:00
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
2024-05-21 17:30:31 +00:00
} else {
2024-06-02 17:06:12 +00:00
responseMessage.content += data.message.content;
2024-05-21 17:30:31 +00:00
messages = messages;
2024-06-02 17:06:12 +00:00
}
} else {
responseMessage.done = true;
if (responseMessage.content == '') {
responseMessage.error = {
code: 400,
content: `Oops! No text generated from Ollama, Please try again.`
};
}
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
load_duration: data.load_duration,
sample_count: data.sample_count,
sample_duration: data.sample_duration,
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
messages = messages;
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
2024-06-10 00:34:42 +00:00
: `${ model . id } `,
2024-06-02 17:06:12 +00:00
{
body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? `${ WEBUI_BASE_URL } /static/favicon.png`
}
);
}
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
2024-05-21 17:30:31 +00:00
2024-06-08 04:10:00 +00:00
if ($settings.responseAutoPlayback && !$showCallOverlay) {
2024-06-02 17:06:12 +00:00
await tick();
document.getElementById(`speak-button-${ responseMessage . id } `)?.click();
2024-05-21 17:30:31 +00:00
}
}
}
}
} catch (error) {
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
}
break;
}
if (autoScroll) {
scrollToBottom();
}
}
if ($chatId == _chatId) {
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
2024-05-23 05:05:13 +00:00
history: history,
models: selectedModels
2024-05-21 17:30:31 +00:00
});
await chats.set(await getChatList(localStorage.token));
}
}
} else {
if (res !== null) {
const error = await res.json();
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
2024-06-01 17:19:27 +00:00
responseMessage.error = { content : error.detail } ;
2024-05-21 17:30:31 +00:00
} else {
toast.error(error.error);
2024-06-01 17:19:27 +00:00
responseMessage.error = { content : error.error } ;
2024-05-21 17:30:31 +00:00
}
} else {
toast.error(
$i18n.t(`Uh-oh! There was an issue connecting to {{ provider }} .`, { provider : 'Ollama' } )
);
2024-06-01 17:19:27 +00:00
responseMessage.error = {
content: $i18n.t(`Uh-oh! There was an issue connecting to {{ provider }} .`, {
provider: 'Ollama'
})
};
2024-05-21 17:30:31 +00:00
}
responseMessage.done = true;
messages = messages;
}
stopResponseFlag = false;
await tick();
if (autoScroll) {
scrollToBottom();
}
if (messages.length == 2 && messages.at(1).content !== '') {
window.history.replaceState(history.state, '', `/c/${ _chatId } `);
const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
}
2024-06-07 07:04:47 +00:00
return _response;
2024-05-21 17:30:31 +00:00
};
const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
2024-06-07 07:04:47 +00:00
let _response = null;
2024-05-21 17:30:31 +00:00
const responseMessage = history.messages[responseMessageId];
2024-06-10 00:34:42 +00:00
let docs = [];
2024-06-10 01:59:45 +00:00
if (model?.info?.meta?.knowledge ?? false) {
2024-06-10 00:34:42 +00:00
docs = model.info.meta.knowledge;
}
2024-05-21 17:30:31 +00:00
2024-06-10 00:34:42 +00:00
docs = [
...docs,
...messages
.filter((message) => message?.files ?? null)
.map((message) =>
message.files.filter((item) =>
['doc', 'collection', 'web_search_results'].includes(item.type)
)
)
.flat(1)
].filter(
(item, index, array) =>
array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
);
2024-05-21 17:30:31 +00:00
scrollToBottom();
try {
const [res, controller] = await generateOpenAIChatCompletion(
localStorage.token,
{
model: model.id,
stream: true,
2024-05-27 09:11:53 +00:00
stream_options:
model.info?.meta?.capabilities?.usage ?? false
? {
include_usage: true
}
: undefined,
2024-05-21 17:30:31 +00:00
messages: [
$settings.system || (responseMessage?.userContext ?? null)
? {
role: 'system',
2024-05-31 18:11:28 +00:00
content: `${ promptTemplate ( $settings ? . system ?? '' , $user . name )} ${
2024-05-21 17:30:31 +00:00
responseMessage?.userContext ?? null
? `\n\nUser Context:\n${( responseMessage ? . userContext ?? []). join ( '\n' )} `
: ''
}`
}
: undefined,
...messages
]
2024-05-22 01:35:59 +00:00
.filter((message) => message?.content?.trim())
2024-05-21 17:30:31 +00:00
.map((message, idx, arr) => ({
role: message.role,
...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) & &
message.role === 'user'
? {
content: [
{
type: 'text',
text:
arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: {
content:
arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
})
})),
2024-05-25 05:21:57 +00:00
seed: $settings?.params?.seed ?? undefined,
2024-05-21 17:30:31 +00:00
stop:
2024-05-25 05:21:57 +00:00
$settings?.params?.stop ?? undefined
? $settings.params.stop.map((str) =>
2024-05-21 17:30:31 +00:00
decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
)
: undefined,
2024-05-25 05:21:57 +00:00
temperature: $settings?.params?.temperature ?? undefined,
top_p: $settings?.params?.top_p ?? undefined,
2024-05-25 09:04:47 +00:00
frequency_penalty: $settings?.params?.frequency_penalty ?? undefined,
2024-05-25 07:58:20 +00:00
max_tokens: $settings?.params?.max_tokens ?? undefined,
2024-05-21 17:30:31 +00:00
docs: docs.length > 0 ? docs : undefined,
2024-05-30 09:04:29 +00:00
citations: docs.length > 0,
chat_id: $chatId
2024-05-21 17:30:31 +00:00
},
2024-05-25 21:43:35 +00:00
`${ OPENAI_API_BASE_URL } `
2024-05-21 17:30:31 +00:00
);
// Wait until history/message have been updated
await tick();
scrollToBottom();
if (res && res.ok && res.body) {
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
2024-05-27 09:11:53 +00:00
let lastUsage = null;
2024-05-21 17:30:31 +00:00
for await (const update of textStream) {
2024-05-27 09:11:53 +00:00
const { value , done , citations , error , usage } = update;
2024-05-21 17:30:31 +00:00
if (error) {
await handleOpenAIError(error, null, model, responseMessage);
break;
}
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
if (stopResponseFlag) {
controller.abort('User: Stop Response');
2024-05-30 09:04:29 +00:00
} else {
2024-05-30 09:37:43 +00:00
const messages = createMessagesList(responseMessageId);
2024-06-03 02:02:46 +00:00
await chatCompletedHandler(model.id, messages);
2024-05-21 17:30:31 +00:00
}
2024-06-07 07:04:47 +00:00
_response = responseMessage.content;
2024-05-21 17:30:31 +00:00
break;
}
2024-05-27 09:11:53 +00:00
if (usage) {
lastUsage = usage;
}
2024-05-21 17:30:31 +00:00
if (citations) {
responseMessage.citations = citations;
continue;
}
if (responseMessage.content == '' && value == '\n') {
continue;
} else {
responseMessage.content += value;
messages = messages;
}
2024-05-31 21:21:26 +00:00
if (autoScroll) {
scrollToBottom();
2024-05-21 17:30:31 +00:00
}
2024-05-31 21:21:26 +00:00
}
2024-05-21 17:30:31 +00:00
2024-05-31 21:21:26 +00:00
if ($settings.notificationEnabled && !document.hasFocus()) {
2024-06-10 00:34:42 +00:00
const notification = new Notification(`${ model . id } `, {
2024-05-31 21:21:26 +00:00
body: responseMessage.content,
icon: `${ WEBUI_BASE_URL } /static/favicon.png`
});
}
2024-05-21 17:30:31 +00:00
2024-05-31 21:21:26 +00:00
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
2024-05-21 17:30:31 +00:00
2024-06-08 04:10:00 +00:00
if ($settings.responseAutoPlayback && !$showCallOverlay) {
2024-05-31 21:21:26 +00:00
await tick();
2024-06-08 04:10:00 +00:00
2024-05-31 21:21:26 +00:00
document.getElementById(`speak-button-${ responseMessage . id } `)?.click();
2024-05-21 17:30:31 +00:00
}
2024-05-27 09:11:53 +00:00
if (lastUsage) {
responseMessage.info = { ... lastUsage , openai : true } ;
}
2024-05-21 17:30:31 +00:00
if ($chatId == _chatId) {
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, {
2024-05-23 05:05:13 +00:00
models: selectedModels,
2024-05-21 17:30:31 +00:00
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
}
}
} else {
await handleOpenAIError(null, res, model, responseMessage);
}
} catch (error) {
await handleOpenAIError(error, null, model, responseMessage);
}
messages = messages;
stopResponseFlag = false;
await tick();
if (autoScroll) {
scrollToBottom();
}
if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${ _chatId } `);
const _title = await generateChatTitle(userPrompt);
await setChatTitle(_chatId, _title);
}
2024-06-07 07:04:47 +00:00
return _response;
2024-05-21 17:30:31 +00:00
};
const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
let errorMessage = '';
let innerError;
if (error) {
innerError = error;
} else if (res !== null) {
innerError = await res.json();
}
console.error(innerError);
if ('detail' in innerError) {
toast.error(innerError.detail);
errorMessage = innerError.detail;
} else if ('error' in innerError) {
if ('message' in innerError.error) {
toast.error(innerError.error.message);
errorMessage = innerError.error.message;
} else {
toast.error(innerError.error);
errorMessage = innerError.error;
}
} else if ('message' in innerError) {
toast.error(innerError.message);
errorMessage = innerError.message;
}
2024-06-01 17:19:27 +00:00
responseMessage.error = {
2024-06-01 17:33:49 +00:00
content:
$i18n.t(`Uh-oh! There was an issue connecting to {{ provider }} .`, {
provider: model.name ?? model.id
}) +
'\n' +
errorMessage
2024-06-01 17:19:27 +00:00
};
2024-05-21 17:30:31 +00:00
responseMessage.done = true;
messages = messages;
};
const stopResponse = () => {
stopResponseFlag = true;
console.log('stopResponse');
};
const regenerateResponse = async (message) => {
console.log('regenerateResponse');
if (messages.length != 0) {
let userMessage = history.messages[message.parentId];
let userPrompt = userMessage.content;
if ((userMessage?.models ?? [...selectedModels]).length == 1) {
2024-06-11 00:58:07 +00:00
await sendPrompt(userPrompt, userMessage.id, undefined, false);
2024-05-21 17:30:31 +00:00
} else {
2024-06-11 00:58:07 +00:00
await sendPrompt(userPrompt, userMessage.id, message.model, false);
2024-05-21 17:30:31 +00:00
}
}
};
const continueGeneration = async () => {
console.log('continueGeneration');
const _chatId = JSON.parse(JSON.stringify($chatId));
if (messages.length != 0 && messages.at(-1).done == true) {
const responseMessage = history.messages[history.currentId];
responseMessage.done = false;
await tick();
const model = $models.filter((m) => m.id === responseMessage.model).at(0);
if (model) {
2024-05-27 21:25:36 +00:00
if (model?.owned_by === 'openai') {
2024-05-21 17:30:31 +00:00
await sendPromptOpenAI(
model,
history.messages[responseMessage.parentId].content,
responseMessage.id,
_chatId
);
} else
await sendPromptOllama(
model,
history.messages[responseMessage.parentId].content,
responseMessage.id,
_chatId
);
}
} else {
toast.error($i18n.t(`Model {{ modelId }} not found`, { modelId } ));
}
};
const generateChatTitle = async (userPrompt) => {
if ($settings?.title?.auto ?? true) {
const title = await generateTitle(
localStorage.token,
2024-06-09 21:25:31 +00:00
selectedModels[0],
2024-05-21 17:30:31 +00:00
userPrompt,
2024-06-09 21:25:31 +00:00
$chatId
).catch((error) => {
console.error(error);
return 'New Chat';
});
2024-05-21 17:30:31 +00:00
return title;
} else {
return `${ userPrompt } `;
}
};
const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) {
title = _title;
}
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, { title : _title } );
await chats.set(await getChatList(localStorage.token));
}
};
const getTags = async () => {
return await getTagsById(localStorage.token, $chatId).catch(async (error) => {
return [];
});
};
< / script >
< svelte:head >
< title >
{ title
? `${ title . length > 30 ? `$ { title . slice ( 0 , 30 )}... ` : title } | ${ $WEBUI_NAME } `
: `${ $WEBUI_NAME } `}
< / title >
< / svelte:head >
2024-06-07 21:08:04 +00:00
< CallOverlay { submitPrompt } bind:files />
2024-06-07 06:29:08 +00:00
2024-05-21 17:30:31 +00:00
{ #if ! chatIdProp || ( loaded && chatIdProp )}
< div
2024-06-05 15:43:23 +00:00
class="h-screen max-h-[100dvh] { $showSidebar
2024-05-21 17:30:31 +00:00
? 'md:max-w-[calc(100%-260px)]'
: ''} w-full max-w-full flex flex-col"
>
< Navbar
{ title }
bind:selectedModels
bind:showModelSelector
shareEnabled={ messages . length > 0 }
{ chat }
{ initNewChat }
/>
2024-05-26 19:18:43 +00:00
2024-06-02 03:21:26 +00:00
{ #if $banners . length > 0 && messages . length === 0 && ! $chatId && selectedModels . length <= 1 }
2024-05-26 19:41:47 +00:00
< div
class="absolute top-[4.25rem] w-full { $showSidebar ? 'md:max-w-[calc(100%-260px)]' : '' } "
>
2024-05-26 19:32:57 +00:00
< div class = " flex flex-col gap-1 w-full" >
{ #each $banners . filter ( ( b ) => ( b . dismissible ? ! JSON . parse ( localStorage . getItem ( 'dismissedBannerIds' ) ?? '[]' ). includes ( b . id ) : true ) ) as banner }
< Banner
{ banner }
on:dismiss={( e ) => {
const bannerId = e.detail;
localStorage.setItem(
'dismissedBannerIds',
JSON.stringify(
[
bannerId,
...JSON.parse(localStorage.getItem('dismissedBannerIds') ?? '[]')
].filter((id) => $banners.find((b) => b.id === id))
)
);
}}
/>
{ /each }
< / div >
2024-05-26 19:18:43 +00:00
< / div >
{ /if }
2024-05-21 17:30:31 +00:00
< div class = "flex flex-col flex-auto" >
< div
class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full"
id="messages-container"
bind:this={ messagesContainerElement }
on:scroll={( e ) => {
autoScroll =
messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop < =
messagesContainerElement.clientHeight + 5;
}}
>
< div class = " h-full w-full flex flex-col { chatIdProp ? 'py-4' : 'pt-2 pb-4' } " >
< Messages
chatId={ $chatId }
{ selectedModels }
{ processing }
bind:history
bind:messages
bind:autoScroll
bind:prompt
bottomPadding={ files . length > 0 }
{ sendPrompt }
{ continueGeneration }
{ regenerateResponse }
/>
< / div >
< / div >
2024-06-02 01:11:54 +00:00
< MessageInput
bind:files
bind:prompt
bind:autoScroll
bind:webSearchEnabled
bind:atSelectedModel
{ selectedModels }
{ messages }
{ submitPrompt }
{ stopResponse }
/>
2024-05-21 17:30:31 +00:00
< / div >
< / div >
{ /if }