open-webui/src/routes/(app)/c/[id]/+page.svelte

615 lines
16 KiB
Svelte
Raw Normal View History

2023-11-20 01:47:07 +00:00
<script lang="ts">
import { v4 as uuidv4 } from 'uuid';
import toast from 'svelte-french-toast';
import { OLLAMA_API_BASE_URL } from '$lib/constants';
import { onMount, tick } from 'svelte';
import { convertMessagesToHistory, splitStream } from '$lib/utils';
import { goto } from '$app/navigation';
import { config, modelfiles, user, settings, db, chats, chatId } from '$lib/stores';
2023-11-20 01:47:07 +00:00
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
import Navbar from '$lib/components/layout/Navbar.svelte';
import { page } from '$app/stores';
let loaded = false;
let stopResponseFlag = false;
let autoScroll = true;
// let chatId = $page.params.id;
let selectedModels = [''];
let selectedModelfile = null;
$: selectedModelfile =
selectedModels.length === 1 &&
$modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0
? $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0])[0]
: null;
2023-11-20 01:47:07 +00:00
let title = '';
let prompt = '';
2023-12-07 19:52:55 +00:00
let files = [];
2023-11-20 01:47:07 +00:00
let messages = [];
let history = {
messages: {},
currentId: null
};
$: if (history.currentId !== null) {
let _messages = [];
let currentMessage = history.messages[history.currentId];
while (currentMessage !== null) {
_messages.unshift({ ...currentMessage });
currentMessage =
currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
}
messages = _messages;
2023-12-06 16:14:20 +00:00
} else {
messages = [];
2023-11-20 01:47:07 +00:00
}
// onMount(async () => {
// let chat = await loadChat();
2023-11-20 01:47:07 +00:00
// await tick();
// if (chat) {
// loaded = true;
// } else {
// await goto('/');
// }
// });
2023-11-20 01:47:07 +00:00
$: if ($page.params.id) {
(async () => {
let chat = await loadChat();
await tick();
if (chat) {
loaded = true;
} else {
await goto('/');
}
2023-11-20 01:47:07 +00:00
})();
}
//////////////////////////
// Web functions
//////////////////////////
const loadChat = async () => {
await chatId.set($page.params.id);
const chat = await $db.getChatById($chatId);
2023-11-20 01:47:07 +00:00
if (chat) {
console.log(chat);
selectedModels = (chat?.models ?? undefined) !== undefined ? chat.models : [chat.model ?? ''];
history =
(chat?.history ?? undefined) !== undefined
? chat.history
: convertMessagesToHistory(chat.messages);
title = chat.title;
2023-11-20 01:47:07 +00:00
await settings.set({
...$settings,
system: chat.system ?? $settings.system,
options: chat.options ?? $settings.options
});
autoScroll = true;
await tick();
if (messages.length > 0) {
history.messages[messages.at(-1).id].done = true;
}
2023-11-20 04:39:13 +00:00
await tick();
2023-11-20 01:47:07 +00:00
return chat;
} else {
return null;
}
};
//////////////////////////
// Ollama functions
//////////////////////////
2023-12-06 16:14:20 +00:00
const sendPrompt = async (userPrompt, parentId, _chatId) => {
2023-11-20 01:47:07 +00:00
await Promise.all(
selectedModels.map(async (model) => {
if (model.includes('gpt-')) {
2023-12-06 16:14:20 +00:00
await sendPromptOpenAI(model, userPrompt, parentId, _chatId);
2023-11-20 01:47:07 +00:00
} else {
2023-12-06 16:14:20 +00:00
await sendPromptOllama(model, userPrompt, parentId, _chatId);
2023-11-20 01:47:07 +00:00
}
})
);
await chats.set(await $db.getChats());
2023-11-20 01:47:07 +00:00
};
2023-12-06 16:14:20 +00:00
const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
console.log('sendPromptOllama');
2023-11-20 01:47:07 +00:00
let responseMessageId = uuidv4();
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model
};
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
2023-12-11 06:47:16 +00:00
await tick();
window.scrollTo({ top: document.body.scrollHeight });
2023-11-20 01:47:07 +00:00
2023-12-11 06:47:16 +00:00
const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/chat`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
},
body: JSON.stringify({
model: model,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({ role: message.role, content: message.content })),
options: {
seed: $settings.seed ?? undefined,
temperature: $settings.temperature ?? undefined,
repeat_penalty: $settings.repeat_penalty ?? undefined,
top_k: $settings.top_k ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
...($settings.options ?? {})
},
format: $settings.requestFormat ?? undefined
})
}).catch((err) => {
console.log(err);
return null;
2023-12-11 06:47:16 +00:00
});
if (res && res.ok) {
2023-12-11 10:28:49 +00:00
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
2023-11-20 01:47:07 +00:00
2023-12-11 10:28:49 +00:00
try {
let lines = value.split('\n');
2023-11-20 01:47:07 +00:00
2023-12-11 10:28:49 +00:00
for (const line of lines) {
if (line !== '') {
console.log(line);
let data = JSON.parse(line);
2023-12-11 06:47:16 +00:00
2023-12-11 10:28:49 +00:00
if ('detail' in data) {
throw data;
}
2023-12-11 06:47:16 +00:00
2023-12-11 10:28:49 +00:00
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
} else {
responseMessage.content += data.message.content;
messages = messages;
}
2023-11-20 01:47:07 +00:00
} else {
2023-12-11 10:28:49 +00:00
responseMessage.done = true;
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
2023-11-20 01:47:07 +00:00
messages = messages;
}
}
}
2023-12-11 10:28:49 +00:00
} catch (error) {
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
}
break;
2023-11-20 01:47:07 +00:00
}
2023-12-11 10:28:49 +00:00
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
2023-11-20 01:47:07 +00:00
}
2023-12-11 10:28:49 +00:00
await $db.updateChatById(_chatId, {
title: title === '' ? 'New Chat' : title,
models: selectedModels,
system: $settings.system ?? undefined,
options: {
seed: $settings.seed ?? undefined,
temperature: $settings.temperature ?? undefined,
repeat_penalty: $settings.repeat_penalty ?? undefined,
top_k: $settings.top_k ?? undefined,
top_p: $settings.top_p ?? undefined,
num_ctx: $settings.num_ctx ?? undefined,
...($settings.options ?? {})
},
messages: messages,
history: history
});
2023-11-20 01:47:07 +00:00
}
} else {
if (res !== null) {
const error = await res.json();
2023-11-20 01:47:07 +00:00
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
2023-12-13 23:31:07 +00:00
responseMessage.content = error.detail;
} else {
toast.error(error.error);
2023-12-13 23:31:07 +00:00
responseMessage.content = error.error;
2023-11-20 01:47:07 +00:00
}
} else {
toast.error(`Uh-oh! There was an issue connecting to Ollama.`);
2023-12-13 23:31:07 +00:00
responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
2023-11-20 01:47:07 +00:00
}
2023-12-13 23:31:07 +00:00
responseMessage.error = true;
responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
responseMessage.done = true;
messages = messages;
2023-11-20 01:47:07 +00:00
}
stopResponseFlag = false;
await tick();
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
if (messages.length == 2 && messages.at(1).content !== '') {
2023-12-06 16:14:20 +00:00
window.history.replaceState(history.state, '', `/c/${_chatId}`);
await generateChatTitle(_chatId, userPrompt);
2023-11-20 01:47:07 +00:00
}
};
2023-12-06 16:14:20 +00:00
const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
2023-11-22 21:11:26 +00:00
if ($settings.OPENAI_API_KEY) {
2023-11-20 01:47:07 +00:00
if (models) {
let responseMessageId = uuidv4();
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model
};
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
2023-11-28 21:25:41 +00:00
await tick();
2023-11-20 01:47:07 +00:00
window.scrollTo({ top: document.body.scrollHeight });
const res = await fetch(`https://api.openai.com/v1/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
2023-11-22 21:11:26 +00:00
Authorization: `Bearer ${$settings.OPENAI_API_KEY}`
2023-11-20 01:47:07 +00:00
},
body: JSON.stringify({
model: model,
stream: true,
messages: [
$settings.system
? {
role: 'system',
2023-11-22 21:11:26 +00:00
content: $settings.system
2023-11-20 01:47:07 +00:00
}
: undefined,
...messages
]
.filter((message) => message)
.map((message) => ({ role: message.role, content: message.content })),
temperature: $settings.temperature ?? undefined,
top_p: $settings.top_p ?? undefined,
2023-12-02 22:47:23 +00:00
num_ctx: $settings.num_ctx ?? undefined,
2023-11-20 01:47:07 +00:00
frequency_penalty: $settings.repeat_penalty ?? undefined
})
});
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value, done } = await reader.read();
if (done || stopResponseFlag) {
if (stopResponseFlag) {
responseMessage.done = true;
messages = messages;
}
break;
}
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
if (line === 'data: [DONE]') {
responseMessage.done = true;
messages = messages;
} else {
let data = JSON.parse(line.replace(/^data: /, ''));
console.log(data);
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
continue;
} else {
responseMessage.content += data.choices[0].delta.content ?? '';
messages = messages;
}
}
}
}
} catch (error) {
console.log(error);
}
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
2023-12-06 16:14:20 +00:00
await $db.updateChatById(_chatId, {
2023-11-20 01:47:07 +00:00
title: title === '' ? 'New Chat' : title,
models: selectedModels,
system: $settings.system ?? undefined,
options: {
seed: $settings.seed ?? undefined,
temperature: $settings.temperature ?? undefined,
repeat_penalty: $settings.repeat_penalty ?? undefined,
top_k: $settings.top_k ?? undefined,
2023-11-30 03:04:38 +00:00
top_p: $settings.top_p ?? undefined,
2023-12-02 22:47:23 +00:00
num_ctx: $settings.num_ctx ?? undefined,
...($settings.options ?? {})
2023-11-20 01:47:07 +00:00
},
messages: messages,
history: history
2023-11-20 01:47:07 +00:00
});
}
stopResponseFlag = false;
await tick();
if (autoScroll) {
window.scrollTo({ top: document.body.scrollHeight });
}
if (messages.length == 2) {
2023-12-06 16:14:20 +00:00
window.history.replaceState(history.state, '', `/c/${_chatId}`);
await setChatTitle(_chatId, userPrompt);
2023-11-20 01:47:07 +00:00
}
}
}
};
const submitPrompt = async (userPrompt) => {
2023-12-06 16:14:20 +00:00
const _chatId = JSON.parse(JSON.stringify($chatId));
console.log('submitPrompt', _chatId);
2023-11-20 01:47:07 +00:00
if (selectedModels.includes('')) {
toast.error('Model not selected');
} else if (messages.length != 0 && messages.at(-1).done != true) {
console.log('wait');
} else {
document.getElementById('chat-textarea').style.height = '';
let userMessageId = uuidv4();
let userMessage = {
id: userMessageId,
parentId: messages.length !== 0 ? messages.at(-1).id : null,
childrenIds: [],
role: 'user',
2023-12-06 16:14:20 +00:00
content: userPrompt,
files: files.length > 0 ? files : undefined
2023-11-20 01:47:07 +00:00
};
if (messages.length !== 0) {
history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
}
history.messages[userMessageId] = userMessage;
history.currentId = userMessageId;
2023-12-06 16:14:20 +00:00
await tick();
if (messages.length == 1) {
await $db.createNewChat({
2023-12-06 16:14:20 +00:00
id: _chatId,
2023-11-20 01:47:07 +00:00
title: 'New Chat',
models: selectedModels,
system: $settings.system ?? undefined,
options: {
seed: $settings.seed ?? undefined,
temperature: $settings.temperature ?? undefined,
repeat_penalty: $settings.repeat_penalty ?? undefined,
top_k: $settings.top_k ?? undefined,
2023-11-30 03:04:38 +00:00
top_p: $settings.top_p ?? undefined,
2023-12-02 22:47:23 +00:00
num_ctx: $settings.num_ctx ?? undefined,
...($settings.options ?? {})
2023-11-20 01:47:07 +00:00
},
messages: messages,
history: history
2023-11-20 01:47:07 +00:00
});
}
2023-12-06 16:14:20 +00:00
prompt = '';
files = [];
2023-11-20 01:47:07 +00:00
setTimeout(() => {
window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' });
}, 50);
2023-12-06 16:14:20 +00:00
await sendPrompt(userPrompt, userMessageId, _chatId);
2023-11-20 01:47:07 +00:00
}
};
const stopResponse = () => {
stopResponseFlag = true;
console.log('stopResponse');
};
const regenerateResponse = async () => {
2023-12-06 16:14:20 +00:00
const _chatId = JSON.parse(JSON.stringify($chatId));
console.log('regenerateResponse', _chatId);
2023-11-20 01:47:07 +00:00
if (messages.length != 0 && messages.at(-1).done == true) {
messages.splice(messages.length - 1, 1);
messages = messages;
let userMessage = messages.at(-1);
let userPrompt = userMessage.content;
2023-12-06 16:14:20 +00:00
await sendPrompt(userPrompt, userMessage.id, _chatId);
2023-11-20 01:47:07 +00:00
}
};
const generateChatTitle = async (_chatId, userPrompt) => {
if ($settings.titleAutoGenerate ?? true) {
console.log('generateChatTitle');
const res = await fetch(`${$settings?.API_BASE_URL ?? OLLAMA_API_BASE_URL}/generate`, {
method: 'POST',
headers: {
'Content-Type': 'text/event-stream',
...($settings.authHeader && { Authorization: $settings.authHeader }),
...($user && { Authorization: `Bearer ${localStorage.token}` })
},
body: JSON.stringify({
model: selectedModels[0],
prompt: `Generate a brief 3-5 word title for this question, excluding the term 'title.' Then, please reply with only the title: ${userPrompt}`,
stream: false
})
2023-11-20 01:47:07 +00:00
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((error) => {
if ('detail' in error) {
toast.error(error.detail);
}
console.log(error);
return null;
});
2023-11-20 01:47:07 +00:00
if (res) {
await setChatTitle(_chatId, res.response === '' ? 'New Chat' : res.response);
}
} else {
await setChatTitle(_chatId, `${userPrompt}`);
2023-11-20 01:47:07 +00:00
}
};
const setChatTitle = async (_chatId, _title) => {
await $db.updateChatById(_chatId, { title: _title });
if (_chatId === $chatId) {
2023-11-20 01:47:07 +00:00
title = _title;
}
};
</script>
<svelte:window
on:scroll={(e) => {
autoScroll = window.innerHeight + window.scrollY >= document.body.offsetHeight - 40;
}}
/>
2023-11-20 04:39:13 +00:00
{#if loaded}
<Navbar {title} />
<div class="min-h-screen w-full flex justify-center">
<div class=" py-2.5 flex flex-col justify-between w-full">
<div class="max-w-2xl mx-auto w-full px-3 md:px-0 mt-10">
<ModelSelector bind:selectedModels disabled={messages.length > 0} />
</div>
<div class=" h-full mt-10 mb-32 w-full flex flex-col">
<Messages
{selectedModels}
{selectedModelfile}
bind:history
bind:messages
bind:autoScroll
{sendPrompt}
{regenerateResponse}
/>
2023-11-20 04:39:13 +00:00
</div>
2023-11-20 01:47:07 +00:00
</div>
<MessageInput
bind:prompt
bind:autoScroll
suggestionPrompts={selectedModelfile?.suggestionPrompts ?? [
{
title: ['Help me study', 'vocabulary for a college entrance exam'],
content: `Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option.`
},
{
title: ['Give me ideas', `for what to do with my kids' art`],
content: `What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.`
},
{
title: ['Tell me a fun fact', 'about the Roman Empire'],
content: 'Tell me a random fun fact about the Roman Empire'
},
{
title: ['Show me a code snippet', `of a website's sticky header`],
content: `Show me a code snippet of a website's sticky header in CSS and JavaScript.`
}
]}
{messages}
{submitPrompt}
{stopResponse}
/>
2023-11-20 01:47:07 +00:00
</div>
2023-11-20 04:39:13 +00:00
{/if}