2023-10-08 22:38:42 +00:00
< script lang = "ts" >
2023-10-16 08:27:26 +00:00
import { v4 as uuidv4 } from 'uuid';
2023-10-28 22:16:04 +00:00
import toast from 'svelte-french-toast';
2023-12-26 20:50:52 +00:00
import { onMount , tick } from 'svelte';
2023-11-20 01:47:07 +00:00
import { goto } from '$app/navigation';
2023-12-26 11:28:30 +00:00
import { page } from '$app/stores';
2023-10-29 02:58:16 +00:00
2024-01-03 00:48:10 +00:00
import { models , modelfiles , user , settings , chats , chatId , config } from '$lib/stores';
2024-01-07 08:57:10 +00:00
import { copyToClipboard , splitStream } from '$lib/utils';
2023-12-26 20:50:52 +00:00
import { generateChatCompletion , generateTitle } from '$lib/apis/ollama';
2024-01-07 08:57:10 +00:00
import { createNewChat , getChatList , updateChatById } from '$lib/apis/chats';
import { queryVectorDB } from '$lib/apis/rag';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
2023-11-20 01:47:07 +00:00
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
import ModelSelector from '$lib/components/chat/ModelSelector.svelte';
2023-10-20 20:21:40 +00:00
import Navbar from '$lib/components/layout/Navbar.svelte';
2024-01-07 08:57:10 +00:00
import { RAGTemplate } from '$lib/utils/rag';
2023-10-16 08:27:26 +00:00
2023-11-20 01:47:07 +00:00
let stopResponseFlag = false;
let autoScroll = true;
2024-01-07 11:19:34 +00:00
let processing = '';
2023-10-08 22:38:42 +00:00
2023-11-12 18:30:34 +00:00
let selectedModels = [''];
2023-12-26 20:50:52 +00:00
2023-12-03 19:54:11 +00:00
let selectedModelfile = null;
$: selectedModelfile =
selectedModels.length === 1 & &
$modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0]).length > 0
? $modelfiles.filter((modelfile) => modelfile.tagName === selectedModels[0])[0]
: null;
2023-11-11 21:03:54 +00:00
2023-12-30 07:03:48 +00:00
let selectedModelfiles = {} ;
$: selectedModelfiles = selectedModels.reduce((a, tagName, i, arr) => {
const modelfile =
$modelfiles.filter((modelfile) => modelfile.tagName === tagName)?.at(0) ?? undefined;
return {
...a,
...(modelfile && { [ tagName ] : modelfile } )
};
}, {} );
2023-12-26 11:28:30 +00:00
let chat = null;
2023-10-25 00:49:54 +00:00
let title = '';
2023-10-08 22:38:42 +00:00
let prompt = '';
2023-11-25 08:21:07 +00:00
let files = [];
2023-10-13 00:45:11 +00:00
let messages = [];
2023-11-12 04:18:03 +00:00
let history = {
messages: {} ,
currentId: null
};
$: if (history.currentId !== null) {
let _messages = [];
let currentMessage = history.messages[history.currentId];
while (currentMessage !== null) {
_messages.unshift({ ... currentMessage } );
currentMessage =
currentMessage.parentId !== null ? history.messages[currentMessage.parentId] : null;
}
messages = _messages;
2023-12-06 16:14:20 +00:00
} else {
messages = [];
2023-11-12 04:18:03 +00:00
}
2023-10-08 22:38:42 +00:00
2023-10-16 08:27:26 +00:00
onMount(async () => {
2023-12-26 11:28:30 +00:00
await initNewChat();
2023-10-13 01:18:07 +00:00
});
//////////////////////////
// Web functions
//////////////////////////
2023-11-20 01:47:07 +00:00
const initNewChat = async () => {
2023-12-26 22:41:41 +00:00
window.history.replaceState(history.state, '', `/`);
2023-12-26 11:28:30 +00:00
console.log('initNewChat');
await chatId.set('');
2023-11-20 01:47:07 +00:00
console.log($chatId);
2023-10-28 06:26:01 +00:00
2023-11-20 01:47:07 +00:00
autoScroll = true;
2023-10-28 06:26:01 +00:00
2023-11-20 01:47:07 +00:00
title = '';
messages = [];
history = {
messages: {} ,
currentId: null
2023-11-12 04:18:03 +00:00
};
2024-01-03 00:48:10 +00:00
console.log($config);
if ($page.url.searchParams.get('models')) {
selectedModels = $page.url.searchParams.get('models')?.split(',');
} else if ($settings?.models) {
selectedModels = $settings?.models;
} else if ($config?.default_models) {
selectedModels = $config?.default_models.split(',');
} else {
selectedModels = [''];
}
2023-12-11 06:59:51 +00:00
let _settings = JSON.parse(localStorage.getItem('settings') ?? '{} ');
settings.set({
..._settings
});
2023-11-12 04:18:03 +00:00
};
2023-10-13 00:45:11 +00:00
//////////////////////////
// Ollama functions
//////////////////////////
2024-01-10 06:47:31 +00:00
const submitPrompt = async (userPrompt, _user = null) => {
2024-01-07 08:57:10 +00:00
console.log('submitPrompt', $chatId);
if (selectedModels.includes('')) {
toast.error('Model not selected');
} else if (messages.length != 0 && messages.at(-1).done != true) {
// Response not done
console.log('wait');
2024-01-07 17:33:34 +00:00
} else if (
files.length > 0 & &
files.filter((file) => file.upload_status === false).length > 0
) {
// Upload not done
toast.error(
`Oops! Hold tight! Your files are still in the processing oven. We're cooking them up to perfection. Please be patient and we'll let you know once they're ready.`
);
2024-01-07 08:57:10 +00:00
} else {
// Reset chat message textarea height
document.getElementById('chat-textarea').style.height = '';
// Create user message
let userMessageId = uuidv4();
let userMessage = {
id: userMessageId,
parentId: messages.length !== 0 ? messages.at(-1).id : null,
childrenIds: [],
role: 'user',
2024-01-10 06:47:31 +00:00
user: _user ?? undefined,
2024-01-07 08:57:10 +00:00
content: userPrompt,
files: files.length > 0 ? files : undefined
};
// Add message to history and Set currentId to messageId
history.messages[userMessageId] = userMessage;
history.currentId = userMessageId;
// Append messageId to childrenIds of parent message
if (messages.length !== 0) {
history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
}
// Wait until history/message have been updated
await tick();
// Create new chat if only one message in messages
if (messages.length == 1) {
if ($settings.saveChatHistory ?? true) {
chat = await createNewChat(localStorage.token, {
id: $chatId,
title: 'New Chat',
models: selectedModels,
system: $settings.system ?? undefined,
options: {
...($settings.options ?? {} )
},
messages: messages,
history: history,
timestamp: Date.now()
});
await chats.set(await getChatList(localStorage.token));
await chatId.set(chat.id);
} else {
await chatId.set('local');
}
await tick();
}
// Reset chat input textarea
prompt = '';
files = [];
// Send prompt
await sendPrompt(userPrompt, userMessageId);
}
};
2023-12-26 11:28:30 +00:00
const sendPrompt = async (prompt, parentId) => {
const _chatId = JSON.parse(JSON.stringify($chatId));
2024-01-07 08:57:10 +00:00
2024-01-07 10:14:17 +00:00
const docs = messages
.filter((message) => message?.files ?? null)
.map((message) => message.files.filter((item) => item.type === 'doc'))
.flat(1);
2024-01-07 09:40:36 +00:00
console.log(docs);
2024-01-07 08:57:10 +00:00
if (docs.length > 0) {
2024-01-07 11:19:34 +00:00
processing = 'Reading';
2024-01-07 08:57:10 +00:00
const query = history.messages[parentId].content;
let relevantContexts = await Promise.all(
docs.map(async (doc) => {
return await queryVectorDB(localStorage.token, doc.collection_name, query, 4).catch(
(error) => {
console.log(error);
return null;
}
);
})
);
relevantContexts = relevantContexts.filter((context) => context);
const contextString = relevantContexts.reduce((a, context, i, arr) => {
return `${ a } ${ context . documents . join ( ' ' )} \n`;
}, '');
2024-01-07 09:40:36 +00:00
console.log(contextString);
2024-01-07 08:57:10 +00:00
history.messages[parentId].raContent = RAGTemplate(contextString, query);
history.messages[parentId].contexts = relevantContexts;
await tick();
2024-01-07 11:19:34 +00:00
processing = '';
2024-01-07 08:57:10 +00:00
}
2023-11-12 18:30:34 +00:00
await Promise.all(
selectedModels.map(async (model) => {
2023-12-23 04:09:50 +00:00
console.log(model);
2024-01-03 00:06:11 +00:00
const modelTag = $models.filter((m) => m.name === model).at(0);
if (modelTag?.external) {
2023-12-26 11:28:30 +00:00
await sendPromptOpenAI(model, prompt, parentId, _chatId);
2024-01-03 00:06:11 +00:00
} else if (modelTag) {
2023-12-26 11:28:30 +00:00
await sendPromptOllama(model, prompt, parentId, _chatId);
2024-01-03 00:06:11 +00:00
} else {
toast.error(`Model ${ model } not found`);
2023-11-12 18:30:34 +00:00
}
})
);
2023-11-20 01:47:07 +00:00
2023-12-26 20:50:52 +00:00
await chats.set(await getChatList(localStorage.token));
2023-11-04 00:16:02 +00:00
};
2023-12-06 16:14:20 +00:00
const sendPromptOllama = async (model, userPrompt, parentId, _chatId) => {
2023-12-26 20:50:52 +00:00
// Create response message
2023-11-12 04:18:03 +00:00
let responseMessageId = uuidv4();
2023-11-02 02:29:50 +00:00
let responseMessage = {
2023-11-12 04:18:03 +00:00
parentId: parentId,
id: responseMessageId,
childrenIds: [],
2023-11-02 02:29:50 +00:00
role: 'assistant',
2023-11-12 18:30:34 +00:00
content: '',
model: model
2023-11-02 02:29:50 +00:00
};
2023-12-26 20:50:52 +00:00
// Add message to history and Set currentId to messageId
2023-11-12 04:18:03 +00:00
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
2023-12-26 20:50:52 +00:00
// Append messageId to childrenIds of parent message
2023-11-12 04:18:03 +00:00
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
2023-12-26 20:50:52 +00:00
// Wait until history/message have been updated
2023-12-11 06:47:16 +00:00
await tick();
2023-12-26 20:50:52 +00:00
// Scroll down
2023-12-11 06:47:16 +00:00
window.scrollTo({ top : document.body.scrollHeight } );
2023-11-02 02:29:50 +00:00
2024-01-04 21:06:31 +00:00
const res = await generateChatCompletion(localStorage.token, {
model: model,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
2024-01-09 22:33:04 +00:00
.map((message, idx, arr) => ({
2024-01-04 21:06:31 +00:00
role: message.role,
2024-01-09 22:33:04 +00:00
content: arr.length - 2 !== idx ? message.content : message?.raContent ?? message.content,
2024-01-04 21:06:31 +00:00
...(message.files && {
images: message.files
.filter((file) => file.type === 'image')
.map((file) => file.url.slice(file.url.indexOf(',') + 1))
})
})),
options: {
...($settings.options ?? {} )
},
format: $settings.requestFormat ?? undefined
});
2023-12-11 06:47:16 +00:00
2023-12-13 21:05:31 +00:00
if (res && res.ok) {
2023-12-11 10:28:49 +00:00
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
while (true) {
const { value , done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
2023-11-02 02:29:50 +00:00
2023-12-11 10:28:49 +00:00
try {
let lines = value.split('\n');
2023-11-02 02:29:50 +00:00
2023-12-11 10:28:49 +00:00
for (const line of lines) {
if (line !== '') {
console.log(line);
let data = JSON.parse(line);
2023-12-11 06:47:16 +00:00
2023-12-11 10:28:49 +00:00
if ('detail' in data) {
throw data;
}
2023-12-11 06:47:16 +00:00
2023-12-11 10:28:49 +00:00
if (data.done == false) {
if (responseMessage.content == '' && data.message.content == '\n') {
continue;
} else {
responseMessage.content += data.message.content;
messages = messages;
}
2023-11-02 02:29:50 +00:00
} else {
2023-12-11 10:28:49 +00:00
responseMessage.done = true;
2023-12-24 08:47:31 +00:00
if (responseMessage.content == '') {
responseMessage.error = true;
responseMessage.content =
'Oops! No text generated from Ollama, Please try again.';
}
2023-12-11 10:28:49 +00:00
responseMessage.context = data.context ?? null;
responseMessage.info = {
total_duration: data.total_duration,
2023-12-14 22:24:56 +00:00
load_duration: data.load_duration,
sample_count: data.sample_count,
sample_duration: data.sample_duration,
2023-12-11 10:28:49 +00:00
prompt_eval_count: data.prompt_eval_count,
prompt_eval_duration: data.prompt_eval_duration,
eval_count: data.eval_count,
eval_duration: data.eval_duration
};
2023-11-02 02:29:50 +00:00
messages = messages;
2023-12-14 23:32:43 +00:00
2023-12-15 01:48:21 +00:00
if ($settings.notificationEnabled && !document.hasFocus()) {
2023-12-14 23:32:43 +00:00
const notification = new Notification(
selectedModelfile
? `${
selectedModelfile.title.charAt(0).toUpperCase() +
selectedModelfile.title.slice(1)
}`
: `Ollama - ${ model } `,
{
body: responseMessage.content,
icon: selectedModelfile?.imageUrl ?? '/favicon.png'
}
);
}
2023-12-19 02:48:51 +00:00
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
2023-11-02 02:29:50 +00:00
}
}
}
2023-12-11 10:28:49 +00:00
} catch (error) {
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
}
break;
2023-11-02 02:29:50 +00:00
}
2023-12-11 10:28:49 +00:00
if (autoScroll) {
window.scrollTo({ top : document.body.scrollHeight } );
2023-11-19 00:47:12 +00:00
}
2023-12-26 11:28:30 +00:00
}
2023-11-02 02:29:50 +00:00
2023-12-26 11:28:30 +00:00
if ($chatId == _chatId) {
2024-01-06 10:33:00 +00:00
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
}
2023-11-02 02:29:50 +00:00
}
2023-12-13 21:05:31 +00:00
} else {
if (res !== null) {
const error = await res.json();
2023-11-02 02:29:50 +00:00
console.log(error);
2023-11-19 00:47:12 +00:00
if ('detail' in error) {
toast.error(error.detail);
2023-12-13 23:31:07 +00:00
responseMessage.content = error.detail;
2023-12-13 21:05:31 +00:00
} else {
toast.error(error.error);
2023-12-13 23:31:07 +00:00
responseMessage.content = error.error;
2023-11-19 00:47:12 +00:00
}
2023-12-13 21:05:31 +00:00
} else {
toast.error(`Uh-oh! There was an issue connecting to Ollama.`);
2023-12-13 23:31:07 +00:00
responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
2023-11-02 02:29:50 +00:00
}
2023-12-13 23:31:07 +00:00
responseMessage.error = true;
responseMessage.content = `Uh-oh! There was an issue connecting to Ollama.`;
responseMessage.done = true;
messages = messages;
2023-11-02 02:29:50 +00:00
}
stopResponseFlag = false;
await tick();
2023-12-26 20:50:52 +00:00
2023-11-02 02:29:50 +00:00
if (autoScroll) {
window.scrollTo({ top : document.body.scrollHeight } );
}
2023-11-04 00:16:02 +00:00
2023-11-19 00:47:12 +00:00
if (messages.length == 2 && messages.at(1).content !== '') {
2023-12-06 16:14:20 +00:00
window.history.replaceState(history.state, '', `/c/${ _chatId } `);
await generateChatTitle(_chatId, userPrompt);
2023-11-04 00:16:02 +00:00
}
};
2023-12-06 16:14:20 +00:00
const sendPromptOpenAI = async (model, userPrompt, parentId, _chatId) => {
2024-01-05 03:20:49 +00:00
let responseMessageId = uuidv4();
2023-11-12 07:12:32 +00:00
2024-01-05 03:20:49 +00:00
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '',
model: model
};
2023-11-04 00:16:02 +00:00
2024-01-05 03:20:49 +00:00
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
2023-11-04 00:16:02 +00:00
2024-01-05 03:20:49 +00:00
window.scrollTo({ top : document.body.scrollHeight } );
2023-11-04 00:16:02 +00:00
2024-01-05 03:20:49 +00:00
const res = await generateOpenAIChatCompletion(localStorage.token, {
model: model,
stream: true,
messages: [
$settings.system
? {
role: 'system',
content: $settings.system
}
: undefined,
...messages
]
.filter((message) => message)
2024-01-09 22:33:04 +00:00
.map((message, idx, arr) => ({
2024-01-05 03:20:49 +00:00
role: message.role,
...(message.files
? {
content: [
{
type: 'text',
2024-01-09 22:33:04 +00:00
text:
arr.length - 1 !== idx
? message.content
: message?.raContent ?? message.content
2024-01-05 03:20:49 +00:00
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
2024-01-09 22:33:04 +00:00
: {
content:
arr.length - 1 !== idx ? message.content : message?.raContent ?? message.content
})
2024-01-05 03:20:49 +00:00
})),
seed: $settings?.options?.seed ?? undefined,
stop: $settings?.options?.stop ?? undefined,
temperature: $settings?.options?.temperature ?? undefined,
top_p: $settings?.options?.top_p ?? undefined,
num_ctx: $settings?.options?.num_ctx ?? undefined,
frequency_penalty: $settings?.options?.repeat_penalty ?? undefined,
max_tokens: $settings?.options?.num_predict ?? undefined
});
2023-11-04 00:16:02 +00:00
2024-01-05 03:20:49 +00:00
if (res && res.ok) {
const reader = res.body
.pipeThrough(new TextDecoderStream())
.pipeThrough(splitStream('\n'))
.getReader();
2023-11-04 00:16:02 +00:00
2024-01-05 03:20:49 +00:00
while (true) {
const { value , done } = await reader.read();
if (done || stopResponseFlag || _chatId !== $chatId) {
responseMessage.done = true;
messages = messages;
break;
}
2023-11-04 00:16:02 +00:00
2024-01-05 03:20:49 +00:00
try {
let lines = value.split('\n');
for (const line of lines) {
if (line !== '') {
console.log(line);
if (line === 'data: [DONE]') {
responseMessage.done = true;
messages = messages;
2023-12-23 04:09:50 +00:00
} else {
2024-01-05 03:20:49 +00:00
let data = JSON.parse(line.replace(/^data: /, ''));
console.log(data);
if (responseMessage.content == '' && data.choices[0].delta.content == '\n') {
continue;
} else {
responseMessage.content += data.choices[0].delta.content ?? '';
messages = messages;
}
2023-12-23 04:09:50 +00:00
}
}
}
2024-01-05 03:20:49 +00:00
} catch (error) {
console.log(error);
}
2023-12-19 02:48:51 +00:00
2024-01-05 03:20:49 +00:00
if ($settings.notificationEnabled && !document.hasFocus()) {
const notification = new Notification(`OpenAI ${ model } `, {
body: responseMessage.content,
icon: '/favicon.png'
});
2023-12-19 02:48:51 +00:00
}
2024-01-05 03:20:49 +00:00
if ($settings.responseAutoCopy) {
copyToClipboard(responseMessage.content);
}
2023-12-19 02:48:51 +00:00
2023-11-04 00:16:02 +00:00
if (autoScroll) {
window.scrollTo({ top : document.body.scrollHeight } );
}
2024-01-05 03:20:49 +00:00
}
2023-11-04 00:16:02 +00:00
2024-01-05 03:20:49 +00:00
if ($chatId == _chatId) {
2024-01-06 10:33:00 +00:00
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, {
messages: messages,
history: history
});
await chats.set(await getChatList(localStorage.token));
}
2024-01-05 03:20:49 +00:00
}
} else {
if (res !== null) {
const error = await res.json();
console.log(error);
if ('detail' in error) {
toast.error(error.detail);
responseMessage.content = error.detail;
} else {
if ('message' in error.error) {
toast.error(error.error.message);
responseMessage.content = error.error.message;
} else {
toast.error(error.error);
responseMessage.content = error.error;
}
2023-11-04 00:16:02 +00:00
}
2024-01-05 03:20:49 +00:00
} else {
toast.error(`Uh-oh! There was an issue connecting to ${ model } .`);
responseMessage.content = `Uh-oh! There was an issue connecting to ${ model } .`;
2023-11-04 00:16:02 +00:00
}
2024-01-05 03:20:49 +00:00
responseMessage.error = true;
responseMessage.content = `Uh-oh! There was an issue connecting to ${ model } .`;
responseMessage.done = true;
messages = messages;
}
stopResponseFlag = false;
await tick();
if (autoScroll) {
window.scrollTo({ top : document.body.scrollHeight } );
}
if (messages.length == 2) {
window.history.replaceState(history.state, '', `/c/${ _chatId } `);
await setChatTitle(_chatId, userPrompt);
2023-11-04 00:16:02 +00:00
}
2023-11-02 02:29:50 +00:00
};
2023-11-20 01:47:07 +00:00
const stopResponse = () => {
stopResponseFlag = true;
console.log('stopResponse');
};
2023-10-13 00:45:11 +00:00
const regenerateResponse = async () => {
2023-12-26 20:50:52 +00:00
console.log('regenerateResponse');
2023-10-13 00:45:11 +00:00
if (messages.length != 0 && messages.at(-1).done == true) {
messages.splice(messages.length - 1, 1);
messages = messages;
2023-10-08 22:38:42 +00:00
2023-11-02 02:29:50 +00:00
let userMessage = messages.at(-1);
let userPrompt = userMessage.content;
2023-11-04 00:16:02 +00:00
2023-12-26 20:50:52 +00:00
await sendPrompt(userPrompt, userMessage.id);
2023-10-08 22:38:42 +00:00
}
2023-11-02 02:29:50 +00:00
};
2023-10-13 00:45:11 +00:00
2023-11-04 00:16:02 +00:00
const generateChatTitle = async (_chatId, userPrompt) => {
2023-11-22 21:20:41 +00:00
if ($settings.titleAutoGenerate ?? true) {
2023-12-26 20:50:52 +00:00
const title = await generateTitle(
localStorage.token,
2024-01-03 01:08:35 +00:00
$settings?.titleAutoGenerateModel ?? selectedModels[0],
2023-12-26 20:50:52 +00:00
userPrompt
);
if (title) {
await setChatTitle(_chatId, title);
2023-11-22 21:20:41 +00:00
}
} else {
await setChatTitle(_chatId, `${ userPrompt } `);
2023-11-04 00:16:02 +00:00
}
};
const setChatTitle = async (_chatId, _title) => {
2023-11-21 17:39:11 +00:00
if (_chatId === $chatId) {
2023-11-04 00:16:02 +00:00
title = _title;
2023-10-16 08:27:26 +00:00
}
2023-12-26 20:50:52 +00:00
2024-01-06 10:33:00 +00:00
if ($settings.saveChatHistory ?? true) {
chat = await updateChatById(localStorage.token, _chatId, { title : _title } );
await chats.set(await getChatList(localStorage.token));
}
2023-10-16 08:27:26 +00:00
};
2023-10-08 22:38:42 +00:00
< / script >
2023-11-02 02:29:50 +00:00
< svelte:window
on:scroll={( e ) => {
2023-11-09 00:40:58 +00:00
autoScroll = window.innerHeight + window.scrollY >= document.body.offsetHeight - 40;
2023-11-02 02:29:50 +00:00
}}
/>
2023-12-26 11:28:30 +00:00
< Navbar { title } shareEnabled = { messages . length > 0 } { initNewChat } / >
2023-11-20 01:47:07 +00:00
< div class = "min-h-screen w-full flex justify-center" >
< div class = " py-2.5 flex flex-col justify-between w-full" >
< div class = "max-w-2xl mx-auto w-full px-3 md:px-0 mt-10" >
< ModelSelector bind:selectedModels disabled = { messages . length > 0 } / >
2023-10-08 22:38:42 +00:00
< / div >
2023-10-12 04:43:42 +00:00
2023-11-20 01:47:07 +00:00
< div class = " h-full mt-10 mb-32 w-full flex flex-col" >
2023-12-03 19:54:11 +00:00
< Messages
2023-12-26 11:28:30 +00:00
chatId={ $chatId }
2023-12-03 19:54:11 +00:00
{ selectedModels }
2023-12-30 07:03:48 +00:00
{ selectedModelfiles }
2024-01-07 11:19:34 +00:00
{ processing }
2023-12-03 19:54:11 +00:00
bind:history
bind:messages
bind:autoScroll
2023-12-14 00:21:50 +00:00
bottomPadding={ files . length > 0 }
2023-12-03 19:54:11 +00:00
{ sendPrompt }
{ regenerateResponse }
/>
2023-11-20 01:47:07 +00:00
< / div >
< / div >
2023-10-25 00:49:54 +00:00
2023-12-03 19:54:11 +00:00
< MessageInput
bind:files
2023-12-14 00:21:50 +00:00
bind:prompt
2023-12-03 19:54:11 +00:00
bind:autoScroll
suggestionPrompts={ selectedModelfile ? . suggestionPrompts ?? [
{
title: ['Help me study', 'vocabulary for a college entrance exam'],
content: `Help me study vocabulary: write a sentence for me to fill in the blank, and I'll try to pick the correct option.`
},
{
title: ['Give me ideas', `for what to do with my kids' art`],
content: `What are 5 creative things I could do with my kids' art? I don't want to throw them away, but it's also so much clutter.`
},
{
title: ['Tell me a fun fact', 'about the Roman Empire'],
content: 'Tell me a random fun fact about the Roman Empire'
},
{
title: ['Show me a code snippet', `of a website's sticky header`],
content: `Show me a code snippet of a website's sticky header in CSS and JavaScript.`
}
]}
{ messages }
{ submitPrompt }
{ stopResponse }
/>
2023-11-20 01:47:07 +00:00
< / div >