feat: add support for using previous messages for query generation

This commit is contained in:
Jun Siang Cheah 2024-05-12 20:45:44 +08:00
parent 654cc09128
commit 466b3e3637
4 changed files with 27 additions and 14 deletions

View File

@ -1,5 +1,6 @@
import { OPENAI_API_BASE_URL } from '$lib/constants'; import { OPENAI_API_BASE_URL } from '$lib/constants';
import { promptTemplate } from '$lib/utils'; import { promptTemplate } from '$lib/utils';
import { type Model, models, settings } from '$lib/stores';
export const getOpenAIUrls = async (token: string = '') => { export const getOpenAIUrls = async (token: string = '') => {
let error = null; let error = null;
@ -322,15 +323,14 @@ export const generateTitle = async (
export const generateSearchQuery = async ( export const generateSearchQuery = async (
token: string = '', token: string = '',
// template: string,
model: string, model: string,
previousMessages: string[],
prompt: string, prompt: string,
url: string = OPENAI_API_BASE_URL url: string = OPENAI_API_BASE_URL
): Promise<string | undefined> => { ): Promise<string | undefined> => {
let error = null; let error = null;
// TODO: Allow users to specify the prompt // TODO: Allow users to specify the prompt
// template = promptTemplate(template, prompt);
// Get the current date in the format "January 20, 2024" // Get the current date in the format "January 20, 2024"
const currentDate = new Intl.DateTimeFormat('en-US', { const currentDate = new Intl.DateTimeFormat('en-US', {
@ -344,8 +344,6 @@ export const generateSearchQuery = async (
day: '2-digit' day: '2-digit'
}).format(new Date()); }).format(new Date());
// console.log(template);
const res = await fetch(`${url}/chat/completions`, { const res = await fetch(`${url}/chat/completions`, {
method: 'POST', method: 'POST',
headers: { headers: {
@ -409,7 +407,10 @@ Current Question: Where is it being hosted?`
}, },
{ {
role: 'user', role: 'user',
content: `Current Question: ${prompt}` content:
(previousMessages.length > 0
? `Previous Questions:\n${previousMessages.join('\n')}\n\n`
: '') + `Current Question: ${prompt}`
} }
], ],
stream: false, stream: false,

View File

@ -39,7 +39,7 @@ export const showSidebar = writable(false);
export const showSettings = writable(false); export const showSettings = writable(false);
export const showChangelog = writable(false); export const showChangelog = writable(false);
type Model = OpenAIModel | OllamaModel; export type Model = OpenAIModel | OllamaModel;
type OpenAIModel = { type OpenAIModel = {
id: string; id: string;

View File

@ -290,7 +290,7 @@
} }
if (useWebSearch) { if (useWebSearch) {
await runWebSearchForPrompt(parentId, responseMessageId, prompt); await runWebSearchForPrompt(parentId, responseMessageId);
} }
if (model?.external) { if (model?.external) {
@ -307,11 +307,11 @@
await chats.set(await getChatList(localStorage.token)); await chats.set(await getChatList(localStorage.token));
}; };
const runWebSearchForPrompt = async (parentId: string, responseId: string, prompt: string) => { const runWebSearchForPrompt = async (parentId: string, responseId: string) => {
const responseMessage = history.messages[responseId]; const responseMessage = history.messages[responseId];
responseMessage.progress = $i18n.t('Generating search query'); responseMessage.progress = $i18n.t('Generating search query');
messages = messages; messages = messages;
const searchQuery = await generateChatSearchQuery(prompt); const searchQuery = await generateChatSearchQuery(parentId);
if (!searchQuery) { if (!searchQuery) {
toast.warning($i18n.t('No search query generated')); toast.warning($i18n.t('No search query generated'));
responseMessage.progress = undefined; responseMessage.progress = undefined;
@ -861,7 +861,7 @@
}; };
// TODO: Add support for adding all the user's messages as context, and not just the last message // TODO: Add support for adding all the user's messages as context, and not just the last message
const generateChatSearchQuery = async (userPrompt: string) => { const generateChatSearchQuery = async (messageId: string) => {
const model = $models.find((model) => model.id === selectedModels[0]); const model = $models.find((model) => model.id === selectedModels[0]);
// TODO: rename titleModel to taskModel - this is the model used for non-chat tasks (e.g. title generation, search query generation) // TODO: rename titleModel to taskModel - this is the model used for non-chat tasks (e.g. title generation, search query generation)
@ -871,10 +871,16 @@
: $settings?.title?.model ?? selectedModels[0]; : $settings?.title?.model ?? selectedModels[0];
const titleModel = $models.find((model) => model.id === titleModelId); const titleModel = $models.find((model) => model.id === titleModelId);
const userMessage = history.messages[messageId];
const userPrompt = userMessage.content;
const previousMessages = messages.filter((message) => message.role === 'user').map((message) => message.content);
console.log(titleModel); console.log(titleModel);
return await generateSearchQuery( return await generateSearchQuery(
localStorage.token, localStorage.token,
titleModelId, titleModelId,
previousMessages,
userPrompt, userPrompt,
titleModel?.external ?? false titleModel?.external ?? false
? titleModel?.source?.toLowerCase() === 'litellm' ? titleModel?.source?.toLowerCase() === 'litellm'

View File

@ -295,7 +295,7 @@
} }
if (useWebSearch) { if (useWebSearch) {
await runWebSearchForPrompt(parentId, responseMessageId, prompt); await runWebSearchForPrompt(parentId, responseMessageId);
} }
if (model?.external) { if (model?.external) {
@ -312,11 +312,11 @@
await chats.set(await getChatList(localStorage.token)); await chats.set(await getChatList(localStorage.token));
}; };
const runWebSearchForPrompt = async (parentId: string, responseId: string, prompt: string) => { const runWebSearchForPrompt = async (parentId: string, responseId: string) => {
const responseMessage = history.messages[responseId]; const responseMessage = history.messages[responseId];
responseMessage.progress = $i18n.t('Generating search query'); responseMessage.progress = $i18n.t('Generating search query');
messages = messages; messages = messages;
const searchQuery = await generateChatSearchQuery(prompt); const searchQuery = await generateChatSearchQuery(parentId);
if (!searchQuery) { if (!searchQuery) {
toast.warning($i18n.t('No search query generated')); toast.warning($i18n.t('No search query generated'));
responseMessage.progress = undefined; responseMessage.progress = undefined;
@ -866,7 +866,7 @@
}; };
// TODO: Add support for adding all the user's messages as context, and not just the last message // TODO: Add support for adding all the user's messages as context, and not just the last message
const generateChatSearchQuery = async (userPrompt: string) => { const generateChatSearchQuery = async (messageId: string) => {
const model = $models.find((model) => model.id === selectedModels[0]); const model = $models.find((model) => model.id === selectedModels[0]);
// TODO: rename titleModel to taskModel - this is the model used for non-chat tasks (e.g. title generation, search query generation) // TODO: rename titleModel to taskModel - this is the model used for non-chat tasks (e.g. title generation, search query generation)
@ -876,10 +876,16 @@
: $settings?.title?.model ?? selectedModels[0]; : $settings?.title?.model ?? selectedModels[0];
const titleModel = $models.find((model) => model.id === titleModelId); const titleModel = $models.find((model) => model.id === titleModelId);
const userMessage = history.messages[messageId];
const userPrompt = userMessage.content;
const previousMessages = messages.filter((message) => message.role === 'user').map((message) => message.content);
console.log(titleModel); console.log(titleModel);
return await generateSearchQuery( return await generateSearchQuery(
localStorage.token, localStorage.token,
titleModelId, titleModelId,
previousMessages,
userPrompt, userPrompt,
titleModel?.external ?? false titleModel?.external ?? false
? titleModel?.source?.toLowerCase() === 'litellm' ? titleModel?.source?.toLowerCase() === 'litellm'