From 466b3e3637c72279564b916a86043a5e44770737 Mon Sep 17 00:00:00 2001 From: Jun Siang Cheah Date: Sun, 12 May 2024 20:45:44 +0800 Subject: [PATCH] feat: add support for using previous messages for query generation --- src/lib/apis/openai/index.ts | 11 ++++++----- src/lib/stores/index.ts | 2 +- src/routes/(app)/+page.svelte | 14 ++++++++++---- src/routes/(app)/c/[id]/+page.svelte | 14 ++++++++++---- 4 files changed, 27 insertions(+), 14 deletions(-) diff --git a/src/lib/apis/openai/index.ts b/src/lib/apis/openai/index.ts index 0145b1b27..c4d19d73f 100644 --- a/src/lib/apis/openai/index.ts +++ b/src/lib/apis/openai/index.ts @@ -1,5 +1,6 @@ import { OPENAI_API_BASE_URL } from '$lib/constants'; import { promptTemplate } from '$lib/utils'; +import { type Model, models, settings } from '$lib/stores'; export const getOpenAIUrls = async (token: string = '') => { let error = null; @@ -322,15 +323,14 @@ export const generateTitle = async ( export const generateSearchQuery = async ( token: string = '', - // template: string, model: string, + previousMessages: string[], prompt: string, url: string = OPENAI_API_BASE_URL ): Promise => { let error = null; // TODO: Allow users to specify the prompt - // template = promptTemplate(template, prompt); // Get the current date in the format "January 20, 2024" const currentDate = new Intl.DateTimeFormat('en-US', { @@ -344,8 +344,6 @@ export const generateSearchQuery = async ( day: '2-digit' }).format(new Date()); - // console.log(template); - const res = await fetch(`${url}/chat/completions`, { method: 'POST', headers: { @@ -409,7 +407,10 @@ Current Question: Where is it being hosted?` }, { role: 'user', - content: `Current Question: ${prompt}` + content: + (previousMessages.length > 0 + ? `Previous Questions:\n${previousMessages.join('\n')}\n\n` + : '') + `Current Question: ${prompt}` } ], stream: false, diff --git a/src/lib/stores/index.ts b/src/lib/stores/index.ts index 7de2e8540..7ae4f66a6 100644 --- a/src/lib/stores/index.ts +++ b/src/lib/stores/index.ts @@ -39,7 +39,7 @@ export const showSidebar = writable(false); export const showSettings = writable(false); export const showChangelog = writable(false); -type Model = OpenAIModel | OllamaModel; +export type Model = OpenAIModel | OllamaModel; type OpenAIModel = { id: string; diff --git a/src/routes/(app)/+page.svelte b/src/routes/(app)/+page.svelte index e85b6ee80..fcf3d580b 100644 --- a/src/routes/(app)/+page.svelte +++ b/src/routes/(app)/+page.svelte @@ -290,7 +290,7 @@ } if (useWebSearch) { - await runWebSearchForPrompt(parentId, responseMessageId, prompt); + await runWebSearchForPrompt(parentId, responseMessageId); } if (model?.external) { @@ -307,11 +307,11 @@ await chats.set(await getChatList(localStorage.token)); }; - const runWebSearchForPrompt = async (parentId: string, responseId: string, prompt: string) => { + const runWebSearchForPrompt = async (parentId: string, responseId: string) => { const responseMessage = history.messages[responseId]; responseMessage.progress = $i18n.t('Generating search query'); messages = messages; - const searchQuery = await generateChatSearchQuery(prompt); + const searchQuery = await generateChatSearchQuery(parentId); if (!searchQuery) { toast.warning($i18n.t('No search query generated')); responseMessage.progress = undefined; @@ -861,7 +861,7 @@ }; // TODO: Add support for adding all the user's messages as context, and not just the last message - const generateChatSearchQuery = async (userPrompt: string) => { + const generateChatSearchQuery = async (messageId: string) => { const model = $models.find((model) => model.id === selectedModels[0]); // TODO: rename titleModel to taskModel - this is the model used for non-chat tasks (e.g. title generation, search query generation) @@ -871,10 +871,16 @@ : $settings?.title?.model ?? selectedModels[0]; const titleModel = $models.find((model) => model.id === titleModelId); + const userMessage = history.messages[messageId]; + const userPrompt = userMessage.content; + + const previousMessages = messages.filter((message) => message.role === 'user').map((message) => message.content); + console.log(titleModel); return await generateSearchQuery( localStorage.token, titleModelId, + previousMessages, userPrompt, titleModel?.external ?? false ? titleModel?.source?.toLowerCase() === 'litellm' diff --git a/src/routes/(app)/c/[id]/+page.svelte b/src/routes/(app)/c/[id]/+page.svelte index c88b627a08..a89cda55c 100644 --- a/src/routes/(app)/c/[id]/+page.svelte +++ b/src/routes/(app)/c/[id]/+page.svelte @@ -295,7 +295,7 @@ } if (useWebSearch) { - await runWebSearchForPrompt(parentId, responseMessageId, prompt); + await runWebSearchForPrompt(parentId, responseMessageId); } if (model?.external) { @@ -312,11 +312,11 @@ await chats.set(await getChatList(localStorage.token)); }; - const runWebSearchForPrompt = async (parentId: string, responseId: string, prompt: string) => { + const runWebSearchForPrompt = async (parentId: string, responseId: string) => { const responseMessage = history.messages[responseId]; responseMessage.progress = $i18n.t('Generating search query'); messages = messages; - const searchQuery = await generateChatSearchQuery(prompt); + const searchQuery = await generateChatSearchQuery(parentId); if (!searchQuery) { toast.warning($i18n.t('No search query generated')); responseMessage.progress = undefined; @@ -866,7 +866,7 @@ }; // TODO: Add support for adding all the user's messages as context, and not just the last message - const generateChatSearchQuery = async (userPrompt: string) => { + const generateChatSearchQuery = async (messageId: string) => { const model = $models.find((model) => model.id === selectedModels[0]); // TODO: rename titleModel to taskModel - this is the model used for non-chat tasks (e.g. title generation, search query generation) @@ -876,10 +876,16 @@ : $settings?.title?.model ?? selectedModels[0]; const titleModel = $models.find((model) => model.id === titleModelId); + const userMessage = history.messages[messageId]; + const userPrompt = userMessage.content; + + const previousMessages = messages.filter((message) => message.role === 'user').map((message) => message.content); + console.log(titleModel); return await generateSearchQuery( localStorage.token, titleModelId, + previousMessages, userPrompt, titleModel?.external ?? false ? titleModel?.source?.toLowerCase() === 'litellm'