feat: prototype frontend web search integration

This commit is contained in:
Jun Siang Cheah
2024-05-11 23:12:52 +08:00
parent 619c2f9c71
commit 2660a6e5b8
11 changed files with 305 additions and 18 deletions

View File

@@ -318,3 +318,119 @@ export const generateTitle = async (
return res?.choices[0]?.message?.content ?? 'New Chat';
};
export const generateSearchQuery = async (
token: string = '',
// template: string,
model: string,
prompt: string,
url: string = OPENAI_API_BASE_URL
): Promise<string | undefined> => {
let error = null;
// TODO: Allow users to specify the prompt
// template = promptTemplate(template, prompt);
// Get the current date in the format "January 20, 2024"
const currentDate = new Intl.DateTimeFormat('en-US', {
year: 'numeric',
month: 'long',
day: '2-digit'
}).format(new Date());
const yesterdayDate = new Intl.DateTimeFormat('en-US', {
year: 'numeric',
month: 'long',
day: '2-digit'
}).format(new Date());
// console.log(template);
const res = await fetch(`${url}/chat/completions`, {
method: 'POST',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
model: model,
// Few shot prompting
messages: [
{
role: 'assistant',
content: `You are tasked with generating web search queries. Give me an appropriate query to answer my question for google search. Answer with only the query. Today is ${currentDate}.`
},
{
role: 'user',
content: `Previous Questions:
- Who is the president of France?
Current Question: What about Mexico?`
},
{
role: 'assistant',
content: 'President of Mexico'
},
{
role: 'user',
content: `Previous questions:
- When is the next formula 1 grand prix?
Current Question: Where is it being hosted?`
},
{
role: 'assistant',
content: 'location of next formula 1 grand prix'
},
{
role: 'user',
content: 'Current Question: What type of printhead does the Epson F2270 DTG printer use?'
},
{
role: 'assistant',
content: 'Epson F2270 DTG printer printhead'
},
{
role: 'user',
content: 'What were the news yesterday?'
},
{
role: 'assistant',
content: `news ${yesterdayDate}`
},
{
role: 'user',
content: 'What is the current weather in Paris?'
},
{
role: 'assistant',
content: `weather in Paris ${currentDate}`
},
{
role: 'user',
content: `Current Question: ${prompt}`
}
],
stream: false,
// Restricting the max tokens to 30 to avoid long search queries
max_tokens: 30
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
if ('detail' in err) {
error = err.detail;
}
return undefined;
});
if (error) {
throw error;
}
return res?.choices[0]?.message?.content.replace(/["']/g, '') ?? undefined;
};

View File

@@ -507,3 +507,44 @@ export const updateRerankingConfig = async (token: string, payload: RerankingMod
return res;
};
export const runWebSearch = async (
token: string,
query: string,
collection_name?: string
): Promise<SearchDocument | undefined> => {
let error = null;
const res = await fetch(`${RAG_API_BASE_URL}/websearch`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`
},
body: JSON.stringify({
query,
collection_name
})
})
.then(async (res) => {
if (!res.ok) throw await res.json();
return res.json();
})
.catch((err) => {
console.log(err);
error = err.detail;
return undefined;
});
if (error) {
throw error;
}
return res;
};
export interface SearchDocument {
status: boolean;
collection_name: string;
filenames: string[];
}

View File

@@ -30,8 +30,8 @@
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { queryCollection, queryDoc } from '$lib/apis/rag';
import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
import { queryCollection, queryDoc, runWebSearch } from '$lib/apis/rag';
import { generateOpenAIChatCompletion, generateSearchQuery, generateTitle } from '$lib/apis/openai';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
@@ -55,6 +55,8 @@
let selectedModels = [''];
let atSelectedModel = '';
let useWebSearch = false;
let selectedModelfile = null;
$: selectedModelfile =
selectedModels.length === 1 &&
@@ -275,6 +277,39 @@
];
}
if (useWebSearch) {
// TODO: Toasts are temporary indicators for web search
toast.info($i18n.t('Generating search query'));
const searchQuery = await generateChatSearchQuery(prompt);
if (searchQuery) {
toast.info($i18n.t('Searching the web for \'{{searchQuery}}\'', { searchQuery }));
const searchDocUuid = uuidv4();
const searchDocument = await runWebSearch(localStorage.token, searchQuery, searchDocUuid);
if (searchDocument) {
const parentMessage = history.messages[parentId];
if (!parentMessage.files) {
parentMessage.files = [];
}
parentMessage.files.push({
collection_name: searchDocument.collection_name,
name: searchQuery,
type: 'doc',
upload_status: true,
error: ""
});
// Find message in messages and update it
const messageIndex = messages.findIndex((message) => message.id === parentId);
if (messageIndex !== -1) {
messages[messageIndex] = parentMessage;
}
} else {
toast.warning($i18n.t('No search results found'));
}
} else {
toast.warning($i18n.t('No search query generated'));
}
}
if (model?.external) {
await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
} else if (model) {
@@ -807,6 +842,30 @@
}
};
// TODO: Add support for adding all the user's messages as context, and not just the last message
const generateChatSearchQuery = async (userPrompt: string) => {
const model = $models.find((model) => model.id === selectedModels[0]);
// TODO: rename titleModel to taskModel - this is the model used for non-chat tasks (e.g. title generation, search query generation)
const titleModelId =
model?.external ?? false
? $settings?.title?.modelExternal ?? selectedModels[0]
: $settings?.title?.model ?? selectedModels[0];
const titleModel = $models.find((model) => model.id === titleModelId);
console.log(titleModel);
return await generateSearchQuery(
localStorage.token,
titleModelId,
userPrompt,
titleModel?.external ?? false
? titleModel?.source?.toLowerCase() === 'litellm'
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1`
);
};
const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) {
title = _title;
@@ -906,6 +965,7 @@
bind:prompt
bind:autoScroll
bind:selectedModel={atSelectedModel}
bind:useWebSearch
{messages}
{submitPrompt}
{stopResponse}

View File

@@ -30,7 +30,7 @@
getTagsById,
updateChatById
} from '$lib/apis/chats';
import { generateOpenAIChatCompletion, generateTitle } from '$lib/apis/openai';
import { generateOpenAIChatCompletion, generateSearchQuery, generateTitle } from '$lib/apis/openai';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
@@ -43,6 +43,7 @@
WEBUI_BASE_URL
} from '$lib/constants';
import { createOpenAITextStream } from '$lib/apis/streaming';
import { runWebSearch } from '$lib/apis/rag';
const i18n = getContext('i18n');
@@ -59,6 +60,8 @@
let selectedModels = [''];
let atSelectedModel = '';
let useWebSearch = false;
let selectedModelfile = null;
$: selectedModelfile =
@@ -287,6 +290,39 @@
];
}
if (useWebSearch) {
// TODO: Toasts are temporary indicators for web search
toast.info($i18n.t('Generating search query'));
const searchQuery = await generateChatSearchQuery(prompt);
if (searchQuery) {
toast.info($i18n.t('Searching the web for \'{{searchQuery}}\'', { searchQuery }));
const searchDocUuid = uuidv4();
const searchDocument = await runWebSearch(localStorage.token, searchQuery, searchDocUuid);
if (searchDocument) {
const parentMessage = history.messages[parentId];
if (!parentMessage.files) {
parentMessage.files = [];
}
parentMessage.files.push({
collection_name: searchDocument.collection_name,
name: searchQuery,
type: 'doc',
upload_status: true,
error: ""
});
// Find message in messages and update it
const messageIndex = messages.findIndex((message) => message.id === parentId);
if (messageIndex !== -1) {
messages[messageIndex] = parentMessage;
}
} else {
toast.warning($i18n.t('No search results found'));
}
} else {
toast.warning($i18n.t('No search query generated'));
}
}
if (model?.external) {
await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
} else if (model) {
@@ -819,6 +855,30 @@
}
};
// TODO: Add support for adding all the user's messages as context, and not just the last message
const generateChatSearchQuery = async (userPrompt: string) => {
const model = $models.find((model) => model.id === selectedModels[0]);
// TODO: rename titleModel to taskModel - this is the model used for non-chat tasks (e.g. title generation, search query generation)
const titleModelId =
model?.external ?? false
? $settings?.title?.modelExternal ?? selectedModels[0]
: $settings?.title?.model ?? selectedModels[0];
const titleModel = $models.find((model) => model.id === titleModelId);
console.log(titleModel);
return await generateSearchQuery(
localStorage.token,
titleModelId,
userPrompt,
titleModel?.external ?? false
? titleModel?.source?.toLowerCase() === 'litellm'
? `${LITELLM_API_BASE_URL}/v1`
: `${OPENAI_API_BASE_URL}`
: `${OLLAMA_API_BASE_URL}/v1`
);
};
const setChatTitle = async (_chatId, _title) => {
if (_chatId === $chatId) {
title = _title;
@@ -929,6 +989,7 @@
bind:prompt
bind:autoScroll
bind:selectedModel={atSelectedModel}
bind:useWebSearch
suggestionPrompts={selectedModelfile?.suggestionPrompts ?? $config.default_prompt_suggestions}
{messages}
{submitPrompt}