mirror of
https://github.com/open-webui/open-webui
synced 2025-06-26 18:26:48 +00:00
feat: submit prompt integration
This commit is contained in:
@@ -298,6 +298,7 @@
|
||||
//////////////////////////
|
||||
|
||||
const submitPrompt = async (userPrompt, _user = null) => {
|
||||
let _responses = [];
|
||||
console.log('submitPrompt', $chatId);
|
||||
|
||||
selectedModels = selectedModels.map((modelId) =>
|
||||
@@ -379,11 +380,14 @@
|
||||
files = [];
|
||||
|
||||
// Send prompt
|
||||
await sendPrompt(userPrompt, userMessageId);
|
||||
_responses = await sendPrompt(userPrompt, userMessageId);
|
||||
}
|
||||
|
||||
return _responses;
|
||||
};
|
||||
|
||||
const sendPrompt = async (prompt, parentId, modelId = null) => {
|
||||
let _responses = [];
|
||||
const _chatId = JSON.parse(JSON.stringify($chatId));
|
||||
|
||||
await Promise.all(
|
||||
@@ -470,11 +474,14 @@
|
||||
await getWebSearchResults(model.id, parentId, responseMessageId);
|
||||
}
|
||||
|
||||
let _response = null;
|
||||
|
||||
if (model?.owned_by === 'openai') {
|
||||
await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
|
||||
_response = await sendPromptOpenAI(model, prompt, responseMessageId, _chatId);
|
||||
} else if (model) {
|
||||
await sendPromptOllama(model, prompt, responseMessageId, _chatId);
|
||||
_response = await sendPromptOllama(model, prompt, responseMessageId, _chatId);
|
||||
}
|
||||
_responses.push(_response);
|
||||
|
||||
console.log('chatEventEmitter', chatEventEmitter);
|
||||
|
||||
@@ -486,6 +493,8 @@
|
||||
);
|
||||
|
||||
await chats.set(await getChatList(localStorage.token));
|
||||
|
||||
return _responses;
|
||||
};
|
||||
|
||||
const getWebSearchResults = async (model: string, parentId: string, responseId: string) => {
|
||||
@@ -560,6 +569,8 @@
|
||||
};
|
||||
|
||||
const sendPromptOllama = async (model, userPrompt, responseMessageId, _chatId) => {
|
||||
let _response = null;
|
||||
|
||||
model = model.id;
|
||||
|
||||
const responseMessage = history.messages[responseMessageId];
|
||||
@@ -670,6 +681,7 @@
|
||||
await chatCompletedHandler(model, messages);
|
||||
}
|
||||
|
||||
_response = responseMessage.content;
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -806,9 +818,12 @@
|
||||
const _title = await generateChatTitle(userPrompt);
|
||||
await setChatTitle(_chatId, _title);
|
||||
}
|
||||
|
||||
return _response;
|
||||
};
|
||||
|
||||
const sendPromptOpenAI = async (model, userPrompt, responseMessageId, _chatId) => {
|
||||
let _response = null;
|
||||
const responseMessage = history.messages[responseMessageId];
|
||||
|
||||
const docs = messages
|
||||
@@ -925,6 +940,8 @@
|
||||
await chatCompletedHandler(model.id, messages);
|
||||
}
|
||||
|
||||
_response = responseMessage.content;
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -1000,6 +1017,8 @@
|
||||
const _title = await generateChatTitle(userPrompt);
|
||||
await setChatTitle(_chatId, _title);
|
||||
}
|
||||
|
||||
return _response;
|
||||
};
|
||||
|
||||
const handleOpenAIError = async (error, res: Response | null, model, responseMessage) => {
|
||||
@@ -1195,7 +1214,7 @@
|
||||
</title>
|
||||
</svelte:head>
|
||||
|
||||
<CallOverlay />
|
||||
<CallOverlay {submitPrompt} />
|
||||
|
||||
{#if !chatIdProp || (loaded && chatIdProp)}
|
||||
<div
|
||||
|
||||
Reference in New Issue
Block a user