mirror of
https://github.com/open-webui/open-webui
synced 2025-06-26 18:26:48 +00:00
Merge pull request #12997 from Elmolesto/feat/knowledge-lazy-load-content
feat: Lazy load file content on click
This commit is contained in:
commit
794360addc
@ -9,7 +9,7 @@ from open_webui.models.knowledge import (
|
||||
KnowledgeResponse,
|
||||
KnowledgeUserResponse,
|
||||
)
|
||||
from open_webui.models.files import Files, FileModel
|
||||
from open_webui.models.files import Files, FileModel, FileMetadataResponse
|
||||
from open_webui.retrieval.vector.connector import VECTOR_DB_CLIENT
|
||||
from open_webui.routers.retrieval import (
|
||||
process_file,
|
||||
@ -235,7 +235,7 @@ async def reindex_knowledge_files(request: Request, user=Depends(get_verified_us
|
||||
|
||||
|
||||
class KnowledgeFilesResponse(KnowledgeResponse):
|
||||
files: list[FileModel]
|
||||
files: list[FileMetadataResponse]
|
||||
|
||||
|
||||
@router.get("/{id}", response_model=Optional[KnowledgeFilesResponse])
|
||||
@ -251,7 +251,7 @@ async def get_knowledge_by_id(id: str, user=Depends(get_verified_user)):
|
||||
):
|
||||
|
||||
file_ids = knowledge.data.get("file_ids", []) if knowledge.data else []
|
||||
files = Files.get_files_by_ids(file_ids)
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
@ -379,7 +379,7 @@ def add_file_to_knowledge_by_id(
|
||||
knowledge = Knowledges.update_knowledge_data_by_id(id=id, data=data)
|
||||
|
||||
if knowledge:
|
||||
files = Files.get_files_by_ids(file_ids)
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
@ -456,7 +456,7 @@ def update_file_from_knowledge_by_id(
|
||||
data = knowledge.data or {}
|
||||
file_ids = data.get("file_ids", [])
|
||||
|
||||
files = Files.get_files_by_ids(file_ids)
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
@ -538,7 +538,7 @@ def remove_file_from_knowledge_by_id(
|
||||
knowledge = Knowledges.update_knowledge_data_by_id(id=id, data=data)
|
||||
|
||||
if knowledge:
|
||||
files = Files.get_files_by_ids(file_ids)
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
@ -734,7 +734,7 @@ def add_files_to_knowledge_batch(
|
||||
error_details = [f"{err.file_id}: {err.error}" for err in result.errors]
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
files=Files.get_files_by_ids(existing_file_ids),
|
||||
files=Files.get_file_metadatas_by_ids(existing_file_ids),
|
||||
warnings={
|
||||
"message": "Some files failed to process",
|
||||
"errors": error_details,
|
||||
@ -742,5 +742,5 @@ def add_files_to_knowledge_batch(
|
||||
)
|
||||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(), files=Files.get_files_by_ids(existing_file_ids)
|
||||
**knowledge.model_dump(), files=Files.get_file_metadatas_by_ids(existing_file_ids)
|
||||
)
|
||||
|
@ -11,7 +11,7 @@
|
||||
import { page } from '$app/stores';
|
||||
import { mobile, showSidebar, knowledge as _knowledge, config, user } from '$lib/stores';
|
||||
|
||||
import { updateFileDataContentById, uploadFile, deleteFileById } from '$lib/apis/files';
|
||||
import { updateFileDataContentById, uploadFile, deleteFileById, getFileById } from '$lib/apis/files';
|
||||
import {
|
||||
addFileToKnowledgeById,
|
||||
getKnowledgeById,
|
||||
@ -84,12 +84,15 @@
|
||||
|
||||
let selectedFile = null;
|
||||
let selectedFileId = null;
|
||||
let selectedFileContent = '';
|
||||
|
||||
// Add cache object
|
||||
let fileContentCache = new Map();
|
||||
|
||||
$: if (selectedFileId) {
|
||||
const file = (knowledge?.files ?? []).find((file) => file.id === selectedFileId);
|
||||
if (file) {
|
||||
file.data = file.data ?? { content: '' };
|
||||
selectedFile = file;
|
||||
fileSelectHandler(file);
|
||||
} else {
|
||||
selectedFile = null;
|
||||
}
|
||||
@ -394,7 +397,10 @@
|
||||
|
||||
const updateFileContentHandler = async () => {
|
||||
const fileId = selectedFile.id;
|
||||
const content = selectedFile.data.content;
|
||||
const content = selectedFileContent;
|
||||
|
||||
// Clear the cache for this file since we're updating it
|
||||
fileContentCache.delete(fileId);
|
||||
|
||||
const res = updateFileDataContentById(localStorage.token, fileId, content).catch((e) => {
|
||||
toast.error(`${e}`);
|
||||
@ -450,6 +456,29 @@
|
||||
}
|
||||
};
|
||||
|
||||
const fileSelectHandler = async (file) => {
|
||||
try {
|
||||
selectedFile = file;
|
||||
|
||||
// Check cache first
|
||||
if (fileContentCache.has(file.id)) {
|
||||
selectedFileContent = fileContentCache.get(file.id);
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await getFileById(localStorage.token, file.id);
|
||||
if (response) {
|
||||
selectedFileContent = response.data.content;
|
||||
// Cache the content
|
||||
fileContentCache.set(file.id, response.data.content);
|
||||
} else {
|
||||
toast.error($i18n.t('No content found in file.'));
|
||||
}
|
||||
} catch (e) {
|
||||
toast.error($i18n.t('Failed to load file content.'));
|
||||
}
|
||||
};
|
||||
|
||||
const onDragOver = (e) => {
|
||||
e.preventDefault();
|
||||
|
||||
@ -728,7 +757,7 @@
|
||||
{#key selectedFile.id}
|
||||
<RichTextInput
|
||||
className="input-prose-sm"
|
||||
bind:value={selectedFile.data.content}
|
||||
bind:value={selectedFileContent}
|
||||
placeholder={$i18n.t('Add content here')}
|
||||
preserveBreaks={true}
|
||||
/>
|
||||
@ -786,7 +815,7 @@
|
||||
{#key selectedFile.id}
|
||||
<RichTextInput
|
||||
className="input-prose-sm"
|
||||
bind:value={selectedFile.data.content}
|
||||
bind:value={selectedFileContent}
|
||||
placeholder={$i18n.t('Add content here')}
|
||||
preserveBreaks={true}
|
||||
/>
|
||||
|
Loading…
Reference in New Issue
Block a user