mirror of
https://github.com/stackblitz-labs/bolt.diy
synced 2025-06-26 18:26:38 +00:00
Refactor the file deletion logic in FilesStore to precompute prefixes and iterate through files only once. This reduces the complexity of nested loops and improves performance by applying all deletions in a single update to the store. Additionally, remove a redundant console.log statement in Chat.client.tsx and update the prompts documentation for clarity.
506 lines
14 KiB
TypeScript
506 lines
14 KiB
TypeScript
import type { PathWatcherEvent, WebContainer } from '@webcontainer/api';
|
|
import { getEncoding } from 'istextorbinary';
|
|
import { map, type MapStore } from 'nanostores';
|
|
import { Buffer } from 'node:buffer';
|
|
import { path } from '~/utils/path';
|
|
import { bufferWatchEvents } from '~/utils/buffer';
|
|
import { WORK_DIR } from '~/utils/constants';
|
|
import { computeFileModifications } from '~/utils/diff';
|
|
import { createScopedLogger } from '~/utils/logger';
|
|
import { unreachable } from '~/utils/unreachable';
|
|
|
|
const logger = createScopedLogger('FilesStore');
|
|
|
|
const utf8TextDecoder = new TextDecoder('utf8', { fatal: true });
|
|
|
|
export interface File {
|
|
type: 'file';
|
|
content: string;
|
|
isBinary: boolean;
|
|
}
|
|
|
|
export interface Folder {
|
|
type: 'folder';
|
|
}
|
|
|
|
type Dirent = File | Folder;
|
|
|
|
export type FileMap = Record<string, Dirent | undefined>;
|
|
|
|
export class FilesStore {
|
|
#webcontainer: Promise<WebContainer>;
|
|
|
|
/**
|
|
* Tracks the number of files without folders.
|
|
*/
|
|
#size = 0;
|
|
|
|
/**
|
|
* @note Keeps track all modified files with their original content since the last user message.
|
|
* Needs to be reset when the user sends another message and all changes have to be submitted
|
|
* for the model to be aware of the changes.
|
|
*/
|
|
#modifiedFiles: Map<string, string> = import.meta.hot?.data.modifiedFiles ?? new Map();
|
|
|
|
/**
|
|
* Keeps track of deleted files and folders to prevent them from reappearing on reload
|
|
*/
|
|
#deletedPaths: Set<string> = import.meta.hot?.data.deletedPaths ?? new Set();
|
|
|
|
/**
|
|
* Map of files that matches the state of WebContainer.
|
|
*/
|
|
files: MapStore<FileMap> = import.meta.hot?.data.files ?? map({});
|
|
|
|
get filesCount() {
|
|
return this.#size;
|
|
}
|
|
|
|
constructor(webcontainerPromise: Promise<WebContainer>) {
|
|
this.#webcontainer = webcontainerPromise;
|
|
|
|
// Load deleted paths from localStorage if available
|
|
try {
|
|
if (typeof localStorage !== 'undefined') {
|
|
const deletedPathsJson = localStorage.getItem('bolt-deleted-paths');
|
|
|
|
if (deletedPathsJson) {
|
|
const deletedPaths = JSON.parse(deletedPathsJson);
|
|
|
|
if (Array.isArray(deletedPaths)) {
|
|
deletedPaths.forEach((path) => this.#deletedPaths.add(path));
|
|
}
|
|
}
|
|
}
|
|
} catch (error) {
|
|
logger.error('Failed to load deleted paths from localStorage', error);
|
|
}
|
|
|
|
if (import.meta.hot) {
|
|
// Persist our state across hot reloads
|
|
import.meta.hot.data.files = this.files;
|
|
import.meta.hot.data.modifiedFiles = this.#modifiedFiles;
|
|
import.meta.hot.data.deletedPaths = this.#deletedPaths;
|
|
}
|
|
|
|
this.#init();
|
|
}
|
|
|
|
getFile(filePath: string) {
|
|
const dirent = this.files.get()[filePath];
|
|
|
|
if (dirent?.type !== 'file') {
|
|
return undefined;
|
|
}
|
|
|
|
return dirent;
|
|
}
|
|
|
|
getFileModifications() {
|
|
return computeFileModifications(this.files.get(), this.#modifiedFiles);
|
|
}
|
|
getModifiedFiles() {
|
|
let modifiedFiles: { [path: string]: File } | undefined = undefined;
|
|
|
|
for (const [filePath, originalContent] of this.#modifiedFiles) {
|
|
const file = this.files.get()[filePath];
|
|
|
|
if (file?.type !== 'file') {
|
|
continue;
|
|
}
|
|
|
|
if (file.content === originalContent) {
|
|
continue;
|
|
}
|
|
|
|
if (!modifiedFiles) {
|
|
modifiedFiles = {};
|
|
}
|
|
|
|
modifiedFiles[filePath] = file;
|
|
}
|
|
|
|
return modifiedFiles;
|
|
}
|
|
|
|
resetFileModifications() {
|
|
this.#modifiedFiles.clear();
|
|
}
|
|
|
|
async saveFile(filePath: string, content: string) {
|
|
const webcontainer = await this.#webcontainer;
|
|
|
|
try {
|
|
const relativePath = path.relative(webcontainer.workdir, filePath);
|
|
|
|
if (!relativePath) {
|
|
throw new Error(`EINVAL: invalid file path, write '${relativePath}'`);
|
|
}
|
|
|
|
const oldContent = this.getFile(filePath)?.content;
|
|
|
|
if (!oldContent && oldContent !== '') {
|
|
unreachable('Expected content to be defined');
|
|
}
|
|
|
|
await webcontainer.fs.writeFile(relativePath, content);
|
|
|
|
if (!this.#modifiedFiles.has(filePath)) {
|
|
this.#modifiedFiles.set(filePath, oldContent);
|
|
}
|
|
|
|
// we immediately update the file and don't rely on the `change` event coming from the watcher
|
|
this.files.setKey(filePath, { type: 'file', content, isBinary: false });
|
|
|
|
logger.info('File updated');
|
|
} catch (error) {
|
|
logger.error('Failed to update file content\n\n', error);
|
|
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
async #init() {
|
|
const webcontainer = await this.#webcontainer;
|
|
|
|
// Clean up any files that were previously deleted
|
|
this.#cleanupDeletedFiles();
|
|
|
|
webcontainer.internal.watchPaths(
|
|
{ include: [`${WORK_DIR}/**`], exclude: ['**/node_modules', '.git'], includeContent: true },
|
|
bufferWatchEvents(100, this.#processEventBuffer.bind(this)),
|
|
);
|
|
}
|
|
|
|
/**
|
|
* Removes any deleted files/folders from the store
|
|
*/
|
|
#cleanupDeletedFiles() {
|
|
if (this.#deletedPaths.size === 0) {
|
|
return;
|
|
}
|
|
|
|
const currentFiles = this.files.get();
|
|
const pathsToDelete = new Set<string>();
|
|
|
|
// Precompute prefixes for efficient checking
|
|
const deletedPrefixes = [...this.#deletedPaths].map((p) => p + '/');
|
|
|
|
// Iterate through all current files/folders once
|
|
for (const [path, dirent] of Object.entries(currentFiles)) {
|
|
// Skip if dirent is already undefined (shouldn't happen often but good practice)
|
|
if (!dirent) {
|
|
continue;
|
|
}
|
|
|
|
// Check for exact match in deleted paths
|
|
if (this.#deletedPaths.has(path)) {
|
|
pathsToDelete.add(path);
|
|
continue; // No need to check prefixes if it's an exact match
|
|
}
|
|
|
|
// Check if the path starts with any of the deleted folder prefixes
|
|
for (const prefix of deletedPrefixes) {
|
|
if (path.startsWith(prefix)) {
|
|
pathsToDelete.add(path);
|
|
break; // Found a match, no need to check other prefixes for this path
|
|
}
|
|
}
|
|
}
|
|
|
|
// Perform the deletions and updates based on the collected paths
|
|
if (pathsToDelete.size > 0) {
|
|
const updates: FileMap = {};
|
|
|
|
for (const pathToDelete of pathsToDelete) {
|
|
const dirent = currentFiles[pathToDelete];
|
|
updates[pathToDelete] = undefined; // Mark for deletion in the map update
|
|
|
|
if (dirent?.type === 'file') {
|
|
this.#size--;
|
|
|
|
if (this.#modifiedFiles.has(pathToDelete)) {
|
|
this.#modifiedFiles.delete(pathToDelete);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Apply all deletions to the store at once for potential efficiency
|
|
this.files.set({ ...currentFiles, ...updates });
|
|
}
|
|
}
|
|
|
|
#processEventBuffer(events: Array<[events: PathWatcherEvent[]]>) {
|
|
const watchEvents = events.flat(2);
|
|
|
|
for (const { type, path: eventPath, buffer } of watchEvents) {
|
|
// remove any trailing slashes
|
|
const sanitizedPath = eventPath.replace(/\/+$/g, '');
|
|
|
|
// Skip processing if this file/folder was explicitly deleted
|
|
if (this.#deletedPaths.has(sanitizedPath)) {
|
|
continue;
|
|
}
|
|
|
|
let isInDeletedFolder = false;
|
|
|
|
for (const deletedPath of this.#deletedPaths) {
|
|
if (sanitizedPath.startsWith(deletedPath + '/')) {
|
|
isInDeletedFolder = true;
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (isInDeletedFolder) {
|
|
continue;
|
|
}
|
|
|
|
switch (type) {
|
|
case 'add_dir': {
|
|
// we intentionally add a trailing slash so we can distinguish files from folders in the file tree
|
|
this.files.setKey(sanitizedPath, { type: 'folder' });
|
|
break;
|
|
}
|
|
case 'remove_dir': {
|
|
this.files.setKey(sanitizedPath, undefined);
|
|
|
|
for (const [direntPath] of Object.entries(this.files)) {
|
|
if (direntPath.startsWith(sanitizedPath)) {
|
|
this.files.setKey(direntPath, undefined);
|
|
}
|
|
}
|
|
|
|
break;
|
|
}
|
|
case 'add_file':
|
|
case 'change': {
|
|
if (type === 'add_file') {
|
|
this.#size++;
|
|
}
|
|
|
|
let content = '';
|
|
const isBinary = isBinaryFile(buffer);
|
|
|
|
if (isBinary && buffer) {
|
|
// For binary files, we need to preserve the content as base64
|
|
content = Buffer.from(buffer).toString('base64');
|
|
} else if (!isBinary) {
|
|
content = this.#decodeFileContent(buffer);
|
|
|
|
/*
|
|
* If the content is a single space and this is from our empty file workaround,
|
|
* convert it back to an actual empty string
|
|
*/
|
|
if (content === ' ' && type === 'add_file') {
|
|
content = '';
|
|
}
|
|
}
|
|
|
|
const existingFile = this.files.get()[sanitizedPath];
|
|
|
|
if (existingFile?.type === 'file' && existingFile.isBinary && existingFile.content && !content) {
|
|
content = existingFile.content;
|
|
}
|
|
|
|
this.files.setKey(sanitizedPath, { type: 'file', content, isBinary });
|
|
break;
|
|
}
|
|
case 'remove_file': {
|
|
this.#size--;
|
|
this.files.setKey(sanitizedPath, undefined);
|
|
break;
|
|
}
|
|
case 'update_directory': {
|
|
// we don't care about these events
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
#decodeFileContent(buffer?: Uint8Array) {
|
|
if (!buffer || buffer.byteLength === 0) {
|
|
return '';
|
|
}
|
|
|
|
try {
|
|
return utf8TextDecoder.decode(buffer);
|
|
} catch (error) {
|
|
console.log(error);
|
|
return '';
|
|
}
|
|
}
|
|
|
|
async createFile(filePath: string, content: string | Uint8Array = '') {
|
|
const webcontainer = await this.#webcontainer;
|
|
|
|
try {
|
|
const relativePath = path.relative(webcontainer.workdir, filePath);
|
|
|
|
if (!relativePath) {
|
|
throw new Error(`EINVAL: invalid file path, create '${relativePath}'`);
|
|
}
|
|
|
|
const dirPath = path.dirname(relativePath);
|
|
|
|
if (dirPath !== '.') {
|
|
await webcontainer.fs.mkdir(dirPath, { recursive: true });
|
|
}
|
|
|
|
const isBinary = content instanceof Uint8Array;
|
|
|
|
if (isBinary) {
|
|
await webcontainer.fs.writeFile(relativePath, Buffer.from(content));
|
|
|
|
const base64Content = Buffer.from(content).toString('base64');
|
|
this.files.setKey(filePath, { type: 'file', content: base64Content, isBinary: true });
|
|
|
|
this.#modifiedFiles.set(filePath, base64Content);
|
|
} else {
|
|
const contentToWrite = (content as string).length === 0 ? ' ' : content;
|
|
await webcontainer.fs.writeFile(relativePath, contentToWrite);
|
|
|
|
this.files.setKey(filePath, { type: 'file', content: content as string, isBinary: false });
|
|
|
|
this.#modifiedFiles.set(filePath, content as string);
|
|
}
|
|
|
|
logger.info(`File created: ${filePath}`);
|
|
|
|
return true;
|
|
} catch (error) {
|
|
logger.error('Failed to create file\n\n', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
async createFolder(folderPath: string) {
|
|
const webcontainer = await this.#webcontainer;
|
|
|
|
try {
|
|
const relativePath = path.relative(webcontainer.workdir, folderPath);
|
|
|
|
if (!relativePath) {
|
|
throw new Error(`EINVAL: invalid folder path, create '${relativePath}'`);
|
|
}
|
|
|
|
await webcontainer.fs.mkdir(relativePath, { recursive: true });
|
|
|
|
this.files.setKey(folderPath, { type: 'folder' });
|
|
|
|
logger.info(`Folder created: ${folderPath}`);
|
|
|
|
return true;
|
|
} catch (error) {
|
|
logger.error('Failed to create folder\n\n', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
async deleteFile(filePath: string) {
|
|
const webcontainer = await this.#webcontainer;
|
|
|
|
try {
|
|
const relativePath = path.relative(webcontainer.workdir, filePath);
|
|
|
|
if (!relativePath) {
|
|
throw new Error(`EINVAL: invalid file path, delete '${relativePath}'`);
|
|
}
|
|
|
|
await webcontainer.fs.rm(relativePath);
|
|
|
|
this.#deletedPaths.add(filePath);
|
|
|
|
this.files.setKey(filePath, undefined);
|
|
this.#size--;
|
|
|
|
if (this.#modifiedFiles.has(filePath)) {
|
|
this.#modifiedFiles.delete(filePath);
|
|
}
|
|
|
|
this.#persistDeletedPaths();
|
|
|
|
logger.info(`File deleted: ${filePath}`);
|
|
|
|
return true;
|
|
} catch (error) {
|
|
logger.error('Failed to delete file\n\n', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
async deleteFolder(folderPath: string) {
|
|
const webcontainer = await this.#webcontainer;
|
|
|
|
try {
|
|
const relativePath = path.relative(webcontainer.workdir, folderPath);
|
|
|
|
if (!relativePath) {
|
|
throw new Error(`EINVAL: invalid folder path, delete '${relativePath}'`);
|
|
}
|
|
|
|
await webcontainer.fs.rm(relativePath, { recursive: true });
|
|
|
|
this.#deletedPaths.add(folderPath);
|
|
|
|
this.files.setKey(folderPath, undefined);
|
|
|
|
const allFiles = this.files.get();
|
|
|
|
for (const [path, dirent] of Object.entries(allFiles)) {
|
|
if (path.startsWith(folderPath + '/')) {
|
|
this.files.setKey(path, undefined);
|
|
|
|
this.#deletedPaths.add(path);
|
|
|
|
if (dirent?.type === 'file') {
|
|
this.#size--;
|
|
}
|
|
|
|
if (dirent?.type === 'file' && this.#modifiedFiles.has(path)) {
|
|
this.#modifiedFiles.delete(path);
|
|
}
|
|
}
|
|
}
|
|
|
|
this.#persistDeletedPaths();
|
|
|
|
logger.info(`Folder deleted: ${folderPath}`);
|
|
|
|
return true;
|
|
} catch (error) {
|
|
logger.error('Failed to delete folder\n\n', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
// method to persist deleted paths to localStorage
|
|
#persistDeletedPaths() {
|
|
try {
|
|
if (typeof localStorage !== 'undefined') {
|
|
localStorage.setItem('bolt-deleted-paths', JSON.stringify([...this.#deletedPaths]));
|
|
}
|
|
} catch (error) {
|
|
logger.error('Failed to persist deleted paths to localStorage', error);
|
|
}
|
|
}
|
|
}
|
|
|
|
function isBinaryFile(buffer: Uint8Array | undefined) {
|
|
if (buffer === undefined) {
|
|
return false;
|
|
}
|
|
|
|
return getEncoding(convertToBuffer(buffer), { chunkLength: 100 }) === 'binary';
|
|
}
|
|
|
|
/**
|
|
* Converts a `Uint8Array` into a Node.js `Buffer` by copying the prototype.
|
|
* The goal is to avoid expensive copies. It does create a new typed array
|
|
* but that's generally cheap as long as it uses the same underlying
|
|
* array buffer.
|
|
*/
|
|
function convertToBuffer(view: Uint8Array): Buffer {
|
|
return Buffer.from(view.buffer, view.byteOffset, view.byteLength);
|
|
}
|