diff --git a/app/components/chat/Artifact.tsx b/app/components/chat/Artifact.tsx index 5f0c9910..d164689a 100644 --- a/app/components/chat/Artifact.tsx +++ b/app/components/chat/Artifact.tsx @@ -35,7 +35,11 @@ export const Artifact = memo(({ messageId }: ArtifactProps) => { const actions = useStore( computed(artifact.runner.actions, (actions) => { - return Object.values(actions); + // Filter out Supabase actions except for migrations + return Object.values(actions).filter((action) => { + // Exclude actions with type 'supabase' or actions that contain 'supabase' in their content + return action.type !== 'supabase' && !(action.type === 'shell' && action.content?.includes('supabase')); + }); }), ); diff --git a/app/components/chat/BaseChat.tsx b/app/components/chat/BaseChat.tsx index b8712202..168fbed9 100644 --- a/app/components/chat/BaseChat.tsx +++ b/app/components/chat/BaseChat.tsx @@ -29,13 +29,15 @@ import type { ProviderInfo } from '~/types/model'; import { ScreenshotStateManager } from './ScreenshotStateManager'; import { toast } from 'react-toastify'; import StarterTemplates from './StarterTemplates'; -import type { ActionAlert } from '~/types/actions'; +import type { ActionAlert, SupabaseAlert } from '~/types/actions'; import ChatAlert from './ChatAlert'; import type { ModelInfo } from '~/lib/modules/llm/types'; import ProgressCompilation from './ProgressCompilation'; import type { ProgressAnnotation } from '~/types/context'; import type { ActionRunner } from '~/lib/runtime/action-runner'; import { LOCAL_PROVIDERS } from '~/lib/stores/settings'; +import { SupabaseChatAlert } from '~/components/chat/SupabaseAlert'; +import { SupabaseConnection } from './SupabaseConnection'; const TEXTAREA_MIN_HEIGHT = 76; @@ -69,6 +71,8 @@ interface BaseChatProps { setImageDataList?: (dataList: string[]) => void; actionAlert?: ActionAlert; clearAlert?: () => void; + supabaseAlert?: SupabaseAlert; + clearSupabaseAlert?: () => void; data?: JSONValue[] | undefined; actionRunner?: ActionRunner; } @@ -105,6 +109,8 @@ export const BaseChat = React.forwardRef( messages, actionAlert, clearAlert, + supabaseAlert, + clearSupabaseAlert, data, actionRunner, }, @@ -343,6 +349,16 @@ export const BaseChat = React.forwardRef( ) : null; }} + {supabaseAlert && ( + clearSupabaseAlert?.()} + postMessage={(message) => { + sendMessage?.({} as any, message); + clearSupabaseAlert?.(); + }} + /> + )}
( a new line
) : null} + diff --git a/app/components/chat/Chat.client.tsx b/app/components/chat/Chat.client.tsx index 8ac5d286..4cf8c8ca 100644 --- a/app/components/chat/Chat.client.tsx +++ b/app/components/chat/Chat.client.tsx @@ -26,6 +26,7 @@ import { getTemplates, selectStarterTemplate } from '~/utils/selectStarterTempla import { logStore } from '~/lib/stores/logs'; import { streamingState } from '~/lib/stores/streaming'; import { filesToArtifacts } from '~/utils/fileUtils'; +import { supabaseConnection } from '~/lib/stores/supabase'; const toastAnimation = cssTransition({ enter: 'animated fadeInRight', @@ -123,6 +124,11 @@ export const ChatImpl = memo( const [fakeLoading, setFakeLoading] = useState(false); const files = useStore(workbenchStore.files); const actionAlert = useStore(workbenchStore.alert); + const supabaseConn = useStore(supabaseConnection); // Add this line to get Supabase connection + const selectedProject = supabaseConn.stats?.projects?.find( + (project) => project.id === supabaseConn.selectedProjectId, + ); + const supabaseAlert = useStore(workbenchStore.supabaseAlert); const { activeProviders, promptId, autoSelectTemplate, contextOptimizationEnabled } = useSettings(); const [model, setModel] = useState(() => { @@ -160,6 +166,14 @@ export const ChatImpl = memo( files, promptId, contextOptimization: contextOptimizationEnabled, + supabase: { + isConnected: supabaseConn.isConnected, + hasSelectedProject: !!selectedProject, + credentials: { + supabaseUrl: supabaseConn?.credentials?.supabaseUrl, + anonKey: supabaseConn?.credentials?.anonKey, + }, + }, }, sendExtraMessageFields: true, onError: (e) => { @@ -544,6 +558,8 @@ export const ChatImpl = memo( setImageDataList={setImageDataList} actionAlert={actionAlert} clearAlert={() => workbenchStore.clearAlert()} + supabaseAlert={supabaseAlert} + clearSupabaseAlert={() => workbenchStore.clearSupabaseAlert()} data={chatData} /> ); diff --git a/app/components/chat/SupabaseAlert.tsx b/app/components/chat/SupabaseAlert.tsx new file mode 100644 index 00000000..d86e5e53 --- /dev/null +++ b/app/components/chat/SupabaseAlert.tsx @@ -0,0 +1,199 @@ +import { AnimatePresence, motion } from 'framer-motion'; +import type { SupabaseAlert } from '~/types/actions'; +import { classNames } from '~/utils/classNames'; +import { supabaseConnection } from '~/lib/stores/supabase'; +import { useStore } from '@nanostores/react'; +import { useState } from 'react'; + +interface Props { + alert: SupabaseAlert; + clearAlert: () => void; + postMessage: (message: string) => void; +} + +export function SupabaseChatAlert({ alert, clearAlert, postMessage }: Props) { + const { content } = alert; + const connection = useStore(supabaseConnection); + const [isExecuting, setIsExecuting] = useState(false); + const [isCollapsed, setIsCollapsed] = useState(true); + + // Determine connection state + const isConnected = !!(connection.token && connection.selectedProjectId); + + // Set title and description based on connection state + const title = isConnected ? 'Supabase Query' : 'Supabase Connection Required'; + const description = isConnected ? 'Execute database query' : 'Supabase connection required'; + const message = isConnected + ? 'Please review the proposed changes and apply them to your database.' + : 'Please connect to Supabase to continue with this operation.'; + + const handleConnectClick = () => { + // Dispatch an event to open the Supabase connection dialog + document.dispatchEvent(new CustomEvent('open-supabase-connection')); + }; + + // Determine if we should show the Connect button or Apply Changes button + const showConnectButton = !isConnected; + + const executeSupabaseAction = async (sql: string) => { + if (!connection.token || !connection.selectedProjectId) { + console.error('No Supabase token or project selected'); + return; + } + + setIsExecuting(true); + + try { + const response = await fetch('/api/supabase/query', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${connection.token}`, + }, + body: JSON.stringify({ + projectId: connection.selectedProjectId, + query: sql, + }), + }); + + if (!response.ok) { + const errorData = (await response.json()) as any; + throw new Error(`Supabase query failed: ${errorData.error?.message || response.statusText}`); + } + + const result = await response.json(); + console.log('Supabase query executed successfully:', result); + clearAlert(); + } catch (error) { + console.error('Failed to execute Supabase action:', error); + postMessage( + `*Error executing Supabase query please fix and return the query again*\n\`\`\`\n${error instanceof Error ? error.message : String(error)}\n\`\`\`\n`, + ); + } finally { + setIsExecuting(false); + } + }; + + const cleanSqlContent = (content: string) => { + if (!content) { + return ''; + } + + let cleaned = content.replace(/\/\*[\s\S]*?\*\//g, ''); + + cleaned = cleaned.replace(/(--).*$/gm, '').replace(/(#).*$/gm, ''); + + const statements = cleaned + .split(';') + .map((stmt) => stmt.trim()) + .filter((stmt) => stmt.length > 0) + .join(';\n\n'); + + return statements; + }; + + return ( + + + {/* Header */} +
+
+ +

{title}

+
+
+ + {/* SQL Content */} +
+ {!isConnected ? ( +
+ + You must first connect to Supabase and select a project. + +
+ ) : ( + <> +
setIsCollapsed(!isCollapsed)} + > +
+ + {description || 'Create table and setup auth'} + +
+
+ + {!isCollapsed && content && ( +
+
{cleanSqlContent(content)}
+
+ )} + + )} +
+ + {/* Message and Actions */} +
+

{message}

+ +
+ {showConnectButton ? ( + + ) : ( + + )} + +
+
+
+
+ ); +} diff --git a/app/components/chat/SupabaseConnection.tsx b/app/components/chat/SupabaseConnection.tsx new file mode 100644 index 00000000..dc73973a --- /dev/null +++ b/app/components/chat/SupabaseConnection.tsx @@ -0,0 +1,339 @@ +import { useEffect } from 'react'; +import { useSupabaseConnection } from '~/lib/hooks/useSupabaseConnection'; +import { classNames } from '~/utils/classNames'; +import { useStore } from '@nanostores/react'; +import { chatId } from '~/lib/persistence/useChatHistory'; +import { fetchSupabaseStats } from '~/lib/stores/supabase'; +import { Dialog, DialogRoot, DialogClose, DialogTitle, DialogButton } from '~/components/ui/Dialog'; + +export function SupabaseConnection() { + const { + connection: supabaseConn, + connecting, + fetchingStats, + isProjectsExpanded, + setIsProjectsExpanded, + isDropdownOpen: isDialogOpen, + setIsDropdownOpen: setIsDialogOpen, + handleConnect, + handleDisconnect, + selectProject, + handleCreateProject, + updateToken, + isConnected, + fetchProjectApiKeys, + } = useSupabaseConnection(); + + const currentChatId = useStore(chatId); + + useEffect(() => { + const handleOpenConnectionDialog = () => { + setIsDialogOpen(true); + }; + + document.addEventListener('open-supabase-connection', handleOpenConnectionDialog); + + return () => { + document.removeEventListener('open-supabase-connection', handleOpenConnectionDialog); + }; + }, [setIsDialogOpen]); + + useEffect(() => { + if (isConnected && currentChatId) { + const savedProjectId = localStorage.getItem(`supabase-project-${currentChatId}`); + + /* + * If there's no saved project for this chat but there is a global selected project, + * use the global one instead of clearing it + */ + if (!savedProjectId && supabaseConn.selectedProjectId) { + // Save the current global project to this chat + localStorage.setItem(`supabase-project-${currentChatId}`, supabaseConn.selectedProjectId); + } else if (savedProjectId && savedProjectId !== supabaseConn.selectedProjectId) { + selectProject(savedProjectId); + } + } + }, [isConnected, currentChatId]); + + useEffect(() => { + if (currentChatId && supabaseConn.selectedProjectId) { + localStorage.setItem(`supabase-project-${currentChatId}`, supabaseConn.selectedProjectId); + } else if (currentChatId && !supabaseConn.selectedProjectId) { + localStorage.removeItem(`supabase-project-${currentChatId}`); + } + }, [currentChatId, supabaseConn.selectedProjectId]); + + useEffect(() => { + if (isConnected && supabaseConn.token) { + fetchSupabaseStats(supabaseConn.token).catch(console.error); + } + }, [isConnected, supabaseConn.token]); + + useEffect(() => { + if (isConnected && supabaseConn.selectedProjectId && supabaseConn.token && !supabaseConn.credentials) { + fetchProjectApiKeys(supabaseConn.selectedProjectId).catch(console.error); + } + }, [isConnected, supabaseConn.selectedProjectId, supabaseConn.token, supabaseConn.credentials]); + + return ( +
+
+ +
+ + + {isDialogOpen && ( + + {!isConnected ? ( +
+ + + Connect to Supabase + + +
+ + updateToken(e.target.value)} + disabled={connecting} + placeholder="Enter your Supabase access token" + className={classNames( + 'w-full px-3 py-2 rounded-lg text-sm', + 'bg-[#F8F8F8] dark:bg-[#1A1A1A]', + 'border border-[#E5E5E5] dark:border-[#333333]', + 'text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary', + 'focus:outline-none focus:ring-1 focus:ring-[#3ECF8E]', + 'disabled:opacity-50', + )} + /> + + +
+ + Cancel + + +
+
+ ) : ( +
+
+ + + Supabase Connection + +
+ +
+
+

{supabaseConn.user?.email}

+

Role: {supabaseConn.user?.role}

+
+
+ + {fetchingStats ? ( +
+
+ Fetching projects... +
+ ) : ( +
+
+ +
+ + +
+
+ + {isProjectsExpanded && ( + <> + {!supabaseConn.selectedProjectId && ( +
+ Select a project or create a new one for this chat +
+ )} + + {supabaseConn.stats?.projects?.length ? ( +
+ {supabaseConn.stats.projects.map((project) => ( +
+
+
+
+
+ {project.name} +
+
+ {project.region} +
+
+ +
+
+ ))} +
+ ) : ( +
+
+ No projects found +
+ )} + + )} +
+ )} + +
+ + Close + + +
+ Disconnect + +
+
+ )} +
+ )} +
+
+ ); +} + +interface ButtonProps { + active?: boolean; + disabled?: boolean; + children?: any; + onClick?: VoidFunction; + className?: string; +} + +function Button({ active = false, disabled = false, children, onClick, className }: ButtonProps) { + return ( + + ); +} diff --git a/app/components/workbench/FileTree.tsx b/app/components/workbench/FileTree.tsx index eed791ef..197fa4f7 100644 --- a/app/components/workbench/FileTree.tsx +++ b/app/components/workbench/FileTree.tsx @@ -1,10 +1,13 @@ -import { memo, useEffect, useMemo, useState, type ReactNode } from 'react'; +import { memo, useCallback, useEffect, useMemo, useRef, useState, type ReactNode } from 'react'; import type { FileMap } from '~/lib/stores/files'; import { classNames } from '~/utils/classNames'; import { createScopedLogger, renderLogger } from '~/utils/logger'; import * as ContextMenu from '@radix-ui/react-context-menu'; import type { FileHistory } from '~/types/actions'; import { diffLines, type Change } from 'diff'; +import { workbenchStore } from '~/lib/stores/workbench'; +import { toast } from 'react-toastify'; +import { path } from '~/utils/path'; const logger = createScopedLogger('FileTree'); @@ -25,6 +28,14 @@ interface Props { className?: string; } +interface InlineInputProps { + depth: number; + placeholder: string; + initialValue?: string; + onSubmit: (value: string) => void; + onCancel: () => void; +} + export const FileTree = memo( ({ files = {}, @@ -213,28 +224,258 @@ function ContextMenuItem({ onSelect, children }: { onSelect?: () => void; childr ); } -function FileContextMenu({ onCopyPath, onCopyRelativePath, children }: FolderContextMenuProps) { +function InlineInput({ depth, placeholder, initialValue = '', onSubmit, onCancel }: InlineInputProps) { + const inputRef = useRef(null); + + useEffect(() => { + const timer = setTimeout(() => { + if (inputRef.current) { + inputRef.current.focus(); + + if (initialValue) { + inputRef.current.value = initialValue; + inputRef.current.select(); + } + } + }, 50); + + return () => clearTimeout(timer); + }, [initialValue]); + + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === 'Enter') { + const value = inputRef.current?.value.trim(); + + if (value) { + onSubmit(value); + } + } else if (e.key === 'Escape') { + onCancel(); + } + }; + return ( - - {children} - - - - Copy path - Copy relative path - - - - +
+
+ { + setTimeout(() => { + if (document.activeElement !== inputRef.current) { + onCancel(); + } + }, 100); + }} + /> +
+ ); +} + +function FileContextMenu({ + onCopyPath, + onCopyRelativePath, + fullPath, + children, +}: FolderContextMenuProps & { fullPath: string }) { + const [isCreatingFile, setIsCreatingFile] = useState(false); + const [isCreatingFolder, setIsCreatingFolder] = useState(false); + const [isDragging, setIsDragging] = useState(false); + const depth = useMemo(() => fullPath.split('/').length, [fullPath]); + const fileName = useMemo(() => path.basename(fullPath), [fullPath]); + + const isFolder = useMemo(() => { + const files = workbenchStore.files.get(); + const fileEntry = files[fullPath]; + + return !fileEntry || fileEntry.type === 'folder'; + }, [fullPath]); + + const targetPath = useMemo(() => { + return isFolder ? fullPath : path.dirname(fullPath); + }, [fullPath, isFolder]); + + const handleDragOver = useCallback((e: React.DragEvent) => { + e.preventDefault(); + e.stopPropagation(); + setIsDragging(true); + }, []); + + const handleDragLeave = useCallback((e: React.DragEvent) => { + e.preventDefault(); + e.stopPropagation(); + setIsDragging(false); + }, []); + + const handleDrop = useCallback( + async (e: React.DragEvent) => { + e.preventDefault(); + e.stopPropagation(); + + const items = Array.from(e.dataTransfer.items); + const files = items.filter((item) => item.kind === 'file'); + + for (const item of files) { + const file = item.getAsFile(); + + if (file) { + try { + const filePath = path.join(fullPath, file.name); + + // Convert file to binary data (Uint8Array) + const arrayBuffer = await file.arrayBuffer(); + const binaryContent = new Uint8Array(arrayBuffer); + + const success = await workbenchStore.createFile(filePath, binaryContent); + + if (success) { + toast.success(`File ${file.name} uploaded successfully`); + } else { + toast.error(`Failed to upload file ${file.name}`); + } + } catch (error) { + toast.error(`Error uploading ${file.name}`); + logger.error(error); + } + } + } + + setIsDragging(false); + }, + [fullPath], + ); + + const handleCreateFile = async (fileName: string) => { + const newFilePath = path.join(targetPath, fileName); + const success = await workbenchStore.createFile(newFilePath, ''); + + if (success) { + toast.success('File created successfully'); + } else { + toast.error('Failed to create file'); + } + + setIsCreatingFile(false); + }; + + const handleCreateFolder = async (folderName: string) => { + const newFolderPath = path.join(targetPath, folderName); + const success = await workbenchStore.createFolder(newFolderPath); + + if (success) { + toast.success('Folder created successfully'); + } else { + toast.error('Failed to create folder'); + } + + setIsCreatingFolder(false); + }; + + const handleDelete = async () => { + try { + if (!confirm(`Are you sure you want to delete ${isFolder ? 'folder' : 'file'}: ${fileName}?`)) { + return; + } + + let success; + + if (isFolder) { + success = await workbenchStore.deleteFolder(fullPath); + } else { + success = await workbenchStore.deleteFile(fullPath); + } + + if (success) { + toast.success(`${isFolder ? 'Folder' : 'File'} deleted successfully`); + } else { + toast.error(`Failed to delete ${isFolder ? 'folder' : 'file'}`); + } + } catch (error) { + toast.error(`Error deleting ${isFolder ? 'folder' : 'file'}`); + logger.error(error); + } + }; + + return ( + <> + + +
+ {children} +
+
+ + + + setIsCreatingFile(true)}> +
+
+ New File +
+ + setIsCreatingFolder(true)}> +
+
+ New Folder +
+ + + + Copy path + Copy relative path + + {/* Add delete option in a new group */} + + +
+
+ Delete {isFolder ? 'Folder' : 'File'} +
+ + + + + + {isCreatingFile && ( + setIsCreatingFile(false)} + /> + )} + {isCreatingFolder && ( + setIsCreatingFolder(false)} + /> + )} + ); } function Folder({ folder, collapsed, selected = false, onCopyPath, onCopyRelativePath, onClick }: FolderProps) { return ( - + { if (!fileModifications?.originalContent) { return { additions: 0, deletions: 0 }; } - // Usar a mesma lógica do DiffView para processar as mudanças const normalizedOriginal = fileModifications.originalContent.replace(/\r\n/g, '\n'); const normalizedCurrent = fileModifications.versions[fileModifications.versions.length - 1]?.content.replace(/\r\n/g, '\n') || ''; @@ -317,7 +556,7 @@ function File({ const showStats = additions > 0 || deletions > 0; return ( - + [0], 'model'>; +export interface StreamingOptions extends Omit[0], 'model'> { + supabaseConnection?: { + isConnected: boolean; + hasSelectedProject: boolean; + credentials?: { + anonKey?: string; + supabaseUrl?: string; + }; + }; +} const logger = createScopedLogger('stream-text'); @@ -97,6 +106,11 @@ export async function streamText(props: { cwd: WORK_DIR, allowedHtmlElements: allowedHTMLElements, modificationTagName: MODIFICATIONS_TAG_NAME, + supabase: { + isConnected: options?.supabaseConnection?.isConnected || false, + hasSelectedProject: options?.supabaseConnection?.hasSelectedProject || false, + credentials: options?.supabaseConnection?.credentials || undefined, + }, }) ?? getSystemPrompt(); if (files && contextFiles && contextOptimization) { diff --git a/app/lib/common/prompt-library.ts b/app/lib/common/prompt-library.ts index 7513e811..f4747d71 100644 --- a/app/lib/common/prompt-library.ts +++ b/app/lib/common/prompt-library.ts @@ -5,6 +5,14 @@ export interface PromptOptions { cwd: string; allowedHtmlElements: string[]; modificationTagName: string; + supabase?: { + isConnected: boolean; + hasSelectedProject: boolean; + credentials?: { + anonKey?: string; + supabaseUrl?: string; + }; + }; } export class PromptLibrary { @@ -19,7 +27,7 @@ export class PromptLibrary { default: { label: 'Default Prompt', description: 'This is the battle tested default system Prompt', - get: (options) => getSystemPrompt(options.cwd), + get: (options) => getSystemPrompt(options.cwd, options.supabase), }, optimized: { label: 'Optimized Prompt (experimental)', diff --git a/app/lib/common/prompts/optimized.ts b/app/lib/common/prompts/optimized.ts index 33872088..4de3232b 100644 --- a/app/lib/common/prompts/optimized.ts +++ b/app/lib/common/prompts/optimized.ts @@ -1,7 +1,7 @@ import type { PromptOptions } from '~/lib/common/prompt-library'; export default (options: PromptOptions) => { - const { cwd, allowedHtmlElements } = options; + const { cwd, allowedHtmlElements, supabase } = options; return ` You are Bolt, an expert AI assistant and exceptional senior software developer with vast knowledge across multiple programming languages, frameworks, and best practices. @@ -18,6 +18,203 @@ You are Bolt, an expert AI assistant and exceptional senior software developer w Available shell commands: cat, cp, ls, mkdir, mv, rm, rmdir, touch, hostname, ps, pwd, uptime, env, node, python3, code, jq, curl, head, sort, tail, clear, which, export, chmod, scho, kill, ln, xxd, alias, getconf, loadenv, wasm, xdg-open, command, exit, source + + The following instructions guide how you should handle database operations in projects. + + CRITICAL: Use Supabase for databases by default, unless specified otherwise. + + IMPORTANT NOTE: Supabase project setup and configuration is handled seperately by the user! ${ + supabase + ? !supabase.isConnected + ? 'You are not connected to Supabase. Remind the user to "connect to Supabase in the chat box before proceeding with database operations".' + : !supabase.hasSelectedProject + ? 'Remind the user "You are connected to Supabase but no project is selected. Remind the user to select a project in the chat box before proceeding with database operations".' + : '' + : '' + } + IMPORTANT: Create a .env file if it doesnt exist and include the following variables: + ${ + supabase?.isConnected && + supabase?.hasSelectedProject && + supabase?.credentials?.supabaseUrl && + supabase?.credentials?.anonKey + ? `VITE_SUPABASE_URL=${supabase.credentials.supabaseUrl} + VITE_SUPABASE_ANON_KEY=${supabase.credentials.anonKey}` + : 'SUPABASE_URL=your_supabase_url\nSUPABASE_ANON_KEY=your_supabase_anon_key' + } + NEVER modify any Supabase configuration or \`.env\` files. + + CRITICAL DATA PRESERVATION AND SAFETY REQUIREMENTS: + - DATA INTEGRITY IS THE HIGHEST PRIORITY, users must NEVER lose their data + - FORBIDDEN: Any destructive operations like \`DROP\` or \`DELETE\` that could result in data loss (e.g., when dropping columns, changing column types, renaming tables, etc.) + - FORBIDDEN: Any transaction control statements (e.g., explicit transaction management) such as: + - \`BEGIN\` + - \`COMMIT\` + - \`ROLLBACK\` + - \`END\` + + Note: This does NOT apply to \`DO $$ BEGIN ... END $$\` blocks, which are PL/pgSQL anonymous blocks! + + Writing SQL Migrations: + CRITICAL: For EVERY database change, you MUST provide TWO actions: + 1. Migration File Creation: + + /* SQL migration content */ + + + 2. Immediate Query Execution: + + /* Same SQL content as migration */ + + + Example: + + + CREATE TABLE users ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + email text UNIQUE NOT NULL + ); + + + + CREATE TABLE users ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + email text UNIQUE NOT NULL + ); + + + + - IMPORTANT: The SQL content must be identical in both actions to ensure consistency between the migration file and the executed query. + - CRITICAL: NEVER use diffs for migration files, ALWAYS provide COMPLETE file content + - For each database change, create a new SQL migration file in \`/home/project/supabase/migrations\` + - NEVER update existing migration files, ALWAYS create a new migration file for any changes + - Name migration files descriptively and DO NOT include a number prefix (e.g., \`create_users.sql\`, \`add_posts_table.sql\`). + + - DO NOT worry about ordering as the files will be renamed correctly! + + - ALWAYS enable row level security (RLS) for new tables: + + + alter table users enable row level security; + + + - Add appropriate RLS policies for CRUD operations for each table + + - Use default values for columns: + - Set default values for columns where appropriate to ensure data consistency and reduce null handling + - Common default values include: + - Booleans: \`DEFAULT false\` or \`DEFAULT true\` + - Numbers: \`DEFAULT 0\` + - Strings: \`DEFAULT ''\` or meaningful defaults like \`'user'\` + - Dates/Timestamps: \`DEFAULT now()\` or \`DEFAULT CURRENT_TIMESTAMP\` + - Be cautious not to set default values that might mask problems; sometimes it's better to allow an error than to proceed with incorrect data + + - CRITICAL: Each migration file MUST follow these rules: + - ALWAYS Start with a markdown summary block (in a multi-line comment) that: + - Include a short, descriptive title (using a headline) that summarizes the changes (e.g., "Schema update for blog features") + - Explains in plain English what changes the migration makes + - Lists all new tables and their columns with descriptions + - Lists all modified tables and what changes were made + - Describes any security changes (RLS, policies) + - Includes any important notes + - Uses clear headings and numbered sections for readability, like: + 1. New Tables + 2. Security + 3. Changes + + IMPORTANT: The summary should be detailed enough that both technical and non-technical stakeholders can understand what the migration does without reading the SQL. + + - Include all necessary operations (e.g., table creation and updates, RLS, policies) + + Here is an example of a migration file: + + + /* + # Create users table + + 1. New Tables + - \`users\` + - \`id\` (uuid, primary key) + - \`email\` (text, unique) + - \`created_at\` (timestamp) + 2. Security + - Enable RLS on \`users\` table + - Add policy for authenticated users to read their own data + */ + + CREATE TABLE IF NOT EXISTS users ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + email text UNIQUE NOT NULL, + created_at timestamptz DEFAULT now() + ); + + ALTER TABLE users ENABLE ROW LEVEL SECURITY; + + CREATE POLICY "Users can read own data" + ON users + FOR SELECT + TO authenticated + USING (auth.uid() = id); + + + - Ensure SQL statements are safe and robust: + - Use \`IF EXISTS\` or \`IF NOT EXISTS\` to prevent errors when creating or altering database objects. Here are examples: + + + CREATE TABLE IF NOT EXISTS users ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + email text UNIQUE NOT NULL, + created_at timestamptz DEFAULT now() + ); + + + + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'users' AND column_name = 'last_login' + ) THEN + ALTER TABLE users ADD COLUMN last_login timestamptz; + END IF; + END $$; + + + Client Setup: + - Use \`@supabase/supabase-js\` + - Create a singleton client instance + - Use the environment variables from the project's \`.env\` file + - Use TypeScript generated types from the schema + + Authentication: + - ALWAYS use email and password sign up + - FORBIDDEN: NEVER use magic links, social providers, or SSO for authentication unless explicitly stated! + - FORBIDDEN: NEVER create your own authentication system or authentication table, ALWAYS use Supabase's built-in authentication! + - Email confirmation is ALWAYS disabled unless explicitly stated! + + Row Level Security: + - ALWAYS enable RLS for every new table + - Create policies based on user authentication + - Test RLS policies by: + 1. Verifying authenticated users can only access their allowed data + 2. Confirming unauthenticated users cannot access protected data + 3. Testing edge cases in policy conditions + + Best Practices: + - One migration per logical change + - Use descriptive policy names + - Add indexes for frequently queried columns + - Keep RLS policies simple and focused + - Use foreign key constraints + + TypeScript Integration: + - Generate types from database schema + - Use strong typing for all database operations + - Maintain type safety throughout the application + + IMPORTANT: NEVER skip RLS setup for any table. Security is non-negotiable! + + Use 2 spaces for indentation diff --git a/app/lib/common/prompts/prompts.ts b/app/lib/common/prompts/prompts.ts index 1e9134ac..8fcacfd7 100644 --- a/app/lib/common/prompts/prompts.ts +++ b/app/lib/common/prompts/prompts.ts @@ -2,7 +2,14 @@ import { WORK_DIR } from '~/utils/constants'; import { allowedHTMLElements } from '~/utils/markdown'; import { stripIndents } from '~/utils/stripIndent'; -export const getSystemPrompt = (cwd: string = WORK_DIR) => ` +export const getSystemPrompt = ( + cwd: string = WORK_DIR, + supabase?: { + isConnected: boolean; + hasSelectedProject: boolean; + credentials?: { anonKey?: string; supabaseUrl?: string }; + }, +) => ` You are Bolt, an expert AI assistant and exceptional senior software developer with vast knowledge across multiple programming languages, frameworks, and best practices. @@ -59,6 +66,205 @@ You are Bolt, an expert AI assistant and exceptional senior software developer w - curl, head, sort, tail, clear, which, export, chmod, scho, hostname, kill, ln, xxd, alias, false, getconf, true, loadenv, wasm, xdg-open, command, exit, source + + The following instructions guide how you should handle database operations in projects. + + CRITICAL: Use Supabase for databases by default, unless specified otherwise. + + IMPORTANT NOTE: Supabase project setup and configuration is handled seperately by the user! ${ + supabase + ? !supabase.isConnected + ? 'You are not connected to Supabase. Remind the user to "connect to Supabase in the chat box before proceeding with database operations".' + : !supabase.hasSelectedProject + ? 'Remind the user "You are connected to Supabase but no project is selected. Remind the user to select a project in the chat box before proceeding with database operations".' + : '' + : '' + } + IMPORTANT: Create a .env file if it doesnt exist${ + supabase?.isConnected && + supabase?.hasSelectedProject && + supabase?.credentials?.supabaseUrl && + supabase?.credentials?.anonKey + ? ` and include the following variables: + VITE_SUPABASE_URL=${supabase.credentials.supabaseUrl} + VITE_SUPABASE_ANON_KEY=${supabase.credentials.anonKey}` + : '.' + } + NEVER modify any Supabase configuration or \`.env\` files apart from creating the \`.env\`. + + Do not try to generate types for supabase. + + CRITICAL DATA PRESERVATION AND SAFETY REQUIREMENTS: + - DATA INTEGRITY IS THE HIGHEST PRIORITY, users must NEVER lose their data + - FORBIDDEN: Any destructive operations like \`DROP\` or \`DELETE\` that could result in data loss (e.g., when dropping columns, changing column types, renaming tables, etc.) + - FORBIDDEN: Any transaction control statements (e.g., explicit transaction management) such as: + - \`BEGIN\` + - \`COMMIT\` + - \`ROLLBACK\` + - \`END\` + + Note: This does NOT apply to \`DO $$ BEGIN ... END $$\` blocks, which are PL/pgSQL anonymous blocks! + + Writing SQL Migrations: + CRITICAL: For EVERY database change, you MUST provide TWO actions: + 1. Migration File Creation: + + /* SQL migration content */ + + + 2. Immediate Query Execution: + + /* Same SQL content as migration */ + + + Example: + + + CREATE TABLE users ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + email text UNIQUE NOT NULL + ); + + + + CREATE TABLE users ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + email text UNIQUE NOT NULL + ); + + + + - IMPORTANT: The SQL content must be identical in both actions to ensure consistency between the migration file and the executed query. + - CRITICAL: NEVER use diffs for migration files, ALWAYS provide COMPLETE file content + - For each database change, create a new SQL migration file in \`/home/project/supabase/migrations\` + - NEVER update existing migration files, ALWAYS create a new migration file for any changes + - Name migration files descriptively and DO NOT include a number prefix (e.g., \`create_users.sql\`, \`add_posts_table.sql\`). + + - DO NOT worry about ordering as the files will be renamed correctly! + + - ALWAYS enable row level security (RLS) for new tables: + + + alter table users enable row level security; + + + - Add appropriate RLS policies for CRUD operations for each table + + - Use default values for columns: + - Set default values for columns where appropriate to ensure data consistency and reduce null handling + - Common default values include: + - Booleans: \`DEFAULT false\` or \`DEFAULT true\` + - Numbers: \`DEFAULT 0\` + - Strings: \`DEFAULT ''\` or meaningful defaults like \`'user'\` + - Dates/Timestamps: \`DEFAULT now()\` or \`DEFAULT CURRENT_TIMESTAMP\` + - Be cautious not to set default values that might mask problems; sometimes it's better to allow an error than to proceed with incorrect data + + - CRITICAL: Each migration file MUST follow these rules: + - ALWAYS Start with a markdown summary block (in a multi-line comment) that: + - Include a short, descriptive title (using a headline) that summarizes the changes (e.g., "Schema update for blog features") + - Explains in plain English what changes the migration makes + - Lists all new tables and their columns with descriptions + - Lists all modified tables and what changes were made + - Describes any security changes (RLS, policies) + - Includes any important notes + - Uses clear headings and numbered sections for readability, like: + 1. New Tables + 2. Security + 3. Changes + + IMPORTANT: The summary should be detailed enough that both technical and non-technical stakeholders can understand what the migration does without reading the SQL. + + - Include all necessary operations (e.g., table creation and updates, RLS, policies) + + Here is an example of a migration file: + + + /* + # Create users table + + 1. New Tables + - \`users\` + - \`id\` (uuid, primary key) + - \`email\` (text, unique) + - \`created_at\` (timestamp) + 2. Security + - Enable RLS on \`users\` table + - Add policy for authenticated users to read their own data + */ + + CREATE TABLE IF NOT EXISTS users ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + email text UNIQUE NOT NULL, + created_at timestamptz DEFAULT now() + ); + + ALTER TABLE users ENABLE ROW LEVEL SECURITY; + + CREATE POLICY "Users can read own data" + ON users + FOR SELECT + TO authenticated + USING (auth.uid() = id); + + + - Ensure SQL statements are safe and robust: + - Use \`IF EXISTS\` or \`IF NOT EXISTS\` to prevent errors when creating or altering database objects. Here are examples: + + + CREATE TABLE IF NOT EXISTS users ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + email text UNIQUE NOT NULL, + created_at timestamptz DEFAULT now() + ); + + + + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'users' AND column_name = 'last_login' + ) THEN + ALTER TABLE users ADD COLUMN last_login timestamptz; + END IF; + END $$; + + + Client Setup: + - Use \`@supabase/supabase-js\` + - Create a singleton client instance + - Use the environment variables from the project's \`.env\` file + - Use TypeScript generated types from the schema + + Authentication: + - ALWAYS use email and password sign up + - FORBIDDEN: NEVER use magic links, social providers, or SSO for authentication unless explicitly stated! + - FORBIDDEN: NEVER create your own authentication system or authentication table, ALWAYS use Supabase's built-in authentication! + - Email confirmation is ALWAYS disabled unless explicitly stated! + + Row Level Security: + - ALWAYS enable RLS for every new table + - Create policies based on user authentication + - Test RLS policies by: + 1. Verifying authenticated users can only access their allowed data + 2. Confirming unauthenticated users cannot access protected data + 3. Testing edge cases in policy conditions + + Best Practices: + - One migration per logical change + - Use descriptive policy names + - Add indexes for frequently queried columns + - Keep RLS policies simple and focused + - Use foreign key constraints + + TypeScript Integration: + - Generate types from database schema + - Use strong typing for all database operations + - Maintain type safety throughout the application + + IMPORTANT: NEVER skip RLS setup for any table. Security is non-negotiable! + + Use 2 spaces for code indentation diff --git a/app/lib/hooks/useSupabaseConnection.ts b/app/lib/hooks/useSupabaseConnection.ts new file mode 100644 index 00000000..8a2f8118 --- /dev/null +++ b/app/lib/hooks/useSupabaseConnection.ts @@ -0,0 +1,147 @@ +import { useEffect, useState } from 'react'; +import { toast } from 'react-toastify'; +import { useStore } from '@nanostores/react'; +import { logStore } from '~/lib/stores/logs'; +import { + supabaseConnection, + isConnecting, + isFetchingStats, + isFetchingApiKeys, + updateSupabaseConnection, + fetchProjectApiKeys, +} from '~/lib/stores/supabase'; + +export function useSupabaseConnection() { + const connection = useStore(supabaseConnection); + const connecting = useStore(isConnecting); + const fetchingStats = useStore(isFetchingStats); + const fetchingApiKeys = useStore(isFetchingApiKeys); + const [isProjectsExpanded, setIsProjectsExpanded] = useState(false); + const [isDropdownOpen, setIsDropdownOpen] = useState(false); + + useEffect(() => { + const savedConnection = localStorage.getItem('supabase_connection'); + const savedCredentials = localStorage.getItem('supabaseCredentials'); + + if (savedConnection) { + const parsed = JSON.parse(savedConnection); + + if (savedCredentials && !parsed.credentials) { + parsed.credentials = JSON.parse(savedCredentials); + } + + updateSupabaseConnection(parsed); + + if (parsed.token && parsed.selectedProjectId && !parsed.credentials) { + fetchProjectApiKeys(parsed.selectedProjectId, parsed.token).catch(console.error); + } + } + }, []); + + const handleConnect = async () => { + isConnecting.set(true); + + try { + const cleanToken = connection.token.trim(); + + const response = await fetch('/api/supabase', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + token: cleanToken, + }), + }); + + const data = (await response.json()) as any; + + if (!response.ok) { + throw new Error(data.error || 'Failed to connect'); + } + + updateSupabaseConnection({ + user: data.user, + token: connection.token, + stats: data.stats, + }); + + toast.success('Successfully connected to Supabase'); + + setIsProjectsExpanded(true); + + return true; + } catch (error) { + console.error('Connection error:', error); + logStore.logError('Failed to authenticate with Supabase', { error }); + toast.error(error instanceof Error ? error.message : 'Failed to connect to Supabase'); + updateSupabaseConnection({ user: null, token: '' }); + + return false; + } finally { + isConnecting.set(false); + } + }; + + const handleDisconnect = () => { + updateSupabaseConnection({ user: null, token: '' }); + toast.success('Disconnected from Supabase'); + setIsDropdownOpen(false); + }; + + const selectProject = async (projectId: string) => { + const currentState = supabaseConnection.get(); + let projectData = undefined; + + if (projectId && currentState.stats?.projects) { + projectData = currentState.stats.projects.find((project) => project.id === projectId); + } + + updateSupabaseConnection({ + selectedProjectId: projectId, + project: projectData, + }); + + if (projectId && currentState.token) { + try { + await fetchProjectApiKeys(projectId, currentState.token); + toast.success('Project selected successfully'); + } catch (error) { + console.error('Failed to fetch API keys:', error); + toast.error('Selected project but failed to fetch API keys'); + } + } else { + toast.success('Project selected successfully'); + } + + setIsDropdownOpen(false); + }; + + const handleCreateProject = async () => { + window.open('https://app.supabase.com/new/new-project', '_blank'); + }; + + return { + connection, + connecting, + fetchingStats, + fetchingApiKeys, + isProjectsExpanded, + setIsProjectsExpanded, + isDropdownOpen, + setIsDropdownOpen, + handleConnect, + handleDisconnect, + selectProject, + handleCreateProject, + updateToken: (token: string) => updateSupabaseConnection({ ...connection, token }), + isConnected: !!(connection.user && connection.token), + fetchProjectApiKeys: (projectId: string) => { + if (connection.token) { + return fetchProjectApiKeys(projectId, connection.token); + } + + return Promise.reject(new Error('No token available')); + }, + }; +} diff --git a/app/lib/runtime/action-runner.ts b/app/lib/runtime/action-runner.ts index 013a9a77..a04a3de4 100644 --- a/app/lib/runtime/action-runner.ts +++ b/app/lib/runtime/action-runner.ts @@ -1,7 +1,7 @@ import type { WebContainer } from '@webcontainer/api'; import { path as nodePath } from '~/utils/path'; import { atom, map, type MapStore } from 'nanostores'; -import type { ActionAlert, BoltAction, FileHistory } from '~/types/actions'; +import type { ActionAlert, BoltAction, FileHistory, SupabaseAction, SupabaseAlert } from '~/types/actions'; import { createScopedLogger } from '~/utils/logger'; import { unreachable } from '~/utils/unreachable'; import type { ActionCallbackData } from './message-parser'; @@ -70,16 +70,19 @@ export class ActionRunner { runnerId = atom(`${Date.now()}`); actions: ActionsMap = map({}); onAlert?: (alert: ActionAlert) => void; + onSupabaseAlert?: (alert: SupabaseAlert) => void; buildOutput?: { path: string; exitCode: number; output: string }; constructor( webcontainerPromise: Promise, getShellTerminal: () => BoltShell, onAlert?: (alert: ActionAlert) => void, + onSupabaseAlert?: (alert: SupabaseAlert) => void, ) { this.#webcontainer = webcontainerPromise; this.#shellTerminal = getShellTerminal; this.onAlert = onAlert; + this.onSupabaseAlert = onSupabaseAlert; } addAction(data: ActionCallbackData) { @@ -157,6 +160,21 @@ export class ActionRunner { await this.#runFileAction(action); break; } + case 'supabase': { + try { + await this.handleSupabaseAction(action as SupabaseAction); + } catch (error: any) { + // Update action status + this.#updateAction(actionId, { + status: 'failed', + error: error instanceof Error ? error.message : 'Supabase action failed', + }); + + // Return early without re-throwing + return; + } + break; + } case 'build': { const buildOutput = await this.#runBuildAction(action); @@ -377,4 +395,50 @@ export class ActionRunner { output, }; } + async handleSupabaseAction(action: SupabaseAction) { + const { operation, content, filePath } = action; + logger.debug('[Supabase Action]:', { operation, filePath, content }); + + switch (operation) { + case 'migration': + if (!filePath) { + throw new Error('Migration requires a filePath'); + } + + // Show alert for migration action + this.onSupabaseAlert?.({ + type: 'info', + title: 'Supabase Migration', + description: `Create migration file: ${filePath}`, + content, + source: 'supabase', + }); + + // Only create the migration file + await this.#runFileAction({ + type: 'file', + filePath, + content, + changeSource: 'supabase', + } as any); + return { success: true }; + + case 'query': { + // Always show the alert and let the SupabaseAlert component handle connection state + this.onSupabaseAlert?.({ + type: 'info', + title: 'Supabase Query', + description: 'Execute database query', + content, + source: 'supabase', + }); + + // The actual execution will be triggered from SupabaseChatAlert + return { pending: true }; + } + + default: + throw new Error(`Unknown operation: ${operation}`); + } + } } diff --git a/app/lib/runtime/message-parser.ts b/app/lib/runtime/message-parser.ts index 3b41b6d6..17375480 100644 --- a/app/lib/runtime/message-parser.ts +++ b/app/lib/runtime/message-parser.ts @@ -1,4 +1,4 @@ -import type { ActionType, BoltAction, BoltActionData, FileAction, ShellAction } from '~/types/actions'; +import type { ActionType, BoltAction, BoltActionData, FileAction, ShellAction, SupabaseAction } from '~/types/actions'; import type { BoltArtifactData } from '~/types/artifact'; import { createScopedLogger } from '~/utils/logger'; import { unreachable } from '~/utils/unreachable'; @@ -293,7 +293,27 @@ export class StreamingMessageParser { content: '', }; - if (actionType === 'file') { + if (actionType === 'supabase') { + const operation = this.#extractAttribute(actionTag, 'operation'); + + if (!operation || !['migration', 'query'].includes(operation)) { + logger.warn(`Invalid or missing operation for Supabase action: ${operation}`); + throw new Error(`Invalid Supabase operation: ${operation}`); + } + + (actionAttributes as SupabaseAction).operation = operation as 'migration' | 'query'; + + if (operation === 'migration') { + const filePath = this.#extractAttribute(actionTag, 'filePath'); + + if (!filePath) { + logger.warn('Migration requires a filePath'); + throw new Error('Migration requires a filePath'); + } + + (actionAttributes as SupabaseAction).filePath = filePath; + } + } else if (actionType === 'file') { const filePath = this.#extractAttribute(actionTag, 'filePath') as string; if (!filePath) { diff --git a/app/lib/stores/files.ts b/app/lib/stores/files.ts index 60904a65..2ec73a03 100644 --- a/app/lib/stores/files.ts +++ b/app/lib/stores/files.ts @@ -42,6 +42,11 @@ export class FilesStore { */ #modifiedFiles: Map = import.meta.hot?.data.modifiedFiles ?? new Map(); + /** + * Keeps track of deleted files and folders to prevent them from reappearing on reload + */ + #deletedPaths: Set = import.meta.hot?.data.deletedPaths ?? new Set(); + /** * Map of files that matches the state of WebContainer. */ @@ -54,9 +59,28 @@ export class FilesStore { constructor(webcontainerPromise: Promise) { this.#webcontainer = webcontainerPromise; + // Load deleted paths from localStorage if available + try { + if (typeof localStorage !== 'undefined') { + const deletedPathsJson = localStorage.getItem('bolt-deleted-paths'); + + if (deletedPathsJson) { + const deletedPaths = JSON.parse(deletedPathsJson); + + if (Array.isArray(deletedPaths)) { + deletedPaths.forEach((path) => this.#deletedPaths.add(path)); + } + } + } + } catch (error) { + logger.error('Failed to load deleted paths from localStorage', error); + } + if (import.meta.hot) { + // Persist our state across hot reloads import.meta.hot.data.files = this.files; import.meta.hot.data.modifiedFiles = this.#modifiedFiles; + import.meta.hot.data.deletedPaths = this.#deletedPaths; } this.#init(); @@ -139,18 +163,74 @@ export class FilesStore { async #init() { const webcontainer = await this.#webcontainer; + // Clean up any files that were previously deleted + this.#cleanupDeletedFiles(); + webcontainer.internal.watchPaths( { include: [`${WORK_DIR}/**`], exclude: ['**/node_modules', '.git'], includeContent: true }, bufferWatchEvents(100, this.#processEventBuffer.bind(this)), ); } + /** + * Removes any deleted files/folders from the store + */ + #cleanupDeletedFiles() { + if (this.#deletedPaths.size === 0) { + return; + } + + const currentFiles = this.files.get(); + + for (const deletedPath of this.#deletedPaths) { + if (currentFiles[deletedPath]) { + this.files.setKey(deletedPath, undefined); + + if (currentFiles[deletedPath]?.type === 'file') { + this.#size--; + } + } + + for (const [path, dirent] of Object.entries(currentFiles)) { + if (path.startsWith(deletedPath + '/')) { + this.files.setKey(path, undefined); + + if (dirent?.type === 'file') { + this.#size--; + } + + if (dirent?.type === 'file' && this.#modifiedFiles.has(path)) { + this.#modifiedFiles.delete(path); + } + } + } + } + } + #processEventBuffer(events: Array<[events: PathWatcherEvent[]]>) { const watchEvents = events.flat(2); - for (const { type, path, buffer } of watchEvents) { + for (const { type, path: eventPath, buffer } of watchEvents) { // remove any trailing slashes - const sanitizedPath = path.replace(/\/+$/g, ''); + const sanitizedPath = eventPath.replace(/\/+$/g, ''); + + // Skip processing if this file/folder was explicitly deleted + if (this.#deletedPaths.has(sanitizedPath)) { + continue; + } + + let isInDeletedFolder = false; + + for (const deletedPath of this.#deletedPaths) { + if (sanitizedPath.startsWith(deletedPath + '/')) { + isInDeletedFolder = true; + break; + } + } + + if (isInDeletedFolder) { + continue; + } switch (type) { case 'add_dir': { @@ -176,21 +256,30 @@ export class FilesStore { } let content = ''; - - /** - * @note This check is purely for the editor. The way we detect this is not - * bullet-proof and it's a best guess so there might be false-positives. - * The reason we do this is because we don't want to display binary files - * in the editor nor allow to edit them. - */ const isBinary = isBinaryFile(buffer); - if (!isBinary) { + if (isBinary && buffer) { + // For binary files, we need to preserve the content as base64 + content = Buffer.from(buffer).toString('base64'); + } else if (!isBinary) { content = this.#decodeFileContent(buffer); + + /* + * If the content is a single space and this is from our empty file workaround, + * convert it back to an actual empty string + */ + if (content === ' ' && type === 'add_file') { + content = ''; + } + } + + const existingFile = this.files.get()[sanitizedPath]; + + if (existingFile?.type === 'file' && existingFile.isBinary && existingFile.content && !content) { + content = existingFile.content; } this.files.setKey(sanitizedPath, { type: 'file', content, isBinary }); - break; } case 'remove_file': { @@ -218,6 +307,160 @@ export class FilesStore { return ''; } } + + async createFile(filePath: string, content: string | Uint8Array = '') { + const webcontainer = await this.#webcontainer; + + try { + const relativePath = path.relative(webcontainer.workdir, filePath); + + if (!relativePath) { + throw new Error(`EINVAL: invalid file path, create '${relativePath}'`); + } + + const dirPath = path.dirname(relativePath); + + if (dirPath !== '.') { + await webcontainer.fs.mkdir(dirPath, { recursive: true }); + } + + const isBinary = content instanceof Uint8Array; + + if (isBinary) { + await webcontainer.fs.writeFile(relativePath, Buffer.from(content)); + + const base64Content = Buffer.from(content).toString('base64'); + this.files.setKey(filePath, { type: 'file', content: base64Content, isBinary: true }); + + this.#modifiedFiles.set(filePath, base64Content); + } else { + const contentToWrite = (content as string).length === 0 ? ' ' : content; + await webcontainer.fs.writeFile(relativePath, contentToWrite); + + this.files.setKey(filePath, { type: 'file', content: content as string, isBinary: false }); + + this.#modifiedFiles.set(filePath, content as string); + } + + logger.info(`File created: ${filePath}`); + + return true; + } catch (error) { + logger.error('Failed to create file\n\n', error); + throw error; + } + } + + async createFolder(folderPath: string) { + const webcontainer = await this.#webcontainer; + + try { + const relativePath = path.relative(webcontainer.workdir, folderPath); + + if (!relativePath) { + throw new Error(`EINVAL: invalid folder path, create '${relativePath}'`); + } + + await webcontainer.fs.mkdir(relativePath, { recursive: true }); + + this.files.setKey(folderPath, { type: 'folder' }); + + logger.info(`Folder created: ${folderPath}`); + + return true; + } catch (error) { + logger.error('Failed to create folder\n\n', error); + throw error; + } + } + + async deleteFile(filePath: string) { + const webcontainer = await this.#webcontainer; + + try { + const relativePath = path.relative(webcontainer.workdir, filePath); + + if (!relativePath) { + throw new Error(`EINVAL: invalid file path, delete '${relativePath}'`); + } + + await webcontainer.fs.rm(relativePath); + + this.#deletedPaths.add(filePath); + + this.files.setKey(filePath, undefined); + this.#size--; + + if (this.#modifiedFiles.has(filePath)) { + this.#modifiedFiles.delete(filePath); + } + + this.#persistDeletedPaths(); + + logger.info(`File deleted: ${filePath}`); + + return true; + } catch (error) { + logger.error('Failed to delete file\n\n', error); + throw error; + } + } + + async deleteFolder(folderPath: string) { + const webcontainer = await this.#webcontainer; + + try { + const relativePath = path.relative(webcontainer.workdir, folderPath); + + if (!relativePath) { + throw new Error(`EINVAL: invalid folder path, delete '${relativePath}'`); + } + + await webcontainer.fs.rm(relativePath, { recursive: true }); + + this.#deletedPaths.add(folderPath); + + this.files.setKey(folderPath, undefined); + + const allFiles = this.files.get(); + + for (const [path, dirent] of Object.entries(allFiles)) { + if (path.startsWith(folderPath + '/')) { + this.files.setKey(path, undefined); + + this.#deletedPaths.add(path); + + if (dirent?.type === 'file') { + this.#size--; + } + + if (dirent?.type === 'file' && this.#modifiedFiles.has(path)) { + this.#modifiedFiles.delete(path); + } + } + } + + this.#persistDeletedPaths(); + + logger.info(`Folder deleted: ${folderPath}`); + + return true; + } catch (error) { + logger.error('Failed to delete folder\n\n', error); + throw error; + } + } + + // method to persist deleted paths to localStorage + #persistDeletedPaths() { + try { + if (typeof localStorage !== 'undefined') { + localStorage.setItem('bolt-deleted-paths', JSON.stringify([...this.#deletedPaths])); + } + } catch (error) { + logger.error('Failed to persist deleted paths to localStorage', error); + } + } } function isBinaryFile(buffer: Uint8Array | undefined) { diff --git a/app/lib/stores/supabase.ts b/app/lib/stores/supabase.ts new file mode 100644 index 00000000..72540914 --- /dev/null +++ b/app/lib/stores/supabase.ts @@ -0,0 +1,191 @@ +import { atom } from 'nanostores'; +import type { SupabaseUser, SupabaseStats, SupabaseApiKey, SupabaseCredentials } from '~/types/supabase'; + +export interface SupabaseProject { + id: string; + name: string; + region: string; + organization_id: string; + status: string; + database?: { + host: string; + version: string; + postgres_engine: string; + release_channel: string; + }; + created_at: string; +} + +export interface SupabaseConnectionState { + user: SupabaseUser | null; + token: string; + stats?: SupabaseStats; + selectedProjectId?: string; + isConnected?: boolean; + project?: SupabaseProject; + credentials?: SupabaseCredentials; +} + +const savedConnection = typeof localStorage !== 'undefined' ? localStorage.getItem('supabase_connection') : null; +const savedCredentials = typeof localStorage !== 'undefined' ? localStorage.getItem('supabaseCredentials') : null; + +const initialState: SupabaseConnectionState = savedConnection + ? JSON.parse(savedConnection) + : { + user: null, + token: '', + stats: undefined, + selectedProjectId: undefined, + isConnected: false, + project: undefined, + }; + +if (savedCredentials && !initialState.credentials) { + try { + initialState.credentials = JSON.parse(savedCredentials); + } catch (e) { + console.error('Failed to parse saved credentials:', e); + } +} + +export const supabaseConnection = atom(initialState); + +if (initialState.token && !initialState.stats) { + fetchSupabaseStats(initialState.token).catch(console.error); +} + +export const isConnecting = atom(false); +export const isFetchingStats = atom(false); +export const isFetchingApiKeys = atom(false); + +export function updateSupabaseConnection(connection: Partial) { + const currentState = supabaseConnection.get(); + + if (connection.user !== undefined || connection.token !== undefined) { + const newUser = connection.user !== undefined ? connection.user : currentState.user; + const newToken = connection.token !== undefined ? connection.token : currentState.token; + connection.isConnected = !!(newUser && newToken); + } + + if (connection.selectedProjectId !== undefined) { + if (connection.selectedProjectId && currentState.stats?.projects) { + const selectedProject = currentState.stats.projects.find( + (project) => project.id === connection.selectedProjectId, + ); + + if (selectedProject) { + connection.project = selectedProject; + } else { + connection.project = { + id: connection.selectedProjectId, + name: `Project ${connection.selectedProjectId.substring(0, 8)}...`, + region: 'unknown', + organization_id: '', + status: 'active', + created_at: new Date().toISOString(), + }; + } + } else if (connection.selectedProjectId === '') { + connection.project = undefined; + connection.credentials = undefined; + } + } + + const newState = { ...currentState, ...connection }; + supabaseConnection.set(newState); + + /* + * Always save the connection state to localStorage to persist across chats + */ + if (connection.user || connection.token || connection.selectedProjectId !== undefined || connection.credentials) { + localStorage.setItem('supabase_connection', JSON.stringify(newState)); + + if (newState.credentials) { + localStorage.setItem('supabaseCredentials', JSON.stringify(newState.credentials)); + } else { + localStorage.removeItem('supabaseCredentials'); + } + } else { + localStorage.removeItem('supabase_connection'); + localStorage.removeItem('supabaseCredentials'); + } +} + +export async function fetchSupabaseStats(token: string) { + isFetchingStats.set(true); + + try { + // Use the internal API route instead of direct Supabase API call + const response = await fetch('/api/supabase', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + token, + }), + }); + + if (!response.ok) { + throw new Error('Failed to fetch projects'); + } + + const data = (await response.json()) as any; + + updateSupabaseConnection({ + user: data.user, + stats: data.stats, + }); + } catch (error) { + console.error('Failed to fetch Supabase stats:', error); + throw error; + } finally { + isFetchingStats.set(false); + } +} + +export async function fetchProjectApiKeys(projectId: string, token: string) { + isFetchingApiKeys.set(true); + + try { + const response = await fetch('/api/supabase/variables', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + projectId, + token, + }), + }); + + if (!response.ok) { + throw new Error('Failed to fetch API keys'); + } + + const data = (await response.json()) as any; + const apiKeys = data.apiKeys; + + const anonKey = apiKeys.find((key: SupabaseApiKey) => key.name === 'anon' || key.name === 'public'); + + if (anonKey) { + const supabaseUrl = `https://${projectId}.supabase.co`; + + updateSupabaseConnection({ + credentials: { + anonKey: anonKey.api_key, + supabaseUrl, + }, + }); + + return { anonKey: anonKey.api_key, supabaseUrl }; + } + + return null; + } catch (error) { + console.error('Failed to fetch project API keys:', error); + throw error; + } finally { + isFetchingApiKeys.set(false); + } +} diff --git a/app/lib/stores/workbench.ts b/app/lib/stores/workbench.ts index 4914ebc5..051d45aa 100644 --- a/app/lib/stores/workbench.ts +++ b/app/lib/stores/workbench.ts @@ -17,7 +17,7 @@ import { extractRelativePath } from '~/utils/diff'; import { description } from '~/lib/persistence'; import Cookies from 'js-cookie'; import { createSampler } from '~/utils/sampler'; -import type { ActionAlert } from '~/types/actions'; +import type { ActionAlert, SupabaseAlert } from '~/types/actions'; const { saveAs } = fileSaver; @@ -50,6 +50,8 @@ export class WorkbenchStore { unsavedFiles: WritableAtom> = import.meta.hot?.data.unsavedFiles ?? atom(new Set()); actionAlert: WritableAtom = import.meta.hot?.data.unsavedFiles ?? atom(undefined); + supabaseAlert: WritableAtom = + import.meta.hot?.data.unsavedFiles ?? atom(undefined); modifiedFiles = new Set(); artifactIdList: string[] = []; #globalExecutionQueue = Promise.resolve(); @@ -60,6 +62,17 @@ export class WorkbenchStore { import.meta.hot.data.showWorkbench = this.showWorkbench; import.meta.hot.data.currentView = this.currentView; import.meta.hot.data.actionAlert = this.actionAlert; + import.meta.hot.data.supabaseAlert = this.supabaseAlert; + + // Ensure binary files are properly preserved across hot reloads + const filesMap = this.files.get(); + + for (const [path, dirent] of Object.entries(filesMap)) { + if (dirent?.type === 'file' && dirent.isBinary && dirent.content) { + // Make sure binary content is preserved + this.files.setKey(path, { ...dirent }); + } + } } } @@ -104,6 +117,14 @@ export class WorkbenchStore { this.actionAlert.set(undefined); } + get SupabaseAlert() { + return this.supabaseAlert; + } + + clearSupabaseAlert() { + this.supabaseAlert.set(undefined); + } + toggleTerminal(value?: boolean) { this.#terminalStore.toggleTerminal(value); } @@ -238,6 +259,7 @@ export class WorkbenchStore { getFileModifcations() { return this.#filesStore.getFileModifications(); } + getModifiedFiles() { return this.#filesStore.getModifiedFiles(); } @@ -246,6 +268,120 @@ export class WorkbenchStore { this.#filesStore.resetFileModifications(); } + async createFile(filePath: string, content: string | Uint8Array = '') { + try { + const success = await this.#filesStore.createFile(filePath, content); + + if (success) { + this.setSelectedFile(filePath); + + /* + * For empty files, we need to ensure they're not marked as unsaved + * Only check for empty string, not empty Uint8Array + */ + if (typeof content === 'string' && content === '') { + const newUnsavedFiles = new Set(this.unsavedFiles.get()); + newUnsavedFiles.delete(filePath); + this.unsavedFiles.set(newUnsavedFiles); + } + } + + return success; + } catch (error) { + console.error('Failed to create file:', error); + throw error; + } + } + + async createFolder(folderPath: string) { + try { + return await this.#filesStore.createFolder(folderPath); + } catch (error) { + console.error('Failed to create folder:', error); + throw error; + } + } + + async deleteFile(filePath: string) { + try { + const currentDocument = this.currentDocument.get(); + const isCurrentFile = currentDocument?.filePath === filePath; + + const success = await this.#filesStore.deleteFile(filePath); + + if (success) { + const newUnsavedFiles = new Set(this.unsavedFiles.get()); + + if (newUnsavedFiles.has(filePath)) { + newUnsavedFiles.delete(filePath); + this.unsavedFiles.set(newUnsavedFiles); + } + + if (isCurrentFile) { + const files = this.files.get(); + let nextFile: string | undefined = undefined; + + for (const [path, dirent] of Object.entries(files)) { + if (dirent?.type === 'file') { + nextFile = path; + break; + } + } + + this.setSelectedFile(nextFile); + } + } + + return success; + } catch (error) { + console.error('Failed to delete file:', error); + throw error; + } + } + + async deleteFolder(folderPath: string) { + try { + const currentDocument = this.currentDocument.get(); + const isInCurrentFolder = currentDocument?.filePath?.startsWith(folderPath + '/'); + + const success = await this.#filesStore.deleteFolder(folderPath); + + if (success) { + const unsavedFiles = this.unsavedFiles.get(); + const newUnsavedFiles = new Set(); + + for (const file of unsavedFiles) { + if (!file.startsWith(folderPath + '/')) { + newUnsavedFiles.add(file); + } + } + + if (newUnsavedFiles.size !== unsavedFiles.size) { + this.unsavedFiles.set(newUnsavedFiles); + } + + if (isInCurrentFolder) { + const files = this.files.get(); + let nextFile: string | undefined = undefined; + + for (const [path, dirent] of Object.entries(files)) { + if (dirent?.type === 'file') { + nextFile = path; + break; + } + } + + this.setSelectedFile(nextFile); + } + } + + return success; + } catch (error) { + console.error('Failed to delete folder:', error); + throw error; + } + } + abortAllActions() { // TODO: what do we wanna do and how do we wanna recover from this? } @@ -280,6 +416,13 @@ export class WorkbenchStore { this.actionAlert.set(alert); }, + (alert) => { + if (this.#reloadedMessages.has(messageId)) { + return; + } + + this.supabaseAlert.set(alert); + }, ), }); } diff --git a/app/routes/api.chat.ts b/app/routes/api.chat.ts index bbecdae5..5917dfc4 100644 --- a/app/routes/api.chat.ts +++ b/app/routes/api.chat.ts @@ -37,11 +37,19 @@ function parseCookies(cookieHeader: string): Record { } async function chatAction({ context, request }: ActionFunctionArgs) { - const { messages, files, promptId, contextOptimization } = await request.json<{ + const { messages, files, promptId, contextOptimization, supabase } = await request.json<{ messages: Messages; files: any; promptId?: string; contextOptimization: boolean; + supabase?: { + isConnected: boolean; + hasSelectedProject: boolean; + credentials?: { + anonKey?: string; + supabaseUrl?: string; + }; + }; }>(); const cookieHeader = request.headers.get('Cookie'); @@ -179,8 +187,8 @@ async function chatAction({ context, request }: ActionFunctionArgs) { // logger.debug('Code Files Selected'); } - // Stream the text const options: StreamingOptions = { + supabaseConnection: supabase, toolChoice: 'none', onFinish: async ({ text: content, finishReason, usage }) => { logger.debug('usage', JSON.stringify(usage)); diff --git a/app/routes/api.supabase.query.ts b/app/routes/api.supabase.query.ts new file mode 100644 index 00000000..0b810754 --- /dev/null +++ b/app/routes/api.supabase.query.ts @@ -0,0 +1,92 @@ +import { type ActionFunctionArgs } from '@remix-run/cloudflare'; +import { createScopedLogger } from '~/utils/logger'; + +const logger = createScopedLogger('api.supabase.query'); + +export async function action({ request }: ActionFunctionArgs) { + if (request.method !== 'POST') { + return new Response('Method not allowed', { status: 405 }); + } + + const authHeader = request.headers.get('Authorization'); + + if (!authHeader) { + return new Response('No authorization token provided', { status: 401 }); + } + + try { + const { projectId, query } = (await request.json()) as any; + logger.debug('Executing query:', { projectId, query }); + + const response = await fetch(`https://api.supabase.com/v1/projects/${projectId}/database/query`, { + method: 'POST', + headers: { + Authorization: authHeader, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ query }), + }); + + if (!response.ok) { + const errorText = await response.text(); + let errorData; + + try { + errorData = JSON.parse(errorText); + } catch (e) { + console.log(e); + errorData = { message: errorText }; + } + + logger.error( + 'Supabase API error:', + JSON.stringify({ + status: response.status, + statusText: response.statusText, + error: errorData, + }), + ); + + return new Response( + JSON.stringify({ + error: { + status: response.status, + statusText: response.statusText, + message: errorData.message || errorData.error || errorText, + details: errorData, + }, + }), + { + status: response.status, + headers: { + 'Content-Type': 'application/json', + }, + }, + ); + } + + const result = await response.json(); + + return new Response(JSON.stringify(result), { + headers: { + 'Content-Type': 'application/json', + }, + }); + } catch (error) { + logger.error('Query execution error:', error); + return new Response( + JSON.stringify({ + error: { + message: error instanceof Error ? error.message : 'Query execution failed', + stack: error instanceof Error ? error.stack : undefined, + }, + }), + { + status: 500, + headers: { + 'Content-Type': 'application/json', + }, + }, + ); + } +} diff --git a/app/routes/api.supabase.ts b/app/routes/api.supabase.ts new file mode 100644 index 00000000..f21da60f --- /dev/null +++ b/app/routes/api.supabase.ts @@ -0,0 +1,57 @@ +import { json } from '@remix-run/node'; +import type { ActionFunction } from '@remix-run/node'; +import type { SupabaseProject } from '~/types/supabase'; + +export const action: ActionFunction = async ({ request }) => { + if (request.method !== 'POST') { + return json({ error: 'Method not allowed' }, { status: 405 }); + } + + try { + const { token } = (await request.json()) as any; + + const projectsResponse = await fetch('https://api.supabase.com/v1/projects', { + headers: { + Authorization: `Bearer ${token}`, + 'Content-Type': 'application/json', + }, + }); + + if (!projectsResponse.ok) { + const errorText = await projectsResponse.text(); + console.error('Projects fetch failed:', errorText); + + return json({ error: 'Failed to fetch projects' }, { status: 401 }); + } + + const projects = (await projectsResponse.json()) as SupabaseProject[]; + + const uniqueProjectsMap = new Map(); + + for (const project of projects) { + if (!uniqueProjectsMap.has(project.id)) { + uniqueProjectsMap.set(project.id, project); + } + } + + const uniqueProjects = Array.from(uniqueProjectsMap.values()); + + uniqueProjects.sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()); + + return json({ + user: { email: 'Connected', role: 'Admin' }, + stats: { + projects: uniqueProjects, + totalProjects: uniqueProjects.length, + }, + }); + } catch (error) { + console.error('Supabase API error:', error); + return json( + { + error: error instanceof Error ? error.message : 'Authentication failed', + }, + { status: 401 }, + ); + } +}; diff --git a/app/routes/api.supabase.variables.ts b/app/routes/api.supabase.variables.ts new file mode 100644 index 00000000..fd2d028f --- /dev/null +++ b/app/routes/api.supabase.variables.ts @@ -0,0 +1,33 @@ +import { json } from '@remix-run/node'; +import type { ActionFunctionArgs } from '@remix-run/node'; + +export async function action({ request }: ActionFunctionArgs) { + try { + // Add proper type assertion for the request body + const body = (await request.json()) as { projectId?: string; token?: string }; + const { projectId, token } = body; + + if (!projectId || !token) { + return json({ error: 'Project ID and token are required' }, { status: 400 }); + } + + const response = await fetch(`https://api.supabase.com/v1/projects/${projectId}/api-keys`, { + method: 'GET', + headers: { + Authorization: `Bearer ${token}`, + 'Content-Type': 'application/json', + }, + }); + + if (!response.ok) { + return json({ error: `Failed to fetch API keys: ${response.statusText}` }, { status: response.status }); + } + + const apiKeys = await response.json(); + + return json({ apiKeys }); + } catch (error) { + console.error('Error fetching project API keys:', error); + return json({ error: error instanceof Error ? error.message : 'Unknown error occurred' }, { status: 500 }); + } +} diff --git a/app/types/actions.ts b/app/types/actions.ts index 623c4979..63b84269 100644 --- a/app/types/actions.ts +++ b/app/types/actions.ts @@ -1,6 +1,6 @@ import type { Change } from 'diff'; -export type ActionType = 'file' | 'shell'; +export type ActionType = 'file' | 'shell' | 'supabase'; export interface BaseAction { content: string; @@ -23,7 +23,14 @@ export interface BuildAction extends BaseAction { type: 'build'; } -export type BoltAction = FileAction | ShellAction | StartAction | BuildAction; +export interface SupabaseAction extends BaseAction { + type: 'supabase'; + operation: 'migration' | 'query'; + filePath?: string; + projectId?: string; +} + +export type BoltAction = FileAction | ShellAction | StartAction | BuildAction | SupabaseAction; export type BoltActionData = BoltAction | BaseAction; @@ -35,6 +42,14 @@ export interface ActionAlert { source?: 'terminal' | 'preview'; // Add source to differentiate between terminal and preview errors } +export interface SupabaseAlert { + type: string; + title: string; + description: string; + content: string; + source?: 'supabase'; +} + export interface FileHistory { originalContent: string; lastModified: number; diff --git a/app/types/supabase.ts b/app/types/supabase.ts new file mode 100644 index 00000000..f99bbaf2 --- /dev/null +++ b/app/types/supabase.ts @@ -0,0 +1,31 @@ +export interface SupabaseUser { + id: string; + email: string; + role: string; + created_at: string; + last_sign_in_at: string; +} + +export interface SupabaseProject { + id: string; + name: string; + organization_id: string; + region: string; + created_at: string; + status: string; +} + +export interface SupabaseStats { + projects: SupabaseProject[]; + totalProjects: number; +} + +export interface SupabaseApiKey { + name: string; + api_key: string; +} + +export interface SupabaseCredentials { + anonKey?: string; + supabaseUrl?: string; +}