fix: settings bugfix error building my application issue #1414 (#1436)
Some checks failed
Docker Publish / docker-build-publish (push) Has been cancelled
Update Stable Branch / prepare-release (push) Has been cancelled

* Fix: error building my application #1414

* fix for vite

* Update vite.config.ts

* Update root.tsx

* fix the root.tsx and the debugtab

* lm studio fix and fix for the api key

* Update api.enhancer for prompt enhancement

* bugfixes

* Revert api.enhancer.ts back to original code

* Update api.enhancer.ts

* Update api.git-proxy.$.ts

* Update api.git-proxy.$.ts

* Update api.enhancer.ts
This commit is contained in:
Stijnus 2025-03-08 20:37:56 +01:00 committed by GitHub
parent 7ff48e1d45
commit 50dd74de07
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 233 additions and 771 deletions

View File

@ -1,6 +1,19 @@
import { motion } from 'framer-motion';
import { GithubConnection } from './GithubConnection';
import { NetlifyConnection } from './NetlifyConnection';
import React, { Suspense } from 'react';
// Use React.lazy for dynamic imports
const GithubConnection = React.lazy(() => import('./GithubConnection'));
const NetlifyConnection = React.lazy(() => import('./NetlifyConnection'));
// Loading fallback component
const LoadingFallback = () => (
<div className="p-4 bg-white dark:bg-[#0A0A0A] rounded-lg border border-[#E5E5E5] dark:border-[#1A1A1A]">
<div className="flex items-center gap-2 text-bolt-elements-textSecondary">
<div className="i-ph:spinner-gap w-5 h-5 animate-spin" />
<span>Loading connection...</span>
</div>
</div>
);
export default function ConnectionsTab() {
return (
@ -20,8 +33,12 @@ export default function ConnectionsTab() {
</p>
<div className="grid grid-cols-1 gap-4">
<GithubConnection />
<NetlifyConnection />
<Suspense fallback={<LoadingFallback />}>
<GithubConnection />
</Suspense>
<Suspense fallback={<LoadingFallback />}>
<NetlifyConnection />
</Suspense>
</div>
</div>
);

View File

@ -66,7 +66,7 @@ interface GitHubConnection {
stats?: GitHubStats;
}
export function GithubConnection() {
export default function GithubConnection() {
const [connection, setConnection] = useState<GitHubConnection>({
user: null,
token: '',

View File

@ -13,7 +13,7 @@ import {
} from '~/lib/stores/netlify';
import type { NetlifyUser } from '~/types/netlify';
export function NetlifyConnection() {
export default function NetlifyConnection() {
const connection = useStore(netlifyConnection);
const connecting = useStore(isConnecting);
const fetchingStats = useStore(isFetchingStats);

View File

@ -41,7 +41,7 @@ export function useShortcuts(): void {
}
// Debug logging in development only
if (process.env.NODE_ENV === 'development') {
if (import.meta.env.DEV) {
console.log('Key pressed:', {
key: event.key,
code: event.code,

View File

@ -75,7 +75,7 @@ export default class LMStudioProvider extends BaseProvider {
throw new Error('No baseUrl found for LMStudio provider');
}
const isDocker = process.env.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
if (typeof window === 'undefined') {
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;

View File

@ -27,8 +27,6 @@ export interface OllamaApiResponse {
models: OllamaModel[];
}
export const DEFAULT_NUM_CTX = process?.env?.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
export default class OllamaProvider extends BaseProvider {
name = 'Ollama';
getApiKeyLink = 'https://ollama.com/download';
@ -41,6 +39,26 @@ export default class OllamaProvider extends BaseProvider {
staticModels: ModelInfo[] = [];
private _convertEnvToRecord(env?: Env): Record<string, string> {
if (!env) {
return {};
}
// Convert Env to a plain object with string values
return Object.entries(env).reduce(
(acc, [key, value]) => {
acc[key] = String(value);
return acc;
},
{} as Record<string, string>,
);
}
getDefaultNumCtx(serverEnv?: Env): number {
const envRecord = this._convertEnvToRecord(serverEnv);
return envRecord.DEFAULT_NUM_CTX ? parseInt(envRecord.DEFAULT_NUM_CTX, 10) : 32768;
}
async getDynamicModels(
apiKeys?: Record<string, string>,
settings?: IProviderSetting,
@ -81,6 +99,7 @@ export default class OllamaProvider extends BaseProvider {
maxTokenAllowed: 8000,
}));
}
getModelInstance: (options: {
model: string;
serverEnv?: Env;
@ -88,10 +107,12 @@ export default class OllamaProvider extends BaseProvider {
providerSettings?: Record<string, IProviderSetting>;
}) => LanguageModelV1 = (options) => {
const { apiKeys, providerSettings, serverEnv, model } = options;
const envRecord = this._convertEnvToRecord(serverEnv);
let { baseUrl } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: providerSettings?.[this.name],
serverEnv: serverEnv as any,
serverEnv: envRecord,
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
defaultApiTokenKey: '',
});
@ -101,14 +122,14 @@ export default class OllamaProvider extends BaseProvider {
throw new Error('No baseUrl found for OLLAMA provider');
}
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || envRecord.RUNNING_IN_DOCKER === 'true';
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
logger.debug('Ollama Base Url used: ', baseUrl);
const ollamaInstance = ollama(model, {
numCtx: DEFAULT_NUM_CTX,
numCtx: this.getDefaultNumCtx(serverEnv),
}) as LanguageModelV1 & { config: any };
ollamaInstance.config.baseURL = `${baseUrl}/api`;

View File

@ -8,6 +8,7 @@ import { createHead } from 'remix-island';
import { useEffect } from 'react';
import { DndProvider } from 'react-dnd';
import { HTML5Backend } from 'react-dnd-html5-backend';
import { ClientOnly } from 'remix-utils/client-only';
import reactToastifyStyles from 'react-toastify/dist/ReactToastify.css?url';
import globalStyles from './styles/index.scss?url';
@ -72,11 +73,11 @@ export function Layout({ children }: { children: React.ReactNode }) {
}, [theme]);
return (
<DndProvider backend={HTML5Backend}>
{children}
<>
<ClientOnly>{() => <DndProvider backend={HTML5Backend}>{children}</DndProvider>}</ClientOnly>
<ScrollRestoration />
<Scripts />
</DndProvider>
</>
);
}

View File

@ -1,16 +1,41 @@
import type { LoaderFunction } from '@remix-run/cloudflare';
import { providerBaseUrlEnvKeys } from '~/utils/constants';
import { LLMManager } from '~/lib/modules/llm/manager';
import { getApiKeysFromCookie } from '~/lib/api/cookies';
export const loader: LoaderFunction = async ({ context, request }) => {
const url = new URL(request.url);
const provider = url.searchParams.get('provider');
if (!provider || !providerBaseUrlEnvKeys[provider].apiTokenKey) {
if (!provider) {
return Response.json({ isSet: false });
}
const envVarName = providerBaseUrlEnvKeys[provider].apiTokenKey;
const isSet = !!(process.env[envVarName] || (context?.cloudflare?.env as Record<string, any>)?.[envVarName]);
const llmManager = LLMManager.getInstance(context?.cloudflare?.env as any);
const providerInstance = llmManager.getProvider(provider);
if (!providerInstance || !providerInstance.config.apiTokenKey) {
return Response.json({ isSet: false });
}
const envVarName = providerInstance.config.apiTokenKey;
// Get API keys from cookie
const cookieHeader = request.headers.get('Cookie');
const apiKeys = getApiKeysFromCookie(cookieHeader);
/*
* Check API key in order of precedence:
* 1. Client-side API keys (from cookies)
* 2. Server environment variables (from Cloudflare env)
* 3. Process environment variables (from .env.local)
* 4. LLMManager environment variables
*/
const isSet = !!(
apiKeys?.[provider] ||
(context?.cloudflare?.env as Record<string, any>)?.[envVarName] ||
process.env[envVarName] ||
llmManager.env[envVarName]
);
return Response.json({ isSet });
};

View File

@ -1,5 +1,4 @@
import { type ActionFunctionArgs, json } from '@remix-run/cloudflare';
import crypto from 'crypto';
import type { NetlifySiteInfo } from '~/types/netlify';
interface DeployRequestBody {
@ -8,6 +7,15 @@ interface DeployRequestBody {
chatId: string;
}
async function sha1(message: string) {
const msgBuffer = new TextEncoder().encode(message);
const hashBuffer = await crypto.subtle.digest('SHA-1', msgBuffer);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
return hashHex;
}
export async function action({ request }: ActionFunctionArgs) {
try {
const { siteId, files, token, chatId } = (await request.json()) as DeployRequestBody & { token: string };
@ -104,7 +112,7 @@ export async function action({ request }: ActionFunctionArgs) {
for (const [filePath, content] of Object.entries(files)) {
// Ensure file path starts with a forward slash
const normalizedPath = filePath.startsWith('/') ? filePath : '/' + filePath;
const hash = crypto.createHash('sha1').update(content).digest('hex');
const hash = await sha1(content);
fileDigests[normalizedPath] = hash;
}

View File

@ -95,24 +95,28 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
},
});
// Handle streaming errors in a non-blocking way
(async () => {
for await (const part of result.fullStream) {
if (part.type === 'error') {
const error: any = part.error;
logger.error(error);
return;
try {
for await (const part of result.fullStream) {
if (part.type === 'error') {
const error: any = part.error;
logger.error('Streaming error:', error);
break;
}
}
} catch (error) {
logger.error('Error processing stream:', error);
}
})();
// Return the text stream directly since it's already text data
return new Response(result.textStream, {
status: 200,
headers: {
'Content-Type': 'text/event-stream',
Connection: 'keep-alive',
'Cache-Control': 'no-cache',
'Text-Encoding': 'chunked',
},
});
} catch (error: unknown) {

View File

@ -114,10 +114,14 @@ async function handleProxyRequest(request: Request, path: string | undefined) {
redirect: 'follow',
};
// Add body and duplex option for non-GET/HEAD requests
// Add body for non-GET/HEAD requests
if (!['GET', 'HEAD'].includes(request.method)) {
fetchOptions.body = request.body;
fetchOptions.duplex = 'half'; // This fixes the "duplex option is required when sending a body" error
/*
* Note: duplex property is removed to ensure TypeScript compatibility
* across different environments and versions
*/
}
// Forward the request to the target URL

View File

@ -1,18 +1,8 @@
import type { LoaderFunctionArgs } from '@remix-run/node';
import { json, type LoaderFunctionArgs } from '@remix-run/cloudflare';
export const loader = async ({ request: _request }: LoaderFunctionArgs) => {
// Return a simple 200 OK response with some basic health information
return new Response(
JSON.stringify({
status: 'healthy',
timestamp: new Date().toISOString(),
uptime: process.uptime(),
}),
{
status: 200,
headers: {
'Content-Type': 'application/json',
},
},
);
return json({
status: 'healthy',
timestamp: new Date().toISOString(),
});
};

View File

@ -1,6 +1,5 @@
import type { ActionFunctionArgs, LoaderFunction } from '@remix-run/cloudflare';
import { json } from '@remix-run/cloudflare';
import { execSync } from 'child_process';
// These are injected by Vite at build time
declare const __APP_VERSION: string;
@ -11,34 +10,24 @@ declare const __PKG_DEPENDENCIES: Record<string, string>;
declare const __PKG_DEV_DEPENDENCIES: Record<string, string>;
declare const __PKG_PEER_DEPENDENCIES: Record<string, string>;
declare const __PKG_OPTIONAL_DEPENDENCIES: Record<string, string>;
declare const __COMMIT_HASH: string;
declare const __GIT_BRANCH: string;
declare const __GIT_COMMIT_TIME: string;
declare const __GIT_AUTHOR: string;
declare const __GIT_EMAIL: string;
declare const __GIT_REMOTE_URL: string;
declare const __GIT_REPO_NAME: string;
const getGitInfo = () => {
try {
return {
commitHash: execSync('git rev-parse --short HEAD').toString().trim(),
branch: execSync('git rev-parse --abbrev-ref HEAD').toString().trim(),
commitTime: execSync('git log -1 --format=%cd').toString().trim(),
author: execSync('git log -1 --format=%an').toString().trim(),
email: execSync('git log -1 --format=%ae').toString().trim(),
remoteUrl: execSync('git config --get remote.origin.url').toString().trim(),
repoName: execSync('git config --get remote.origin.url')
.toString()
.trim()
.replace(/^.*github.com[:/]/, '')
.replace(/\.git$/, ''),
};
} catch (error) {
console.error('Failed to get git info:', error);
return {
commitHash: 'unknown',
branch: 'unknown',
commitTime: 'unknown',
author: 'unknown',
email: 'unknown',
remoteUrl: 'unknown',
repoName: 'unknown',
};
}
return {
commitHash: __COMMIT_HASH || 'unknown',
branch: __GIT_BRANCH || 'unknown',
commitTime: __GIT_COMMIT_TIME || 'unknown',
author: __GIT_AUTHOR || 'unknown',
email: __GIT_EMAIL || 'unknown',
remoteUrl: __GIT_REMOTE_URL || 'unknown',
repoName: __GIT_REPO_NAME || 'unknown',
};
};
const formatDependencies = (
@ -60,11 +49,11 @@ const getAppResponse = () => {
version: __APP_VERSION || '0.1.0',
description: __PKG_DESCRIPTION || 'A DIY LLM interface',
license: __PKG_LICENSE || 'MIT',
environment: process.env.NODE_ENV || 'development',
environment: 'cloudflare',
gitInfo,
timestamp: new Date().toISOString(),
runtimeInfo: {
nodeVersion: process.version || 'unknown',
nodeVersion: 'cloudflare',
},
dependencies: {
production: formatDependencies(__PKG_DEPENDENCIES, 'production'),

View File

@ -1,138 +1,48 @@
import type { LoaderFunction } from '@remix-run/cloudflare';
import { json } from '@remix-run/cloudflare';
import { execSync } from 'child_process';
import { json, type LoaderFunction } from '@remix-run/cloudflare';
interface GitHubRepoInfo {
name: string;
full_name: string;
default_branch: string;
stargazers_count: number;
forks_count: number;
open_issues_count: number;
parent?: {
full_name: string;
default_branch: string;
stargazers_count: number;
forks_count: number;
interface GitInfo {
local: {
commitHash: string;
branch: string;
commitTime: string;
author: string;
email: string;
remoteUrl: string;
repoName: string;
};
github?: {
currentRepo?: {
fullName: string;
defaultBranch: string;
stars: number;
forks: number;
openIssues?: number;
};
};
isForked?: boolean;
}
const getLocalGitInfo = () => {
try {
return {
commitHash: execSync('git rev-parse HEAD').toString().trim(),
branch: execSync('git rev-parse --abbrev-ref HEAD').toString().trim(),
commitTime: execSync('git log -1 --format=%cd').toString().trim(),
author: execSync('git log -1 --format=%an').toString().trim(),
email: execSync('git log -1 --format=%ae').toString().trim(),
remoteUrl: execSync('git config --get remote.origin.url').toString().trim(),
repoName: execSync('git config --get remote.origin.url')
.toString()
.trim()
.replace(/^.*github.com[:/]/, '')
.replace(/\.git$/, ''),
};
} catch (error) {
console.error('Failed to get local git info:', error);
return null;
}
};
// These values will be replaced at build time
declare const __COMMIT_HASH: string;
declare const __GIT_BRANCH: string;
declare const __GIT_COMMIT_TIME: string;
declare const __GIT_AUTHOR: string;
declare const __GIT_EMAIL: string;
declare const __GIT_REMOTE_URL: string;
declare const __GIT_REPO_NAME: string;
const getGitHubInfo = async (repoFullName: string) => {
try {
// Add GitHub token if available
const headers: Record<string, string> = {
Accept: 'application/vnd.github.v3+json',
};
const githubToken = process.env.GITHUB_TOKEN;
if (githubToken) {
headers.Authorization = `token ${githubToken}`;
}
console.log('Fetching GitHub info for:', repoFullName); // Debug log
const response = await fetch(`https://api.github.com/repos/${repoFullName}`, {
headers,
});
if (!response.ok) {
console.error('GitHub API error:', {
status: response.status,
statusText: response.statusText,
repoFullName,
});
// If we get a 404, try the main repo as fallback
if (response.status === 404 && repoFullName !== 'stackblitz-labs/bolt.diy') {
return getGitHubInfo('stackblitz-labs/bolt.diy');
}
throw new Error(`GitHub API error: ${response.statusText}`);
}
const data = await response.json();
console.log('GitHub API response:', data); // Debug log
return data as GitHubRepoInfo;
} catch (error) {
console.error('Failed to get GitHub info:', error);
return null;
}
};
export const loader: LoaderFunction = async ({ request: _request }) => {
const localInfo = getLocalGitInfo();
console.log('Local git info:', localInfo); // Debug log
// If we have local info, try to get GitHub info for both our fork and upstream
let githubInfo = null;
if (localInfo?.repoName) {
githubInfo = await getGitHubInfo(localInfo.repoName);
}
// If no local info or GitHub info, try the main repo
if (!githubInfo) {
githubInfo = await getGitHubInfo('stackblitz-labs/bolt.diy');
}
const response = {
local: localInfo || {
commitHash: 'unknown',
branch: 'unknown',
commitTime: 'unknown',
author: 'unknown',
email: 'unknown',
remoteUrl: 'unknown',
repoName: 'unknown',
export const loader: LoaderFunction = async () => {
const gitInfo: GitInfo = {
local: {
commitHash: typeof __COMMIT_HASH !== 'undefined' ? __COMMIT_HASH : 'development',
branch: typeof __GIT_BRANCH !== 'undefined' ? __GIT_BRANCH : 'main',
commitTime: typeof __GIT_COMMIT_TIME !== 'undefined' ? __GIT_COMMIT_TIME : new Date().toISOString(),
author: typeof __GIT_AUTHOR !== 'undefined' ? __GIT_AUTHOR : 'development',
email: typeof __GIT_EMAIL !== 'undefined' ? __GIT_EMAIL : 'development@local',
remoteUrl: typeof __GIT_REMOTE_URL !== 'undefined' ? __GIT_REMOTE_URL : 'local',
repoName: typeof __GIT_REPO_NAME !== 'undefined' ? __GIT_REPO_NAME : 'bolt.diy',
},
github: githubInfo
? {
currentRepo: {
fullName: githubInfo.full_name,
defaultBranch: githubInfo.default_branch,
stars: githubInfo.stargazers_count,
forks: githubInfo.forks_count,
openIssues: githubInfo.open_issues_count,
},
upstream: githubInfo.parent
? {
fullName: githubInfo.parent.full_name,
defaultBranch: githubInfo.parent.default_branch,
stars: githubInfo.parent.stargazers_count,
forks: githubInfo.parent.forks_count,
}
: null,
}
: null,
isForked: Boolean(githubInfo?.parent),
timestamp: new Date().toISOString(),
};
console.log('Final response:', response);
// Debug log
return json(response);
return json(gitInfo);
};

View File

@ -1,573 +1,21 @@
import { json } from '@remix-run/node';
import type { ActionFunction } from '@remix-run/node';
import { exec } from 'child_process';
import { promisify } from 'util';
const execAsync = promisify(exec);
interface UpdateRequestBody {
branch: string;
autoUpdate?: boolean;
}
interface UpdateProgress {
stage: 'fetch' | 'pull' | 'install' | 'build' | 'complete';
message: string;
progress?: number;
error?: string;
details?: {
changedFiles?: string[];
additions?: number;
deletions?: number;
commitMessages?: string[];
totalSize?: string;
currentCommit?: string;
remoteCommit?: string;
updateReady?: boolean;
changelog?: string;
compareUrl?: string;
};
}
import { json, type ActionFunction } from '@remix-run/cloudflare';
export const action: ActionFunction = async ({ request }) => {
if (request.method !== 'POST') {
return json({ error: 'Method not allowed' }, { status: 405 });
}
try {
const body = await request.json();
if (!body || typeof body !== 'object' || !('branch' in body) || typeof body.branch !== 'string') {
return json({ error: 'Invalid request body: branch is required and must be a string' }, { status: 400 });
}
const { branch, autoUpdate = false } = body as UpdateRequestBody;
// Create a ReadableStream to send progress updates
const stream = new ReadableStream({
async start(controller) {
const encoder = new TextEncoder();
const sendProgress = (update: UpdateProgress) => {
controller.enqueue(encoder.encode(JSON.stringify(update) + '\n'));
};
try {
// Initial check stage
sendProgress({
stage: 'fetch',
message: 'Checking repository status...',
progress: 0,
});
// Check if remote exists
let defaultBranch = branch || 'main'; // Make branch mutable
try {
await execAsync('git remote get-url upstream');
sendProgress({
stage: 'fetch',
message: 'Repository remote verified',
progress: 10,
});
} catch {
throw new Error(
'No upstream repository found. Please set up the upstream repository first by running:\ngit remote add upstream https://github.com/stackblitz-labs/bolt.diy.git',
);
}
// Get default branch if not specified
if (!branch) {
sendProgress({
stage: 'fetch',
message: 'Detecting default branch...',
progress: 20,
});
try {
const { stdout } = await execAsync('git remote show upstream | grep "HEAD branch" | cut -d" " -f5');
defaultBranch = stdout.trim() || 'main';
sendProgress({
stage: 'fetch',
message: `Using branch: ${defaultBranch}`,
progress: 30,
});
} catch {
defaultBranch = 'main'; // Fallback to main if we can't detect
sendProgress({
stage: 'fetch',
message: 'Using default branch: main',
progress: 30,
});
}
}
// Fetch stage
sendProgress({
stage: 'fetch',
message: 'Fetching latest changes...',
progress: 40,
});
// Fetch all remotes
await execAsync('git fetch --all');
sendProgress({
stage: 'fetch',
message: 'Remote changes fetched',
progress: 50,
});
// Check if remote branch exists
try {
await execAsync(`git rev-parse --verify upstream/${defaultBranch}`);
sendProgress({
stage: 'fetch',
message: 'Remote branch verified',
progress: 60,
});
} catch {
throw new Error(
`Remote branch 'upstream/${defaultBranch}' not found. Please ensure the upstream repository is properly configured.`,
);
}
// Get current commit hash and remote commit hash
sendProgress({
stage: 'fetch',
message: 'Comparing versions...',
progress: 70,
});
const { stdout: currentCommit } = await execAsync('git rev-parse HEAD');
const { stdout: remoteCommit } = await execAsync(`git rev-parse upstream/${defaultBranch}`);
// If we're on the same commit, no update is available
if (currentCommit.trim() === remoteCommit.trim()) {
sendProgress({
stage: 'complete',
message: 'No updates available. You are on the latest version.',
progress: 100,
details: {
currentCommit: currentCommit.trim().substring(0, 7),
remoteCommit: remoteCommit.trim().substring(0, 7),
},
});
return;
}
sendProgress({
stage: 'fetch',
message: 'Analyzing changes...',
progress: 80,
});
// Initialize variables
let changedFiles: string[] = [];
let commitMessages: string[] = [];
let stats: RegExpMatchArray | null = null;
let totalSizeInBytes = 0;
// Format size for display
const formatSize = (bytes: number) => {
if (bytes === 0) {
return '0 B';
}
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
};
// Get list of changed files and their sizes
try {
const { stdout: diffOutput } = await execAsync(
`git diff --name-status ${currentCommit.trim()}..${remoteCommit.trim()}`,
);
const files = diffOutput.split('\n').filter(Boolean);
if (files.length === 0) {
sendProgress({
stage: 'complete',
message: `No file changes detected between your version and upstream/${defaultBranch}. You might be on a different branch.`,
progress: 100,
details: {
currentCommit: currentCommit.trim().substring(0, 7),
remoteCommit: remoteCommit.trim().substring(0, 7),
},
});
return;
}
sendProgress({
stage: 'fetch',
message: `Found ${files.length} changed files, calculating sizes...`,
progress: 90,
});
// Get size information for each changed file
for (const line of files) {
const [status, file] = line.split('\t');
if (status !== 'D') {
// Skip deleted files
try {
const { stdout: sizeOutput } = await execAsync(`git cat-file -s ${remoteCommit.trim()}:${file}`);
const size = parseInt(sizeOutput) || 0;
totalSizeInBytes += size;
} catch {
console.debug(`Could not get size for file: ${file}`);
}
}
}
changedFiles = files.map((line) => {
const [status, file] = line.split('\t');
return `${status === 'M' ? 'Modified' : status === 'A' ? 'Added' : 'Deleted'}: ${file}`;
});
} catch (err) {
console.debug('Failed to get changed files:', err);
throw new Error(`Failed to compare changes with upstream/${defaultBranch}. Are you on the correct branch?`);
}
// Get commit messages between current and remote
try {
const { stdout: logOutput } = await execAsync(
`git log --pretty=format:"%h|%s|%aI" ${currentCommit.trim()}..${remoteCommit.trim()}`,
);
// Parse and group commits by type
const commits = logOutput
.split('\n')
.filter(Boolean)
.map((line) => {
const [hash, subject, timestamp] = line.split('|');
let type = 'other';
let message = subject;
if (subject.startsWith('feat:') || subject.startsWith('feature:')) {
type = 'feature';
message = subject.replace(/^feat(?:ure)?:/, '').trim();
} else if (subject.startsWith('fix:')) {
type = 'fix';
message = subject.replace(/^fix:/, '').trim();
} else if (subject.startsWith('docs:')) {
type = 'docs';
message = subject.replace(/^docs:/, '').trim();
} else if (subject.startsWith('style:')) {
type = 'style';
message = subject.replace(/^style:/, '').trim();
} else if (subject.startsWith('refactor:')) {
type = 'refactor';
message = subject.replace(/^refactor:/, '').trim();
} else if (subject.startsWith('perf:')) {
type = 'perf';
message = subject.replace(/^perf:/, '').trim();
} else if (subject.startsWith('test:')) {
type = 'test';
message = subject.replace(/^test:/, '').trim();
} else if (subject.startsWith('build:')) {
type = 'build';
message = subject.replace(/^build:/, '').trim();
} else if (subject.startsWith('ci:')) {
type = 'ci';
message = subject.replace(/^ci:/, '').trim();
}
return {
hash,
type,
message,
timestamp: new Date(timestamp),
};
});
// Group commits by type
const groupedCommits = commits.reduce(
(acc, commit) => {
if (!acc[commit.type]) {
acc[commit.type] = [];
}
acc[commit.type].push(commit);
return acc;
},
{} as Record<string, typeof commits>,
);
// Format commit messages with emojis and timestamps
const formattedMessages = Object.entries(groupedCommits).map(([type, commits]) => {
const emoji = {
feature: '✨',
fix: '🐛',
docs: '📚',
style: '💎',
refactor: '♻️',
perf: '⚡',
test: '🧪',
build: '🛠️',
ci: '⚙️',
other: '🔍',
}[type];
const title = {
feature: 'Features',
fix: 'Bug Fixes',
docs: 'Documentation',
style: 'Styles',
refactor: 'Code Refactoring',
perf: 'Performance',
test: 'Tests',
build: 'Build',
ci: 'CI',
other: 'Other Changes',
}[type];
return `### ${emoji} ${title}\n\n${commits
.map((c) => `* ${c.message} (${c.hash.substring(0, 7)}) - ${c.timestamp.toLocaleString()}`)
.join('\n')}`;
});
commitMessages = formattedMessages;
} catch {
// Handle silently - empty commitMessages array will be used
}
// Get diff stats using the specific commits
try {
const { stdout: diffStats } = await execAsync(
`git diff --shortstat ${currentCommit.trim()}..${remoteCommit.trim()}`,
);
stats = diffStats.match(
/(\d+) files? changed(?:, (\d+) insertions?\\(\\+\\))?(?:, (\d+) deletions?\\(-\\))?/,
);
} catch {
// Handle silently - null stats will be used
}
// If we somehow still have no changes detected
if (!stats && changedFiles.length === 0) {
sendProgress({
stage: 'complete',
message: `No changes detected between your version and upstream/${defaultBranch}. This might be unexpected - please check your git status.`,
progress: 100,
});
return;
}
// Fetch changelog
sendProgress({
stage: 'fetch',
message: 'Fetching changelog...',
progress: 95,
});
const changelog = await fetchChangelog(currentCommit.trim(), remoteCommit.trim());
// We have changes, send the details
sendProgress({
stage: 'fetch',
message: `Changes detected on upstream/${defaultBranch}`,
progress: 100,
details: {
changedFiles,
additions: stats?.[2] ? parseInt(stats[2]) : 0,
deletions: stats?.[3] ? parseInt(stats[3]) : 0,
commitMessages,
totalSize: formatSize(totalSizeInBytes),
currentCommit: currentCommit.trim().substring(0, 7),
remoteCommit: remoteCommit.trim().substring(0, 7),
updateReady: true,
changelog,
compareUrl: `https://github.com/stackblitz-labs/bolt.diy/compare/${currentCommit.trim().substring(0, 7)}...${remoteCommit.trim().substring(0, 7)}`,
},
});
// Only proceed with update if autoUpdate is true
if (!autoUpdate) {
sendProgress({
stage: 'complete',
message: 'Update is ready to be applied. Click "Update Now" to proceed.',
progress: 100,
details: {
changedFiles,
additions: stats?.[2] ? parseInt(stats[2]) : 0,
deletions: stats?.[3] ? parseInt(stats[3]) : 0,
commitMessages,
totalSize: formatSize(totalSizeInBytes),
currentCommit: currentCommit.trim().substring(0, 7),
remoteCommit: remoteCommit.trim().substring(0, 7),
updateReady: true,
changelog,
compareUrl: `https://github.com/stackblitz-labs/bolt.diy/compare/${currentCommit.trim().substring(0, 7)}...${remoteCommit.trim().substring(0, 7)}`,
},
});
return;
}
// Pull stage
sendProgress({
stage: 'pull',
message: `Pulling changes from upstream/${defaultBranch}...`,
progress: 0,
});
await execAsync(`git pull upstream ${defaultBranch}`);
sendProgress({
stage: 'pull',
message: 'Changes pulled successfully',
progress: 100,
});
// Install stage
sendProgress({
stage: 'install',
message: 'Installing dependencies...',
progress: 0,
});
await execAsync('pnpm install');
sendProgress({
stage: 'install',
message: 'Dependencies installed successfully',
progress: 100,
});
// Build stage
sendProgress({
stage: 'build',
message: 'Building application...',
progress: 0,
});
await execAsync('pnpm build');
sendProgress({
stage: 'build',
message: 'Build completed successfully',
progress: 100,
});
// Complete
sendProgress({
stage: 'complete',
message: 'Update completed successfully! Click Restart to apply changes.',
progress: 100,
});
} catch (err) {
sendProgress({
stage: 'complete',
message: 'Update failed',
error: err instanceof Error ? err.message : 'Unknown error occurred',
});
} finally {
controller.close();
}
},
});
return new Response(stream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
},
});
} catch (err) {
console.error('Update preparation failed:', err);
return json(
{
success: false,
error: err instanceof Error ? err.message : 'Unknown error occurred while preparing update',
},
{ status: 500 },
);
}
return json(
{
error: 'Updates must be performed manually in a server environment',
instructions: [
'1. Navigate to the project directory',
'2. Run: git fetch upstream',
'3. Run: git pull upstream main',
'4. Run: pnpm install',
'5. Run: pnpm run build',
],
},
{ status: 400 },
);
};
// Add this function to fetch the changelog
async function fetchChangelog(currentCommit: string, remoteCommit: string): Promise<string> {
try {
// First try to get the changelog.md content
const { stdout: changelogContent } = await execAsync('git show upstream/main:changelog.md');
// If we have a changelog, return it
if (changelogContent) {
return changelogContent;
}
// If no changelog.md, generate one in a similar format
let changelog = '# Changes in this Update\n\n';
// Get commit messages grouped by type
const { stdout: commitLog } = await execAsync(
`git log --pretty=format:"%h|%s|%b" ${currentCommit.trim()}..${remoteCommit.trim()}`,
);
const commits = commitLog.split('\n').filter(Boolean);
const categorizedCommits: Record<string, string[]> = {
'✨ Features': [],
'🐛 Bug Fixes': [],
'📚 Documentation': [],
'💎 Styles': [],
'♻️ Code Refactoring': [],
'⚡ Performance': [],
'🧪 Tests': [],
'🛠️ Build': [],
'⚙️ CI': [],
'🔍 Other Changes': [],
};
// Categorize commits
for (const commit of commits) {
const [hash, subject] = commit.split('|');
let category = '🔍 Other Changes';
if (subject.startsWith('feat:') || subject.startsWith('feature:')) {
category = '✨ Features';
} else if (subject.startsWith('fix:')) {
category = '🐛 Bug Fixes';
} else if (subject.startsWith('docs:')) {
category = '📚 Documentation';
} else if (subject.startsWith('style:')) {
category = '💎 Styles';
} else if (subject.startsWith('refactor:')) {
category = '♻️ Code Refactoring';
} else if (subject.startsWith('perf:')) {
category = '⚡ Performance';
} else if (subject.startsWith('test:')) {
category = '🧪 Tests';
} else if (subject.startsWith('build:')) {
category = '🛠️ Build';
} else if (subject.startsWith('ci:')) {
category = '⚙️ CI';
}
const message = subject.includes(':') ? subject.split(':')[1].trim() : subject.trim();
categorizedCommits[category].push(`* ${message} (${hash.substring(0, 7)})`);
}
// Build changelog content
for (const [category, commits] of Object.entries(categorizedCommits)) {
if (commits.length > 0) {
changelog += `\n## ${category}\n\n${commits.join('\n')}\n`;
}
}
// Add stats
const { stdout: stats } = await execAsync(`git diff --shortstat ${currentCommit.trim()}..${remoteCommit.trim()}`);
if (stats) {
changelog += '\n## 📊 Stats\n\n';
changelog += `${stats.trim()}\n`;
}
return changelog;
} catch (error) {
console.error('Error fetching changelog:', error);
return 'Unable to fetch changelog';
}
}

View File

@ -123,6 +123,7 @@
"remark-gfm": "^4.0.0",
"remix-island": "^0.2.0",
"remix-utils": "^7.7.0",
"rollup-plugin-node-polyfills": "^0.2.1",
"shiki": "^1.24.0",
"tailwind-merge": "^2.2.1",
"unist-util-visit": "^5.0.0",

View File

@ -287,6 +287,9 @@ importers:
remix-utils:
specifier: ^7.7.0
version: 7.7.0(@remix-run/cloudflare@2.15.3(@cloudflare/workers-types@4.20250204.0)(typescript@5.7.3))(@remix-run/node@2.15.3(typescript@5.7.3))(@remix-run/react@2.15.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.7.3))(@remix-run/router@1.22.0)(react@18.3.1)(zod@3.24.1)
rollup-plugin-node-polyfills:
specifier: ^0.2.1
version: 0.2.1
shiki:
specifier: ^1.24.0
version: 1.29.2

View File

@ -89,14 +89,55 @@ export default defineConfig((config) => {
__PKG_DEV_DEPENDENCIES: JSON.stringify(pkg.devDependencies),
__PKG_PEER_DEPENDENCIES: JSON.stringify(pkg.peerDependencies),
__PKG_OPTIONAL_DEPENDENCIES: JSON.stringify(pkg.optionalDependencies),
// Define global values
'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV),
},
build: {
target: 'esnext',
rollupOptions: {
output: {
format: 'esm',
},
},
commonjsOptions: {
transformMixedEsModules: true,
},
},
optimizeDeps: {
esbuildOptions: {
define: {
global: 'globalThis',
},
},
},
resolve: {
alias: {
buffer: 'vite-plugin-node-polyfills/polyfills/buffer',
},
},
plugins: [
nodePolyfills({
include: ['path', 'buffer', 'process'],
include: ['buffer', 'process', 'util', 'stream'],
globals: {
Buffer: true,
process: true,
global: true,
},
protocolImports: true,
// Exclude Node.js modules that shouldn't be polyfilled in Cloudflare
exclude: ['child_process', 'fs', 'path'],
}),
{
name: 'buffer-polyfill',
transform(code, id) {
if (id.includes('env.mjs')) {
return {
code: `import { Buffer } from 'buffer';\n${code}`,
map: null,
};
}
},
},
config.mode !== 'test' && remixCloudflareDevProxy(),
remixVitePlugin({
future: {