From a87eae211904f13e695d208d5b691f8c5bd6b97b Mon Sep 17 00:00:00 2001
From: Leex
Date: Sun, 23 Feb 2025 22:37:38 +0100
Subject: [PATCH 1/7] Update Dockerfile (fix docker deployment)
---
Dockerfile | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/Dockerfile b/Dockerfile
index d287d407..1cd3f0bf 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -6,7 +6,10 @@ WORKDIR /app
# Install dependencies (this step is cached as long as the dependencies don't change)
COPY package.json pnpm-lock.yaml ./
-RUN corepack enable pnpm && pnpm install
+#RUN npm install -g corepack@latest
+
+#RUN corepack enable pnpm && pnpm install
+RUN npm install -g pnpm && pnpm install
# Copy the rest of your app's source code
COPY . .
From 73710da5018df2e3b30cf59c34d18f5ba92acd10 Mon Sep 17 00:00:00 2001
From: Leex
Date: Sun, 23 Feb 2025 22:52:09 +0100
Subject: [PATCH 2/7] Update docker.yaml
---
.github/workflows/docker.yaml | 66 +++++++++--------------------------
1 file changed, 17 insertions(+), 49 deletions(-)
diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml
index d3bd2f1b..fece8ba0 100644
--- a/.github/workflows/docker.yaml
+++ b/.github/workflows/docker.yaml
@@ -1,14 +1,11 @@
----
name: Docker Publish
on:
- workflow_dispatch:
push:
branches:
- main
- tags:
- - v*
- - "*"
+ - stable
+ workflow_dispatch:
permissions:
packages: write
@@ -16,66 +13,37 @@ permissions:
env:
REGISTRY: ghcr.io
- DOCKER_IMAGE: ghcr.io/${{ github.repository }}
- BUILD_TARGET: bolt-ai-production # bolt-ai-development
+ IMAGE_NAME: ${{ github.repository }}
jobs:
docker-build-publish:
runs-on: ubuntu-latest
steps:
- - name: Checkout
+ - name: Checkout code
uses: actions/checkout@v4
- - id: string
- uses: ASzc/change-string-case-action@v6
- with:
- string: ${{ env.DOCKER_IMAGE }}
-
- - name: Docker meta
- id: meta
- uses: crazy-max/ghaction-docker-meta@v5
- with:
- images: ${{ steps.string.outputs.lowercase }}
- flavor: |
- latest=true
- prefix=
- suffix=
- tags: |
- type=semver,pattern={{version}}
- type=pep440,pattern={{version}}
- type=ref,event=tag
- type=raw,value={{sha}}
-
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
-
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- - name: Login to Container Registry
+ - name: Log in to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
- username: ${{ github.actor }} # ${{ secrets.DOCKER_USERNAME }}
- password: ${{ secrets.GITHUB_TOKEN }} # ${{ secrets.DOCKER_PASSWORD }}
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
- - name: Build and push
+ - name: Extract metadata for Docker image
+ id: meta
+ uses: docker/metadata-action@v4
+ with:
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
+
+ - name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
- file: ./Dockerfile
- target: ${{ env.BUILD_TARGET }}
- platforms: linux/amd64,linux/arm64
push: true
- tags: ${{ steps.meta.outputs.tags }}
+ tags: |
+ ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.ref == 'refs/heads/stable' && 'stable' || 'latest' }}
+ ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.sha }}
labels: ${{ steps.meta.outputs.labels }}
- cache-from: type=registry,ref=${{ steps.string.outputs.lowercase }}:latest
- cache-to: type=inline
-
- - name: Check manifest
- run: |
- docker buildx imagetools inspect ${{ steps.string.outputs.lowercase }}:${{ steps.meta.outputs.version }}
-
- - name: Dump context
- if: always()
- uses: crazy-max/ghaction-dump-context@v2
From eb4aa68581e25edc0cb7168eed7fb914cb8cd514 Mon Sep 17 00:00:00 2001
From: Leex
Date: Mon, 24 Feb 2025 23:27:33 +0100
Subject: [PATCH 3/7] Update docker.yaml
---
.github/workflows/docker.yaml | 16 ++++++++++++++--
1 file changed, 14 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml
index fece8ba0..42070f9f 100644
--- a/.github/workflows/docker.yaml
+++ b/.github/workflows/docker.yaml
@@ -38,12 +38,24 @@ jobs:
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- - name: Build and push Docker image
+ - name: Build and push Docker image for main
+ if: github.ref == 'refs/heads/main'
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: |
- ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.ref == 'refs/heads/stable' && 'stable' || 'latest' }}
+ ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
+ ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.sha }}
+ labels: ${{ steps.meta.outputs.labels }}
+
+ - name: Build and push Docker image for stable
+ if: github.ref == 'refs/heads/stable'
+ uses: docker/build-push-action@v6
+ with:
+ context: .
+ push: true
+ tags: |
+ ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:stable
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.sha }}
labels: ${{ steps.meta.outputs.labels }}
From 332edd3d982aa1c05933537ef766695069da28bb Mon Sep 17 00:00:00 2001
From: Stijnus <72551117+Stijnus@users.noreply.github.com>
Date: Sat, 8 Mar 2025 20:37:56 +0100
Subject: [PATCH 4/7] fix: settings bugfix error building my application issue
#1414 (#1436)
* Fix: error building my application #1414
* fix for vite
* Update vite.config.ts
* Update root.tsx
* fix the root.tsx and the debugtab
* lm studio fix and fix for the api key
* Update api.enhancer for prompt enhancement
* bugfixes
* Revert api.enhancer.ts back to original code
* Update api.enhancer.ts
* Update api.git-proxy.$.ts
* Update api.git-proxy.$.ts
* Update api.enhancer.ts
---
.../tabs/connections/ConnectionsTab.tsx | 25 +-
.../tabs/connections/GithubConnection.tsx | 2 +-
.../tabs/connections/NetlifyConnection.tsx | 2 +-
app/lib/hooks/useShortcuts.ts | 2 +-
app/lib/modules/llm/providers/lmstudio.ts | 2 +-
app/lib/modules/llm/providers/ollama.ts | 31 +-
app/root.tsx | 7 +-
app/routes/api.check-env-key.ts | 33 +-
app/routes/api.deploy.ts | 12 +-
app/routes/api.enhancer.ts | 18 +-
app/routes/api.git-proxy.$.ts | 180 +++++-
app/routes/api.health.ts | 20 +-
app/routes/api.system.app-info.ts | 47 +-
app/routes/api.system.git-info.ts | 168 ++---
app/routes/api.update.ts | 580 +-----------------
package.json | 1 +
pnpm-lock.yaml | 3 +
vite.config.ts | 43 +-
18 files changed, 373 insertions(+), 803 deletions(-)
diff --git a/app/components/@settings/tabs/connections/ConnectionsTab.tsx b/app/components/@settings/tabs/connections/ConnectionsTab.tsx
index 450d241a..72ff6434 100644
--- a/app/components/@settings/tabs/connections/ConnectionsTab.tsx
+++ b/app/components/@settings/tabs/connections/ConnectionsTab.tsx
@@ -1,6 +1,19 @@
import { motion } from 'framer-motion';
-import { GithubConnection } from './GithubConnection';
-import { NetlifyConnection } from './NetlifyConnection';
+import React, { Suspense } from 'react';
+
+// Use React.lazy for dynamic imports
+const GithubConnection = React.lazy(() => import('./GithubConnection'));
+const NetlifyConnection = React.lazy(() => import('./NetlifyConnection'));
+
+// Loading fallback component
+const LoadingFallback = () => (
+
+
+
+
Loading connection...
+
+
+);
export default function ConnectionsTab() {
return (
@@ -20,8 +33,12 @@ export default function ConnectionsTab() {
-
-
+ }>
+
+
+ }>
+
+
);
diff --git a/app/components/@settings/tabs/connections/GithubConnection.tsx b/app/components/@settings/tabs/connections/GithubConnection.tsx
index e2d8924f..77bcd1a6 100644
--- a/app/components/@settings/tabs/connections/GithubConnection.tsx
+++ b/app/components/@settings/tabs/connections/GithubConnection.tsx
@@ -65,7 +65,7 @@ interface GitHubConnection {
stats?: GitHubStats;
}
-export function GithubConnection() {
+export default function GithubConnection() {
const [connection, setConnection] = useState({
user: null,
token: '',
diff --git a/app/components/@settings/tabs/connections/NetlifyConnection.tsx b/app/components/@settings/tabs/connections/NetlifyConnection.tsx
index 5881b761..d811602e 100644
--- a/app/components/@settings/tabs/connections/NetlifyConnection.tsx
+++ b/app/components/@settings/tabs/connections/NetlifyConnection.tsx
@@ -13,7 +13,7 @@ import {
} from '~/lib/stores/netlify';
import type { NetlifyUser } from '~/types/netlify';
-export function NetlifyConnection() {
+export default function NetlifyConnection() {
const connection = useStore(netlifyConnection);
const connecting = useStore(isConnecting);
const fetchingStats = useStore(isFetchingStats);
diff --git a/app/lib/hooks/useShortcuts.ts b/app/lib/hooks/useShortcuts.ts
index 39308fcd..447a97e3 100644
--- a/app/lib/hooks/useShortcuts.ts
+++ b/app/lib/hooks/useShortcuts.ts
@@ -41,7 +41,7 @@ export function useShortcuts(): void {
}
// Debug logging in development only
- if (process.env.NODE_ENV === 'development') {
+ if (import.meta.env.DEV) {
console.log('Key pressed:', {
key: event.key,
code: event.code,
diff --git a/app/lib/modules/llm/providers/lmstudio.ts b/app/lib/modules/llm/providers/lmstudio.ts
index 9dabc3eb..fe5b27cd 100644
--- a/app/lib/modules/llm/providers/lmstudio.ts
+++ b/app/lib/modules/llm/providers/lmstudio.ts
@@ -75,7 +75,7 @@ export default class LMStudioProvider extends BaseProvider {
throw new Error('No baseUrl found for LMStudio provider');
}
- const isDocker = process.env.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
+ const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
if (typeof window === 'undefined') {
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
diff --git a/app/lib/modules/llm/providers/ollama.ts b/app/lib/modules/llm/providers/ollama.ts
index a3974ab3..e50ecae5 100644
--- a/app/lib/modules/llm/providers/ollama.ts
+++ b/app/lib/modules/llm/providers/ollama.ts
@@ -27,8 +27,6 @@ export interface OllamaApiResponse {
models: OllamaModel[];
}
-export const DEFAULT_NUM_CTX = process?.env?.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
-
export default class OllamaProvider extends BaseProvider {
name = 'Ollama';
getApiKeyLink = 'https://ollama.com/download';
@@ -41,6 +39,26 @@ export default class OllamaProvider extends BaseProvider {
staticModels: ModelInfo[] = [];
+ private _convertEnvToRecord(env?: Env): Record {
+ if (!env) {
+ return {};
+ }
+
+ // Convert Env to a plain object with string values
+ return Object.entries(env).reduce(
+ (acc, [key, value]) => {
+ acc[key] = String(value);
+ return acc;
+ },
+ {} as Record,
+ );
+ }
+
+ getDefaultNumCtx(serverEnv?: Env): number {
+ const envRecord = this._convertEnvToRecord(serverEnv);
+ return envRecord.DEFAULT_NUM_CTX ? parseInt(envRecord.DEFAULT_NUM_CTX, 10) : 32768;
+ }
+
async getDynamicModels(
apiKeys?: Record,
settings?: IProviderSetting,
@@ -81,6 +99,7 @@ export default class OllamaProvider extends BaseProvider {
maxTokenAllowed: 8000,
}));
}
+
getModelInstance: (options: {
model: string;
serverEnv?: Env;
@@ -88,10 +107,12 @@ export default class OllamaProvider extends BaseProvider {
providerSettings?: Record;
}) => LanguageModelV1 = (options) => {
const { apiKeys, providerSettings, serverEnv, model } = options;
+ const envRecord = this._convertEnvToRecord(serverEnv);
+
let { baseUrl } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: providerSettings?.[this.name],
- serverEnv: serverEnv as any,
+ serverEnv: envRecord,
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
defaultApiTokenKey: '',
});
@@ -101,14 +122,14 @@ export default class OllamaProvider extends BaseProvider {
throw new Error('No baseUrl found for OLLAMA provider');
}
- const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
+ const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || envRecord.RUNNING_IN_DOCKER === 'true';
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
logger.debug('Ollama Base Url used: ', baseUrl);
const ollamaInstance = ollama(model, {
- numCtx: DEFAULT_NUM_CTX,
+ numCtx: this.getDefaultNumCtx(serverEnv),
}) as LanguageModelV1 & { config: any };
ollamaInstance.config.baseURL = `${baseUrl}/api`;
diff --git a/app/root.tsx b/app/root.tsx
index b49d7355..a7ccb285 100644
--- a/app/root.tsx
+++ b/app/root.tsx
@@ -8,6 +8,7 @@ import { createHead } from 'remix-island';
import { useEffect } from 'react';
import { DndProvider } from 'react-dnd';
import { HTML5Backend } from 'react-dnd-html5-backend';
+import { ClientOnly } from 'remix-utils/client-only';
import reactToastifyStyles from 'react-toastify/dist/ReactToastify.css?url';
import globalStyles from './styles/index.scss?url';
@@ -72,11 +73,11 @@ export function Layout({ children }: { children: React.ReactNode }) {
}, [theme]);
return (
-
- {children}
+ <>
+ {() => {children}}
-
+ >
);
}
diff --git a/app/routes/api.check-env-key.ts b/app/routes/api.check-env-key.ts
index 14d21236..70f65bf0 100644
--- a/app/routes/api.check-env-key.ts
+++ b/app/routes/api.check-env-key.ts
@@ -1,16 +1,41 @@
import type { LoaderFunction } from '@remix-run/cloudflare';
-import { providerBaseUrlEnvKeys } from '~/utils/constants';
+import { LLMManager } from '~/lib/modules/llm/manager';
+import { getApiKeysFromCookie } from '~/lib/api/cookies';
export const loader: LoaderFunction = async ({ context, request }) => {
const url = new URL(request.url);
const provider = url.searchParams.get('provider');
- if (!provider || !providerBaseUrlEnvKeys[provider].apiTokenKey) {
+ if (!provider) {
return Response.json({ isSet: false });
}
- const envVarName = providerBaseUrlEnvKeys[provider].apiTokenKey;
- const isSet = !!(process.env[envVarName] || (context?.cloudflare?.env as Record)?.[envVarName]);
+ const llmManager = LLMManager.getInstance(context?.cloudflare?.env as any);
+ const providerInstance = llmManager.getProvider(provider);
+
+ if (!providerInstance || !providerInstance.config.apiTokenKey) {
+ return Response.json({ isSet: false });
+ }
+
+ const envVarName = providerInstance.config.apiTokenKey;
+
+ // Get API keys from cookie
+ const cookieHeader = request.headers.get('Cookie');
+ const apiKeys = getApiKeysFromCookie(cookieHeader);
+
+ /*
+ * Check API key in order of precedence:
+ * 1. Client-side API keys (from cookies)
+ * 2. Server environment variables (from Cloudflare env)
+ * 3. Process environment variables (from .env.local)
+ * 4. LLMManager environment variables
+ */
+ const isSet = !!(
+ apiKeys?.[provider] ||
+ (context?.cloudflare?.env as Record)?.[envVarName] ||
+ process.env[envVarName] ||
+ llmManager.env[envVarName]
+ );
return Response.json({ isSet });
};
diff --git a/app/routes/api.deploy.ts b/app/routes/api.deploy.ts
index 48543e97..0bc1c5e4 100644
--- a/app/routes/api.deploy.ts
+++ b/app/routes/api.deploy.ts
@@ -1,5 +1,4 @@
import { type ActionFunctionArgs, json } from '@remix-run/cloudflare';
-import crypto from 'crypto';
import type { NetlifySiteInfo } from '~/types/netlify';
interface DeployRequestBody {
@@ -8,6 +7,15 @@ interface DeployRequestBody {
chatId: string;
}
+async function sha1(message: string) {
+ const msgBuffer = new TextEncoder().encode(message);
+ const hashBuffer = await crypto.subtle.digest('SHA-1', msgBuffer);
+ const hashArray = Array.from(new Uint8Array(hashBuffer));
+ const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
+
+ return hashHex;
+}
+
export async function action({ request }: ActionFunctionArgs) {
try {
const { siteId, files, token, chatId } = (await request.json()) as DeployRequestBody & { token: string };
@@ -104,7 +112,7 @@ export async function action({ request }: ActionFunctionArgs) {
for (const [filePath, content] of Object.entries(files)) {
// Ensure file path starts with a forward slash
const normalizedPath = filePath.startsWith('/') ? filePath : '/' + filePath;
- const hash = crypto.createHash('sha1').update(content).digest('hex');
+ const hash = await sha1(content);
fileDigests[normalizedPath] = hash;
}
diff --git a/app/routes/api.enhancer.ts b/app/routes/api.enhancer.ts
index 4ab54f31..1e7bad5f 100644
--- a/app/routes/api.enhancer.ts
+++ b/app/routes/api.enhancer.ts
@@ -95,24 +95,28 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
},
});
+ // Handle streaming errors in a non-blocking way
(async () => {
- for await (const part of result.fullStream) {
- if (part.type === 'error') {
- const error: any = part.error;
- logger.error(error);
-
- return;
+ try {
+ for await (const part of result.fullStream) {
+ if (part.type === 'error') {
+ const error: any = part.error;
+ logger.error('Streaming error:', error);
+ break;
+ }
}
+ } catch (error) {
+ logger.error('Error processing stream:', error);
}
})();
+ // Return the text stream directly since it's already text data
return new Response(result.textStream, {
status: 200,
headers: {
'Content-Type': 'text/event-stream',
Connection: 'keep-alive',
'Cache-Control': 'no-cache',
- 'Text-Encoding': 'chunked',
},
});
} catch (error: unknown) {
diff --git a/app/routes/api.git-proxy.$.ts b/app/routes/api.git-proxy.$.ts
index 9e6cb3b1..a4e8f1e0 100644
--- a/app/routes/api.git-proxy.$.ts
+++ b/app/routes/api.git-proxy.$.ts
@@ -1,6 +1,47 @@
import { json } from '@remix-run/cloudflare';
import type { ActionFunctionArgs, LoaderFunctionArgs } from '@remix-run/cloudflare';
+// Allowed headers to forward to the target server
+const ALLOW_HEADERS = [
+ 'accept-encoding',
+ 'accept-language',
+ 'accept',
+ 'access-control-allow-origin',
+ 'authorization',
+ 'cache-control',
+ 'connection',
+ 'content-length',
+ 'content-type',
+ 'dnt',
+ 'pragma',
+ 'range',
+ 'referer',
+ 'user-agent',
+ 'x-authorization',
+ 'x-http-method-override',
+ 'x-requested-with',
+];
+
+// Headers to expose from the target server's response
+const EXPOSE_HEADERS = [
+ 'accept-ranges',
+ 'age',
+ 'cache-control',
+ 'content-length',
+ 'content-language',
+ 'content-type',
+ 'date',
+ 'etag',
+ 'expires',
+ 'last-modified',
+ 'pragma',
+ 'server',
+ 'transfer-encoding',
+ 'vary',
+ 'x-github-request-id',
+ 'x-redirected-url',
+];
+
// Handle all HTTP methods
export async function action({ request, params }: ActionFunctionArgs) {
return handleProxyRequest(request, params['*']);
@@ -16,50 +57,121 @@ async function handleProxyRequest(request: Request, path: string | undefined) {
return json({ error: 'Invalid proxy URL format' }, { status: 400 });
}
- const url = new URL(request.url);
-
- // Reconstruct the target URL
- const targetURL = `https://${path}${url.search}`;
-
- // Forward the request to the target URL
- const response = await fetch(targetURL, {
- method: request.method,
- headers: {
- ...Object.fromEntries(request.headers),
-
- // Override host header with the target host
- host: new URL(targetURL).host,
- },
- body: ['GET', 'HEAD'].includes(request.method) ? null : await request.arrayBuffer(),
- });
-
- // Create response with CORS headers
- const corsHeaders = {
- 'Access-Control-Allow-Origin': '*',
- 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
- 'Access-Control-Allow-Headers': '*',
- };
-
- // Handle preflight requests
+ // Handle CORS preflight request
if (request.method === 'OPTIONS') {
return new Response(null, {
- headers: corsHeaders,
- status: 204,
+ status: 200,
+ headers: {
+ 'Access-Control-Allow-Origin': '*',
+ 'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
+ 'Access-Control-Allow-Headers': ALLOW_HEADERS.join(', '),
+ 'Access-Control-Expose-Headers': EXPOSE_HEADERS.join(', '),
+ 'Access-Control-Max-Age': '86400',
+ },
});
}
- // Forward the response with CORS headers
- const responseHeaders = new Headers(response.headers);
- Object.entries(corsHeaders).forEach(([key, value]) => {
- responseHeaders.set(key, value);
- });
+ // Extract domain and remaining path
+ const parts = path.match(/([^\/]+)\/?(.*)/);
+ if (!parts) {
+ return json({ error: 'Invalid path format' }, { status: 400 });
+ }
+
+ const domain = parts[1];
+ const remainingPath = parts[2] || '';
+
+ // Reconstruct the target URL with query parameters
+ const url = new URL(request.url);
+ const targetURL = `https://${domain}/${remainingPath}${url.search}`;
+
+ console.log('Target URL:', targetURL);
+
+ // Filter and prepare headers
+ const headers = new Headers();
+
+ // Only forward allowed headers
+ for (const header of ALLOW_HEADERS) {
+ if (request.headers.has(header)) {
+ headers.set(header, request.headers.get(header)!);
+ }
+ }
+
+ // Set the host header
+ headers.set('Host', domain);
+
+ // Set Git user agent if not already present
+ if (!headers.has('user-agent') || !headers.get('user-agent')?.startsWith('git/')) {
+ headers.set('User-Agent', 'git/@isomorphic-git/cors-proxy');
+ }
+
+ console.log('Request headers:', Object.fromEntries(headers.entries()));
+
+ // Prepare fetch options
+ const fetchOptions: RequestInit = {
+ method: request.method,
+ headers,
+ redirect: 'follow',
+ };
+
+ // Add body for non-GET/HEAD requests
+ if (!['GET', 'HEAD'].includes(request.method)) {
+ fetchOptions.body = request.body;
+
+ /*
+ * Note: duplex property is removed to ensure TypeScript compatibility
+ * across different environments and versions
+ */
+ }
+
+ // Forward the request to the target URL
+ const response = await fetch(targetURL, fetchOptions);
+
+ console.log('Response status:', response.status);
+
+ // Create response headers
+ const responseHeaders = new Headers();
+
+ // Add CORS headers
+ responseHeaders.set('Access-Control-Allow-Origin', '*');
+ responseHeaders.set('Access-Control-Allow-Methods', 'POST, GET, OPTIONS');
+ responseHeaders.set('Access-Control-Allow-Headers', ALLOW_HEADERS.join(', '));
+ responseHeaders.set('Access-Control-Expose-Headers', EXPOSE_HEADERS.join(', '));
+
+ // Copy exposed headers from the target response
+ for (const header of EXPOSE_HEADERS) {
+ // Skip content-length as we'll use the original response's content-length
+ if (header === 'content-length') {
+ continue;
+ }
+
+ if (response.headers.has(header)) {
+ responseHeaders.set(header, response.headers.get(header)!);
+ }
+ }
+
+ // If the response was redirected, add the x-redirected-url header
+ if (response.redirected) {
+ responseHeaders.set('x-redirected-url', response.url);
+ }
+
+ console.log('Response headers:', Object.fromEntries(responseHeaders.entries()));
+
+ // Return the response with the target's body stream piped directly
return new Response(response.body, {
status: response.status,
+ statusText: response.statusText,
headers: responseHeaders,
});
} catch (error) {
- console.error('Git proxy error:', error);
- return json({ error: 'Proxy error' }, { status: 500 });
+ console.error('Proxy error:', error);
+ return json(
+ {
+ error: 'Proxy error',
+ message: error instanceof Error ? error.message : 'Unknown error',
+ url: path ? `https://${path}` : 'Invalid URL',
+ },
+ { status: 500 },
+ );
}
}
diff --git a/app/routes/api.health.ts b/app/routes/api.health.ts
index 9d3bd839..e5f5a30d 100644
--- a/app/routes/api.health.ts
+++ b/app/routes/api.health.ts
@@ -1,18 +1,8 @@
-import type { LoaderFunctionArgs } from '@remix-run/node';
+import { json, type LoaderFunctionArgs } from '@remix-run/cloudflare';
export const loader = async ({ request: _request }: LoaderFunctionArgs) => {
- // Return a simple 200 OK response with some basic health information
- return new Response(
- JSON.stringify({
- status: 'healthy',
- timestamp: new Date().toISOString(),
- uptime: process.uptime(),
- }),
- {
- status: 200,
- headers: {
- 'Content-Type': 'application/json',
- },
- },
- );
+ return json({
+ status: 'healthy',
+ timestamp: new Date().toISOString(),
+ });
};
diff --git a/app/routes/api.system.app-info.ts b/app/routes/api.system.app-info.ts
index d3ed0185..01953b68 100644
--- a/app/routes/api.system.app-info.ts
+++ b/app/routes/api.system.app-info.ts
@@ -1,6 +1,5 @@
import type { ActionFunctionArgs, LoaderFunction } from '@remix-run/cloudflare';
import { json } from '@remix-run/cloudflare';
-import { execSync } from 'child_process';
// These are injected by Vite at build time
declare const __APP_VERSION: string;
@@ -11,34 +10,24 @@ declare const __PKG_DEPENDENCIES: Record;
declare const __PKG_DEV_DEPENDENCIES: Record;
declare const __PKG_PEER_DEPENDENCIES: Record;
declare const __PKG_OPTIONAL_DEPENDENCIES: Record;
+declare const __COMMIT_HASH: string;
+declare const __GIT_BRANCH: string;
+declare const __GIT_COMMIT_TIME: string;
+declare const __GIT_AUTHOR: string;
+declare const __GIT_EMAIL: string;
+declare const __GIT_REMOTE_URL: string;
+declare const __GIT_REPO_NAME: string;
const getGitInfo = () => {
- try {
- return {
- commitHash: execSync('git rev-parse --short HEAD').toString().trim(),
- branch: execSync('git rev-parse --abbrev-ref HEAD').toString().trim(),
- commitTime: execSync('git log -1 --format=%cd').toString().trim(),
- author: execSync('git log -1 --format=%an').toString().trim(),
- email: execSync('git log -1 --format=%ae').toString().trim(),
- remoteUrl: execSync('git config --get remote.origin.url').toString().trim(),
- repoName: execSync('git config --get remote.origin.url')
- .toString()
- .trim()
- .replace(/^.*github.com[:/]/, '')
- .replace(/\.git$/, ''),
- };
- } catch (error) {
- console.error('Failed to get git info:', error);
- return {
- commitHash: 'unknown',
- branch: 'unknown',
- commitTime: 'unknown',
- author: 'unknown',
- email: 'unknown',
- remoteUrl: 'unknown',
- repoName: 'unknown',
- };
- }
+ return {
+ commitHash: __COMMIT_HASH || 'unknown',
+ branch: __GIT_BRANCH || 'unknown',
+ commitTime: __GIT_COMMIT_TIME || 'unknown',
+ author: __GIT_AUTHOR || 'unknown',
+ email: __GIT_EMAIL || 'unknown',
+ remoteUrl: __GIT_REMOTE_URL || 'unknown',
+ repoName: __GIT_REPO_NAME || 'unknown',
+ };
};
const formatDependencies = (
@@ -60,11 +49,11 @@ const getAppResponse = () => {
version: __APP_VERSION || '0.1.0',
description: __PKG_DESCRIPTION || 'A DIY LLM interface',
license: __PKG_LICENSE || 'MIT',
- environment: process.env.NODE_ENV || 'development',
+ environment: 'cloudflare',
gitInfo,
timestamp: new Date().toISOString(),
runtimeInfo: {
- nodeVersion: process.version || 'unknown',
+ nodeVersion: 'cloudflare',
},
dependencies: {
production: formatDependencies(__PKG_DEPENDENCIES, 'production'),
diff --git a/app/routes/api.system.git-info.ts b/app/routes/api.system.git-info.ts
index d6bf9975..63c879ce 100644
--- a/app/routes/api.system.git-info.ts
+++ b/app/routes/api.system.git-info.ts
@@ -1,138 +1,48 @@
-import type { LoaderFunction } from '@remix-run/cloudflare';
-import { json } from '@remix-run/cloudflare';
-import { execSync } from 'child_process';
+import { json, type LoaderFunction } from '@remix-run/cloudflare';
-interface GitHubRepoInfo {
- name: string;
- full_name: string;
- default_branch: string;
- stargazers_count: number;
- forks_count: number;
- open_issues_count: number;
- parent?: {
- full_name: string;
- default_branch: string;
- stargazers_count: number;
- forks_count: number;
+interface GitInfo {
+ local: {
+ commitHash: string;
+ branch: string;
+ commitTime: string;
+ author: string;
+ email: string;
+ remoteUrl: string;
+ repoName: string;
};
+ github?: {
+ currentRepo?: {
+ fullName: string;
+ defaultBranch: string;
+ stars: number;
+ forks: number;
+ openIssues?: number;
+ };
+ };
+ isForked?: boolean;
}
-const getLocalGitInfo = () => {
- try {
- return {
- commitHash: execSync('git rev-parse HEAD').toString().trim(),
- branch: execSync('git rev-parse --abbrev-ref HEAD').toString().trim(),
- commitTime: execSync('git log -1 --format=%cd').toString().trim(),
- author: execSync('git log -1 --format=%an').toString().trim(),
- email: execSync('git log -1 --format=%ae').toString().trim(),
- remoteUrl: execSync('git config --get remote.origin.url').toString().trim(),
- repoName: execSync('git config --get remote.origin.url')
- .toString()
- .trim()
- .replace(/^.*github.com[:/]/, '')
- .replace(/\.git$/, ''),
- };
- } catch (error) {
- console.error('Failed to get local git info:', error);
- return null;
- }
-};
+// These values will be replaced at build time
+declare const __COMMIT_HASH: string;
+declare const __GIT_BRANCH: string;
+declare const __GIT_COMMIT_TIME: string;
+declare const __GIT_AUTHOR: string;
+declare const __GIT_EMAIL: string;
+declare const __GIT_REMOTE_URL: string;
+declare const __GIT_REPO_NAME: string;
-const getGitHubInfo = async (repoFullName: string) => {
- try {
- // Add GitHub token if available
- const headers: Record = {
- Accept: 'application/vnd.github.v3+json',
- };
-
- const githubToken = process.env.GITHUB_TOKEN;
-
- if (githubToken) {
- headers.Authorization = `token ${githubToken}`;
- }
-
- console.log('Fetching GitHub info for:', repoFullName); // Debug log
-
- const response = await fetch(`https://api.github.com/repos/${repoFullName}`, {
- headers,
- });
-
- if (!response.ok) {
- console.error('GitHub API error:', {
- status: response.status,
- statusText: response.statusText,
- repoFullName,
- });
-
- // If we get a 404, try the main repo as fallback
- if (response.status === 404 && repoFullName !== 'stackblitz-labs/bolt.diy') {
- return getGitHubInfo('stackblitz-labs/bolt.diy');
- }
-
- throw new Error(`GitHub API error: ${response.statusText}`);
- }
-
- const data = await response.json();
- console.log('GitHub API response:', data); // Debug log
-
- return data as GitHubRepoInfo;
- } catch (error) {
- console.error('Failed to get GitHub info:', error);
- return null;
- }
-};
-
-export const loader: LoaderFunction = async ({ request: _request }) => {
- const localInfo = getLocalGitInfo();
- console.log('Local git info:', localInfo); // Debug log
-
- // If we have local info, try to get GitHub info for both our fork and upstream
- let githubInfo = null;
-
- if (localInfo?.repoName) {
- githubInfo = await getGitHubInfo(localInfo.repoName);
- }
-
- // If no local info or GitHub info, try the main repo
- if (!githubInfo) {
- githubInfo = await getGitHubInfo('stackblitz-labs/bolt.diy');
- }
-
- const response = {
- local: localInfo || {
- commitHash: 'unknown',
- branch: 'unknown',
- commitTime: 'unknown',
- author: 'unknown',
- email: 'unknown',
- remoteUrl: 'unknown',
- repoName: 'unknown',
+export const loader: LoaderFunction = async () => {
+ const gitInfo: GitInfo = {
+ local: {
+ commitHash: typeof __COMMIT_HASH !== 'undefined' ? __COMMIT_HASH : 'development',
+ branch: typeof __GIT_BRANCH !== 'undefined' ? __GIT_BRANCH : 'main',
+ commitTime: typeof __GIT_COMMIT_TIME !== 'undefined' ? __GIT_COMMIT_TIME : new Date().toISOString(),
+ author: typeof __GIT_AUTHOR !== 'undefined' ? __GIT_AUTHOR : 'development',
+ email: typeof __GIT_EMAIL !== 'undefined' ? __GIT_EMAIL : 'development@local',
+ remoteUrl: typeof __GIT_REMOTE_URL !== 'undefined' ? __GIT_REMOTE_URL : 'local',
+ repoName: typeof __GIT_REPO_NAME !== 'undefined' ? __GIT_REPO_NAME : 'bolt.diy',
},
- github: githubInfo
- ? {
- currentRepo: {
- fullName: githubInfo.full_name,
- defaultBranch: githubInfo.default_branch,
- stars: githubInfo.stargazers_count,
- forks: githubInfo.forks_count,
- openIssues: githubInfo.open_issues_count,
- },
- upstream: githubInfo.parent
- ? {
- fullName: githubInfo.parent.full_name,
- defaultBranch: githubInfo.parent.default_branch,
- stars: githubInfo.parent.stargazers_count,
- forks: githubInfo.parent.forks_count,
- }
- : null,
- }
- : null,
- isForked: Boolean(githubInfo?.parent),
- timestamp: new Date().toISOString(),
};
- console.log('Final response:', response);
-
- // Debug log
- return json(response);
+ return json(gitInfo);
};
diff --git a/app/routes/api.update.ts b/app/routes/api.update.ts
index 9f79d4ae..97d28ce0 100644
--- a/app/routes/api.update.ts
+++ b/app/routes/api.update.ts
@@ -1,573 +1,21 @@
-import { json } from '@remix-run/node';
-import type { ActionFunction } from '@remix-run/node';
-import { exec } from 'child_process';
-import { promisify } from 'util';
-
-const execAsync = promisify(exec);
-
-interface UpdateRequestBody {
- branch: string;
- autoUpdate?: boolean;
-}
-
-interface UpdateProgress {
- stage: 'fetch' | 'pull' | 'install' | 'build' | 'complete';
- message: string;
- progress?: number;
- error?: string;
- details?: {
- changedFiles?: string[];
- additions?: number;
- deletions?: number;
- commitMessages?: string[];
- totalSize?: string;
- currentCommit?: string;
- remoteCommit?: string;
- updateReady?: boolean;
- changelog?: string;
- compareUrl?: string;
- };
-}
+import { json, type ActionFunction } from '@remix-run/cloudflare';
export const action: ActionFunction = async ({ request }) => {
if (request.method !== 'POST') {
return json({ error: 'Method not allowed' }, { status: 405 });
}
- try {
- const body = await request.json();
-
- if (!body || typeof body !== 'object' || !('branch' in body) || typeof body.branch !== 'string') {
- return json({ error: 'Invalid request body: branch is required and must be a string' }, { status: 400 });
- }
-
- const { branch, autoUpdate = false } = body as UpdateRequestBody;
-
- // Create a ReadableStream to send progress updates
- const stream = new ReadableStream({
- async start(controller) {
- const encoder = new TextEncoder();
- const sendProgress = (update: UpdateProgress) => {
- controller.enqueue(encoder.encode(JSON.stringify(update) + '\n'));
- };
-
- try {
- // Initial check stage
- sendProgress({
- stage: 'fetch',
- message: 'Checking repository status...',
- progress: 0,
- });
-
- // Check if remote exists
- let defaultBranch = branch || 'main'; // Make branch mutable
-
- try {
- await execAsync('git remote get-url upstream');
- sendProgress({
- stage: 'fetch',
- message: 'Repository remote verified',
- progress: 10,
- });
- } catch {
- throw new Error(
- 'No upstream repository found. Please set up the upstream repository first by running:\ngit remote add upstream https://github.com/stackblitz-labs/bolt.diy.git',
- );
- }
-
- // Get default branch if not specified
- if (!branch) {
- sendProgress({
- stage: 'fetch',
- message: 'Detecting default branch...',
- progress: 20,
- });
-
- try {
- const { stdout } = await execAsync('git remote show upstream | grep "HEAD branch" | cut -d" " -f5');
- defaultBranch = stdout.trim() || 'main';
- sendProgress({
- stage: 'fetch',
- message: `Using branch: ${defaultBranch}`,
- progress: 30,
- });
- } catch {
- defaultBranch = 'main'; // Fallback to main if we can't detect
- sendProgress({
- stage: 'fetch',
- message: 'Using default branch: main',
- progress: 30,
- });
- }
- }
-
- // Fetch stage
- sendProgress({
- stage: 'fetch',
- message: 'Fetching latest changes...',
- progress: 40,
- });
-
- // Fetch all remotes
- await execAsync('git fetch --all');
- sendProgress({
- stage: 'fetch',
- message: 'Remote changes fetched',
- progress: 50,
- });
-
- // Check if remote branch exists
- try {
- await execAsync(`git rev-parse --verify upstream/${defaultBranch}`);
- sendProgress({
- stage: 'fetch',
- message: 'Remote branch verified',
- progress: 60,
- });
- } catch {
- throw new Error(
- `Remote branch 'upstream/${defaultBranch}' not found. Please ensure the upstream repository is properly configured.`,
- );
- }
-
- // Get current commit hash and remote commit hash
- sendProgress({
- stage: 'fetch',
- message: 'Comparing versions...',
- progress: 70,
- });
-
- const { stdout: currentCommit } = await execAsync('git rev-parse HEAD');
- const { stdout: remoteCommit } = await execAsync(`git rev-parse upstream/${defaultBranch}`);
-
- // If we're on the same commit, no update is available
- if (currentCommit.trim() === remoteCommit.trim()) {
- sendProgress({
- stage: 'complete',
- message: 'No updates available. You are on the latest version.',
- progress: 100,
- details: {
- currentCommit: currentCommit.trim().substring(0, 7),
- remoteCommit: remoteCommit.trim().substring(0, 7),
- },
- });
- return;
- }
-
- sendProgress({
- stage: 'fetch',
- message: 'Analyzing changes...',
- progress: 80,
- });
-
- // Initialize variables
- let changedFiles: string[] = [];
- let commitMessages: string[] = [];
- let stats: RegExpMatchArray | null = null;
- let totalSizeInBytes = 0;
-
- // Format size for display
- const formatSize = (bytes: number) => {
- if (bytes === 0) {
- return '0 B';
- }
-
- const k = 1024;
- const sizes = ['B', 'KB', 'MB', 'GB'];
- const i = Math.floor(Math.log(bytes) / Math.log(k));
-
- return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
- };
-
- // Get list of changed files and their sizes
- try {
- const { stdout: diffOutput } = await execAsync(
- `git diff --name-status ${currentCommit.trim()}..${remoteCommit.trim()}`,
- );
- const files = diffOutput.split('\n').filter(Boolean);
-
- if (files.length === 0) {
- sendProgress({
- stage: 'complete',
- message: `No file changes detected between your version and upstream/${defaultBranch}. You might be on a different branch.`,
- progress: 100,
- details: {
- currentCommit: currentCommit.trim().substring(0, 7),
- remoteCommit: remoteCommit.trim().substring(0, 7),
- },
- });
- return;
- }
-
- sendProgress({
- stage: 'fetch',
- message: `Found ${files.length} changed files, calculating sizes...`,
- progress: 90,
- });
-
- // Get size information for each changed file
- for (const line of files) {
- const [status, file] = line.split('\t');
-
- if (status !== 'D') {
- // Skip deleted files
- try {
- const { stdout: sizeOutput } = await execAsync(`git cat-file -s ${remoteCommit.trim()}:${file}`);
- const size = parseInt(sizeOutput) || 0;
- totalSizeInBytes += size;
- } catch {
- console.debug(`Could not get size for file: ${file}`);
- }
- }
- }
-
- changedFiles = files.map((line) => {
- const [status, file] = line.split('\t');
- return `${status === 'M' ? 'Modified' : status === 'A' ? 'Added' : 'Deleted'}: ${file}`;
- });
- } catch (err) {
- console.debug('Failed to get changed files:', err);
- throw new Error(`Failed to compare changes with upstream/${defaultBranch}. Are you on the correct branch?`);
- }
-
- // Get commit messages between current and remote
- try {
- const { stdout: logOutput } = await execAsync(
- `git log --pretty=format:"%h|%s|%aI" ${currentCommit.trim()}..${remoteCommit.trim()}`,
- );
-
- // Parse and group commits by type
- const commits = logOutput
- .split('\n')
- .filter(Boolean)
- .map((line) => {
- const [hash, subject, timestamp] = line.split('|');
- let type = 'other';
- let message = subject;
-
- if (subject.startsWith('feat:') || subject.startsWith('feature:')) {
- type = 'feature';
- message = subject.replace(/^feat(?:ure)?:/, '').trim();
- } else if (subject.startsWith('fix:')) {
- type = 'fix';
- message = subject.replace(/^fix:/, '').trim();
- } else if (subject.startsWith('docs:')) {
- type = 'docs';
- message = subject.replace(/^docs:/, '').trim();
- } else if (subject.startsWith('style:')) {
- type = 'style';
- message = subject.replace(/^style:/, '').trim();
- } else if (subject.startsWith('refactor:')) {
- type = 'refactor';
- message = subject.replace(/^refactor:/, '').trim();
- } else if (subject.startsWith('perf:')) {
- type = 'perf';
- message = subject.replace(/^perf:/, '').trim();
- } else if (subject.startsWith('test:')) {
- type = 'test';
- message = subject.replace(/^test:/, '').trim();
- } else if (subject.startsWith('build:')) {
- type = 'build';
- message = subject.replace(/^build:/, '').trim();
- } else if (subject.startsWith('ci:')) {
- type = 'ci';
- message = subject.replace(/^ci:/, '').trim();
- }
-
- return {
- hash,
- type,
- message,
- timestamp: new Date(timestamp),
- };
- });
-
- // Group commits by type
- const groupedCommits = commits.reduce(
- (acc, commit) => {
- if (!acc[commit.type]) {
- acc[commit.type] = [];
- }
-
- acc[commit.type].push(commit);
-
- return acc;
- },
- {} as Record,
- );
-
- // Format commit messages with emojis and timestamps
- const formattedMessages = Object.entries(groupedCommits).map(([type, commits]) => {
- const emoji = {
- feature: 'โจ',
- fix: '๐',
- docs: '๐',
- style: '๐',
- refactor: 'โป๏ธ',
- perf: 'โก',
- test: '๐งช',
- build: '๐ ๏ธ',
- ci: 'โ๏ธ',
- other: '๐',
- }[type];
-
- const title = {
- feature: 'Features',
- fix: 'Bug Fixes',
- docs: 'Documentation',
- style: 'Styles',
- refactor: 'Code Refactoring',
- perf: 'Performance',
- test: 'Tests',
- build: 'Build',
- ci: 'CI',
- other: 'Other Changes',
- }[type];
-
- return `### ${emoji} ${title}\n\n${commits
- .map((c) => `* ${c.message} (${c.hash.substring(0, 7)}) - ${c.timestamp.toLocaleString()}`)
- .join('\n')}`;
- });
-
- commitMessages = formattedMessages;
- } catch {
- // Handle silently - empty commitMessages array will be used
- }
-
- // Get diff stats using the specific commits
- try {
- const { stdout: diffStats } = await execAsync(
- `git diff --shortstat ${currentCommit.trim()}..${remoteCommit.trim()}`,
- );
- stats = diffStats.match(
- /(\d+) files? changed(?:, (\d+) insertions?\\(\\+\\))?(?:, (\d+) deletions?\\(-\\))?/,
- );
- } catch {
- // Handle silently - null stats will be used
- }
-
- // If we somehow still have no changes detected
- if (!stats && changedFiles.length === 0) {
- sendProgress({
- stage: 'complete',
- message: `No changes detected between your version and upstream/${defaultBranch}. This might be unexpected - please check your git status.`,
- progress: 100,
- });
- return;
- }
-
- // Fetch changelog
- sendProgress({
- stage: 'fetch',
- message: 'Fetching changelog...',
- progress: 95,
- });
-
- const changelog = await fetchChangelog(currentCommit.trim(), remoteCommit.trim());
-
- // We have changes, send the details
- sendProgress({
- stage: 'fetch',
- message: `Changes detected on upstream/${defaultBranch}`,
- progress: 100,
- details: {
- changedFiles,
- additions: stats?.[2] ? parseInt(stats[2]) : 0,
- deletions: stats?.[3] ? parseInt(stats[3]) : 0,
- commitMessages,
- totalSize: formatSize(totalSizeInBytes),
- currentCommit: currentCommit.trim().substring(0, 7),
- remoteCommit: remoteCommit.trim().substring(0, 7),
- updateReady: true,
- changelog,
- compareUrl: `https://github.com/stackblitz-labs/bolt.diy/compare/${currentCommit.trim().substring(0, 7)}...${remoteCommit.trim().substring(0, 7)}`,
- },
- });
-
- // Only proceed with update if autoUpdate is true
- if (!autoUpdate) {
- sendProgress({
- stage: 'complete',
- message: 'Update is ready to be applied. Click "Update Now" to proceed.',
- progress: 100,
- details: {
- changedFiles,
- additions: stats?.[2] ? parseInt(stats[2]) : 0,
- deletions: stats?.[3] ? parseInt(stats[3]) : 0,
- commitMessages,
- totalSize: formatSize(totalSizeInBytes),
- currentCommit: currentCommit.trim().substring(0, 7),
- remoteCommit: remoteCommit.trim().substring(0, 7),
- updateReady: true,
- changelog,
- compareUrl: `https://github.com/stackblitz-labs/bolt.diy/compare/${currentCommit.trim().substring(0, 7)}...${remoteCommit.trim().substring(0, 7)}`,
- },
- });
- return;
- }
-
- // Pull stage
- sendProgress({
- stage: 'pull',
- message: `Pulling changes from upstream/${defaultBranch}...`,
- progress: 0,
- });
-
- await execAsync(`git pull upstream ${defaultBranch}`);
-
- sendProgress({
- stage: 'pull',
- message: 'Changes pulled successfully',
- progress: 100,
- });
-
- // Install stage
- sendProgress({
- stage: 'install',
- message: 'Installing dependencies...',
- progress: 0,
- });
-
- await execAsync('pnpm install');
-
- sendProgress({
- stage: 'install',
- message: 'Dependencies installed successfully',
- progress: 100,
- });
-
- // Build stage
- sendProgress({
- stage: 'build',
- message: 'Building application...',
- progress: 0,
- });
-
- await execAsync('pnpm build');
-
- sendProgress({
- stage: 'build',
- message: 'Build completed successfully',
- progress: 100,
- });
-
- // Complete
- sendProgress({
- stage: 'complete',
- message: 'Update completed successfully! Click Restart to apply changes.',
- progress: 100,
- });
- } catch (err) {
- sendProgress({
- stage: 'complete',
- message: 'Update failed',
- error: err instanceof Error ? err.message : 'Unknown error occurred',
- });
- } finally {
- controller.close();
- }
- },
- });
-
- return new Response(stream, {
- headers: {
- 'Content-Type': 'text/event-stream',
- 'Cache-Control': 'no-cache',
- Connection: 'keep-alive',
- },
- });
- } catch (err) {
- console.error('Update preparation failed:', err);
- return json(
- {
- success: false,
- error: err instanceof Error ? err.message : 'Unknown error occurred while preparing update',
- },
- { status: 500 },
- );
- }
+ return json(
+ {
+ error: 'Updates must be performed manually in a server environment',
+ instructions: [
+ '1. Navigate to the project directory',
+ '2. Run: git fetch upstream',
+ '3. Run: git pull upstream main',
+ '4. Run: pnpm install',
+ '5. Run: pnpm run build',
+ ],
+ },
+ { status: 400 },
+ );
};
-
-// Add this function to fetch the changelog
-async function fetchChangelog(currentCommit: string, remoteCommit: string): Promise {
- try {
- // First try to get the changelog.md content
- const { stdout: changelogContent } = await execAsync('git show upstream/main:changelog.md');
-
- // If we have a changelog, return it
- if (changelogContent) {
- return changelogContent;
- }
-
- // If no changelog.md, generate one in a similar format
- let changelog = '# Changes in this Update\n\n';
-
- // Get commit messages grouped by type
- const { stdout: commitLog } = await execAsync(
- `git log --pretty=format:"%h|%s|%b" ${currentCommit.trim()}..${remoteCommit.trim()}`,
- );
-
- const commits = commitLog.split('\n').filter(Boolean);
- const categorizedCommits: Record = {
- 'โจ Features': [],
- '๐ Bug Fixes': [],
- '๐ Documentation': [],
- '๐ Styles': [],
- 'โป๏ธ Code Refactoring': [],
- 'โก Performance': [],
- '๐งช Tests': [],
- '๐ ๏ธ Build': [],
- 'โ๏ธ CI': [],
- '๐ Other Changes': [],
- };
-
- // Categorize commits
- for (const commit of commits) {
- const [hash, subject] = commit.split('|');
- let category = '๐ Other Changes';
-
- if (subject.startsWith('feat:') || subject.startsWith('feature:')) {
- category = 'โจ Features';
- } else if (subject.startsWith('fix:')) {
- category = '๐ Bug Fixes';
- } else if (subject.startsWith('docs:')) {
- category = '๐ Documentation';
- } else if (subject.startsWith('style:')) {
- category = '๐ Styles';
- } else if (subject.startsWith('refactor:')) {
- category = 'โป๏ธ Code Refactoring';
- } else if (subject.startsWith('perf:')) {
- category = 'โก Performance';
- } else if (subject.startsWith('test:')) {
- category = '๐งช Tests';
- } else if (subject.startsWith('build:')) {
- category = '๐ ๏ธ Build';
- } else if (subject.startsWith('ci:')) {
- category = 'โ๏ธ CI';
- }
-
- const message = subject.includes(':') ? subject.split(':')[1].trim() : subject.trim();
- categorizedCommits[category].push(`* ${message} (${hash.substring(0, 7)})`);
- }
-
- // Build changelog content
- for (const [category, commits] of Object.entries(categorizedCommits)) {
- if (commits.length > 0) {
- changelog += `\n## ${category}\n\n${commits.join('\n')}\n`;
- }
- }
-
- // Add stats
- const { stdout: stats } = await execAsync(`git diff --shortstat ${currentCommit.trim()}..${remoteCommit.trim()}`);
-
- if (stats) {
- changelog += '\n## ๐ Stats\n\n';
- changelog += `${stats.trim()}\n`;
- }
-
- return changelog;
- } catch (error) {
- console.error('Error fetching changelog:', error);
- return 'Unable to fetch changelog';
- }
-}
diff --git a/package.json b/package.json
index e287f0c5..144831fb 100644
--- a/package.json
+++ b/package.json
@@ -123,6 +123,7 @@
"remark-gfm": "^4.0.0",
"remix-island": "^0.2.0",
"remix-utils": "^7.7.0",
+ "rollup-plugin-node-polyfills": "^0.2.1",
"shiki": "^1.24.0",
"tailwind-merge": "^2.2.1",
"unist-util-visit": "^5.0.0",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index fc491791..bb37289b 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -287,6 +287,9 @@ importers:
remix-utils:
specifier: ^7.7.0
version: 7.7.0(@remix-run/cloudflare@2.15.3(@cloudflare/workers-types@4.20250204.0)(typescript@5.7.3))(@remix-run/node@2.15.3(typescript@5.7.3))(@remix-run/react@2.15.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.7.3))(@remix-run/router@1.22.0)(react@18.3.1)(zod@3.24.1)
+ rollup-plugin-node-polyfills:
+ specifier: ^0.2.1
+ version: 0.2.1
shiki:
specifier: ^1.24.0
version: 1.29.2
diff --git a/vite.config.ts b/vite.config.ts
index 01fb3b2e..a9351c12 100644
--- a/vite.config.ts
+++ b/vite.config.ts
@@ -89,14 +89,55 @@ export default defineConfig((config) => {
__PKG_DEV_DEPENDENCIES: JSON.stringify(pkg.devDependencies),
__PKG_PEER_DEPENDENCIES: JSON.stringify(pkg.peerDependencies),
__PKG_OPTIONAL_DEPENDENCIES: JSON.stringify(pkg.optionalDependencies),
+ // Define global values
+ 'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV),
},
build: {
target: 'esnext',
+ rollupOptions: {
+ output: {
+ format: 'esm',
+ },
+ },
+ commonjsOptions: {
+ transformMixedEsModules: true,
+ },
+ },
+ optimizeDeps: {
+ esbuildOptions: {
+ define: {
+ global: 'globalThis',
+ },
+ },
+ },
+ resolve: {
+ alias: {
+ buffer: 'vite-plugin-node-polyfills/polyfills/buffer',
+ },
},
plugins: [
nodePolyfills({
- include: ['path', 'buffer', 'process'],
+ include: ['buffer', 'process', 'util', 'stream'],
+ globals: {
+ Buffer: true,
+ process: true,
+ global: true,
+ },
+ protocolImports: true,
+ // Exclude Node.js modules that shouldn't be polyfilled in Cloudflare
+ exclude: ['child_process', 'fs', 'path'],
}),
+ {
+ name: 'buffer-polyfill',
+ transform(code, id) {
+ if (id.includes('env.mjs')) {
+ return {
+ code: `import { Buffer } from 'buffer';\n${code}`,
+ map: null,
+ };
+ }
+ },
+ },
config.mode !== 'test' && remixCloudflareDevProxy(),
remixVitePlugin({
future: {
From 9c0e898d17b66f1b5ff5fd87b3e589e9b557a164 Mon Sep 17 00:00:00 2001
From: KevIsDev
Date: Mon, 12 May 2025 02:12:32 +0100
Subject: [PATCH 5/7] ci: reorder steps and add env vars for Electron build
#release:major
Reordered the "Commit and Tag Release" step to ensure it runs before uploading release assets. Added environment variables `GH_TOKEN`, `GITHUB_TOKEN`, and `NODE_OPTIONS` to the Electron build, added authentication and memory allocation during the build process.
---
.github/workflows/update-stable.yml | 23 +++++++++++++----------
1 file changed, 13 insertions(+), 10 deletions(-)
diff --git a/.github/workflows/update-stable.yml b/.github/workflows/update-stable.yml
index 5c0ddef3..8488d0f5 100644
--- a/.github/workflows/update-stable.yml
+++ b/.github/workflows/update-stable.yml
@@ -122,6 +122,10 @@ jobs:
sudo apt-get install -y rpm
- name: Build Electron app
+ env:
+ GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ NODE_OPTIONS: "--max_old_space_size=4096"
run: |
if [ "$RUNNER_OS" == "Windows" ]; then
pnpm run electron:build:win
@@ -131,6 +135,15 @@ jobs:
pnpm run electron:build:linux
fi
+ - name: Commit and Tag Release
+ run: |
+ git pull
+ git add package.json pnpm-lock.yaml changelog.md
+ git commit -m "chore: release version ${{ steps.bump_version.outputs.new_version }}"
+ git tag "v${{ steps.bump_version.outputs.new_version }}"
+ git push
+ git push --tags
+
- name: Upload Electron Build as Release Assets
uses: softprops/action-gh-release@v2
with:
@@ -145,16 +158,6 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- # Commit and Tag Release
- - name: Commit and Tag Release
- run: |
- git pull
- git add package.json pnpm-lock.yaml changelog.md
- git commit -m "chore: release version ${{ steps.bump_version.outputs.new_version }}"
- git tag "v${{ steps.bump_version.outputs.new_version }}"
- git push
- git push --tags
-
- name: Update Stable Branch
run: |
if ! git checkout stable 2>/dev/null; then
From 3779000ec5835b3eead33573eb0c6ac8befaf693 Mon Sep 17 00:00:00 2001
From: KevIsDev
Date: Mon, 12 May 2025 02:15:31 +0100
Subject: [PATCH 6/7] Revert "ci: reorder steps and add env vars for Electron
build #release:major"
This reverts commit 9c0e898d17b66f1b5ff5fd87b3e589e9b557a164.
---
.github/workflows/update-stable.yml | 23 ++++++++++-------------
1 file changed, 10 insertions(+), 13 deletions(-)
diff --git a/.github/workflows/update-stable.yml b/.github/workflows/update-stable.yml
index 8488d0f5..5c0ddef3 100644
--- a/.github/workflows/update-stable.yml
+++ b/.github/workflows/update-stable.yml
@@ -122,10 +122,6 @@ jobs:
sudo apt-get install -y rpm
- name: Build Electron app
- env:
- GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- NODE_OPTIONS: "--max_old_space_size=4096"
run: |
if [ "$RUNNER_OS" == "Windows" ]; then
pnpm run electron:build:win
@@ -135,15 +131,6 @@ jobs:
pnpm run electron:build:linux
fi
- - name: Commit and Tag Release
- run: |
- git pull
- git add package.json pnpm-lock.yaml changelog.md
- git commit -m "chore: release version ${{ steps.bump_version.outputs.new_version }}"
- git tag "v${{ steps.bump_version.outputs.new_version }}"
- git push
- git push --tags
-
- name: Upload Electron Build as Release Assets
uses: softprops/action-gh-release@v2
with:
@@ -158,6 +145,16 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ # Commit and Tag Release
+ - name: Commit and Tag Release
+ run: |
+ git pull
+ git add package.json pnpm-lock.yaml changelog.md
+ git commit -m "chore: release version ${{ steps.bump_version.outputs.new_version }}"
+ git tag "v${{ steps.bump_version.outputs.new_version }}"
+ git push
+ git push --tags
+
- name: Update Stable Branch
run: |
if ! git checkout stable 2>/dev/null; then
From 99d1506011693d9feec0912a5104ab34cfeae5c2 Mon Sep 17 00:00:00 2001
From: ksuilen
Date: Tue, 17 Jun 2025 11:54:02 +0200
Subject: [PATCH 7/7] Add Portkey provider with custom headers and models - Add
Portkey provider for AI gateway functionality - Support for custom headers
via UI (default: x-portkey-debug: false) - Support for custom models
configuration via UI - Integrate HeaderManager and ModelManager components -
Update CloudProvidersTab to support Portkey configuration - Add environment
variables for PORTKEY_API_KEY and PORTKEY_API_BASE_URL - Fix linting issues
and import paths
---
.../tabs/providers/HeaderManager.tsx | 236 ++++++++++++++++
.../@settings/tabs/providers/ModelManager.tsx | 255 ++++++++++++++++++
.../providers/cloud/CloudProvidersTab.tsx | 49 +++-
app/lib/modules/llm/base-provider.ts | 8 +-
app/lib/modules/llm/providers/portkey.ts | 56 ++++
app/lib/modules/llm/registry.ts | 2 +
app/lib/stores/settings.ts | 2 +-
app/types/model.ts | 2 +
worker-configuration.d.ts | 2 +
9 files changed, 608 insertions(+), 4 deletions(-)
create mode 100644 app/components/@settings/tabs/providers/HeaderManager.tsx
create mode 100644 app/components/@settings/tabs/providers/ModelManager.tsx
create mode 100644 app/lib/modules/llm/providers/portkey.ts
diff --git a/app/components/@settings/tabs/providers/HeaderManager.tsx b/app/components/@settings/tabs/providers/HeaderManager.tsx
new file mode 100644
index 00000000..0663f63b
--- /dev/null
+++ b/app/components/@settings/tabs/providers/HeaderManager.tsx
@@ -0,0 +1,236 @@
+import React, { useState } from 'react';
+import { IconButton } from '~/components/ui/IconButton';
+import type { IProviderSetting } from '~/types/model';
+import { classNames } from '~/utils/classNames';
+import { motion, AnimatePresence } from 'framer-motion';
+
+interface HeaderManagerProps {
+ provider: string;
+ settings: IProviderSetting;
+ onUpdateSettings: (settings: IProviderSetting) => void;
+}
+
+interface HeaderFormData {
+ name: string;
+ value: string;
+}
+
+export const HeaderManager: React.FC = ({ provider, settings, onUpdateSettings }) => {
+ const [isAddingHeader, setIsAddingHeader] = useState(false);
+ const [editingHeaderKey, setEditingHeaderKey] = useState(null);
+ const [headerForm, setHeaderForm] = useState({
+ name: '',
+ value: '',
+ });
+
+ const customHeaders = settings.customHeaders || {};
+ const headerEntries = Object.entries(customHeaders);
+
+ const handleAddHeader = () => {
+ if (!headerForm.name.trim() || !headerForm.value.trim()) {
+ return;
+ }
+
+ const updatedHeaders = {
+ ...customHeaders,
+ [headerForm.name.trim()]: headerForm.value.trim(),
+ };
+
+ const updatedSettings = {
+ ...settings,
+ customHeaders: updatedHeaders,
+ };
+
+ onUpdateSettings(updatedSettings);
+ setHeaderForm({ name: '', value: '' });
+ setIsAddingHeader(false);
+ };
+
+ const handleEditHeader = (key: string) => {
+ setHeaderForm({
+ name: key,
+ value: customHeaders[key],
+ });
+ setEditingHeaderKey(key);
+ };
+
+ const handleUpdateHeader = () => {
+ if (editingHeaderKey === null || !headerForm.name.trim() || !headerForm.value.trim()) {
+ return;
+ }
+
+ const updatedHeaders = { ...customHeaders };
+
+ // Remove old key if name changed
+ if (editingHeaderKey !== headerForm.name.trim()) {
+ delete updatedHeaders[editingHeaderKey];
+ }
+
+ // Add/update with new values
+ updatedHeaders[headerForm.name.trim()] = headerForm.value.trim();
+
+ const updatedSettings = {
+ ...settings,
+ customHeaders: updatedHeaders,
+ };
+
+ onUpdateSettings(updatedSettings);
+ setHeaderForm({ name: '', value: '' });
+ setEditingHeaderKey(null);
+ };
+
+ const handleDeleteHeader = (key: string) => {
+ const updatedHeaders = { ...customHeaders };
+ delete updatedHeaders[key];
+
+ const updatedSettings = {
+ ...settings,
+ customHeaders: updatedHeaders,
+ };
+ onUpdateSettings(updatedSettings);
+ };
+
+ const handleCancelEdit = () => {
+ setHeaderForm({ name: '', value: '' });
+ setIsAddingHeader(false);
+ setEditingHeaderKey(null);
+ };
+
+ return (
+
+
+
Custom Headers
+
setIsAddingHeader(true)}
+ title="Add Header"
+ className="bg-green-500/10 hover:bg-green-500/20 text-green-500"
+ disabled={isAddingHeader || editingHeaderKey !== null}
+ >
+
+
+
+
+ {/* Header Form */}
+
+ {(isAddingHeader || editingHeaderKey !== null) && (
+
+
+
+
+
+ setHeaderForm({ ...headerForm, name: e.target.value })}
+ placeholder="e.g., x-api-version"
+ className={classNames(
+ 'w-full px-3 py-2 rounded-lg text-sm',
+ 'bg-bolt-elements-background-depth-2 border border-bolt-elements-borderColor',
+ 'text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary',
+ 'focus:outline-none focus:ring-2 focus:ring-purple-500/30',
+ )}
+ />
+
+
+
+ setHeaderForm({ ...headerForm, value: e.target.value })}
+ placeholder="e.g., v1.0"
+ className={classNames(
+ 'w-full px-3 py-2 rounded-lg text-sm',
+ 'bg-bolt-elements-background-depth-2 border border-bolt-elements-borderColor',
+ 'text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary',
+ 'focus:outline-none focus:ring-2 focus:ring-purple-500/30',
+ )}
+ />
+
+
+
+
+
+ )}
+
+
+ {/* Headers List */}
+
+
+ {headerEntries.map(([key, value]) => (
+
+
+
+ {key}
+ {value}
+
+
Custom header for {provider}
+
+
+
handleEditHeader(key)}
+ title="Edit Header"
+ className="bg-blue-500/10 hover:bg-blue-500/20 text-blue-500"
+ disabled={isAddingHeader || editingHeaderKey !== null}
+ >
+
+
+
handleDeleteHeader(key)}
+ title="Delete Header"
+ className="bg-red-500/10 hover:bg-red-500/20 text-red-500"
+ disabled={isAddingHeader || editingHeaderKey !== null}
+ >
+
+
+
+
+ ))}
+
+
+ {headerEntries.length === 0 && !isAddingHeader && (
+
+
+
No custom headers configured
+
Click the + button to add a header
+ {provider === 'Portkey' &&
Default: x-portkey-debug: false
}
+
+ )}
+
+
+ );
+};
diff --git a/app/components/@settings/tabs/providers/ModelManager.tsx b/app/components/@settings/tabs/providers/ModelManager.tsx
new file mode 100644
index 00000000..632ec7d0
--- /dev/null
+++ b/app/components/@settings/tabs/providers/ModelManager.tsx
@@ -0,0 +1,255 @@
+import React, { useState } from 'react';
+import { IconButton } from '~/components/ui/IconButton';
+import type { ModelInfo } from '~/lib/modules/llm/types';
+import type { IProviderSetting } from '~/types/model';
+import { classNames } from '~/utils/classNames';
+import { motion, AnimatePresence } from 'framer-motion';
+
+interface ModelManagerProps {
+ provider: string;
+ settings: IProviderSetting;
+ onUpdateSettings: (settings: IProviderSetting) => void;
+}
+
+interface ModelFormData {
+ name: string;
+ label: string;
+ maxTokenAllowed: number;
+}
+
+export const ModelManager: React.FC = ({ provider, settings, onUpdateSettings }) => {
+ const [isAddingModel, setIsAddingModel] = useState(false);
+ const [editingModelIndex, setEditingModelIndex] = useState(null);
+ const [modelForm, setModelForm] = useState({
+ name: '',
+ label: '',
+ maxTokenAllowed: 8000,
+ });
+
+ const customModels = settings.customModels || [];
+
+ const handleAddModel = () => {
+ if (!modelForm.name.trim() || !modelForm.label.trim()) {
+ return;
+ }
+
+ const newModel: ModelInfo = {
+ name: modelForm.name.trim(),
+ label: modelForm.label.trim(),
+ provider,
+ maxTokenAllowed: modelForm.maxTokenAllowed,
+ };
+
+ const updatedSettings = {
+ ...settings,
+ customModels: [...customModels, newModel],
+ };
+
+ onUpdateSettings(updatedSettings);
+ setModelForm({ name: '', label: '', maxTokenAllowed: 8000 });
+ setIsAddingModel(false);
+ };
+
+ const handleEditModel = (index: number) => {
+ const model = customModels[index];
+ setModelForm({
+ name: model.name,
+ label: model.label,
+ maxTokenAllowed: model.maxTokenAllowed,
+ });
+ setEditingModelIndex(index);
+ };
+
+ const handleUpdateModel = () => {
+ if (editingModelIndex === null || !modelForm.name.trim() || !modelForm.label.trim()) {
+ return;
+ }
+
+ const updatedModels = [...customModels];
+ updatedModels[editingModelIndex] = {
+ name: modelForm.name.trim(),
+ label: modelForm.label.trim(),
+ provider,
+ maxTokenAllowed: modelForm.maxTokenAllowed,
+ };
+
+ const updatedSettings = {
+ ...settings,
+ customModels: updatedModels,
+ };
+
+ onUpdateSettings(updatedSettings);
+ setModelForm({ name: '', label: '', maxTokenAllowed: 8000 });
+ setEditingModelIndex(null);
+ };
+
+ const handleDeleteModel = (index: number) => {
+ const updatedModels = customModels.filter((_, i) => i !== index);
+ const updatedSettings = {
+ ...settings,
+ customModels: updatedModels,
+ };
+ onUpdateSettings(updatedSettings);
+ };
+
+ const handleCancelEdit = () => {
+ setModelForm({ name: '', label: '', maxTokenAllowed: 8000 });
+ setIsAddingModel(false);
+ setEditingModelIndex(null);
+ };
+
+ return (
+
+
+
Custom Models
+
setIsAddingModel(true)}
+ title="Add Model"
+ className="bg-green-500/10 hover:bg-green-500/20 text-green-500"
+ disabled={isAddingModel || editingModelIndex !== null}
+ >
+
+
+
+
+ {/* Model Form */}
+
+ {(isAddingModel || editingModelIndex !== null) && (
+
+
+
+
+
+ setModelForm({ ...modelForm, name: e.target.value })}
+ placeholder="e.g., gpt-4o"
+ className={classNames(
+ 'w-full px-3 py-2 rounded-lg text-sm',
+ 'bg-bolt-elements-background-depth-2 border border-bolt-elements-borderColor',
+ 'text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary',
+ 'focus:outline-none focus:ring-2 focus:ring-purple-500/30',
+ )}
+ />
+
+
+
+ setModelForm({ ...modelForm, label: e.target.value })}
+ placeholder="e.g., GPT-4o (via Portkey)"
+ className={classNames(
+ 'w-full px-3 py-2 rounded-lg text-sm',
+ 'bg-bolt-elements-background-depth-2 border border-bolt-elements-borderColor',
+ 'text-bolt-elements-textPrimary placeholder-bolt-elements-textTertiary',
+ 'focus:outline-none focus:ring-2 focus:ring-purple-500/30',
+ )}
+ />
+
+
+
+
+ setModelForm({ ...modelForm, maxTokenAllowed: parseInt(e.target.value) || 8000 })}
+ min="1000"
+ max="2000000"
+ className={classNames(
+ 'w-full px-3 py-2 rounded-lg text-sm',
+ 'bg-bolt-elements-background-depth-2 border border-bolt-elements-borderColor',
+ 'text-bolt-elements-textPrimary',
+ 'focus:outline-none focus:ring-2 focus:ring-purple-500/30',
+ )}
+ />
+
+
+
+
+ )}
+
+
+ {/* Model List */}
+
+
+ {customModels.map((model, index) => (
+
+
+
+ {model.label}
+ {model.name}
+
+
+ Max tokens: {model.maxTokenAllowed.toLocaleString()}
+
+
+
+
handleEditModel(index)}
+ title="Edit Model"
+ className="bg-blue-500/10 hover:bg-blue-500/20 text-blue-500"
+ disabled={isAddingModel || editingModelIndex !== null}
+ >
+
+
+
handleDeleteModel(index)}
+ title="Delete Model"
+ className="bg-red-500/10 hover:bg-red-500/20 text-red-500"
+ disabled={isAddingModel || editingModelIndex !== null}
+ >
+
+
+
+
+ ))}
+
+
+ {customModels.length === 0 && !isAddingModel && (
+
+
+
No custom models configured
+
Click the + button to add a model
+
+ )}
+
+
+ );
+};
diff --git a/app/components/@settings/tabs/providers/cloud/CloudProvidersTab.tsx b/app/components/@settings/tabs/providers/cloud/CloudProvidersTab.tsx
index 9f85b766..99242f29 100644
--- a/app/components/@settings/tabs/providers/cloud/CloudProvidersTab.tsx
+++ b/app/components/@settings/tabs/providers/cloud/CloudProvidersTab.tsx
@@ -2,7 +2,7 @@ import React, { useEffect, useState, useCallback } from 'react';
import { Switch } from '~/components/ui/Switch';
import { useSettings } from '~/lib/hooks/useSettings';
import { URL_CONFIGURABLE_PROVIDERS } from '~/lib/stores/settings';
-import type { IProviderConfig } from '~/types/model';
+import type { IProviderConfig, ProviderInfo } from '~/types/model';
import { logStore } from '~/lib/stores/logs';
import { motion } from 'framer-motion';
import { classNames } from '~/utils/classNames';
@@ -14,6 +14,9 @@ import { TbBrain, TbCloudComputing } from 'react-icons/tb';
import { BiCodeBlock, BiChip } from 'react-icons/bi';
import { FaCloud, FaBrain } from 'react-icons/fa';
import type { IconType } from 'react-icons';
+import { APIKeyManager, getApiKeysFromCookies } from '~/components/chat/APIKeyManager';
+import { ModelManager } from '~/components/@settings/tabs/providers/ModelManager';
+import { HeaderManager } from '~/components/@settings/tabs/providers/HeaderManager';
// Add type for provider names to ensure type safety
type ProviderName =
@@ -30,7 +33,8 @@ type ProviderName =
| 'OpenRouter'
| 'Perplexity'
| 'Together'
- | 'XAI';
+ | 'XAI'
+ | 'Portkey';
// Update the PROVIDER_ICONS type to use the ProviderName type
const PROVIDER_ICONS: Record = {
@@ -48,12 +52,14 @@ const PROVIDER_ICONS: Record = {
Perplexity: SiPerplexity,
Together: BsCloud,
XAI: BsRobot,
+ Portkey: BsCloud,
};
// Update PROVIDER_DESCRIPTIONS to use the same type
const PROVIDER_DESCRIPTIONS: Partial> = {
Anthropic: 'Access Claude and other Anthropic models',
OpenAI: 'Use GPT-4, GPT-3.5, and other OpenAI models',
+ Portkey: 'AI gateway with custom model configuration',
};
const CloudProvidersTab = () => {
@@ -61,6 +67,13 @@ const CloudProvidersTab = () => {
const [editingProvider, setEditingProvider] = useState(null);
const [filteredProviders, setFilteredProviders] = useState([]);
const [categoryEnabled, setCategoryEnabled] = useState(false);
+ const [apiKeys, setApiKeys] = useState>({});
+
+ // Load API keys from cookies on mount
+ useEffect(() => {
+ const savedApiKeys = getApiKeysFromCookies();
+ setApiKeys(savedApiKeys);
+ }, []);
// Load and filter providers
useEffect(() => {
@@ -283,6 +296,38 @@ const CloudProvidersTab = () => {
)}
)}
+
+ {/* Special Portkey configuration */}
+ {provider.settings.enabled && provider.name === 'Portkey' && (
+
+ {/* API Key Manager */}
+ setApiKeys((prev) => ({ ...prev, [provider.name]: key }))}
+ />
+
+ {/* Model Manager */}
+ settings.updateProviderSettings(provider.name, newSettings)}
+ />
+
+ {/* Header Manager */}
+ settings.updateProviderSettings(provider.name, newSettings)}
+ />
+
+ )}
diff --git a/app/lib/modules/llm/base-provider.ts b/app/lib/modules/llm/base-provider.ts
index 9cb23403..08ee17af 100644
--- a/app/lib/modules/llm/base-provider.ts
+++ b/app/lib/modules/llm/base-provider.ts
@@ -119,10 +119,16 @@ export abstract class BaseProvider implements ProviderInfo {
type OptionalApiKey = string | undefined;
-export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
+export function getOpenAILikeModel(
+ baseURL: string,
+ apiKey: OptionalApiKey,
+ model: string,
+ customHeaders?: Record,
+) {
const openai = createOpenAI({
baseURL,
apiKey,
+ headers: customHeaders,
});
return openai(model);
diff --git a/app/lib/modules/llm/providers/portkey.ts b/app/lib/modules/llm/providers/portkey.ts
new file mode 100644
index 00000000..d21221ce
--- /dev/null
+++ b/app/lib/modules/llm/providers/portkey.ts
@@ -0,0 +1,56 @@
+import { BaseProvider, getOpenAILikeModel } from '~/lib/modules/llm/base-provider';
+import type { ModelInfo } from '~/lib/modules/llm/types';
+import type { IProviderSetting } from '~/types/model';
+import type { LanguageModelV1 } from 'ai';
+
+export default class PortkeyProvider extends BaseProvider {
+ name = 'Portkey';
+ getApiKeyLink = 'https://portkey.ai/docs/api-reference/authentication';
+
+ config = {
+ baseUrlKey: 'PORTKEY_API_BASE_URL',
+ apiTokenKey: 'PORTKEY_API_KEY',
+ };
+
+ // No static models - all models are user-configurable
+ staticModels: ModelInfo[] = [];
+
+ // Get custom models from user settings
+ async getDynamicModels(
+ apiKeys?: Record,
+ settings?: IProviderSetting,
+ _serverEnv: Record = {},
+ ): Promise {
+ // Return user-configured custom models
+ return settings?.customModels || [];
+ }
+
+ getModelInstance(options: {
+ model: string;
+ serverEnv: Env;
+ apiKeys?: Record;
+ providerSettings?: Record;
+ }): LanguageModelV1 {
+ const { model, serverEnv, apiKeys, providerSettings } = options;
+
+ const { baseUrl, apiKey } = this.getProviderBaseUrlAndKey({
+ apiKeys,
+ providerSettings: providerSettings?.[this.name],
+ serverEnv: serverEnv as any,
+ defaultBaseUrlKey: 'PORTKEY_API_BASE_URL',
+ defaultApiTokenKey: 'PORTKEY_API_KEY',
+ });
+
+ if (!baseUrl || !apiKey) {
+ throw new Error(`Missing configuration for ${this.name} provider`);
+ }
+
+ // Get custom headers from settings, with Portkey defaults
+ const customHeaders = {
+ 'x-portkey-debug': 'false', // Default Portkey header
+ ...providerSettings?.[this.name]?.customHeaders, // User-defined headers override defaults
+ };
+
+ return getOpenAILikeModel(baseUrl, apiKey, model, customHeaders);
+ }
+}
diff --git a/app/lib/modules/llm/registry.ts b/app/lib/modules/llm/registry.ts
index 6edba6d8..9842ca86 100644
--- a/app/lib/modules/llm/registry.ts
+++ b/app/lib/modules/llm/registry.ts
@@ -16,6 +16,7 @@ import XAIProvider from './providers/xai';
import HyperbolicProvider from './providers/hyperbolic';
import AmazonBedrockProvider from './providers/amazon-bedrock';
import GithubProvider from './providers/github';
+import PortkeyProvider from './providers/portkey';
export {
AnthropicProvider,
@@ -36,4 +37,5 @@ export {
LMStudioProvider,
AmazonBedrockProvider,
GithubProvider,
+ PortkeyProvider,
};
diff --git a/app/lib/stores/settings.ts b/app/lib/stores/settings.ts
index 917f6e0a..ae1358ad 100644
--- a/app/lib/stores/settings.ts
+++ b/app/lib/stores/settings.ts
@@ -29,7 +29,7 @@ export interface Shortcuts {
toggleTerminal: Shortcut;
}
-export const URL_CONFIGURABLE_PROVIDERS = ['Ollama', 'LMStudio', 'OpenAILike'];
+export const URL_CONFIGURABLE_PROVIDERS = ['Ollama', 'LMStudio', 'OpenAILike', 'Portkey'];
export const LOCAL_PROVIDERS = ['OpenAILike', 'LMStudio', 'Ollama'];
export type ProviderSetting = Record;
diff --git a/app/types/model.ts b/app/types/model.ts
index d16b10ae..8fb99ac8 100644
--- a/app/types/model.ts
+++ b/app/types/model.ts
@@ -17,6 +17,8 @@ export type ProviderInfo = {
export interface IProviderSetting {
enabled?: boolean;
baseUrl?: string;
+ customModels?: ModelInfo[];
+ customHeaders?: Record;
}
export type IProviderConfig = ProviderInfo & {
diff --git a/worker-configuration.d.ts b/worker-configuration.d.ts
index b2ae1ce7..2437adaf 100644
--- a/worker-configuration.d.ts
+++ b/worker-configuration.d.ts
@@ -18,4 +18,6 @@ interface Env {
XAI_API_KEY: string;
PERPLEXITY_API_KEY: string;
AWS_BEDROCK_CONFIG: string;
+ PORTKEY_API_KEY: string;
+ PORTKEY_API_BASE_URL: string;
}