From 3178504069363b5b3cfe45307b4751b5e9c84198 Mon Sep 17 00:00:00 2001 From: Jason Laster Date: Thu, 13 Mar 2025 15:33:54 -0400 Subject: [PATCH] playwright --- .github/workflows/ci.yaml | 9 +--- .github/workflows/playwright.yml | 23 +++++------ CONTRIBUTING.md | 69 ------------------------------- app/api/[...path]/route.ts | 31 ++++++++------ app/entry.server.tsx | 20 +++++++-- app/lib/.server/otel.ts | 2 +- app/lib/remix-types.ts | 22 +++++----- app/routes/api.check-env-key.ts | 3 +- app/routes/api.ping-telemetry.ts | 2 +- docs/docs/CONTRIBUTING.md | 70 -------------------------------- docs/docs/index.md | 2 +- playwright.config.ts | 20 +++++---- 12 files changed, 75 insertions(+), 198 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4e2f365b..f60b76dd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -69,11 +69,4 @@ jobs: - name: Setup and Build uses: ./.github/actions/setup-and-build with: - sentry-auth-token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - - name: Deploy to Cloudflare Pages - uses: cloudflare/wrangler-action@v3 - with: - command: pages deploy - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + sentry-auth-token: ${{ secrets.SENTRY_AUTH_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/playwright.yml b/.github/workflows/playwright.yml index c7b2be16..1885f93d 100644 --- a/.github/workflows/playwright.yml +++ b/.github/workflows/playwright.yml @@ -1,11 +1,12 @@ name: Playwright Tests on: - pull_request: + deployment_status: jobs: test: name: 'Playwright Tests' runs-on: ubuntu-latest + if: ${{ github.event_name == 'pull_request' || (github.event_name == 'deployment_status' && github.event.deployment_status.state == 'success') }} steps: - uses: actions/checkout@v4 @@ -26,19 +27,15 @@ jobs: - name: Install Playwright browsers run: npx playwright install chromium - - name: Run Playwright tests with supabase - run: pnpm test:e2e:supabase + - name: Get Vercel Preview URL + if: ${{ github.event_name == 'deployment_status' }} + run: | + echo "PLAYWRIGHT_TEST_BASE_URL=${{ github.event.deployment_status.target_url }}" >> $GITHUB_ENV + echo "Testing against Vercel Preview URL: ${{ github.event.deployment_status.target_url }}" - - name: Upload test results - if: always() - uses: actions/upload-artifact@v4 - with: - name: playwright-report - path: playwright-report/ - retention-days: 30 - - - name: Run Playwright tests without supabase - run: pnpm test:e2e:legacy + - name: Run Playwright tests + if: ${{ github.event_name == 'deployment_status' }} + run: npx playwright test - name: Upload test results if: always() diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3a8d5be8..6341b151 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -109,75 +109,6 @@ Run the test suite with: pnpm test ``` ---- - -## 🚀 Deployment - -### Deploy to Cloudflare Pages -```bash -pnpm run deploy -``` -Ensure you have required permissions and that Wrangler is configured. - ---- - -## 🐳 Docker Deployment - -This section outlines the methods for deploying the application using Docker. The processes for **Development** and **Production** are provided separately for clarity. - ---- - -### 🧑‍💻 Development Environment - -#### Build Options - -**Option 1: Helper Scripts** -```bash -# Development build -npm run dockerbuild -``` - -**Option 2: Direct Docker Build Command** -```bash -docker build . --target bolt-ai-development -``` - -**Option 3: Docker Compose Profile** -```bash -docker-compose --profile development up -``` - -#### Running the Development Container -```bash -docker run -p 5173:5173 --env-file .env.local bolt-ai:development -``` - ---- - -### 🏭 Production Environment - -#### Build Options - -**Option 1: Helper Scripts** -```bash -# Production build -npm run dockerbuild:prod -``` - -**Option 2: Direct Docker Build Command** -```bash -docker build . --target bolt-ai-production -``` - -**Option 3: Docker Compose Profile** -```bash -docker-compose --profile production up -``` - -#### Running the Production Container -```bash -docker run -p 5173:5173 --env-file .env.local bolt-ai:production -``` --- diff --git a/app/api/[...path]/route.ts b/app/api/[...path]/route.ts index a5327660..8a2b84ce 100644 --- a/app/api/[...path]/route.ts +++ b/app/api/[...path]/route.ts @@ -1,31 +1,38 @@ import * as Sentry from '@sentry/nextjs'; import { createRequestHandler } from '~/lib/remix-types'; -// We'll import the server build at runtime, not during compilation -// Build path will be available after the build is complete + +/* + * We'll import the server build at runtime, not during compilation + * Build path will be available after the build is complete + */ // Add Sentry's request handler to wrap the Remix request handler const handleRequest = async (request: Request) => { try { - // Dynamically import the server build at runtime - // In a real Vercel deployment, the server build will be available - // This is just a placeholder for type checking - const build = { /* production build will be available at runtime */ }; - + /* + * Dynamically import the server build at runtime + * In a real Vercel deployment, the server build will be available + * This is just a placeholder for type checking + */ + const build = { + /* production build will be available at runtime */ + }; + // Create the request handler const handler = createRequestHandler({ build: build as any, mode: process.env.NODE_ENV, getLoadContext: () => ({ - env: process.env - }) + env: process.env, + }), }); - + // Handle the request return handler(request); } catch (error) { // Log the error with Sentry Sentry.captureException(error); - + // Return a basic error response return new Response('Server Error', { status: 500 }); } @@ -39,4 +46,4 @@ export const DELETE = handleRequest; export const HEAD = handleRequest; export const OPTIONS = handleRequest; -export const runtime = 'edge'; \ No newline at end of file +export const runtime = 'edge'; diff --git a/app/entry.server.tsx b/app/entry.server.tsx index d4289f4c..539b1113 100644 --- a/app/entry.server.tsx +++ b/app/entry.server.tsx @@ -4,7 +4,7 @@ import { sentryHandleError } from '~/lib/sentry'; * Using our conditional Sentry implementation instead of direct import * This avoids loading Sentry in development environments */ -import type { AppLoadContext, EntryContext } from '~/lib/remix-types'; +import type { AppLoadContext } from '~/lib/remix-types'; import { RemixServer } from '@remix-run/react'; import { isbot } from 'isbot'; import { renderToString } from 'react-dom/server'; @@ -26,9 +26,21 @@ export default async function handleRequest( const isBot = isbot(userAgent || ''); // Create the HTML string - const markup = renderToString( - - ); + const markup = renderToString(); + + // If this is a bot request, we can wait for all data to be ready + if (isBot) { + /* + * In Cloudflare, we had: + * await readable.allReady; + * + * For Vercel, we could do additional processing for bots + * such as waiting for all data fetching to complete. + * Future enhancement: add mechanism to ensure all data is loaded + * before rendering for bots (important for SEO) + */ + console.log(`Bot detected: ${userAgent}`); + } // @ts-ignore - Fix for incompatible EntryContext types between different remix versions const head = renderHeadToString({ request, remixContext, Head }); diff --git a/app/lib/.server/otel.ts b/app/lib/.server/otel.ts index a4e6fc01..96bede7d 100644 --- a/app/lib/.server/otel.ts +++ b/app/lib/.server/otel.ts @@ -200,7 +200,7 @@ async function loadAsyncHooksContextManager() { } } -export async function createTracer(appContext: AppLoadContext) { +export async function createTracer(_appContext: AppLoadContext) { const honeycombApiKey = process.env.HONEYCOMB_API_KEY; const honeycombDataset = process.env.HONEYCOMB_DATASET; diff --git a/app/lib/remix-types.ts b/app/lib/remix-types.ts index 35bbf4c7..08292073 100644 --- a/app/lib/remix-types.ts +++ b/app/lib/remix-types.ts @@ -1,10 +1,10 @@ // This file provides compatibility types to smoothly migrate from Cloudflare to Vercel -import type { +import type { ActionFunctionArgs as VercelActionFunctionArgs, LoaderFunctionArgs as VercelLoaderFunctionArgs, AppLoadContext as VercelAppLoadContext, - EntryContext as VercelEntryContext + EntryContext as VercelEntryContext, } from '@vercel/remix'; // Re-export necessary types with compatible names @@ -14,7 +14,7 @@ export type LoaderFunction = (args: LoaderFunctionArgs) => Promise | R export type ActionFunction = (args: ActionFunctionArgs) => Promise | Response; export type AppLoadContext = VercelAppLoadContext; export type EntryContext = VercelEntryContext; -export type MetaFunction = () => Array<{ +export type MetaFunction = () => Array<{ title?: string; name?: string; content?: string; @@ -22,7 +22,7 @@ export type MetaFunction = () => Array<{ }>; export type LinksFunction = () => Array<{ rel: string; href: string }>; -// Re-export json function +// Re-export json function export function json(data: T, init?: ResponseInit): Response { return new Response(JSON.stringify(data), { ...init, @@ -34,14 +34,16 @@ export function json(data: T, init?: ResponseInit): Response { } // Export a createRequestHandler function -export function createRequestHandler(options: { +export function createRequestHandler(_options: { build: any; mode?: string; getLoadContext?: (req: Request) => AppLoadContext; }) { - return async (request: Request) => { - // This is a simplified handler for type checking - // The real implementation will use Vercel's handler - return new Response("Not implemented", { status: 501 }); + return async (_request: Request) => { + /* + * This is a simplified handler for type checking + * The real implementation will use Vercel's handler + */ + return new Response('Not implemented', { status: 501 }); }; -} \ No newline at end of file +} diff --git a/app/routes/api.check-env-key.ts b/app/routes/api.check-env-key.ts index 49f60b94..2002c337 100644 --- a/app/routes/api.check-env-key.ts +++ b/app/routes/api.check-env-key.ts @@ -1,7 +1,7 @@ import type { LoaderFunction } from '~/lib/remix-types'; import { providerBaseUrlEnvKeys } from '~/utils/constants'; -export const loader: LoaderFunction = async ({ context, request }) => { +export const loader: LoaderFunction = async ({ context: _, request }) => { const url = new URL(request.url); const provider = url.searchParams.get('provider'); @@ -10,6 +10,7 @@ export const loader: LoaderFunction = async ({ context, request }) => { } const envVarName = providerBaseUrlEnvKeys[provider].apiTokenKey; + // Use only process.env since context.env might be undefined const isSet = !!process.env[envVarName]; diff --git a/app/routes/api.ping-telemetry.ts b/app/routes/api.ping-telemetry.ts index f06db4fc..5d504fc3 100644 --- a/app/routes/api.ping-telemetry.ts +++ b/app/routes/api.ping-telemetry.ts @@ -29,7 +29,7 @@ export async function action(args: ActionFunctionArgs) { } async function pingTelemetryAction({ request }: ActionFunctionArgs) { - const { event, data } = await request.json() as { + const { event, data } = (await request.json()) as { event: string; data: any; }; diff --git a/docs/docs/CONTRIBUTING.md b/docs/docs/CONTRIBUTING.md index 3a8d5be8..12ea54b3 100644 --- a/docs/docs/CONTRIBUTING.md +++ b/docs/docs/CONTRIBUTING.md @@ -111,76 +111,6 @@ pnpm test --- -## 🚀 Deployment - -### Deploy to Cloudflare Pages -```bash -pnpm run deploy -``` -Ensure you have required permissions and that Wrangler is configured. - ---- - -## 🐳 Docker Deployment - -This section outlines the methods for deploying the application using Docker. The processes for **Development** and **Production** are provided separately for clarity. - ---- - -### 🧑‍💻 Development Environment - -#### Build Options - -**Option 1: Helper Scripts** -```bash -# Development build -npm run dockerbuild -``` - -**Option 2: Direct Docker Build Command** -```bash -docker build . --target bolt-ai-development -``` - -**Option 3: Docker Compose Profile** -```bash -docker-compose --profile development up -``` - -#### Running the Development Container -```bash -docker run -p 5173:5173 --env-file .env.local bolt-ai:development -``` - ---- - -### 🏭 Production Environment - -#### Build Options - -**Option 1: Helper Scripts** -```bash -# Production build -npm run dockerbuild:prod -``` - -**Option 2: Direct Docker Build Command** -```bash -docker build . --target bolt-ai-production -``` - -**Option 3: Docker Compose Profile** -```bash -docker-compose --profile production up -``` - -#### Running the Production Container -```bash -docker run -p 5173:5173 --env-file .env.local bolt-ai:production -``` - ---- - ### Coolify Deployment For an easy deployment process, use [Coolify](https://github.com/coollabsio/coolify): diff --git a/docs/docs/index.md b/docs/docs/index.md index 7f12f5dd..69032743 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -219,7 +219,7 @@ When you add a new model to the MODEL_LIST array, it will immediately be availab - `pnpm test`: Runs the test suite using Vitest. - `pnpm run typecheck`: Runs TypeScript type checking. - `pnpm run typegen`: Generates TypeScript types using Wrangler. -- `pnpm run deploy`: Builds the project and deploys it to Cloudflare Pages. +- `pnpm run deploy`: Builds the project and deploys it to Vercel. --- diff --git a/playwright.config.ts b/playwright.config.ts index 32be66a7..a582500f 100644 --- a/playwright.config.ts +++ b/playwright.config.ts @@ -1,6 +1,8 @@ import { defineConfig, devices } from '@playwright/test'; const port = 5175; +const usePreviewUrl = !!process.env.PLAYWRIGHT_TEST_BASE_URL; +const baseURL = usePreviewUrl ? process.env.PLAYWRIGHT_TEST_BASE_URL : `http://localhost:${port}`; export default defineConfig({ testDir: './tests/e2e', @@ -11,7 +13,7 @@ export default defineConfig({ reporter: 'html', timeout: 60000, // Increase global timeout to 60 seconds use: { - baseURL: `http://localhost:${port}`, + baseURL, trace: 'on', }, projects: [ @@ -20,11 +22,13 @@ export default defineConfig({ use: { ...devices['Desktop Chrome'] }, }, ], - webServer: { - command: `pnpm dev --port ${port}`, - port, - timeout: 120000, // 2 minutes - stdout: 'pipe', - stderr: 'pipe', - }, + webServer: usePreviewUrl + ? undefined + : { + command: `pnpm dev --port ${port}`, + port, + timeout: 120000, // 2 minutes + stdout: 'pipe', + stderr: 'pipe', + }, });