Merge remote-tracking branch 'upstream/main'

This commit is contained in:
Brian Hackett
2025-01-16 08:02:49 -08:00
85 changed files with 4011 additions and 918 deletions

View File

@@ -32,7 +32,8 @@ OPEN_ROUTER_API_KEY=
GOOGLE_GENERATIVE_AI_API_KEY=
# You only need this environment variable set if you want to use oLLAMA models
# EXAMPLE http://localhost:11434
# DONT USE http://localhost:11434 due to IPV6 issues
# USE EXAMPLE http://127.0.0.1:11434
OLLAMA_API_BASE_URL=
# You only need this environment variable set if you want to use OpenAI Like models
@@ -50,6 +51,12 @@ OPENAI_LIKE_API_KEY=
# Get your Together API Key
TOGETHER_API_KEY=
# You only need this environment variable set if you want to use Hyperbolic models
#Get your Hyperbolics API Key at https://app.hyperbolic.xyz/settings
#baseURL="https://api.hyperbolic.xyz/v1/chat/completions"
HYPERBOLIC_API_KEY=
HYPERBOLIC_API_BASE_URL=
# Get your Mistral API Key by following these instructions -
# https://console.mistral.ai/api-keys/
# You only need this environment variable set if you want to use Mistral models
@@ -62,7 +69,8 @@ COHERE_API_KEY=
# Get LMStudio Base URL from LM Studio Developer Console
# Make sure to enable CORS
# Example: http://localhost:1234
# DONT USE http://localhost:1234 due to IPV6 issues
# Example: http://127.0.0.1:1234
LMSTUDIO_API_BASE_URL=
# Get your xAI API key
@@ -75,6 +83,17 @@ XAI_API_KEY=
# You only need this environment variable set if you want to use Perplexity models
PERPLEXITY_API_KEY=
# Get your AWS configuration
# https://console.aws.amazon.com/iam/home
# The JSON should include the following keys:
# - region: The AWS region where Bedrock is available.
# - accessKeyId: Your AWS access key ID.
# - secretAccessKey: Your AWS secret access key.
# - sessionToken (optional): Temporary session token if using an IAM role or temporary credentials.
# Example JSON:
# {"region": "us-east-1", "accessKeyId": "yourAccessKeyId", "secretAccessKey": "yourSecretAccessKey", "sessionToken": "yourSessionToken"}
AWS_BEDROCK_CONFIG=
# Include this environment variable if you want more logging for debugging locally
VITE_LOG_LEVEL=debug

View File

@@ -6,8 +6,8 @@ body:
value: |
Thank you for reporting an issue :pray:.
This issue tracker is for bugs and issues found with [Bolt.new](https://bolt.new).
If you experience issues related to WebContainer, please file an issue in our [WebContainer repo](https://github.com/stackblitz/webcontainer-core), or file an issue in our [StackBlitz core repo](https://github.com/stackblitz/core) for issues with StackBlitz.
This issue tracker is for bugs and issues found with [Bolt.diy](https://bolt.diy).
If you experience issues related to WebContainer, please file an issue in the official [StackBlitz WebContainer repo](https://github.com/stackblitz/webcontainer-core).
The more information you fill in, the better we can help you.
- type: textarea

23
.github/ISSUE_TEMPLATE/epic.md vendored Normal file
View File

@@ -0,0 +1,23 @@
---
name: Epic
about: Epics define long-term vision and capabilities of the software. They will never be finished but serve as umbrella for features.
title: ''
labels:
- epic
assignees: ''
---
# Strategic Impact
<!-- Why does this area matter? How is it integrated into the product or the development process? What would happen if we ignore it? -->
# Target Audience
<!-- Who benefits most from improvements in this area?
Usual values: Software Developers using the IDE | Contributors -->
# Capabilities
<!-- which existing capabilities or future features can be imagined that belong to this epic? This list serves as illustration to sketch the boundaries of this epic.
Once features are actually being planned / described in detail, they can be linked here. -->

28
.github/ISSUE_TEMPLATE/feature.md vendored Normal file
View File

@@ -0,0 +1,28 @@
---
name: Feature
about: A pretty vague description of how a capability of our software can be added or improved.
title: ''
labels:
- feature
assignees: ''
---
# Motivation
<!-- What capability should be either established or improved? How is life of the target audience better after it's been done? -->
# Scope
<!-- This is kind-of the definition-of-done for a feature.
Try to keep the scope as small as possible and prefer creating multiple, small features which each solve a single problem / make something better
-->
# Options
<!-- If you already have an idea how this can be implemented, please describe it here.
This allows potential other contributors to join forces and provide meaningful feedback prio to even starting work on it.
-->
# Related
<!-- Link to the epic or other issues or PRs which are related to this feature. -->

261
.github/scripts/generate-changelog.sh vendored Executable file
View File

@@ -0,0 +1,261 @@
#!/usr/bin/env bash
# Ensure we're running in bash
if [ -z "$BASH_VERSION" ]; then
echo "This script requires bash. Please run with: bash $0" >&2
exit 1
fi
# Ensure we're using bash 4.0 or later for associative arrays
if ((BASH_VERSINFO[0] < 4)); then
echo "This script requires bash version 4 or later" >&2
echo "Current bash version: $BASH_VERSION" >&2
exit 1
fi
# Set default values for required environment variables if not in GitHub Actions
if [ -z "$GITHUB_ACTIONS" ]; then
: "${GITHUB_SERVER_URL:=https://github.com}"
: "${GITHUB_REPOSITORY:=stackblitz-labs/bolt.diy}"
: "${GITHUB_OUTPUT:=/tmp/github_output}"
touch "$GITHUB_OUTPUT"
# Running locally
echo "Running locally - checking for upstream remote..."
MAIN_REMOTE="origin"
if git remote -v | grep -q "upstream"; then
MAIN_REMOTE="upstream"
fi
MAIN_BRANCH="main" # or "master" depending on your repository
# Ensure we have latest tags
git fetch ${MAIN_REMOTE} --tags
# Use the remote reference for git log
GITLOG_REF="${MAIN_REMOTE}/${MAIN_BRANCH}"
else
# Running in GitHub Actions
GITLOG_REF="HEAD"
fi
# Get the latest tag
LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "")
# Start changelog file
echo "# 🚀 Release v${NEW_VERSION}" > changelog.md
echo "" >> changelog.md
echo "## What's Changed 🌟" >> changelog.md
echo "" >> changelog.md
if [ -z "$LATEST_TAG" ]; then
echo "### 🎉 First Release" >> changelog.md
echo "" >> changelog.md
echo "Exciting times! This marks our first release. Thanks to everyone who contributed! 🙌" >> changelog.md
echo "" >> changelog.md
COMPARE_BASE="$(git rev-list --max-parents=0 HEAD)"
else
echo "### 🔄 Changes since $LATEST_TAG" >> changelog.md
echo "" >> changelog.md
COMPARE_BASE="$LATEST_TAG"
fi
# Function to extract conventional commit type and associated emoji
get_commit_type() {
local msg="$1"
if [[ $msg =~ ^feat(\(.+\))?:|^feature(\(.+\))?: ]]; then echo "✨ Features"
elif [[ $msg =~ ^fix(\(.+\))?: ]]; then echo "🐛 Bug Fixes"
elif [[ $msg =~ ^docs(\(.+\))?: ]]; then echo "📚 Documentation"
elif [[ $msg =~ ^style(\(.+\))?: ]]; then echo "💎 Styles"
elif [[ $msg =~ ^refactor(\(.+\))?: ]]; then echo "♻️ Code Refactoring"
elif [[ $msg =~ ^perf(\(.+\))?: ]]; then echo "⚡ Performance Improvements"
elif [[ $msg =~ ^test(\(.+\))?: ]]; then echo "🧪 Tests"
elif [[ $msg =~ ^build(\(.+\))?: ]]; then echo "🛠️ Build System"
elif [[ $msg =~ ^ci(\(.+\))?: ]]; then echo "⚙️ CI"
elif [[ $msg =~ ^chore(\(.+\))?: ]]; then echo "" # Skip chore commits
else echo "🔍 Other Changes" # Default category with emoji
fi
}
# Initialize associative arrays
declare -A CATEGORIES
declare -A COMMITS_BY_CATEGORY
declare -A ALL_AUTHORS
declare -A NEW_CONTRIBUTORS
# Get all historical authors before the compare base
while IFS= read -r author; do
ALL_AUTHORS["$author"]=1
done < <(git log "${COMPARE_BASE}" --pretty=format:"%ae" | sort -u)
# Process all commits since last tag
while IFS= read -r commit_line; do
if [[ ! $commit_line =~ ^[a-f0-9]+\| ]]; then
echo "WARNING: Skipping invalid commit line format: $commit_line" >&2
continue
fi
HASH=$(echo "$commit_line" | cut -d'|' -f1)
COMMIT_MSG=$(echo "$commit_line" | cut -d'|' -f2)
BODY=$(echo "$commit_line" | cut -d'|' -f3)
# Skip if hash doesn't match the expected format
if [[ ! $HASH =~ ^[a-f0-9]{40}$ ]]; then
continue
fi
HASH=$(echo "$commit_line" | cut -d'|' -f1)
COMMIT_MSG=$(echo "$commit_line" | cut -d'|' -f2)
BODY=$(echo "$commit_line" | cut -d'|' -f3)
# Validate hash format
if [[ ! $HASH =~ ^[a-f0-9]{40}$ ]]; then
echo "WARNING: Invalid commit hash format: $HASH" >&2
continue
fi
# Check if it's a merge commit
if [[ $COMMIT_MSG =~ Merge\ pull\ request\ #([0-9]+) ]]; then
# echo "Processing as merge commit" >&2
PR_NUM="${BASH_REMATCH[1]}"
# Extract the PR title from the merge commit body
PR_TITLE=$(echo "$BODY" | grep -v "^Merge pull request" | head -n 1)
# Only process if it follows conventional commit format
CATEGORY=$(get_commit_type "$PR_TITLE")
if [ -n "$CATEGORY" ]; then # Only process if it's a conventional commit
# Get PR author's GitHub username
GITHUB_USERNAME=$(gh pr view "$PR_NUM" --json author --jq '.author.login')
if [ -n "$GITHUB_USERNAME" ]; then
# Check if this is a first-time contributor
AUTHOR_EMAIL=$(git show -s --format='%ae' "$HASH")
if [ -z "${ALL_AUTHORS[$AUTHOR_EMAIL]}" ]; then
NEW_CONTRIBUTORS["$GITHUB_USERNAME"]=1
ALL_AUTHORS["$AUTHOR_EMAIL"]=1
fi
CATEGORIES["$CATEGORY"]=1
COMMITS_BY_CATEGORY["$CATEGORY"]+="* ${PR_TITLE#*: } ([#$PR_NUM](${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/pull/$PR_NUM)) by @$GITHUB_USERNAME"$'\n'
else
COMMITS_BY_CATEGORY["$CATEGORY"]+="* ${PR_TITLE#*: } ([#$PR_NUM](${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/pull/$PR_NUM))"$'\n'
fi
fi
# Check if it's a squash merge by looking for (#NUMBER) pattern
elif [[ $COMMIT_MSG =~ \(#([0-9]+)\) ]]; then
# echo "Processing as squash commit" >&2
PR_NUM="${BASH_REMATCH[1]}"
# Only process if it follows conventional commit format
CATEGORY=$(get_commit_type "$COMMIT_MSG")
if [ -n "$CATEGORY" ]; then # Only process if it's a conventional commit
# Get PR author's GitHub username
GITHUB_USERNAME=$(gh pr view "$PR_NUM" --json author --jq '.author.login')
if [ -n "$GITHUB_USERNAME" ]; then
# Check if this is a first-time contributor
AUTHOR_EMAIL=$(git show -s --format='%ae' "$HASH")
if [ -z "${ALL_AUTHORS[$AUTHOR_EMAIL]}" ]; then
NEW_CONTRIBUTORS["$GITHUB_USERNAME"]=1
ALL_AUTHORS["$AUTHOR_EMAIL"]=1
fi
CATEGORIES["$CATEGORY"]=1
COMMIT_TITLE=${COMMIT_MSG%% (#*} # Remove the PR number suffix
COMMIT_TITLE=${COMMIT_TITLE#*: } # Remove the type prefix
COMMITS_BY_CATEGORY["$CATEGORY"]+="* $COMMIT_TITLE ([#$PR_NUM](${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/pull/$PR_NUM)) by @$GITHUB_USERNAME"$'\n'
else
COMMIT_TITLE=${COMMIT_MSG%% (#*} # Remove the PR number suffix
COMMIT_TITLE=${COMMIT_TITLE#*: } # Remove the type prefix
COMMITS_BY_CATEGORY["$CATEGORY"]+="* $COMMIT_TITLE ([#$PR_NUM](${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/pull/$PR_NUM))"$'\n'
fi
fi
else
# echo "Processing as regular commit" >&2
# Process conventional commits without PR numbers
CATEGORY=$(get_commit_type "$COMMIT_MSG")
if [ -n "$CATEGORY" ]; then # Only process if it's a conventional commit
# Get commit author info
AUTHOR_EMAIL=$(git show -s --format='%ae' "$HASH")
# Try to get GitHub username using gh api
if [ -n "$GITHUB_ACTIONS" ] || command -v gh >/dev/null 2>&1; then
GITHUB_USERNAME=$(gh api "/repos/${GITHUB_REPOSITORY}/commits/${HASH}" --jq '.author.login' 2>/dev/null)
fi
if [ -n "$GITHUB_USERNAME" ]; then
# If we got GitHub username, use it
if [ -z "${ALL_AUTHORS[$AUTHOR_EMAIL]}" ]; then
NEW_CONTRIBUTORS["$GITHUB_USERNAME"]=1
ALL_AUTHORS["$AUTHOR_EMAIL"]=1
fi
CATEGORIES["$CATEGORY"]=1
COMMIT_TITLE=${COMMIT_MSG#*: } # Remove the type prefix
COMMITS_BY_CATEGORY["$CATEGORY"]+="* $COMMIT_TITLE (${HASH:0:7}) by @$GITHUB_USERNAME"$'\n'
else
# Fallback to git author name if no GitHub username found
AUTHOR_NAME=$(git show -s --format='%an' "$HASH")
if [ -z "${ALL_AUTHORS[$AUTHOR_EMAIL]}" ]; then
NEW_CONTRIBUTORS["$AUTHOR_NAME"]=1
ALL_AUTHORS["$AUTHOR_EMAIL"]=1
fi
CATEGORIES["$CATEGORY"]=1
COMMIT_TITLE=${COMMIT_MSG#*: } # Remove the type prefix
COMMITS_BY_CATEGORY["$CATEGORY"]+="* $COMMIT_TITLE (${HASH:0:7}) by $AUTHOR_NAME"$'\n'
fi
fi
fi
done < <(git log "${COMPARE_BASE}..${GITLOG_REF}" --pretty=format:"%H|%s|%b" --reverse --first-parent)
# Write categorized commits to changelog with their emojis
for category in "✨ Features" "🐛 Bug Fixes" "📚 Documentation" "💎 Styles" "♻️ Code Refactoring" "⚡ Performance Improvements" "🧪 Tests" "🛠️ Build System" "⚙️ CI" "🔍 Other Changes"; do
if [ -n "${COMMITS_BY_CATEGORY[$category]}" ]; then
echo "### $category" >> changelog.md
echo "" >> changelog.md
echo "${COMMITS_BY_CATEGORY[$category]}" >> changelog.md
echo "" >> changelog.md
fi
done
# Add first-time contributors section if there are any
if [ ${#NEW_CONTRIBUTORS[@]} -gt 0 ]; then
echo "## ✨ First-time Contributors" >> changelog.md
echo "" >> changelog.md
echo "A huge thank you to our amazing new contributors! Your first contribution marks the start of an exciting journey! 🌟" >> changelog.md
echo "" >> changelog.md
# Use readarray to sort the keys
readarray -t sorted_contributors < <(printf '%s\n' "${!NEW_CONTRIBUTORS[@]}" | sort)
for github_username in "${sorted_contributors[@]}"; do
echo "* 🌟 [@$github_username](https://github.com/$github_username)" >> changelog.md
done
echo "" >> changelog.md
fi
# Add compare link if not first release
if [ -n "$LATEST_TAG" ]; then
echo "## 📈 Stats" >> changelog.md
echo "" >> changelog.md
echo "**Full Changelog**: [\`$LATEST_TAG..v${NEW_VERSION}\`](${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/compare/$LATEST_TAG...v${NEW_VERSION})" >> changelog.md
fi
# Output the changelog content
CHANGELOG_CONTENT=$(cat changelog.md)
{
echo "content<<EOF"
echo "$CHANGELOG_CONTENT"
echo "EOF"
} >> "$GITHUB_OUTPUT"
# Also print to stdout for local testing
echo "Generated changelog:"
echo "==================="
cat changelog.md
echo "==================="

81
.github/workflows/docker.yaml vendored Normal file
View File

@@ -0,0 +1,81 @@
---
name: Docker Publish
on:
workflow_dispatch:
push:
branches:
- main
tags:
- v*
- "*"
permissions:
packages: write
contents: read
env:
REGISTRY: ghcr.io
DOCKER_IMAGE: ghcr.io/${{ github.repository }}
BUILD_TARGET: bolt-ai-production # bolt-ai-development
jobs:
docker-build-publish:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- id: string
uses: ASzc/change-string-case-action@v6
with:
string: ${{ env.DOCKER_IMAGE }}
- name: Docker meta
id: meta
uses: crazy-max/ghaction-docker-meta@v5
with:
images: ${{ steps.string.outputs.lowercase }}
flavor: |
latest=true
prefix=
suffix=
tags: |
type=semver,pattern={{version}}
type=pep440,pattern={{version}}
type=ref,event=tag
type=raw,value={{sha}}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Container Registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }} # ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.GITHUB_TOKEN }} # ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
target: ${{ env.BUILD_TARGET }}
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=${{ steps.string.outputs.lowercase }}:latest
cache-to: type=inline
- name: Check manifest
run: |
docker buildx imagetools inspect ${{ steps.string.outputs.lowercase }}:${{ steps.meta.outputs.version }}
- name: Dump context
if: always()
uses: crazy-max/ghaction-dump-context@v2

View File

@@ -80,83 +80,17 @@ jobs:
NEW_VERSION=${{ steps.bump_version.outputs.new_version }}
pnpm version $NEW_VERSION --no-git-tag-version --allow-same-version
- name: Prepare changelog script
run: chmod +x .github/scripts/generate-changelog.sh
- name: Generate Changelog
id: changelog
run: |
# Get the latest tag
LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "")
env:
NEW_VERSION: ${{ steps.bump_version.outputs.new_version }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Start changelog file
echo "# Release v${{ steps.bump_version.outputs.new_version }}" > changelog.md
echo "" >> changelog.md
if [ -z "$LATEST_TAG" ]; then
echo "### 🎉 First Release" >> changelog.md
echo "" >> changelog.md
COMPARE_BASE="$(git rev-list --max-parents=0 HEAD)"
else
echo "### 🔄 Changes since $LATEST_TAG" >> changelog.md
echo "" >> changelog.md
COMPARE_BASE="$LATEST_TAG"
fi
# Function to extract conventional commit type
get_commit_type() {
if [[ $1 =~ ^feat:|^feature: ]]; then echo "✨ Features";
elif [[ $1 =~ ^fix: ]]; then echo "🐛 Bug Fixes";
elif [[ $1 =~ ^docs: ]]; then echo "📚 Documentation";
elif [[ $1 =~ ^style: ]]; then echo "💎 Styles";
elif [[ $1 =~ ^refactor: ]]; then echo "♻️ Code Refactoring";
elif [[ $1 =~ ^perf: ]]; then echo "⚡️ Performance Improvements";
elif [[ $1 =~ ^test: ]]; then echo "✅ Tests";
elif [[ $1 =~ ^build: ]]; then echo "🛠️ Build System";
elif [[ $1 =~ ^ci: ]]; then echo "⚙️ CI";
elif [[ $1 =~ ^chore: ]]; then echo "🔧 Chores";
else echo "🔍 Other Changes";
fi
}
# Generate categorized changelog
declare -A CATEGORIES
declare -A COMMITS_BY_CATEGORY
# Get commits since last tag or all commits if no tag exists
while IFS= read -r commit_line; do
HASH=$(echo "$commit_line" | cut -d'|' -f1)
MSG=$(echo "$commit_line" | cut -d'|' -f2)
PR_NUM=$(echo "$commit_line" | cut -d'|' -f3)
CATEGORY=$(get_commit_type "$MSG")
CATEGORIES["$CATEGORY"]=1
# Format commit message with PR link if available
if [ -n "$PR_NUM" ]; then
COMMITS_BY_CATEGORY["$CATEGORY"]+="- ${MSG#*: } ([#$PR_NUM](${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/pull/$PR_NUM))"$'\n'
else
COMMITS_BY_CATEGORY["$CATEGORY"]+="- ${MSG#*: }"$'\n'
fi
done < <(git log "${COMPARE_BASE}..HEAD" --pretty=format:"%H|%s|%(trailers:key=PR-Number,valueonly)" --reverse)
# Write categorized commits to changelog
for category in "✨ Features" "🐛 Bug Fixes" "📚 Documentation" "💎 Styles" "♻️ Code Refactoring" "⚡️ Performance Improvements" "✅ Tests" "🛠️ Build System" "⚙️ CI" "🔧 Chores" "🔍 Other Changes"; do
if [ -n "${COMMITS_BY_CATEGORY[$category]}" ]; then
echo "#### $category" >> changelog.md
echo "" >> changelog.md
echo "${COMMITS_BY_CATEGORY[$category]}" >> changelog.md
echo "" >> changelog.md
fi
done
# Add compare link if not first release
if [ -n "$LATEST_TAG" ]; then
echo "**Full Changelog**: [\`$LATEST_TAG..v${{ steps.bump_version.outputs.new_version }}\`](${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/compare/$LATEST_TAG...v${{ steps.bump_version.outputs.new_version }})" >> changelog.md
fi
# Save changelog content for the release
CHANGELOG_CONTENT=$(cat changelog.md)
echo "content<<EOF" >> $GITHUB_OUTPUT
echo "$CHANGELOG_CONTENT" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
run: .github/scripts/generate-changelog.sh
- name: Get the latest commit hash and version tag
run: |

View File

@@ -25,8 +25,10 @@ ARG ANTHROPIC_API_KEY
ARG OPEN_ROUTER_API_KEY
ARG GOOGLE_GENERATIVE_AI_API_KEY
ARG OLLAMA_API_BASE_URL
ARG XAI_API_KEY
ARG TOGETHER_API_KEY
ARG TOGETHER_API_BASE_URL
ARG AWS_BEDROCK_CONFIG
ARG VITE_LOG_LEVEL=debug
ARG DEFAULT_NUM_CTX
@@ -38,16 +40,19 @@ ENV WRANGLER_SEND_METRICS=false \
OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \
GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} \
OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} \
XAI_API_KEY=${XAI_API_KEY} \
TOGETHER_API_KEY=${TOGETHER_API_KEY} \
TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \
AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG} \
VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \
DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}
DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}\
RUNNING_IN_DOCKER=true
# Pre-configure wrangler to disable metrics
RUN mkdir -p /root/.config/.wrangler && \
echo '{"enabled":false}' > /root/.config/.wrangler/metrics.json
RUN npm run build
RUN pnpm run build
CMD [ "pnpm", "run", "dockerstart"]
@@ -62,6 +67,7 @@ ARG ANTHROPIC_API_KEY
ARG OPEN_ROUTER_API_KEY
ARG GOOGLE_GENERATIVE_AI_API_KEY
ARG OLLAMA_API_BASE_URL
ARG XAI_API_KEY
ARG TOGETHER_API_KEY
ARG TOGETHER_API_BASE_URL
ARG VITE_LOG_LEVEL=debug
@@ -74,10 +80,13 @@ ENV GROQ_API_KEY=${GROQ_API_KEY} \
OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} \
GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY} \
OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL} \
XAI_API_KEY=${XAI_API_KEY} \
TOGETHER_API_KEY=${TOGETHER_API_KEY} \
TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL} \
AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG} \
VITE_LOG_LEVEL=${VITE_LOG_LEVEL} \
DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}
DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX}\
RUNNING_IN_DOCKER=true
RUN mkdir -p ${WORKDIR}/run
CMD pnpm run dev --host

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2024 StackBlitz, Inc.
Copyright (c) 2024 StackBlitz, Inc. and bolt.diy contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

57
PROJECT.md Normal file
View File

@@ -0,0 +1,57 @@
# Project management of bolt.diy
First off: this sounds funny, we know. "Project management" comes from a world of enterprise stuff and this project is
far from being enterprisy- it's still anarchy all over the place 😉
But we need to organize ourselves somehow, right?
> tl;dr: We've got a project board with epics and features. We use PRs as change log and as materialized features. Find it [here](https://github.com/orgs/stackblitz-labs/projects/4).
Here's how we structure long-term vision, mid-term capabilities of the software and short term improvements.
## Strategic epics (long-term)
Strategic epics define areas in which the product evolves. Usually, these epics dont overlap. They shall allow the core
team to define what they believe is most important and should be worked on with the highest priority.
You can find the [epics as issues](https://github.com/stackblitz-labs/bolt.diy/labels/epic) which are probably never
going to be closed.
What's the benefit / purpose of epics?
1. Prioritization
E. g. we could say “managing files is currently more important that quality”. Then, we could thing about which features
would bring “managing files” forward. It may be different features, such as “upload local files”, “import from a repo”
or also undo/redo/commit.
In a more-or-less regular meeting dedicated for that, the core team discusses which epics matter most, sketch features
and then check who can work on them. After the meeting, they update the roadmap (at least for the next development turn)
and this way communicate where the focus currently is.
2. Grouping of features
By linking features with epics, we can keep them together and document *why* we invest work into a particular thing.
## Features (mid-term)
We all know probably a dozen of methodologies following which features are being described (User story, business
function, you name it).
However, we intentionally describe features in a more vague manner. Why? Everybody loves crisp, well-defined
acceptance-criteria, no? Well, every product owner loves it. because he knows what hell get once its done.
But: **here is no owner of this product**. Therefore, we grant *maximum flexibility to the developer contributing a feature* so that he can bring in his ideas and have most fun implementing it.
The feature therefore tries to describe *what* should be improved but not in detail *how*.
## PRs as materialized features (short-term)
Once a developer starts working on a feature, a draft-PR *can* be opened asap to share, describe and discuss, how the feature shall be implemented. But: this is not a must. It just helps to get early feedback and get other developers involved. Sometimes, the developer just wants to get started and then open a PR later.
In a loosely organized project, it may as well happen that multiple PRs are opened for the same feature. This is no real issue: Usually, peoply being passionate about a solution are willing to join forces and get it done together. And if a second developer was just faster getting the same feature realized: Be happy that it's been done, close the PR and look out for the next feature to implement 🤓
## PRs as change log
Once a PR is merged, a squashed commit contains the whole PR description which allows for a good change log.
All authors of commits in the PR are mentioned in the squashed commit message and become contributors 🙌

View File

@@ -3,7 +3,9 @@
Welcome to bolt.diy, the official open source version of Bolt.new (previously known as oTToDev and bolt.new ANY LLM), which allows you to choose the LLM that you use for each prompt! Currently, you can use OpenAI, Anthropic, Ollama, OpenRouter, Gemini, LMStudio, Mistral, xAI, HuggingFace, DeepSeek, or Groq models - and it is easily extended to use any other model supported by the Vercel AI SDK! See the instructions below for running this locally and extending it to include more models.
Check the [bolt.diy Docs](https://stackblitz-labs.github.io/bolt.diy/) for more information.
Check the [bolt.diy Docs](https://stackblitz-labs.github.io/bolt.diy/) for more information.
Also [this pinned post in our community](https://thinktank.ottomator.ai/t/videos-tutorial-helpful-content/3243) has a bunch of incredible resources for running and deploying bolt.diy yourself!
We have also launched an experimental agent called the "bolt.diy Expert" that can answer common questions about bolt.diy. Find it here on the [oTTomator Live Agent Studio](https://studio.ottomator.ai/).
@@ -23,8 +25,15 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
## Join the community
[Join the bolt.diy community here, in the thinktank on ottomator.ai!](https://thinktank.ottomator.ai)
[Join the bolt.diy community here, in the oTTomator Think Tank!](https://thinktank.ottomator.ai)
## Project management
Bolt.diy is a community effort! Still, the core team of contributors aims at organizing the project in way that allows
you to understand where the current areas of focus are.
If you want to know what we are working on, what we are planning to work on, or if you want to contribute to the
project, please check the [project management guide](./PROJECT.md) to get started easily.
## Requested Additions
@@ -47,6 +56,7 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
- ✅ Bolt terminal to see the output of LLM run commands (@thecodacus)
- ✅ Streaming of code output (@thecodacus)
- ✅ Ability to revert code to earlier version (@wonderwhy-er)
- ✅ Chat history backup and restore functionality (@sidbetatester)
- ✅ Cohere Integration (@hasanraiyan)
- ✅ Dynamic model max token length (@hasanraiyan)
- ✅ Better prompt enhancing (@SujalXplores)
@@ -61,6 +71,11 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
- ✅ PromptLibrary to have different variations of prompts for different use cases (@thecodacus)
- ✅ Detect package.json and commands to auto install & run preview for folder and git import (@wonderwhy-er)
- ✅ Selection tool to target changes visually (@emcconnell)
- ✅ Detect terminal Errors and ask bolt to fix it (@thecodacus)
- ✅ Detect preview Errors and ask bolt to fix it (@wonderwhy-er)
- ✅ Add Starter Template Options (@thecodacus)
- ✅ Perplexity Integration (@meetpateltech)
- ✅ AWS Bedrock Integration (@kunjabijukchhe)
-**HIGH PRIORITY** - Prevent bolt from rewriting files as often (file locking and diffs)
-**HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
-**HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
@@ -70,8 +85,9 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
- ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
- ⬜ Voice prompting
- ⬜ Azure Open AI API Integration
- ⬜ Perplexity Integration
- ⬜ Vertex AI Integration
- ⬜ Granite Integration
- ⬜ Popout Window for Web Container
## Features

View File

@@ -1,6 +1,7 @@
import React, { useState } from 'react';
import React, { useState, useEffect, useCallback } from 'react';
import { IconButton } from '~/components/ui/IconButton';
import type { ProviderInfo } from '~/types/model';
import Cookies from 'js-cookie';
interface APIKeyManagerProps {
provider: ProviderInfo;
@@ -10,58 +11,159 @@ interface APIKeyManagerProps {
labelForGetApiKey?: string;
}
// cache which stores whether the provider's API key is set via environment variable
const providerEnvKeyStatusCache: Record<string, boolean> = {};
const apiKeyMemoizeCache: { [k: string]: Record<string, string> } = {};
export function getApiKeysFromCookies() {
const storedApiKeys = Cookies.get('apiKeys');
let parsedKeys: Record<string, string> = {};
if (storedApiKeys) {
parsedKeys = apiKeyMemoizeCache[storedApiKeys];
if (!parsedKeys) {
parsedKeys = apiKeyMemoizeCache[storedApiKeys] = JSON.parse(storedApiKeys);
}
}
return parsedKeys;
}
// eslint-disable-next-line @typescript-eslint/naming-convention
export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey, setApiKey }) => {
const [isEditing, setIsEditing] = useState(false);
const [tempKey, setTempKey] = useState(apiKey);
const [isEnvKeySet, setIsEnvKeySet] = useState(false);
// Reset states and load saved key when provider changes
useEffect(() => {
// Load saved API key from cookies for this provider
const savedKeys = getApiKeysFromCookies();
const savedKey = savedKeys[provider.name] || '';
setTempKey(savedKey);
setApiKey(savedKey);
setIsEditing(false);
}, [provider.name]);
const checkEnvApiKey = useCallback(async () => {
// Check cache first
if (providerEnvKeyStatusCache[provider.name] !== undefined) {
setIsEnvKeySet(providerEnvKeyStatusCache[provider.name]);
return;
}
try {
const response = await fetch(`/api/check-env-key?provider=${encodeURIComponent(provider.name)}`);
const data = await response.json();
const isSet = (data as { isSet: boolean }).isSet;
// Cache the result
providerEnvKeyStatusCache[provider.name] = isSet;
setIsEnvKeySet(isSet);
} catch (error) {
console.error('Failed to check environment API key:', error);
setIsEnvKeySet(false);
}
}, [provider.name]);
useEffect(() => {
checkEnvApiKey();
}, [checkEnvApiKey]);
const handleSave = () => {
// Save to parent state
setApiKey(tempKey);
// Save to cookies
const currentKeys = getApiKeysFromCookies();
const newKeys = { ...currentKeys, [provider.name]: tempKey };
Cookies.set('apiKeys', JSON.stringify(newKeys));
setIsEditing(false);
};
return (
<div className="flex items-start sm:items-center mt-2 mb-2 flex-col sm:flex-row">
<div>
<span className="text-sm text-bolt-elements-textSecondary">{provider?.name} API Key:</span>
{!isEditing && (
<div className="flex items-center mb-4">
<span className="flex-1 text-xs text-bolt-elements-textPrimary mr-2">
{apiKey ? '••••••••' : 'Not set (will still work if set in .env file)'}
</span>
<IconButton onClick={() => setIsEditing(true)} title="Edit API Key">
<div className="i-ph:pencil-simple" />
</IconButton>
</div>
)}
<div className="flex items-center justify-between py-3 px-1">
<div className="flex items-center gap-2 flex-1">
<div className="flex items-center gap-2">
<span className="text-sm font-medium text-bolt-elements-textSecondary">{provider?.name} API Key:</span>
{!isEditing && (
<div className="flex items-center gap-2">
{apiKey ? (
<>
<div className="i-ph:check-circle-fill text-green-500 w-4 h-4" />
<span className="text-xs text-green-500">Set via UI</span>
</>
) : isEnvKeySet ? (
<>
<div className="i-ph:check-circle-fill text-green-500 w-4 h-4" />
<span className="text-xs text-green-500">Set via environment variable</span>
</>
) : (
<>
<div className="i-ph:x-circle-fill text-red-500 w-4 h-4" />
<span className="text-xs text-red-500">Not Set (Please set via UI or ENV_VAR)</span>
</>
)}
</div>
)}
</div>
</div>
{isEditing ? (
<div className="flex items-center gap-3 mt-2">
<input
type="password"
value={tempKey}
placeholder="Your API Key"
onChange={(e) => setTempKey(e.target.value)}
className="flex-1 px-2 py-1 text-xs lg:text-sm rounded border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus"
/>
<IconButton onClick={handleSave} title="Save API Key">
<div className="i-ph:check" />
</IconButton>
<IconButton onClick={() => setIsEditing(false)} title="Cancel">
<div className="i-ph:x" />
</IconButton>
</div>
) : (
<>
{provider?.getApiKeyLink && (
<IconButton className="ml-auto" onClick={() => window.open(provider?.getApiKeyLink)} title="Edit API Key">
<span className="mr-2 text-xs lg:text-sm">{provider?.labelForGetApiKey || 'Get API Key'}</span>
<div className={provider?.icon || 'i-ph:key'} />
<div className="flex items-center gap-2 shrink-0">
{isEditing ? (
<div className="flex items-center gap-2">
<input
type="password"
value={tempKey}
placeholder="Enter API Key"
onChange={(e) => setTempKey(e.target.value)}
className="w-[300px] px-3 py-1.5 text-sm rounded border border-bolt-elements-borderColor
bg-bolt-elements-prompt-background text-bolt-elements-textPrimary
focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus"
/>
<IconButton
onClick={handleSave}
title="Save API Key"
className="bg-green-500/10 hover:bg-green-500/20 text-green-500"
>
<div className="i-ph:check w-4 h-4" />
</IconButton>
)}
</>
)}
<IconButton
onClick={() => setIsEditing(false)}
title="Cancel"
className="bg-red-500/10 hover:bg-red-500/20 text-red-500"
>
<div className="i-ph:x w-4 h-4" />
</IconButton>
</div>
) : (
<>
{
<IconButton
onClick={() => setIsEditing(true)}
title="Edit API Key"
className="bg-blue-500/10 hover:bg-blue-500/20 text-blue-500"
>
<div className="i-ph:pencil-simple w-4 h-4" />
</IconButton>
}
{provider?.getApiKeyLink && !apiKey && (
<IconButton
onClick={() => window.open(provider?.getApiKeyLink)}
title="Get API Key"
className="bg-purple-500/10 hover:bg-purple-500/20 text-purple-500 flex items-center gap-2"
>
<span className="text-xs whitespace-nowrap">{provider?.labelForGetApiKey || 'Get API Key'}</span>
<div className={`${provider?.icon || 'i-ph:key'} w-4 h-4`} />
</IconButton>
)}
</>
)}
</div>
</div>
);
};

View File

@@ -0,0 +1,108 @@
import { AnimatePresence, motion } from 'framer-motion';
import type { ActionAlert } from '~/types/actions';
import { classNames } from '~/utils/classNames';
interface Props {
alert: ActionAlert;
clearAlert: () => void;
postMessage: (message: string) => void;
}
export default function ChatAlert({ alert, clearAlert, postMessage }: Props) {
const { description, content, source } = alert;
const isPreview = source === 'preview';
const title = isPreview ? 'Preview Error' : 'Terminal Error';
const message = isPreview
? 'We encountered an error while running the preview. Would you like Bolt to analyze and help resolve this issue?'
: 'We encountered an error while running terminal commands. Would you like Bolt to analyze and help resolve this issue?';
return (
<AnimatePresence>
<motion.div
initial={{ opacity: 0, y: -20 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, y: -20 }}
transition={{ duration: 0.3 }}
className={`rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-background-depth-2 p-4`}
>
<div className="flex items-start">
{/* Icon */}
<motion.div
className="flex-shrink-0"
initial={{ scale: 0 }}
animate={{ scale: 1 }}
transition={{ delay: 0.2 }}
>
<div className={`i-ph:warning-duotone text-xl text-bolt-elements-button-danger-text`}></div>
</motion.div>
{/* Content */}
<div className="ml-3 flex-1">
<motion.h3
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
transition={{ delay: 0.1 }}
className={`text-sm font-medium text-bolt-elements-textPrimary`}
>
{title}
</motion.h3>
<motion.div
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
transition={{ delay: 0.2 }}
className={`mt-2 text-sm text-bolt-elements-textSecondary`}
>
<p>{message}</p>
{description && (
<div className="text-xs text-bolt-elements-textSecondary p-2 bg-bolt-elements-background-depth-3 rounded mt-4 mb-4">
Error: {description}
</div>
)}
</motion.div>
{/* Actions */}
<motion.div
className="mt-4"
initial={{ opacity: 0, y: 10 }}
animate={{ opacity: 1, y: 0 }}
transition={{ delay: 0.3 }}
>
<div className={classNames(' flex gap-2')}>
<button
onClick={() =>
postMessage(
`*Fix this ${isPreview ? 'preview' : 'terminal'} error* \n\`\`\`${isPreview ? 'js' : 'sh'}\n${content}\n\`\`\`\n`,
)
}
className={classNames(
`px-2 py-1.5 rounded-md text-sm font-medium`,
'bg-bolt-elements-button-primary-background',
'hover:bg-bolt-elements-button-primary-backgroundHover',
'focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-bolt-elements-button-danger-background',
'text-bolt-elements-button-primary-text',
'flex items-center gap-1.5',
)}
>
<div className="i-ph:chat-circle-duotone"></div>
Ask Bolt
</button>
<button
onClick={clearAlert}
className={classNames(
`px-2 py-1.5 rounded-md text-sm font-medium`,
'bg-bolt-elements-button-secondary-background',
'hover:bg-bolt-elements-button-secondary-backgroundHover',
'focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-bolt-elements-button-secondary-background',
'text-bolt-elements-button-secondary-text',
)}
>
Dismiss
</button>
</div>
</motion.div>
</div>
</div>
</motion.div>
</AnimatePresence>
);
}

View File

@@ -3,6 +3,9 @@ import { useGit } from '~/lib/hooks/useGit';
import type { Message } from 'ai';
import { detectProjectCommands, createCommandsMessage } from '~/utils/projectCommands';
import { generateId } from '~/utils/fileUtils';
import { useState } from 'react';
import { toast } from 'react-toastify';
import { LoadingOverlay } from '~/components/ui/LoadingOverlay';
const IGNORE_PATTERNS = [
'node_modules/**',
@@ -37,6 +40,8 @@ interface GitCloneButtonProps {
export default function GitCloneButton({ importChat }: GitCloneButtonProps) {
const { ready, gitClone } = useGit();
const [loading, setLoading] = useState(false);
const onClick = async (_e: any) => {
if (!ready) {
return;
@@ -45,33 +50,34 @@ export default function GitCloneButton({ importChat }: GitCloneButtonProps) {
const repoUrl = prompt('Enter the Git url');
if (repoUrl) {
const { workdir, data } = await gitClone(repoUrl);
setLoading(true);
if (importChat) {
const filePaths = Object.keys(data).filter((filePath) => !ig.ignores(filePath));
console.log(filePaths);
try {
const { workdir, data } = await gitClone(repoUrl);
const textDecoder = new TextDecoder('utf-8');
if (importChat) {
const filePaths = Object.keys(data).filter((filePath) => !ig.ignores(filePath));
console.log(filePaths);
// Convert files to common format for command detection
const fileContents = filePaths
.map((filePath) => {
const { data: content, encoding } = data[filePath];
return {
path: filePath,
content: encoding === 'utf8' ? content : content instanceof Uint8Array ? textDecoder.decode(content) : '',
};
})
.filter((f) => f.content);
const textDecoder = new TextDecoder('utf-8');
// Detect and create commands message
const commands = await detectProjectCommands(fileContents);
const commandsMessage = createCommandsMessage(commands);
const fileContents = filePaths
.map((filePath) => {
const { data: content, encoding } = data[filePath];
return {
path: filePath,
content:
encoding === 'utf8' ? content : content instanceof Uint8Array ? textDecoder.decode(content) : '',
};
})
.filter((f) => f.content);
// Create files message
const filesMessage: Message = {
role: 'assistant',
content: `Cloning the repo ${repoUrl} into ${workdir}
const commands = await detectProjectCommands(fileContents);
const commandsMessage = createCommandsMessage(commands);
const filesMessage: Message = {
role: 'assistant',
content: `Cloning the repo ${repoUrl} into ${workdir}
<boltArtifact id="imported-files" title="Git Cloned Files" type="bundled">
${fileContents
.map(
@@ -82,29 +88,38 @@ ${file.content}
)
.join('\n')}
</boltArtifact>`,
id: generateId(),
createdAt: new Date(),
};
id: generateId(),
createdAt: new Date(),
};
const messages = [filesMessage];
const messages = [filesMessage];
if (commandsMessage) {
messages.push(commandsMessage);
if (commandsMessage) {
messages.push(commandsMessage);
}
await importChat(`Git Project:${repoUrl.split('/').slice(-1)[0]}`, messages);
}
await importChat(`Git Project:${repoUrl.split('/').slice(-1)[0]}`, messages);
} catch (error) {
console.error('Error during import:', error);
toast.error('Failed to import repository');
} finally {
setLoading(false);
}
}
};
return (
<button
onClick={onClick}
title="Clone a Git Repo"
className="px-4 py-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary hover:bg-bolt-elements-background-depth-3 transition-all flex items-center gap-2"
>
<span className="i-ph:git-branch" />
Clone a Git Repo
</button>
<>
<button
onClick={onClick}
title="Clone a Git Repo"
className="px-4 py-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary hover:bg-bolt-elements-background-depth-3 transition-all flex items-center gap-2"
>
<span className="i-ph:git-branch" />
Clone a Git Repo
</button>
{loading && <LoadingOverlay message="Please wait while we clone the repository..." />}
</>
);
}

View File

@@ -16,35 +16,40 @@ export const ImportFolderButton: React.FC<ImportFolderButtonProps> = ({ classNam
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
const allFiles = Array.from(e.target.files || []);
if (allFiles.length > MAX_FILES) {
const error = new Error(`Too many files: ${allFiles.length}`);
const filteredFiles = allFiles.filter((file) => {
const path = file.webkitRelativePath.split('/').slice(1).join('/');
const include = shouldIncludeFile(path);
return include;
});
if (filteredFiles.length === 0) {
const error = new Error('No valid files found');
logStore.logError('File import failed - no valid files', error, { folderName: 'Unknown Folder' });
toast.error('No files found in the selected folder');
return;
}
if (filteredFiles.length > MAX_FILES) {
const error = new Error(`Too many files: ${filteredFiles.length}`);
logStore.logError('File import failed - too many files', error, {
fileCount: allFiles.length,
fileCount: filteredFiles.length,
maxFiles: MAX_FILES,
});
toast.error(
`This folder contains ${allFiles.length.toLocaleString()} files. This product is not yet optimized for very large projects. Please select a folder with fewer than ${MAX_FILES.toLocaleString()} files.`,
`This folder contains ${filteredFiles.length.toLocaleString()} files. This product is not yet optimized for very large projects. Please select a folder with fewer than ${MAX_FILES.toLocaleString()} files.`,
);
return;
}
const folderName = allFiles[0]?.webkitRelativePath.split('/')[0] || 'Unknown Folder';
const folderName = filteredFiles[0]?.webkitRelativePath.split('/')[0] || 'Unknown Folder';
setIsLoading(true);
const loadingToast = toast.loading(`Importing ${folderName}...`);
try {
const filteredFiles = allFiles.filter((file) => shouldIncludeFile(file.webkitRelativePath));
if (filteredFiles.length === 0) {
const error = new Error('No valid files found');
logStore.logError('File import failed - no valid files', error, { folderName });
toast.error('No files found in the selected folder');
return;
}
const fileChecks = await Promise.all(
filteredFiles.map(async (file) => ({
file,

View File

@@ -10,6 +10,7 @@ interface ModelSelectorProps {
modelList: ModelInfo[];
providerList: ProviderInfo[];
apiKeys: Record<string, string>;
modelLoading?: string;
}
export const ModelSelector = ({
@@ -19,6 +20,7 @@ export const ModelSelector = ({
setProvider,
modelList,
providerList,
modelLoading,
}: ModelSelectorProps) => {
// Load enabled providers from cookies
@@ -83,14 +85,21 @@ export const ModelSelector = ({
value={model}
onChange={(e) => setModel?.(e.target.value)}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all lg:max-w-[70%]"
disabled={modelLoading === 'all' || modelLoading === provider?.name}
>
{[...modelList]
.filter((e) => e.provider == provider?.name && e.name)
.map((modelOption, index) => (
<option key={index} value={modelOption.name}>
{modelOption.label}
</option>
))}
{modelLoading == 'all' || modelLoading == provider?.name ? (
<option key={0} value="">
Loading...
</option>
) : (
[...modelList]
.filter((e) => e.provider == provider?.name && e.name)
.map((modelOption, index) => (
<option key={index} value={modelOption.name}>
{modelOption.label}
</option>
))
)}
</select>
</div>
);

View File

@@ -0,0 +1,37 @@
import React from 'react';
import type { Template } from '~/types/template';
import { STARTER_TEMPLATES } from '~/utils/constants';
interface FrameworkLinkProps {
template: Template;
}
const FrameworkLink: React.FC<FrameworkLinkProps> = ({ template }) => (
<a
href={`/git?url=https://github.com/${template.githubRepo}.git`}
data-state="closed"
data-discover="true"
className="items-center justify-center "
>
<div
className={`inline-block ${template.icon} w-8 h-8 text-4xl transition-theme opacity-25 hover:opacity-75 transition-all`}
/>
</a>
);
const StarterTemplates: React.FC = () => {
return (
<div className="flex flex-col items-center gap-4">
<span className="text-sm text-gray-500">or start a blank app with your favorite stack</span>
<div className="flex justify-center">
<div className="flex w-70 flex-wrap items-center justify-center gap-4">
{STARTER_TEMPLATES.map((template) => (
<FrameworkLink key={template.name} template={template} />
))}
</div>
</div>
</div>
);
};
export default StarterTemplates;

View File

@@ -3,6 +3,11 @@ import { toast } from 'react-toastify';
import { ImportFolderButton } from '~/components/chat/ImportFolderButton';
import { LoadProblemButton } from '~/components/chat/LoadProblemButton';
type ChatData = {
messages?: Message[]; // Standard Bolt format
description?: string; // Optional description
};
export function ImportButtons(importChat: ((description: string, messages: Message[]) => Promise<void>) | undefined) {
return (
<div className="flex flex-col items-center justify-center w-auto">
@@ -21,14 +26,17 @@ export function ImportButtons(importChat: ((description: string, messages: Messa
reader.onload = async (e) => {
try {
const content = e.target?.result as string;
const data = JSON.parse(content);
const data = JSON.parse(content) as ChatData;
if (!Array.isArray(data.messages)) {
toast.error('Invalid chat file format');
// Standard format
if (Array.isArray(data.messages)) {
await importChat(data.description || 'Imported Chat', data.messages);
toast.success('Chat imported successfully');
return;
}
await importChat(data.description, data.messages);
toast.success('Chat imported successfully');
toast.error('Invalid chat file format');
} catch (error: unknown) {
if (error instanceof Error) {
toast.error('Failed to parse chat file: ' + error.message);

View File

@@ -49,33 +49,32 @@ export function GitUrlImport() {
if (repoUrl) {
const ig = ignore().add(IGNORE_PATTERNS);
const { workdir, data } = await gitClone(repoUrl);
if (importChat) {
const filePaths = Object.keys(data).filter((filePath) => !ig.ignores(filePath));
try {
const { workdir, data } = await gitClone(repoUrl);
const textDecoder = new TextDecoder('utf-8');
if (importChat) {
const filePaths = Object.keys(data).filter((filePath) => !ig.ignores(filePath));
const textDecoder = new TextDecoder('utf-8');
// Convert files to common format for command detection
const fileContents = filePaths
.map((filePath) => {
const { data: content, encoding } = data[filePath];
return {
path: filePath,
content: encoding === 'utf8' ? content : content instanceof Uint8Array ? textDecoder.decode(content) : '',
};
})
.filter((f) => f.content);
const fileContents = filePaths
.map((filePath) => {
const { data: content, encoding } = data[filePath];
return {
path: filePath,
content:
encoding === 'utf8' ? content : content instanceof Uint8Array ? textDecoder.decode(content) : '',
};
})
.filter((f) => f.content);
// Detect and create commands message
const commands = await detectProjectCommands(fileContents);
const commandsMessage = createCommandsMessage(commands);
const commands = await detectProjectCommands(fileContents);
const commandsMessage = createCommandsMessage(commands);
// Create files message
const filesMessage: Message = {
role: 'assistant',
content: `Cloning the repo ${repoUrl} into ${workdir}
<boltArtifact id="imported-files" title="Git Cloned Files" type="bundled">
const filesMessage: Message = {
role: 'assistant',
content: `Cloning the repo ${repoUrl} into ${workdir}
<boltArtifact id="imported-files" title="Git Cloned Files" type="bundled">
${fileContents
.map(
(file) =>
@@ -85,17 +84,25 @@ ${file.content}
)
.join('\n')}
</boltArtifact>`,
id: generateId(),
createdAt: new Date(),
};
id: generateId(),
createdAt: new Date(),
};
const messages = [filesMessage];
const messages = [filesMessage];
if (commandsMessage) {
messages.push(commandsMessage);
if (commandsMessage) {
messages.push(commandsMessage);
}
await importChat(`Git Project:${repoUrl.split('/').slice(-1)[0]}`, messages);
}
} catch (error) {
console.error('Error during import:', error);
toast.error('Failed to import repository');
setLoading(false);
window.location.href = '/';
await importChat(`Git Project:${repoUrl.split('/').slice(-1)[0]}`, messages);
return;
}
}
};

View File

@@ -2,10 +2,10 @@ import React, { useState } from 'react';
import { useNavigate } from '@remix-run/react';
import Cookies from 'js-cookie';
import { toast } from 'react-toastify';
import { db, deleteById, getAll } from '~/lib/persistence';
import { db, deleteById, getAll, setMessages } from '~/lib/persistence';
import { logStore } from '~/lib/stores/logs';
import { classNames } from '~/utils/classNames';
import styles from '~/components/settings/Settings.module.scss';
import type { Message } from 'ai';
// List of supported providers that can have API keys
const API_KEY_PROVIDERS = [
@@ -23,10 +23,9 @@ const API_KEY_PROVIDERS = [
'Perplexity',
'Cohere',
'AzureOpenAI',
'AmazonBedrock',
] as const;
type Provider = typeof API_KEY_PROVIDERS[number];
interface ApiKeys {
[key: string]: string;
}
@@ -52,6 +51,7 @@ export default function DataTab() {
const error = new Error('Database is not available');
logStore.logError('Failed to export chats - DB unavailable', error);
toast.error('Database is not available');
return;
}
@@ -83,11 +83,13 @@ export default function DataTab() {
const error = new Error('Database is not available');
logStore.logError('Failed to delete chats - DB unavailable', error);
toast.error('Database is not available');
return;
}
try {
setIsDeleting(true);
const allChats = await getAll(db);
await Promise.all(allChats.map((chat) => deleteById(db!, chat.id)));
logStore.logSystem('All chats deleted successfully', { count: allChats.length });
@@ -125,16 +127,22 @@ export default function DataTab() {
const handleImportSettings = (event: React.ChangeEvent<HTMLInputElement>) => {
const file = event.target.files?.[0];
if (!file) return;
if (!file) {
return;
}
const reader = new FileReader();
reader.onload = (e) => {
try {
const settings = JSON.parse(e.target?.result as string);
Object.entries(settings).forEach(([key, value]) => {
if (key === 'bolt_theme') {
if (value) localStorage.setItem(key, value as string);
if (value) {
localStorage.setItem(key, value as string);
}
} else if (value) {
Cookies.set(key, value as string);
}
@@ -152,14 +160,14 @@ export default function DataTab() {
const handleExportApiKeyTemplate = () => {
const template: ApiKeys = {};
API_KEY_PROVIDERS.forEach(provider => {
API_KEY_PROVIDERS.forEach((provider) => {
template[`${provider}_API_KEY`] = '';
});
template['OPENAI_LIKE_API_BASE_URL'] = '';
template['LMSTUDIO_API_BASE_URL'] = '';
template['OLLAMA_API_BASE_URL'] = '';
template['TOGETHER_API_BASE_URL'] = '';
template.OPENAI_LIKE_API_BASE_URL = '';
template.LMSTUDIO_API_BASE_URL = '';
template.OLLAMA_API_BASE_URL = '';
template.TOGETHER_API_BASE_URL = '';
downloadAsJson(template, 'api-keys-template.json');
toast.success('API keys template exported successfully');
@@ -167,17 +175,22 @@ export default function DataTab() {
const handleImportApiKeys = (event: React.ChangeEvent<HTMLInputElement>) => {
const file = event.target.files?.[0];
if (!file) return;
if (!file) {
return;
}
const reader = new FileReader();
reader.onload = (e) => {
try {
const apiKeys = JSON.parse(e.target?.result as string);
let importedCount = 0;
const consolidatedKeys: Record<string, string> = {};
API_KEY_PROVIDERS.forEach(provider => {
API_KEY_PROVIDERS.forEach((provider) => {
const keyName = `${provider}_API_KEY`;
if (apiKeys[keyName]) {
consolidatedKeys[provider] = apiKeys[keyName];
importedCount++;
@@ -187,13 +200,14 @@ export default function DataTab() {
if (importedCount > 0) {
// Store all API keys in a single cookie as JSON
Cookies.set('apiKeys', JSON.stringify(consolidatedKeys));
// Also set individual cookies for backward compatibility
Object.entries(consolidatedKeys).forEach(([provider, key]) => {
Cookies.set(`${provider}_API_KEY`, key);
});
toast.success(`Successfully imported ${importedCount} API keys/URLs. Refreshing page to apply changes...`);
// Reload the page after a short delay to allow the toast to be seen
setTimeout(() => {
window.location.reload();
@@ -203,12 +217,13 @@ export default function DataTab() {
}
// Set base URLs if they exist
['OPENAI_LIKE_API_BASE_URL', 'LMSTUDIO_API_BASE_URL', 'OLLAMA_API_BASE_URL', 'TOGETHER_API_BASE_URL'].forEach(baseUrl => {
if (apiKeys[baseUrl]) {
Cookies.set(baseUrl, apiKeys[baseUrl]);
}
});
['OPENAI_LIKE_API_BASE_URL', 'LMSTUDIO_API_BASE_URL', 'OLLAMA_API_BASE_URL', 'TOGETHER_API_BASE_URL'].forEach(
(baseUrl) => {
if (apiKeys[baseUrl]) {
Cookies.set(baseUrl, apiKeys[baseUrl]);
}
},
);
} catch (error) {
toast.error('Failed to import API keys. Make sure the file is a valid JSON file.');
console.error('Failed to import API keys:', error);
@@ -218,6 +233,81 @@ export default function DataTab() {
event.target.value = '';
};
const processChatData = (
data: any,
): Array<{
id: string;
messages: Message[];
description: string;
urlId?: string;
}> => {
// Handle Bolt standard format (single chat)
if (data.messages && Array.isArray(data.messages)) {
const chatId = crypto.randomUUID();
return [
{
id: chatId,
messages: data.messages,
description: data.description || 'Imported Chat',
urlId: chatId,
},
];
}
// Handle Bolt export format (multiple chats)
if (data.chats && Array.isArray(data.chats)) {
return data.chats.map((chat: { id?: string; messages: Message[]; description?: string; urlId?: string }) => ({
id: chat.id || crypto.randomUUID(),
messages: chat.messages,
description: chat.description || 'Imported Chat',
urlId: chat.urlId,
}));
}
console.error('No matching format found for:', data);
throw new Error('Unsupported chat format');
};
const handleImportChats = () => {
const input = document.createElement('input');
input.type = 'file';
input.accept = '.json';
input.onchange = async (e) => {
const file = (e.target as HTMLInputElement).files?.[0];
if (!file || !db) {
toast.error('Something went wrong');
return;
}
try {
const content = await file.text();
const data = JSON.parse(content);
const chatsToImport = processChatData(data);
for (const chat of chatsToImport) {
await setMessages(db, chat.id, chat.messages, chat.urlId, chat.description);
}
logStore.logSystem('Chats imported successfully', { count: chatsToImport.length });
toast.success(`Successfully imported ${chatsToImport.length} chat${chatsToImport.length > 1 ? 's' : ''}`);
window.location.reload();
} catch (error) {
if (error instanceof Error) {
logStore.logError('Failed to import chats:', error);
toast.error('Failed to import chats: ' + error.message);
} else {
toast.error('Failed to import chats');
}
console.error(error);
}
};
input.click();
};
return (
<div className="p-4 bg-bolt-elements-bg-depth-2 border border-bolt-elements-borderColor rounded-lg mb-4">
<div className="mb-6">
@@ -226,9 +316,7 @@ export default function DataTab() {
<div className="flex flex-col gap-4">
<div>
<h4 className="text-bolt-elements-textPrimary mb-2">Chat History</h4>
<p className="text-sm text-bolt-elements-textSecondary mb-4">
Export or delete all your chat history.
</p>
<p className="text-sm text-bolt-elements-textSecondary mb-4">Export or delete all your chat history.</p>
<div className="flex gap-4">
<button
onClick={handleExportAllChats}
@@ -236,12 +324,18 @@ export default function DataTab() {
>
Export All Chats
</button>
<button
onClick={handleImportChats}
className="px-4 py-2 bg-bolt-elements-button-primary-background hover:bg-bolt-elements-button-primary-backgroundHover text-bolt-elements-textPrimary rounded-lg transition-colors"
>
Import Chats
</button>
<button
onClick={handleDeleteAllChats}
disabled={isDeleting}
className={classNames(
'px-4 py-2 bg-bolt-elements-button-danger-background hover:bg-bolt-elements-button-danger-backgroundHover text-bolt-elements-button-danger-text rounded-lg transition-colors',
isDeleting ? 'opacity-50 cursor-not-allowed' : ''
isDeleting ? 'opacity-50 cursor-not-allowed' : '',
)}
>
{isDeleting ? 'Deleting...' : 'Delete All Chats'}
@@ -263,12 +357,7 @@ export default function DataTab() {
</button>
<label className="px-4 py-2 bg-bolt-elements-button-primary-background hover:bg-bolt-elements-button-primary-backgroundHover text-bolt-elements-textPrimary rounded-lg transition-colors cursor-pointer">
Import Settings
<input
type="file"
accept=".json"
onChange={handleImportSettings}
className="hidden"
/>
<input type="file" accept=".json" onChange={handleImportSettings} className="hidden" />
</label>
</div>
</div>
@@ -287,12 +376,7 @@ export default function DataTab() {
</button>
<label className="px-4 py-2 bg-bolt-elements-button-primary-background hover:bg-bolt-elements-button-primary-backgroundHover text-bolt-elements-textPrimary rounded-lg transition-colors cursor-pointer">
Import API Keys
<input
type="file"
accept=".json"
onChange={handleImportApiKeys}
className="hidden"
/>
<input type="file" accept=".json" onChange={handleImportApiKeys} className="hidden" />
</label>
</div>
</div>
@@ -301,4 +385,4 @@ export default function DataTab() {
</div>
</div>
);
}
}

View File

@@ -56,8 +56,25 @@ const versionTag = connitJson.version;
const GITHUB_URLS = {
original: 'https://api.github.com/repos/stackblitz-labs/bolt.diy/commits/main',
fork: 'https://api.github.com/repos/Stijnus/bolt.new-any-llm/commits/main',
commitJson: (branch: string) =>
`https://raw.githubusercontent.com/stackblitz-labs/bolt.diy/${branch}/app/commit.json`,
commitJson: async (branch: string) => {
try {
const response = await fetch(`https://api.github.com/repos/stackblitz-labs/bolt.diy/commits/${branch}`);
const data: { sha: string } = await response.json();
const packageJsonResp = await fetch(
`https://raw.githubusercontent.com/stackblitz-labs/bolt.diy/${branch}/package.json`,
);
const packageJson: { version: string } = await packageJsonResp.json();
return {
commit: data.sha.slice(0, 7),
version: packageJson.version,
};
} catch (error) {
console.log('Failed to fetch local commit info:', error);
throw new Error('Failed to fetch local commit info');
}
},
};
function getSystemInfo(): SystemInfo {
@@ -373,14 +390,9 @@ export default function DebugTab() {
const branchToCheck = isLatestBranch ? 'main' : 'stable';
console.log(`[Debug] Checking for updates against ${branchToCheck} branch`);
const localCommitResponse = await fetch(GITHUB_URLS.commitJson(branchToCheck));
const latestCommitResp = await GITHUB_URLS.commitJson(branchToCheck);
if (!localCommitResponse.ok) {
throw new Error('Failed to fetch local commit info');
}
const localCommitData = (await localCommitResponse.json()) as CommitData;
const remoteCommitHash = localCommitData.commit;
const remoteCommitHash = latestCommitResp.commit;
const currentCommitHash = versionHash;
if (remoteCommitHash !== currentCommitHash) {

View File

@@ -14,6 +14,10 @@ export default function FeaturesTab() {
enableLatestBranch,
promptId,
setPromptId,
autoSelectTemplate,
setAutoSelectTemplate,
enableContextOptimization,
contextOptimizationEnabled,
} = useSettings();
const handleToggle = (enabled: boolean) => {
@@ -33,29 +37,55 @@ export default function FeaturesTab() {
<div className="flex items-center justify-between">
<div>
<span className="text-bolt-elements-textPrimary">Use Main Branch</span>
<p className="text-sm text-bolt-elements-textSecondary">
<p className="text-xs text-bolt-elements-textTertiary">
Check for updates against the main branch instead of stable
</p>
</div>
<Switch className="ml-auto" checked={isLatestBranch} onCheckedChange={enableLatestBranch} />
</div>
<div className="flex items-center justify-between">
<div>
<span className="text-bolt-elements-textPrimary">Auto Select Code Template</span>
<p className="text-xs text-bolt-elements-textTertiary">
Let Bolt select the best starter template for your project.
</p>
</div>
<Switch className="ml-auto" checked={autoSelectTemplate} onCheckedChange={setAutoSelectTemplate} />
</div>
<div className="flex items-center justify-between">
<div>
<span className="text-bolt-elements-textPrimary">Use Context Optimization</span>
<p className="text-sm text-bolt-elements-textSecondary">
redact file contents form chat and puts the latest file contents on the system prompt
</p>
</div>
<Switch
className="ml-auto"
checked={contextOptimizationEnabled}
onCheckedChange={enableContextOptimization}
/>
</div>
</div>
</div>
<div className="mb-6 border-t border-bolt-elements-borderColor pt-4">
<h3 className="text-lg font-medium text-bolt-elements-textPrimary mb-4">Experimental Features</h3>
<p className="text-sm text-bolt-elements-textSecondary mb-4">
<p className="text-sm text-bolt-elements-textSecondary mb-10">
Disclaimer: Experimental features may be unstable and are subject to change.
</p>
<div className="flex items-center justify-between mb-2">
<span className="text-bolt-elements-textPrimary">Experimental Providers</span>
<Switch className="ml-auto" checked={isLocalModel} onCheckedChange={enableLocalModels} />
<div className="flex flex-col">
<div className="flex items-center justify-between mb-2">
<span className="text-bolt-elements-textPrimary">Experimental Providers</span>
<Switch className="ml-auto" checked={isLocalModel} onCheckedChange={enableLocalModels} />
</div>
<p className="text-xs text-bolt-elements-textTertiary mb-4">
Enable experimental providers such as Ollama, LMStudio, and OpenAILike.
</p>
</div>
<div className="flex items-start justify-between pt-4 mb-2 gap-2">
<div className="flex-1 max-w-[200px]">
<span className="text-bolt-elements-textPrimary">Prompt Library</span>
<p className="text-sm text-bolt-elements-textSecondary mb-4">
<p className="text-xs text-bolt-elements-textTertiary mb-4">
Choose a prompt from the library to use as the system prompt.
</p>
</div>

View File

@@ -6,9 +6,10 @@ import type { IProviderConfig } from '~/types/model';
import { logStore } from '~/lib/stores/logs';
// Import a default fallback icon
import DefaultIcon from '/icons/Default.svg'; // Adjust the path as necessary
import { providerBaseUrlEnvKeys } from '~/utils/constants';
const DefaultIcon = '/icons/Default.svg'; // Adjust the path as necessary
export default function ProvidersTab() {
const { providers, updateProviderSettings, isLocalModel } = useSettings();
const [filteredProviders, setFilteredProviders] = useState<IProviderConfig[]>([]);

View File

@@ -1,13 +1,31 @@
export const LoadingOverlay = ({ message = 'Loading...' }) => {
export const LoadingOverlay = ({
message = 'Loading...',
progress,
progressText,
}: {
message?: string;
progress?: number;
progressText?: string;
}) => {
return (
<div className="fixed inset-0 flex items-center justify-center bg-black/80 z-50 backdrop-blur-sm">
{/* Loading content */}
<div className="relative flex flex-col items-center gap-4 p-8 rounded-lg bg-bolt-elements-background-depth-2 shadow-lg">
<div
className={'i-svg-spinners:90-ring-with-bg text-bolt-elements-loader-progress'}
style={{ fontSize: '2rem' }}
></div>
<p className="text-lg text-bolt-elements-textTertiary">{message}</p>
{progress !== undefined && (
<div className="w-64 flex flex-col gap-2">
<div className="w-full h-2 bg-bolt-elements-background-depth-1 rounded-full overflow-hidden">
<div
className="h-full bg-bolt-elements-loader-progress transition-all duration-300 ease-out rounded-full"
style={{ width: `${Math.min(100, Math.max(0, progress))}%` }}
/>
</div>
{progressText && <p className="text-sm text-bolt-elements-textTertiary text-center">{progressText}</p>}
</div>
)}
</div>
</div>
);

View File

@@ -5,7 +5,6 @@ import { renderToReadableStream } from 'react-dom/server';
import { renderHeadToString } from 'remix-island';
import { Head } from './root';
import { themeStore } from '~/lib/stores/theme';
import { initializeModelList } from '~/utils/constants';
export default async function handleRequest(
request: Request,
@@ -14,7 +13,7 @@ export default async function handleRequest(
remixContext: EntryContext,
_loadContext: AppLoadContext,
) {
await initializeModelList({});
// await initializeModelList({});
const readable = await renderToReadableStream(<RemixServer context={remixContext} url={request.url} />, {
signal: request.signal,

View File

@@ -3,10 +3,10 @@ import { useCallback, useEffect, useState } from 'react';
import { toast } from 'react-toastify';
import {
chatId as chatIdStore,
description as descriptionStore,
db,
updateChatDescription,
description as descriptionStore,
getMessages,
updateChatDescription,
} from '~/lib/persistence';
interface EditChatDescriptionOptions {

View File

@@ -49,50 +49,68 @@ export function useGit() {
}
fileData.current = {};
await git.clone({
fs,
http,
dir: webcontainer.workdir,
url,
depth: 1,
singleBranch: true,
corsProxy: 'https://cors.isomorphic-git.org',
onAuth: (url) => {
// let domain=url.split("/")[2]
let auth = lookupSavedPassword(url);
const headers: {
[x: string]: string;
} = {
'User-Agent': 'bolt.diy',
};
if (auth) {
return auth;
}
const auth = lookupSavedPassword(url);
if (confirm('This repo is password protected. Ready to enter a username & password?')) {
auth = {
username: prompt('Enter username'),
password: prompt('Enter password'),
};
return auth;
} else {
return { cancel: true };
}
},
onAuthFailure: (url, _auth) => {
toast.error(`Error Authenticating with ${url.split('/')[2]}`);
},
onAuthSuccess: (url, auth) => {
saveGitAuth(url, auth);
},
});
const data: Record<string, { data: any; encoding?: string }> = {};
for (const [key, value] of Object.entries(fileData.current)) {
data[key] = value;
if (auth) {
headers.Authorization = `Basic ${Buffer.from(`${auth.username}:${auth.password}`).toString('base64')}`;
}
return { workdir: webcontainer.workdir, data };
try {
await git.clone({
fs,
http,
dir: webcontainer.workdir,
url,
depth: 1,
singleBranch: true,
corsProxy: '/api/git-proxy',
headers,
onAuth: (url) => {
let auth = lookupSavedPassword(url);
if (auth) {
return auth;
}
if (confirm('This repo is password protected. Ready to enter a username & password?')) {
auth = {
username: prompt('Enter username'),
password: prompt('Enter password'),
};
return auth;
} else {
return { cancel: true };
}
},
onAuthFailure: (url, _auth) => {
toast.error(`Error Authenticating with ${url.split('/')[2]}`);
},
onAuthSuccess: (url, auth) => {
saveGitAuth(url, auth);
},
});
const data: Record<string, { data: any; encoding?: string }> = {};
for (const [key, value] of Object.entries(fileData.current)) {
data[key] = value;
}
return { workdir: webcontainer.workdir, data };
} catch (error) {
console.error('Git clone error:', error);
throw error;
}
},
[webcontainer],
[webcontainer, fs, ready],
);
return { ready, gitClone };
@@ -104,55 +122,86 @@ const getFs = (
) => ({
promises: {
readFile: async (path: string, options: any) => {
const encoding = options.encoding;
const encoding = options?.encoding;
const relativePath = pathUtils.relative(webcontainer.workdir, path);
console.log('readFile', relativePath, encoding);
return await webcontainer.fs.readFile(relativePath, encoding);
try {
const result = await webcontainer.fs.readFile(relativePath, encoding);
return result;
} catch (error) {
throw error;
}
},
writeFile: async (path: string, data: any, options: any) => {
const encoding = options.encoding;
const relativePath = pathUtils.relative(webcontainer.workdir, path);
console.log('writeFile', { relativePath, data, encoding });
if (record.current) {
record.current[relativePath] = { data, encoding };
}
return await webcontainer.fs.writeFile(relativePath, data, { ...options, encoding });
try {
const result = await webcontainer.fs.writeFile(relativePath, data, { ...options, encoding });
return result;
} catch (error) {
throw error;
}
},
mkdir: async (path: string, options: any) => {
const relativePath = pathUtils.relative(webcontainer.workdir, path);
console.log('mkdir', relativePath, options);
return await webcontainer.fs.mkdir(relativePath, { ...options, recursive: true });
try {
const result = await webcontainer.fs.mkdir(relativePath, { ...options, recursive: true });
return result;
} catch (error) {
throw error;
}
},
readdir: async (path: string, options: any) => {
const relativePath = pathUtils.relative(webcontainer.workdir, path);
console.log('readdir', relativePath, options);
return await webcontainer.fs.readdir(relativePath, options);
try {
const result = await webcontainer.fs.readdir(relativePath, options);
return result;
} catch (error) {
throw error;
}
},
rm: async (path: string, options: any) => {
const relativePath = pathUtils.relative(webcontainer.workdir, path);
console.log('rm', relativePath, options);
return await webcontainer.fs.rm(relativePath, { ...(options || {}) });
try {
const result = await webcontainer.fs.rm(relativePath, { ...(options || {}) });
return result;
} catch (error) {
throw error;
}
},
rmdir: async (path: string, options: any) => {
const relativePath = pathUtils.relative(webcontainer.workdir, path);
console.log('rmdir', relativePath, options);
return await webcontainer.fs.rm(relativePath, { recursive: true, ...options });
try {
const result = await webcontainer.fs.rm(relativePath, { recursive: true, ...options });
return result;
} catch (error) {
throw error;
}
},
// Mock implementations for missing functions
unlink: async (path: string) => {
// unlink is just removing a single file
const relativePath = pathUtils.relative(webcontainer.workdir, path);
return await webcontainer.fs.rm(relativePath, { recursive: false });
},
try {
return await webcontainer.fs.rm(relativePath, { recursive: false });
} catch (error) {
throw error;
}
},
stat: async (path: string) => {
try {
const relativePath = pathUtils.relative(webcontainer.workdir, path);
@@ -185,23 +234,12 @@ const getFs = (
throw err;
}
},
lstat: async (path: string) => {
/*
* For basic usage, lstat can return the same as stat
* since we're not handling symbolic links
*/
return await getFs(webcontainer, record).promises.stat(path);
},
readlink: async (path: string) => {
/*
* Since WebContainer doesn't support symlinks,
* we'll throw a "not a symbolic link" error
*/
throw new Error(`EINVAL: invalid argument, readlink '${path}'`);
},
symlink: async (target: string, path: string) => {
/*
* Since WebContainer doesn't support symlinks,

View File

@@ -7,6 +7,8 @@ import {
promptStore,
providersStore,
latestBranchStore,
autoSelectStarterTemplate,
enableContextOptimizationStore,
} from '~/lib/stores/settings';
import { useCallback, useEffect, useState } from 'react';
import Cookies from 'js-cookie';
@@ -30,23 +32,19 @@ export function useSettings() {
const promptId = useStore(promptStore);
const isLocalModel = useStore(isLocalModelsEnabled);
const isLatestBranch = useStore(latestBranchStore);
const autoSelectTemplate = useStore(autoSelectStarterTemplate);
const [activeProviders, setActiveProviders] = useState<ProviderInfo[]>([]);
const contextOptimizationEnabled = useStore(enableContextOptimizationStore);
// Function to check if we're on stable version
const checkIsStableVersion = async () => {
try {
const stableResponse = await fetch(
`https://raw.githubusercontent.com/stackblitz-labs/bolt.diy/refs/tags/v${versionData.version}/app/commit.json`,
const response = await fetch(
`https://api.github.com/repos/stackblitz-labs/bolt.diy/git/refs/tags/v${versionData.version}`,
);
const data: { object: { sha: string } } = await response.json();
if (!stableResponse.ok) {
console.warn('Failed to fetch stable commit info');
return false;
}
const stableData = (await stableResponse.json()) as CommitData;
return versionData.commit === stableData.commit;
return versionData.commit.slice(0, 7) === data.object.sha.slice(0, 7);
} catch (error) {
console.warn('Error checking stable version:', error);
return false;
@@ -124,6 +122,18 @@ export function useSettings() {
} else {
latestBranchStore.set(savedLatestBranch === 'true');
}
const autoSelectTemplate = Cookies.get('autoSelectTemplate');
if (autoSelectTemplate) {
autoSelectStarterTemplate.set(autoSelectTemplate === 'true');
}
const savedContextOptimizationEnabled = Cookies.get('contextOptimizationEnabled');
if (savedContextOptimizationEnabled) {
enableContextOptimizationStore.set(savedContextOptimizationEnabled === 'true');
}
}, []);
// writing values to cookies on change
@@ -185,6 +195,18 @@ export function useSettings() {
Cookies.set('isLatestBranch', String(enabled));
}, []);
const setAutoSelectTemplate = useCallback((enabled: boolean) => {
autoSelectStarterTemplate.set(enabled);
logStore.logSystem(`Auto select template ${enabled ? 'enabled' : 'disabled'}`);
Cookies.set('autoSelectTemplate', String(enabled));
}, []);
const enableContextOptimization = useCallback((enabled: boolean) => {
enableContextOptimizationStore.set(enabled);
logStore.logSystem(`Context optimization ${enabled ? 'enabled' : 'disabled'}`);
Cookies.set('contextOptimizationEnabled', String(enabled));
}, []);
return {
providers,
activeProviders,
@@ -199,5 +221,9 @@ export function useSettings() {
setPromptId,
isLatestBranch,
enableLatestBranch,
autoSelectTemplate,
setAutoSelectTemplate,
contextOptimizationEnabled,
enableContextOptimization,
};
}

View File

@@ -8,6 +8,10 @@ export abstract class BaseProvider implements ProviderInfo {
abstract name: string;
abstract staticModels: ModelInfo[];
abstract config: ProviderConfig;
cachedDynamicModels?: {
cacheId: string;
models: ModelInfo[];
};
getApiKeyLink?: string;
labelForGetApiKey?: string;
@@ -29,7 +33,12 @@ export abstract class BaseProvider implements ProviderInfo {
}
const baseUrlKey = this.config.baseUrlKey || defaultBaseUrlKey;
let baseUrl = settingsBaseUrl || serverEnv?.[baseUrlKey] || process?.env?.[baseUrlKey] || manager.env?.[baseUrlKey];
let baseUrl =
settingsBaseUrl ||
serverEnv?.[baseUrlKey] ||
process?.env?.[baseUrlKey] ||
manager.env?.[baseUrlKey] ||
this.config.baseUrl;
if (baseUrl && baseUrl.endsWith('/')) {
baseUrl = baseUrl.slice(0, -1);
@@ -37,13 +46,61 @@ export abstract class BaseProvider implements ProviderInfo {
const apiTokenKey = this.config.apiTokenKey || defaultApiTokenKey;
const apiKey =
apiKeys?.[this.name] || serverEnv?.[apiTokenKey] || process?.env?.[apiTokenKey] || manager.env?.[baseUrlKey];
apiKeys?.[this.name] || serverEnv?.[apiTokenKey] || process?.env?.[apiTokenKey] || manager.env?.[apiTokenKey];
return {
baseUrl,
apiKey,
};
}
getModelsFromCache(options: {
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
serverEnv?: Record<string, string>;
}): ModelInfo[] | null {
if (!this.cachedDynamicModels) {
// console.log('no dynamic models',this.name);
return null;
}
const cacheKey = this.cachedDynamicModels.cacheId;
const generatedCacheKey = this.getDynamicModelsCacheKey(options);
if (cacheKey !== generatedCacheKey) {
// console.log('cache key mismatch',this.name,cacheKey,generatedCacheKey);
this.cachedDynamicModels = undefined;
return null;
}
return this.cachedDynamicModels.models;
}
getDynamicModelsCacheKey(options: {
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
serverEnv?: Record<string, string>;
}) {
return JSON.stringify({
apiKeys: options.apiKeys?.[this.name],
providerSettings: options.providerSettings?.[this.name],
serverEnv: options.serverEnv,
});
}
storeDynamicModels(
options: {
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
serverEnv?: Record<string, string>;
},
models: ModelInfo[],
) {
const cacheId = this.getDynamicModelsCacheKey(options);
// console.log('caching dynamic models',this.name,cacheId);
this.cachedDynamicModels = {
cacheId,
models,
};
}
// Declare the optional getDynamicModels method
getDynamicModels?(

View File

@@ -2,7 +2,9 @@ import type { IProviderSetting } from '~/types/model';
import { BaseProvider } from './base-provider';
import type { ModelInfo, ProviderInfo } from './types';
import * as providers from './registry';
import { createScopedLogger } from '~/utils/logger';
const logger = createScopedLogger('LLMManager');
export class LLMManager {
private static _instance: LLMManager;
private _providers: Map<string, BaseProvider> = new Map();
@@ -40,22 +42,22 @@ export class LLMManager {
try {
this.registerProvider(provider);
} catch (error: any) {
console.log('Failed To Register Provider: ', provider.name, 'error:', error.message);
logger.warn('Failed To Register Provider: ', provider.name, 'error:', error.message);
}
}
}
} catch (error) {
console.error('Error registering providers:', error);
logger.error('Error registering providers:', error);
}
}
registerProvider(provider: BaseProvider) {
if (this._providers.has(provider.name)) {
console.warn(`Provider ${provider.name} is already registered. Skipping.`);
logger.warn(`Provider ${provider.name} is already registered. Skipping.`);
return;
}
console.log('Registering Provider: ', provider.name);
logger.info('Registering Provider: ', provider.name);
this._providers.set(provider.name, provider);
this._modelList = [...this._modelList, ...provider.staticModels];
}
@@ -79,19 +81,42 @@ export class LLMManager {
}): Promise<ModelInfo[]> {
const { apiKeys, providerSettings, serverEnv } = options;
let enabledProviders = Array.from(this._providers.values()).map((p) => p.name);
if (providerSettings) {
enabledProviders = enabledProviders.filter((p) => providerSettings[p].enabled);
}
// Get dynamic models from all providers that support them
const dynamicModels = await Promise.all(
Array.from(this._providers.values())
.filter((provider) => enabledProviders.includes(provider.name))
.filter(
(provider): provider is BaseProvider & Required<Pick<ProviderInfo, 'getDynamicModels'>> =>
!!provider.getDynamicModels,
)
.map((provider) =>
provider.getDynamicModels(apiKeys, providerSettings?.[provider.name], serverEnv).catch((err) => {
console.error(`Error getting dynamic models ${provider.name} :`, err);
return [];
}),
),
.map(async (provider) => {
const cachedModels = provider.getModelsFromCache(options);
if (cachedModels) {
return cachedModels;
}
const dynamicModels = await provider
.getDynamicModels(apiKeys, providerSettings?.[provider.name], serverEnv)
.then((models) => {
logger.info(`Caching ${models.length} dynamic models for ${provider.name}`);
provider.storeDynamicModels(options, models);
return models;
})
.catch((err) => {
logger.error(`Error getting dynamic models ${provider.name} :`, err);
return [];
});
return dynamicModels;
}),
);
// Combine static and dynamic models
@@ -103,6 +128,68 @@ export class LLMManager {
return modelList;
}
getStaticModelList() {
return [...this._providers.values()].flatMap((p) => p.staticModels || []);
}
async getModelListFromProvider(
providerArg: BaseProvider,
options: {
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
serverEnv?: Record<string, string>;
},
): Promise<ModelInfo[]> {
const provider = this._providers.get(providerArg.name);
if (!provider) {
throw new Error(`Provider ${providerArg.name} not found`);
}
const staticModels = provider.staticModels || [];
if (!provider.getDynamicModels) {
return staticModels;
}
const { apiKeys, providerSettings, serverEnv } = options;
const cachedModels = provider.getModelsFromCache({
apiKeys,
providerSettings,
serverEnv,
});
if (cachedModels) {
logger.info(`Found ${cachedModels.length} cached models for ${provider.name}`);
return [...cachedModels, ...staticModels];
}
logger.info(`Getting dynamic models for ${provider.name}`);
const dynamicModels = await provider
.getDynamicModels?.(apiKeys, providerSettings?.[provider.name], serverEnv)
.then((models) => {
logger.info(`Got ${models.length} dynamic models for ${provider.name}`);
provider.storeDynamicModels(options, models);
return models;
})
.catch((err) => {
logger.error(`Error getting dynamic models ${provider.name} :`, err);
return [];
});
return [...dynamicModels, ...staticModels];
}
getStaticModelListFromProvider(providerArg: BaseProvider) {
const provider = this._providers.get(providerArg.name);
if (!provider) {
throw new Error(`Provider ${providerArg.name} not found`);
}
return [...(provider.staticModels || [])];
}
getDefaultProvider(): BaseProvider {
const firstProvider = this._providers.values().next().value;

View File

@@ -0,0 +1,113 @@
import { BaseProvider } from '~/lib/modules/llm/base-provider';
import type { ModelInfo } from '~/lib/modules/llm/types';
import type { LanguageModelV1 } from 'ai';
import type { IProviderSetting } from '~/types/model';
import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock';
interface AWSBedRockConfig {
region: string;
accessKeyId: string;
secretAccessKey: string;
sessionToken?: string;
}
export default class AmazonBedrockProvider extends BaseProvider {
name = 'AmazonBedrock';
getApiKeyLink = 'https://console.aws.amazon.com/iam/home';
config = {
apiTokenKey: 'AWS_BEDROCK_CONFIG',
};
staticModels: ModelInfo[] = [
{
name: 'anthropic.claude-3-5-sonnet-20240620-v1:0',
label: 'Claude 3.5 Sonnet (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 4096,
},
{
name: 'anthropic.claude-3-sonnet-20240229-v1:0',
label: 'Claude 3 Sonnet (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 4096,
},
{
name: 'anthropic.claude-3-haiku-20240307-v1:0',
label: 'Claude 3 Haiku (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 4096,
},
{
name: 'amazon.nova-pro-v1:0',
label: 'Amazon Nova Pro (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 5120,
},
{
name: 'amazon.nova-lite-v1:0',
label: 'Amazon Nova Lite (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 5120,
},
{
name: 'mistral.mistral-large-2402-v1:0',
label: 'Mistral Large 24.02 (Bedrock)',
provider: 'AmazonBedrock',
maxTokenAllowed: 8192,
},
];
private _parseAndValidateConfig(apiKey: string): AWSBedRockConfig {
let parsedConfig: AWSBedRockConfig;
try {
parsedConfig = JSON.parse(apiKey);
} catch {
throw new Error(
'Invalid AWS Bedrock configuration format. Please provide a valid JSON string containing region, accessKeyId, and secretAccessKey.',
);
}
const { region, accessKeyId, secretAccessKey, sessionToken } = parsedConfig;
if (!region || !accessKeyId || !secretAccessKey) {
throw new Error(
'Missing required AWS credentials. Configuration must include region, accessKeyId, and secretAccessKey.',
);
}
return {
region,
accessKeyId,
secretAccessKey,
...(sessionToken && { sessionToken }),
};
}
getModelInstance(options: {
model: string;
serverEnv: any;
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
}): LanguageModelV1 {
const { model, serverEnv, apiKeys, providerSettings } = options;
const { apiKey } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: providerSettings?.[this.name],
serverEnv: serverEnv as any,
defaultBaseUrlKey: '',
defaultApiTokenKey: 'AWS_BEDROCK_CONFIG',
});
if (!apiKey) {
throw new Error(`Missing API key for ${this.name} provider`);
}
const config = this._parseAndValidateConfig(apiKey);
const bedrock = createAmazonBedrock(config);
return bedrock(model);
}
}

View File

@@ -25,6 +25,30 @@ export default class HuggingFaceProvider extends BaseProvider {
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'codellama/CodeLlama-34b-Instruct-hf',
label: 'CodeLlama-34b-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'Qwen/Qwen2.5-72B-Instruct',
label: 'Qwen2.5-72B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'meta-llama/Llama-3.1-70B-Instruct',
label: 'Llama-3.1-70B-Instruct (HuggingFace)',
@@ -37,6 +61,24 @@ export default class HuggingFaceProvider extends BaseProvider {
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: '01-ai/Yi-1.5-34B-Chat',
label: 'Yi-1.5-34B-Chat (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'codellama/CodeLlama-34b-Instruct-hf',
label: 'CodeLlama-34b-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
];
getModelInstance(options: {

View File

@@ -0,0 +1,111 @@
import { BaseProvider } from '~/lib/modules/llm/base-provider';
import type { ModelInfo } from '~/lib/modules/llm/types';
import type { IProviderSetting } from '~/types/model';
import type { LanguageModelV1 } from 'ai';
import { createOpenAI } from '@ai-sdk/openai';
export default class HyperbolicProvider extends BaseProvider {
name = 'Hyperbolic';
getApiKeyLink = 'https://app.hyperbolic.xyz/settings';
config = {
apiTokenKey: 'HYPERBOLIC_API_KEY',
};
staticModels: ModelInfo[] = [
{
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
label: 'Qwen 2.5 Coder 32B Instruct',
provider: 'Hyperbolic',
maxTokenAllowed: 8192,
},
{
name: 'Qwen/Qwen2.5-72B-Instruct',
label: 'Qwen2.5-72B-Instruct',
provider: 'Hyperbolic',
maxTokenAllowed: 8192,
},
{
name: 'deepseek-ai/DeepSeek-V2.5',
label: 'DeepSeek-V2.5',
provider: 'Hyperbolic',
maxTokenAllowed: 8192,
},
{
name: 'Qwen/QwQ-32B-Preview',
label: 'QwQ-32B-Preview',
provider: 'Hyperbolic',
maxTokenAllowed: 8192,
},
{
name: 'Qwen/Qwen2-VL-72B-Instruct',
label: 'Qwen2-VL-72B-Instruct',
provider: 'Hyperbolic',
maxTokenAllowed: 8192,
},
];
async getDynamicModels(
apiKeys?: Record<string, string>,
settings?: IProviderSetting,
serverEnv: Record<string, string> = {},
): Promise<ModelInfo[]> {
const { baseUrl: fetchBaseUrl, apiKey } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: settings,
serverEnv,
defaultBaseUrlKey: '',
defaultApiTokenKey: 'HYPERBOLIC_API_KEY',
});
const baseUrl = fetchBaseUrl || 'https://api.hyperbolic.xyz/v1';
if (!apiKey) {
throw `Missing Api Key configuration for ${this.name} provider`;
}
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
const res = (await response.json()) as any;
const data = res.data.filter((model: any) => model.object === 'model' && model.supports_chat);
return data.map((m: any) => ({
name: m.id,
label: `${m.id} - context ${m.context_length ? Math.floor(m.context_length / 1000) + 'k' : 'N/A'}`,
provider: this.name,
maxTokenAllowed: m.context_length || 8000,
}));
}
getModelInstance(options: {
model: string;
serverEnv: Env;
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
}): LanguageModelV1 {
const { model, serverEnv, apiKeys, providerSettings } = options;
const { apiKey } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: providerSettings?.[this.name],
serverEnv: serverEnv as any,
defaultBaseUrlKey: '',
defaultApiTokenKey: 'HYPERBOLIC_API_KEY',
});
if (!apiKey) {
throw `Missing Api Key configuration for ${this.name} provider`;
}
const openai = createOpenAI({
baseURL: 'https://api.hyperbolic.xyz/v1/',
apiKey,
});
return openai(model);
}
}

View File

@@ -3,6 +3,7 @@ import type { ModelInfo } from '~/lib/modules/llm/types';
import type { IProviderSetting } from '~/types/model';
import { createOpenAI } from '@ai-sdk/openai';
import type { LanguageModelV1 } from 'ai';
import { logger } from '~/utils/logger';
export default class LMStudioProvider extends BaseProvider {
name = 'LMStudio';
@@ -12,6 +13,7 @@ export default class LMStudioProvider extends BaseProvider {
config = {
baseUrlKey: 'LMSTUDIO_API_BASE_URL',
baseUrl: 'http://localhost:1234/',
};
staticModels: ModelInfo[] = [];
@@ -21,33 +23,38 @@ export default class LMStudioProvider extends BaseProvider {
settings?: IProviderSetting,
serverEnv: Record<string, string> = {},
): Promise<ModelInfo[]> {
try {
const { baseUrl } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: settings,
serverEnv,
defaultBaseUrlKey: 'LMSTUDIO_API_BASE_URL',
defaultApiTokenKey: '',
});
let { baseUrl } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: settings,
serverEnv,
defaultBaseUrlKey: 'LMSTUDIO_API_BASE_URL',
defaultApiTokenKey: '',
});
if (!baseUrl) {
return [];
}
const response = await fetch(`${baseUrl}/v1/models`);
const data = (await response.json()) as { data: Array<{ id: string }> };
return data.data.map((model) => ({
name: model.id,
label: model.id,
provider: this.name,
maxTokenAllowed: 8000,
}));
} catch (error: any) {
console.log('Error getting LMStudio models:', error.message);
return [];
if (!baseUrl) {
throw new Error('No baseUrl found for LMStudio provider');
}
if (typeof window === 'undefined') {
/*
* Running in Server
* Backend: Check if we're running in Docker
*/
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
}
const response = await fetch(`${baseUrl}/v1/models`);
const data = (await response.json()) as { data: Array<{ id: string }> };
return data.data.map((model) => ({
name: model.id,
label: model.id,
provider: this.name,
maxTokenAllowed: 8000,
}));
}
getModelInstance: (options: {
model: string;
@@ -56,13 +63,26 @@ export default class LMStudioProvider extends BaseProvider {
providerSettings?: Record<string, IProviderSetting>;
}) => LanguageModelV1 = (options) => {
const { apiKeys, providerSettings, serverEnv, model } = options;
const { baseUrl } = this.getProviderBaseUrlAndKey({
let { baseUrl } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings,
providerSettings: providerSettings?.[this.name],
serverEnv: serverEnv as any,
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
defaultBaseUrlKey: 'LMSTUDIO_API_BASE_URL',
defaultApiTokenKey: '',
});
if (!baseUrl) {
throw new Error('No baseUrl found for LMStudio provider');
}
if (typeof window === 'undefined') {
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
}
logger.debug('LMStudio Base Url used: ', baseUrl);
const lmstudio = createOpenAI({
baseUrl: `${baseUrl}/v1`,
apiKey: '',

View File

@@ -3,6 +3,7 @@ import type { ModelInfo } from '~/lib/modules/llm/types';
import type { IProviderSetting } from '~/types/model';
import type { LanguageModelV1 } from 'ai';
import { ollama } from 'ollama-ai-provider';
import { logger } from '~/utils/logger';
interface OllamaModelDetails {
parent_model: string;
@@ -45,34 +46,40 @@ export default class OllamaProvider extends BaseProvider {
settings?: IProviderSetting,
serverEnv: Record<string, string> = {},
): Promise<ModelInfo[]> {
try {
const { baseUrl } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: settings,
serverEnv,
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
defaultApiTokenKey: '',
});
let { baseUrl } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: settings,
serverEnv,
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
defaultApiTokenKey: '',
});
if (!baseUrl) {
return [];
}
const response = await fetch(`${baseUrl}/api/tags`);
const data = (await response.json()) as OllamaApiResponse;
// console.log({ ollamamodels: data.models });
return data.models.map((model: OllamaModel) => ({
name: model.name,
label: `${model.name} (${model.details.parameter_size})`,
provider: this.name,
maxTokenAllowed: 8000,
}));
} catch (e) {
console.error('Failed to get Ollama models:', e);
return [];
if (!baseUrl) {
throw new Error('No baseUrl found for OLLAMA provider');
}
if (typeof window === 'undefined') {
/*
* Running in Server
* Backend: Check if we're running in Docker
*/
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
}
const response = await fetch(`${baseUrl}/api/tags`);
const data = (await response.json()) as OllamaApiResponse;
// console.log({ ollamamodels: data.models });
return data.models.map((model: OllamaModel) => ({
name: model.name,
label: `${model.name} (${model.details.parameter_size})`,
provider: this.name,
maxTokenAllowed: 8000,
}));
}
getModelInstance: (options: {
model: string;
@@ -83,18 +90,23 @@ export default class OllamaProvider extends BaseProvider {
const { apiKeys, providerSettings, serverEnv, model } = options;
let { baseUrl } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings,
providerSettings: providerSettings?.[this.name],
serverEnv: serverEnv as any,
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
defaultApiTokenKey: '',
});
// Backend: Check if we're running in Docker
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
if (!baseUrl) {
throw new Error('No baseUrl found for OLLAMA provider');
}
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
logger.debug('Ollama Base Url used: ', baseUrl);
const ollamaInstance = ollama(model, {
numCtx: DEFAULT_NUM_CTX,
}) as LanguageModelV1 & { config: any };

View File

@@ -27,7 +27,6 @@ export default class OpenRouterProvider extends BaseProvider {
};
staticModels: ModelInfo[] = [
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI', maxTokenAllowed: 8000 },
{
name: 'anthropic/claude-3.5-sonnet',
label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',

View File

@@ -19,37 +19,32 @@ export default class OpenAILikeProvider extends BaseProvider {
settings?: IProviderSetting,
serverEnv: Record<string, string> = {},
): Promise<ModelInfo[]> {
try {
const { baseUrl, apiKey } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: settings,
serverEnv,
defaultBaseUrlKey: 'OPENAI_LIKE_API_BASE_URL',
defaultApiTokenKey: 'OPENAI_LIKE_API_KEY',
});
const { baseUrl, apiKey } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: settings,
serverEnv,
defaultBaseUrlKey: 'OPENAI_LIKE_API_BASE_URL',
defaultApiTokenKey: 'OPENAI_LIKE_API_KEY',
});
if (!baseUrl || !apiKey) {
return [];
}
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
const res = (await response.json()) as any;
return res.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: this.name,
maxTokenAllowed: 8000,
}));
} catch (error) {
console.error('Error getting OpenAILike models:', error);
if (!baseUrl || !apiKey) {
return [];
}
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
const res = (await response.json()) as any;
return res.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: this.name,
maxTokenAllowed: 8000,
}));
}
getModelInstance(options: {

View File

@@ -13,6 +13,7 @@ export default class OpenAIProvider extends BaseProvider {
};
staticModels: ModelInfo[] = [
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI', maxTokenAllowed: 8000 },

View File

@@ -38,41 +38,36 @@ export default class TogetherProvider extends BaseProvider {
settings?: IProviderSetting,
serverEnv: Record<string, string> = {},
): Promise<ModelInfo[]> {
try {
const { baseUrl: fetchBaseUrl, apiKey } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: settings,
serverEnv,
defaultBaseUrlKey: 'TOGETHER_API_BASE_URL',
defaultApiTokenKey: 'TOGETHER_API_KEY',
});
const baseUrl = fetchBaseUrl || 'https://api.together.xyz/v1';
const { baseUrl: fetchBaseUrl, apiKey } = this.getProviderBaseUrlAndKey({
apiKeys,
providerSettings: settings,
serverEnv,
defaultBaseUrlKey: 'TOGETHER_API_BASE_URL',
defaultApiTokenKey: 'TOGETHER_API_KEY',
});
const baseUrl = fetchBaseUrl || 'https://api.together.xyz/v1';
if (!baseUrl || !apiKey) {
return [];
}
// console.log({ baseUrl, apiKey });
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
const res = (await response.json()) as any;
const data = (res || []).filter((model: any) => model.type === 'chat');
return data.map((m: any) => ({
name: m.id,
label: `${m.display_name} - in:$${m.pricing.input.toFixed(2)} out:$${m.pricing.output.toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
provider: this.name,
maxTokenAllowed: 8000,
}));
} catch (error: any) {
console.error('Error getting Together models:', error.message);
if (!baseUrl || !apiKey) {
return [];
}
// console.log({ baseUrl, apiKey });
const response = await fetch(`${baseUrl}/models`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
const res = (await response.json()) as any;
const data = (res || []).filter((model: any) => model.type === 'chat');
return data.map((m: any) => ({
name: m.id,
label: `${m.display_name} - in:$${m.pricing.input.toFixed(2)} out:$${m.pricing.output.toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
provider: this.name,
maxTokenAllowed: 8000,
}));
}
getModelInstance(options: {

View File

@@ -13,6 +13,8 @@ import OpenAIProvider from './providers/openai';
import PerplexityProvider from './providers/perplexity';
import TogetherProvider from './providers/together';
import XAIProvider from './providers/xai';
import HyperbolicProvider from './providers/hyperbolic';
import AmazonBedrockProvider from './providers/amazon-bedrock';
export {
AnthropicProvider,
@@ -21,6 +23,7 @@ export {
GoogleProvider,
GroqProvider,
HuggingFaceProvider,
HyperbolicProvider,
MistralProvider,
OllamaProvider,
OpenAIProvider,
@@ -30,4 +33,5 @@ export {
XAIProvider,
TogetherProvider,
LMStudioProvider,
AmazonBedrockProvider,
};

View File

@@ -28,5 +28,6 @@ export interface ProviderInfo {
}
export interface ProviderConfig {
baseUrlKey?: string;
baseUrl?: string;
apiTokenKey?: string;
}

View File

@@ -1,7 +1,7 @@
import { WebContainer } from '@webcontainer/api';
import { atom, map, type MapStore } from 'nanostores';
import * as nodePath from 'node:path';
import type { BoltAction } from '~/types/actions';
import type { ActionAlert, BoltAction } from '~/types/actions';
import { createScopedLogger } from '~/utils/logger';
import { unreachable } from '~/utils/unreachable';
import type { ActionCallbackData } from './message-parser';
@@ -35,16 +35,51 @@ export type ActionStateUpdate =
type ActionsMap = MapStore<Record<string, ActionState>>;
class ActionCommandError extends Error {
readonly _output: string;
readonly _header: string;
constructor(message: string, output: string) {
// Create a formatted message that includes both the error message and output
const formattedMessage = `Failed To Execute Shell Command: ${message}\n\nOutput:\n${output}`;
super(formattedMessage);
// Set the output separately so it can be accessed programmatically
this._header = message;
this._output = output;
// Maintain proper prototype chain
Object.setPrototypeOf(this, ActionCommandError.prototype);
// Set the name of the error for better debugging
this.name = 'ActionCommandError';
}
// Optional: Add a method to get just the terminal output
get output() {
return this._output;
}
get header() {
return this._header;
}
}
export class ActionRunner {
#webcontainer: Promise<WebContainer>;
#currentExecutionPromise: Promise<void> = Promise.resolve();
#shellTerminal: () => BoltShell;
runnerId = atom<string>(`${Date.now()}`);
actions: ActionsMap = map({});
onAlert?: (alert: ActionAlert) => void;
constructor(webcontainerPromise: Promise<WebContainer>, getShellTerminal: () => BoltShell) {
constructor(
webcontainerPromise: Promise<WebContainer>,
getShellTerminal: () => BoltShell,
onAlert?: (alert: ActionAlert) => void,
) {
this.#webcontainer = webcontainerPromise;
this.#shellTerminal = getShellTerminal;
this.onAlert = onAlert;
}
addAction(data: ActionCallbackData) {
@@ -127,7 +162,25 @@ export class ActionRunner {
this.#runStartAction(action)
.then(() => this.#updateAction(actionId, { status: 'complete' }))
.catch(() => this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }));
.catch((err: Error) => {
if (action.abortSignal.aborted) {
return;
}
this.#updateAction(actionId, { status: 'failed', error: 'Action failed' });
logger.error(`[${action.type}]:Action failed\n\n`, err);
if (!(err instanceof ActionCommandError)) {
return;
}
this.onAlert?.({
type: 'error',
title: 'Dev Server Failed',
description: err.header,
content: err.output,
});
});
/*
* adding a delay to avoid any race condition between 2 start actions
@@ -143,9 +196,24 @@ export class ActionRunner {
status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete',
});
} catch (error) {
if (action.abortSignal.aborted) {
return;
}
this.#updateAction(actionId, { status: 'failed', error: 'Action failed' });
logger.error(`[${action.type}]:Action failed\n\n`, error);
if (!(error instanceof ActionCommandError)) {
return;
}
this.onAlert?.({
type: 'error',
title: 'Dev Server Failed',
description: error.header,
content: error.output,
});
// re-throw the error to be caught in the promise chain
throw error;
}
@@ -163,11 +231,14 @@ export class ActionRunner {
unreachable('Shell terminal not found');
}
const resp = await shell.executeCommand(this.runnerId.get(), action.content);
const resp = await shell.executeCommand(this.runnerId.get(), action.content, () => {
logger.debug(`[${action.type}]:Aborting Action\n\n`, action);
action.abort();
});
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`);
if (resp?.exitCode != 0) {
throw new Error('Failed To Execute Shell Command');
throw new ActionCommandError(`Failed To Execute Shell Command`, resp?.output || 'No Output Available');
}
}
@@ -187,11 +258,14 @@ export class ActionRunner {
unreachable('Shell terminal not found');
}
const resp = await shell.executeCommand(this.runnerId.get(), action.content);
const resp = await shell.executeCommand(this.runnerId.get(), action.content, () => {
logger.debug(`[${action.type}]:Aborting Action\n\n`, action);
action.abort();
});
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`);
if (resp?.exitCode != 0) {
throw new Error('Failed To Start Application');
throw new ActionCommandError('Failed To Start Application', resp?.output || 'No Output Available');
}
return resp;

View File

@@ -52,6 +52,18 @@ interface MessageState {
actionId: number;
}
function cleanoutMarkdownSyntax(content: string) {
const codeBlockRegex = /^\s*```\w*\n([\s\S]*?)\n\s*```\s*$/;
const match = content.match(codeBlockRegex);
// console.log('matching', !!match, content);
if (match) {
return match[1]; // Remove common leading 4-space indent
} else {
return content;
}
}
export class StreamingMessageParser {
#messages = new Map<string, MessageState>();
@@ -95,6 +107,11 @@ export class StreamingMessageParser {
let content = currentAction.content.trim();
if ('type' in currentAction && currentAction.type === 'file') {
// Remove markdown code block syntax if present and file is not markdown
if (!currentAction.filePath.endsWith('.md')) {
content = cleanoutMarkdownSyntax(content);
}
content += '\n';
}
@@ -120,7 +137,11 @@ export class StreamingMessageParser {
i = closeIndex + ARTIFACT_ACTION_TAG_CLOSE.length;
} else {
if ('type' in currentAction && currentAction.type === 'file') {
const content = input.slice(i);
let content = input.slice(i);
if (!currentAction.filePath.endsWith('.md')) {
content = cleanoutMarkdownSyntax(content);
}
this._options.callbacks?.onActionStream?.({
artifactId: currentArtifact.id,

View File

@@ -39,6 +39,9 @@ PROVIDER_LIST.forEach((provider) => {
},
};
});
//TODO: need to create one single map for all these flags
export const providersStore = map<ProviderSetting>(initialProviderSettings);
export const isDebugMode = atom(false);
@@ -50,3 +53,6 @@ export const isLocalModelsEnabled = atom(true);
export const promptStore = atom<string>('default');
export const latestBranchStore = atom(false);
export const autoSelectStarterTemplate = atom(false);
export const enableContextOptimizationStore = atom(false);

View File

@@ -19,6 +19,7 @@ import Cookies from 'js-cookie';
import { createSampler } from '~/utils/sampler';
import { removeRecordingMessageHandler } from '~/lib/replay/Recording';
import { uint8ArrayToBase64 } from '../replay/ReplayProtocolClient';
import type { ActionAlert } from '~/types/actions';
export interface ArtifactState {
id: string;
@@ -40,11 +41,15 @@ export class WorkbenchStore {
#editorStore = new EditorStore(this.#filesStore);
#terminalStore = new TerminalStore(webcontainer);
#reloadedMessages = new Set<string>();
artifacts: Artifacts = import.meta.hot?.data.artifacts ?? map({});
showWorkbench: WritableAtom<boolean> = import.meta.hot?.data.showWorkbench ?? atom(false);
currentView: WritableAtom<WorkbenchViewType> = import.meta.hot?.data.currentView ?? atom('code');
unsavedFiles: WritableAtom<Set<string>> = import.meta.hot?.data.unsavedFiles ?? atom(new Set<string>());
actionAlert: WritableAtom<ActionAlert | undefined> =
import.meta.hot?.data.unsavedFiles ?? atom<ActionAlert | undefined>(undefined);
modifiedFiles = new Set<string>();
artifactIdList: string[] = [];
#globalExecutionQueue = Promise.resolve();
@@ -54,6 +59,7 @@ export class WorkbenchStore {
import.meta.hot.data.unsavedFiles = this.unsavedFiles;
import.meta.hot.data.showWorkbench = this.showWorkbench;
import.meta.hot.data.currentView = this.currentView;
import.meta.hot.data.actionAlert = this.actionAlert;
}
}
@@ -91,6 +97,12 @@ export class WorkbenchStore {
get boltTerminal() {
return this.#terminalStore.boltTerminal;
}
get alert() {
return this.actionAlert;
}
clearAlert() {
this.actionAlert.set(undefined);
}
toggleTerminal(value?: boolean) {
this.#terminalStore.toggleTerminal(value);
@@ -239,6 +251,10 @@ export class WorkbenchStore {
// TODO: what do we wanna do and how do we wanna recover from this?
}
setReloadedMessages(messages: string[]) {
this.#reloadedMessages = new Set(messages);
}
addArtifact({ messageId, title, id, type }: ArtifactCallbackData) {
const artifact = this.#getArtifact(messageId);
@@ -255,7 +271,17 @@ export class WorkbenchStore {
title,
closed: false,
type,
runner: new ActionRunner(webcontainer, () => this.boltTerminal),
runner: new ActionRunner(
webcontainer,
() => this.boltTerminal,
(alert) => {
if (this.#reloadedMessages.has(messageId)) {
return;
}
this.actionAlert.set(alert);
},
),
});
}

View File

@@ -1,5 +1,6 @@
import { WebContainer } from '@webcontainer/api';
import { WORK_DIR_NAME } from '~/utils/constants';
import { cleanStackTrace } from '~/utils/stacktrace';
interface WebContainerContext {
loaded: boolean;
@@ -22,10 +23,34 @@ if (!import.meta.env.SSR) {
import.meta.hot?.data.webcontainer ??
Promise.resolve()
.then(() => {
return WebContainer.boot({ workdirName: WORK_DIR_NAME });
return WebContainer.boot({
coep: 'credentialless',
workdirName: WORK_DIR_NAME,
forwardPreviewErrors: true, // Enable error forwarding from iframes
});
})
.then((webcontainer) => {
.then(async (webcontainer) => {
webcontainerContext.loaded = true;
const { workbenchStore } = await import('~/lib/stores/workbench');
// Listen for preview errors
webcontainer.on('preview-message', (message) => {
console.log('WebContainer preview message:', message);
// Handle both uncaught exceptions and unhandled promise rejections
if (message.type === 'PREVIEW_UNCAUGHT_EXCEPTION' || message.type === 'PREVIEW_UNHANDLED_REJECTION') {
const isPromise = message.type === 'PREVIEW_UNHANDLED_REJECTION';
workbenchStore.actionAlert.set({
type: 'preview',
title: isPromise ? 'Unhandled Promise Rejection' : 'Uncaught Exception',
description: message.message,
content: `Error occurred at ${message.pathname}${message.search}${message.hash}\nPort: ${message.port}\n\nStack trace:\n${cleanStackTrace(message.stack || '')}`,
source: 'preview',
});
}
});
return webcontainer;
});

View File

@@ -0,0 +1,16 @@
import type { LoaderFunction } from '@remix-run/cloudflare';
import { providerBaseUrlEnvKeys } from '~/utils/constants';
export const loader: LoaderFunction = async ({ context, request }) => {
const url = new URL(request.url);
const provider = url.searchParams.get('provider');
if (!provider || !providerBaseUrlEnvKeys[provider].apiTokenKey) {
return Response.json({ isSet: false });
}
const envVarName = providerBaseUrlEnvKeys[provider].apiTokenKey;
const isSet = !!(process.env[envVarName] || (context?.cloudflare?.env as Record<string, any>)?.[envVarName]);
return Response.json({ isSet });
};

View File

@@ -107,7 +107,10 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
return new Response(result.textStream, {
status: 200,
headers: {
'Content-Type': 'text/plain; charset=utf-8',
'Content-Type': 'text/event-stream',
Connection: 'keep-alive',
'Cache-Control': 'no-cache',
'Text-Encoding': 'chunked',
},
});
} catch (error: unknown) {

View File

@@ -0,0 +1,65 @@
import { json } from '@remix-run/cloudflare';
import type { ActionFunctionArgs, LoaderFunctionArgs } from '@remix-run/cloudflare';
// Handle all HTTP methods
export async function action({ request, params }: ActionFunctionArgs) {
return handleProxyRequest(request, params['*']);
}
export async function loader({ request, params }: LoaderFunctionArgs) {
return handleProxyRequest(request, params['*']);
}
async function handleProxyRequest(request: Request, path: string | undefined) {
try {
if (!path) {
return json({ error: 'Invalid proxy URL format' }, { status: 400 });
}
const url = new URL(request.url);
// Reconstruct the target URL
const targetURL = `https://${path}${url.search}`;
// Forward the request to the target URL
const response = await fetch(targetURL, {
method: request.method,
headers: {
...Object.fromEntries(request.headers),
// Override host header with the target host
host: new URL(targetURL).host,
},
body: ['GET', 'HEAD'].includes(request.method) ? null : await request.arrayBuffer(),
});
// Create response with CORS headers
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
'Access-Control-Allow-Headers': '*',
};
// Handle preflight requests
if (request.method === 'OPTIONS') {
return new Response(null, {
headers: corsHeaders,
status: 204,
});
}
// Forward the response with CORS headers
const responseHeaders = new Headers(response.headers);
Object.entries(corsHeaders).forEach(([key, value]) => {
responseHeaders.set(key, value);
});
return new Response(response.body, {
status: response.status,
headers: responseHeaders,
});
} catch (error) {
console.error('Git proxy error:', error);
return json({ error: 'Proxy error' }, { status: 500 });
}
}

163
app/routes/api.llmcall.ts Normal file
View File

@@ -0,0 +1,163 @@
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
//import { StreamingTextResponse, parseStreamPart } from 'ai';
import { streamText } from '~/lib/.server/llm/stream-text';
import type { IProviderSetting, ProviderInfo } from '~/types/model';
import { generateText } from 'ai';
import { getModelList, PROVIDER_LIST } from '~/utils/constants';
import { MAX_TOKENS } from '~/lib/.server/llm/constants';
export async function action(args: ActionFunctionArgs) {
return llmCallAction(args);
}
function parseCookies(cookieHeader: string) {
const cookies: any = {};
// Split the cookie string by semicolons and spaces
const items = cookieHeader.split(';').map((cookie) => cookie.trim());
items.forEach((item) => {
const [name, ...rest] = item.split('=');
if (name && rest) {
// Decode the name and value, and join value parts in case it contains '='
const decodedName = decodeURIComponent(name.trim());
const decodedValue = decodeURIComponent(rest.join('=').trim());
cookies[decodedName] = decodedValue;
}
});
return cookies;
}
async function llmCallAction({ context, request }: ActionFunctionArgs) {
const { system, message, model, provider, streamOutput } = await request.json<{
system: string;
message: string;
model: string;
provider: ProviderInfo;
streamOutput?: boolean;
}>();
const { name: providerName } = provider;
// validate 'model' and 'provider' fields
if (!model || typeof model !== 'string') {
throw new Response('Invalid or missing model', {
status: 400,
statusText: 'Bad Request',
});
}
if (!providerName || typeof providerName !== 'string') {
throw new Response('Invalid or missing provider', {
status: 400,
statusText: 'Bad Request',
});
}
const cookieHeader = request.headers.get('Cookie');
// Parse the cookie's value (returns an object or null if no cookie exists)
const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
const providerSettings: Record<string, IProviderSetting> = JSON.parse(
parseCookies(cookieHeader || '').providers || '{}',
);
if (streamOutput) {
try {
const result = await streamText({
options: {
system,
},
messages: [
{
role: 'user',
content: `${message}`,
},
],
env: context.cloudflare.env,
apiKeys,
providerSettings,
});
return new Response(result.textStream, {
status: 200,
headers: {
'Content-Type': 'text/plain; charset=utf-8',
},
});
} catch (error: unknown) {
console.log(error);
if (error instanceof Error && error.message?.includes('API key')) {
throw new Response('Invalid or missing API key', {
status: 401,
statusText: 'Unauthorized',
});
}
throw new Response(null, {
status: 500,
statusText: 'Internal Server Error',
});
}
} else {
try {
const MODEL_LIST = await getModelList({ apiKeys, providerSettings, serverEnv: context.cloudflare.env as any });
const modelDetails = MODEL_LIST.find((m) => m.name === model);
if (!modelDetails) {
throw new Error('Model not found');
}
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
const providerInfo = PROVIDER_LIST.find((p) => p.name === provider.name);
if (!providerInfo) {
throw new Error('Provider not found');
}
const result = await generateText({
system,
messages: [
{
role: 'user',
content: `${message}`,
},
],
model: providerInfo.getModelInstance({
model: modelDetails.name,
serverEnv: context.cloudflare.env as any,
apiKeys,
providerSettings,
}),
maxTokens: dynamicMaxTokens,
toolChoice: 'none',
});
return new Response(JSON.stringify(result), {
status: 200,
headers: {
'Content-Type': 'application/json',
},
});
} catch (error: unknown) {
console.log(error);
if (error instanceof Error && error.message?.includes('API key')) {
throw new Response('Invalid or missing API key', {
status: 401,
statusText: 'Unauthorized',
});
}
throw new Response(null, {
status: 500,
statusText: 'Internal Server Error',
});
}
}
}

View File

@@ -20,3 +20,11 @@ export interface StartAction extends BaseAction {
export type BoltAction = FileAction | ShellAction | StartAction;
export type BoltActionData = BoltAction | BaseAction;
export interface ActionAlert {
type: string;
title: string;
description: string;
content: string;
source?: 'terminal' | 'preview'; // Add source to differentiate between terminal and preview errors
}

8
app/types/template.ts Normal file
View File

@@ -0,0 +1,8 @@
export interface Template {
name: string;
label: string;
description: string;
githubRepo: string;
tags?: string[];
icon?: string;
}

View File

@@ -2,6 +2,7 @@ import type { IProviderSetting } from '~/types/model';
import { LLMManager } from '~/lib/modules/llm/manager';
import type { ModelInfo } from '~/lib/modules/llm/types';
import type { Template } from '~/types/template';
export const WORK_DIR_NAME = 'project';
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
@@ -18,312 +19,6 @@ export const DEFAULT_PROVIDER = llmManager.getDefaultProvider();
let MODEL_LIST = llmManager.getModelList();
/*
*const PROVIDER_LIST_OLD: ProviderInfo[] = [
* {
* name: 'Anthropic',
* staticModels: [
* {
* name: 'claude-3-5-sonnet-latest',
* label: 'Claude 3.5 Sonnet (new)',
* provider: 'Anthropic',
* maxTokenAllowed: 8000,
* },
* {
* name: 'claude-3-5-sonnet-20240620',
* label: 'Claude 3.5 Sonnet (old)',
* provider: 'Anthropic',
* maxTokenAllowed: 8000,
* },
* {
* name: 'claude-3-5-haiku-latest',
* label: 'Claude 3.5 Haiku (new)',
* provider: 'Anthropic',
* maxTokenAllowed: 8000,
* },
* { name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 },
* { name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 },
* { name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 },
* ],
* getApiKeyLink: 'https://console.anthropic.com/settings/keys',
* },
* {
* name: 'Ollama',
* staticModels: [],
* getDynamicModels: getOllamaModels,
* getApiKeyLink: 'https://ollama.com/download',
* labelForGetApiKey: 'Download Ollama',
* icon: 'i-ph:cloud-arrow-down',
* },
* {
* name: 'OpenAILike',
* staticModels: [],
* getDynamicModels: getOpenAILikeModels,
* },
* {
* name: 'Cohere',
* staticModels: [
* { name: 'command-r-plus-08-2024', label: 'Command R plus Latest', provider: 'Cohere', maxTokenAllowed: 4096 },
* { name: 'command-r-08-2024', label: 'Command R Latest', provider: 'Cohere', maxTokenAllowed: 4096 },
* { name: 'command-r-plus', label: 'Command R plus', provider: 'Cohere', maxTokenAllowed: 4096 },
* { name: 'command-r', label: 'Command R', provider: 'Cohere', maxTokenAllowed: 4096 },
* { name: 'command', label: 'Command', provider: 'Cohere', maxTokenAllowed: 4096 },
* { name: 'command-nightly', label: 'Command Nightly', provider: 'Cohere', maxTokenAllowed: 4096 },
* { name: 'command-light', label: 'Command Light', provider: 'Cohere', maxTokenAllowed: 4096 },
* { name: 'command-light-nightly', label: 'Command Light Nightly', provider: 'Cohere', maxTokenAllowed: 4096 },
* { name: 'c4ai-aya-expanse-8b', label: 'c4AI Aya Expanse 8b', provider: 'Cohere', maxTokenAllowed: 4096 },
* { name: 'c4ai-aya-expanse-32b', label: 'c4AI Aya Expanse 32b', provider: 'Cohere', maxTokenAllowed: 4096 },
* ],
* getApiKeyLink: 'https://dashboard.cohere.com/api-keys',
* },
* {
* name: 'OpenRouter',
* staticModels: [
* { name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI', maxTokenAllowed: 8000 },
* {
* name: 'anthropic/claude-3.5-sonnet',
* label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
* provider: 'OpenRouter',
* maxTokenAllowed: 8000,
* },
* {
* name: 'anthropic/claude-3-haiku',
* label: 'Anthropic: Claude 3 Haiku (OpenRouter)',
* provider: 'OpenRouter',
* maxTokenAllowed: 8000,
* },
* {
* name: 'deepseek/deepseek-coder',
* label: 'Deepseek-Coder V2 236B (OpenRouter)',
* provider: 'OpenRouter',
* maxTokenAllowed: 8000,
* },
* {
* name: 'google/gemini-flash-1.5',
* label: 'Google Gemini Flash 1.5 (OpenRouter)',
* provider: 'OpenRouter',
* maxTokenAllowed: 8000,
* },
* {
* name: 'google/gemini-pro-1.5',
* label: 'Google Gemini Pro 1.5 (OpenRouter)',
* provider: 'OpenRouter',
* maxTokenAllowed: 8000,
* },
* { name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
* {
* name: 'mistralai/mistral-nemo',
* label: 'OpenRouter Mistral Nemo (OpenRouter)',
* provider: 'OpenRouter',
* maxTokenAllowed: 8000,
* },
* {
* name: 'qwen/qwen-110b-chat',
* label: 'OpenRouter Qwen 110b Chat (OpenRouter)',
* provider: 'OpenRouter',
* maxTokenAllowed: 8000,
* },
* { name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 },
* ],
* getDynamicModels: getOpenRouterModels,
* getApiKeyLink: 'https://openrouter.ai/settings/keys',
* },
* {
* name: 'Google',
* staticModels: [
* { name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google', maxTokenAllowed: 8192 },
* { name: 'gemini-2.0-flash-exp', label: 'Gemini 2.0 Flash', provider: 'Google', maxTokenAllowed: 8192 },
* { name: 'gemini-1.5-flash-002', label: 'Gemini 1.5 Flash-002', provider: 'Google', maxTokenAllowed: 8192 },
* { name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google', maxTokenAllowed: 8192 },
* { name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google', maxTokenAllowed: 8192 },
* { name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google', maxTokenAllowed: 8192 },
* { name: 'gemini-exp-1206', label: 'Gemini exp-1206', provider: 'Google', maxTokenAllowed: 8192 },
* ],
* getApiKeyLink: 'https://aistudio.google.com/app/apikey',
* },
* {
* name: 'Groq',
* staticModels: [
* { name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
* { name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
* { name: 'llama-3.2-90b-vision-preview', label: 'Llama 3.2 90b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
* { name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
* { name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
* { name: 'llama-3.3-70b-versatile', label: 'Llama 3.3 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
* ],
* getApiKeyLink: 'https://console.groq.com/keys',
* },
* {
* name: 'HuggingFace',
* staticModels: [
* {
* name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
* label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* {
* name: '01-ai/Yi-1.5-34B-Chat',
* label: 'Yi-1.5-34B-Chat (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* {
* name: 'codellama/CodeLlama-34b-Instruct-hf',
* label: 'CodeLlama-34b-Instruct (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* {
* name: 'NousResearch/Hermes-3-Llama-3.1-8B',
* label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* {
* name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
* label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* {
* name: 'Qwen/Qwen2.5-72B-Instruct',
* label: 'Qwen2.5-72B-Instruct (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* {
* name: 'meta-llama/Llama-3.1-70B-Instruct',
* label: 'Llama-3.1-70B-Instruct (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* {
* name: 'meta-llama/Llama-3.1-405B',
* label: 'Llama-3.1-405B (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* {
* name: '01-ai/Yi-1.5-34B-Chat',
* label: 'Yi-1.5-34B-Chat (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* {
* name: 'codellama/CodeLlama-34b-Instruct-hf',
* label: 'CodeLlama-34b-Instruct (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* {
* name: 'NousResearch/Hermes-3-Llama-3.1-8B',
* label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
* provider: 'HuggingFace',
* maxTokenAllowed: 8000,
* },
* ],
* getApiKeyLink: 'https://huggingface.co/settings/tokens',
* },
* {
* name: 'OpenAI',
* staticModels: [
* { name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI', maxTokenAllowed: 8000 },
* { name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
* { name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI', maxTokenAllowed: 8000 },
* { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
* ],
* getApiKeyLink: 'https://platform.openai.com/api-keys',
* },
* {
* name: 'xAI',
* staticModels: [{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 }],
* getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key',
* },
* {
* name: 'Deepseek',
* staticModels: [
* { name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 },
* { name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 },
* ],
* getApiKeyLink: 'https://platform.deepseek.com/apiKeys',
* },
* {
* name: 'Mistral',
* staticModels: [
* { name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral', maxTokenAllowed: 8000 },
* { name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral', maxTokenAllowed: 8000 },
* { name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral', maxTokenAllowed: 8000 },
* { name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral', maxTokenAllowed: 8000 },
* { name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral', maxTokenAllowed: 8000 },
* { name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral', maxTokenAllowed: 8000 },
* { name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral', maxTokenAllowed: 8000 },
* { name: 'codestral-latest', label: 'Codestral', provider: 'Mistral', maxTokenAllowed: 8000 },
* { name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 },
* ],
* getApiKeyLink: 'https://console.mistral.ai/api-keys/',
* },
* {
* name: 'LMStudio',
* staticModels: [],
* getDynamicModels: getLMStudioModels,
* getApiKeyLink: 'https://lmstudio.ai/',
* labelForGetApiKey: 'Get LMStudio',
* icon: 'i-ph:cloud-arrow-down',
* },
* {
* name: 'Together',
* getDynamicModels: getTogetherModels,
* staticModels: [
* {
* name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
* label: 'Qwen/Qwen2.5-Coder-32B-Instruct',
* provider: 'Together',
* maxTokenAllowed: 8000,
* },
* {
* name: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
* label: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
* provider: 'Together',
* maxTokenAllowed: 8000,
* },
*
* {
* name: 'mistralai/Mixtral-8x7B-Instruct-v0.1',
* label: 'Mixtral 8x7B Instruct',
* provider: 'Together',
* maxTokenAllowed: 8192,
* },
* ],
* getApiKeyLink: 'https://api.together.xyz/settings/api-keys',
* },
* {
* name: 'Perplexity',
* staticModels: [
* {
* name: 'llama-3.1-sonar-small-128k-online',
* label: 'Sonar Small Online',
* provider: 'Perplexity',
* maxTokenAllowed: 8192,
* },
* {
* name: 'llama-3.1-sonar-large-128k-online',
* label: 'Sonar Large Online',
* provider: 'Perplexity',
* maxTokenAllowed: 8192,
* },
* {
* name: 'llama-3.1-sonar-huge-128k-online',
* label: 'Sonar Huge Online',
* provider: 'Perplexity',
* maxTokenAllowed: 8192,
* },
* ],
* getApiKeyLink: 'https://www.perplexity.ai/settings/api',
* },
*];
*/
const providerBaseUrlEnvKeys: Record<string, { baseUrlKey?: string; apiTokenKey?: string }> = {};
PROVIDER_LIST.forEach((provider) => {
providerBaseUrlEnvKeys[provider.name] = {
@@ -359,3 +54,96 @@ async function initializeModelList(options: {
// initializeModelList({})
export { initializeModelList, providerBaseUrlEnvKeys, MODEL_LIST };
// starter Templates
export const STARTER_TEMPLATES: Template[] = [
{
name: 'bolt-astro-basic',
label: 'Astro Basic',
description: 'Lightweight Astro starter template for building fast static websites',
githubRepo: 'thecodacus/bolt-astro-basic-template',
tags: ['astro', 'blog', 'performance'],
icon: 'i-bolt:astro',
},
{
name: 'bolt-nextjs-shadcn',
label: 'Next.js with shadcn/ui',
description: 'Next.js starter fullstack template integrated with shadcn/ui components and styling system',
githubRepo: 'thecodacus/bolt-nextjs-shadcn-template',
tags: ['nextjs', 'react', 'typescript', 'shadcn', 'tailwind'],
icon: 'i-bolt:nextjs',
},
{
name: 'bolt-qwik-ts',
label: 'Qwik TypeScript',
description: 'Qwik framework starter with TypeScript for building resumable applications',
githubRepo: 'thecodacus/bolt-qwik-ts-template',
tags: ['qwik', 'typescript', 'performance', 'resumable'],
icon: 'i-bolt:qwik',
},
{
name: 'bolt-remix-ts',
label: 'Remix TypeScript',
description: 'Remix framework starter with TypeScript for full-stack web applications',
githubRepo: 'thecodacus/bolt-remix-ts-template',
tags: ['remix', 'typescript', 'fullstack', 'react'],
icon: 'i-bolt:remix',
},
{
name: 'bolt-slidev',
label: 'Slidev Presentation',
description: 'Slidev starter template for creating developer-friendly presentations using Markdown',
githubRepo: 'thecodacus/bolt-slidev-template',
tags: ['slidev', 'presentation', 'markdown'],
icon: 'i-bolt:slidev',
},
{
name: 'bolt-sveltekit',
label: 'SvelteKit',
description: 'SvelteKit starter template for building fast, efficient web applications',
githubRepo: 'bolt-sveltekit-template',
tags: ['svelte', 'sveltekit', 'typescript'],
icon: 'i-bolt:svelte',
},
{
name: 'vanilla-vite',
label: 'Vanilla + Vite',
description: 'Minimal Vite starter template for vanilla JavaScript projects',
githubRepo: 'thecodacus/vanilla-vite-template',
tags: ['vite', 'vanilla-js', 'minimal'],
icon: 'i-bolt:vite',
},
{
name: 'bolt-vite-react',
label: 'React + Vite + typescript',
description: 'React starter template powered by Vite for fast development experience',
githubRepo: 'thecodacus/bolt-vite-react-ts-template',
tags: ['react', 'vite', 'frontend'],
icon: 'i-bolt:react',
},
{
name: 'bolt-vite-ts',
label: 'Vite + TypeScript',
description: 'Vite starter template with TypeScript configuration for type-safe development',
githubRepo: 'thecodacus/bolt-vite-ts-template',
tags: ['vite', 'typescript', 'minimal'],
icon: 'i-bolt:typescript',
},
{
name: 'bolt-vue',
label: 'Vue.js',
description: 'Vue.js starter template with modern tooling and best practices',
githubRepo: 'thecodacus/bolt-vue-template',
tags: ['vue', 'typescript', 'frontend'],
icon: 'i-bolt:vue',
},
{
name: 'bolt-angular',
label: 'Angular Starter',
description: 'A modern Angular starter template with TypeScript support and best practices configuration',
githubRepo: 'thecodacus/bolt-angular-template',
tags: ['angular', 'typescript', 'frontend', 'spa'],
icon: 'i-bolt:angular',
},
];

View File

@@ -1,4 +1,7 @@
export type DebugLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error';
import { Chalk } from 'chalk';
const chalk = new Chalk({ level: 3 });
type LoggerFunction = (...messages: any[]) => void;
@@ -13,9 +16,6 @@ interface Logger {
let currentLevel: DebugLevel = (import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV) ? 'debug' : 'info';
const isWorker = 'HTMLRewriter' in globalThis;
const supportsColor = !isWorker;
export const logger: Logger = {
trace: (...messages: any[]) => log('trace', undefined, messages),
debug: (...messages: any[]) => log('debug', undefined, messages),
@@ -63,14 +63,8 @@ function log(level: DebugLevel, scope: string | undefined, messages: any[]) {
return `${acc} ${current}`;
}, '');
if (!supportsColor) {
console.log(`[${level.toUpperCase()}]`, allMessages);
return;
}
const labelBackgroundColor = getColorForLevel(level);
const labelTextColor = level === 'warn' ? 'black' : 'white';
const labelTextColor = level === 'warn' ? '#000000' : '#FFFFFF';
const labelStyles = getLabelStyles(labelBackgroundColor, labelTextColor);
const scopeStyles = getLabelStyles('#77828D', 'white');
@@ -81,7 +75,21 @@ function log(level: DebugLevel, scope: string | undefined, messages: any[]) {
styles.push('', scopeStyles);
}
console.log(`%c${level.toUpperCase()}${scope ? `%c %c${scope}` : ''}`, ...styles, allMessages);
let labelText = formatText(` ${level.toUpperCase()} `, labelTextColor, labelBackgroundColor);
if (scope) {
labelText = `${labelText} ${formatText(` ${scope} `, '#FFFFFF', '77828D')}`;
}
if (typeof window !== 'undefined') {
console.log(`%c${level.toUpperCase()}${scope ? `%c %c${scope}` : ''}`, ...styles, allMessages);
} else {
console.log(`${labelText}`, allMessages);
}
}
function formatText(text: string, color: string, bg: string) {
return chalk.bgHex(bg)(chalk.hex(color)(text));
}
function getLabelStyles(color: string, textColor: string) {
@@ -104,7 +112,7 @@ function getColorForLevel(level: DebugLevel): string {
return '#EE4744';
}
default: {
return 'black';
return '#000000';
}
}
}

View File

@@ -0,0 +1,304 @@
import ignore from 'ignore';
import type { ProviderInfo } from '~/types/model';
import type { Template } from '~/types/template';
import { STARTER_TEMPLATES } from './constants';
import Cookies from 'js-cookie';
const starterTemplateSelectionPrompt = (templates: Template[]) => `
You are an experienced developer who helps people choose the best starter template for their projects.
Available templates:
<template>
<name>blank</name>
<description>Empty starter for simple scripts and trivial tasks that don't require a full template setup</description>
<tags>basic, script</tags>
</template>
${templates
.map(
(template) => `
<template>
<name>${template.name}</name>
<description>${template.description}</description>
${template.tags ? `<tags>${template.tags.join(', ')}</tags>` : ''}
</template>
`,
)
.join('\n')}
Response Format:
<selection>
<templateName>{selected template name}</templateName>
<title>{a proper title for the project}</title>
</selection>
Examples:
<example>
User: I need to build a todo app
Response:
<selection>
<templateName>react-basic-starter</templateName>
<title>Simple React todo application</title>
</selection>
</example>
<example>
User: Write a script to generate numbers from 1 to 100
Response:
<selection>
<templateName>blank</templateName>
<title>script to generate numbers from 1 to 100</title>
</selection>
</example>
Instructions:
1. For trivial tasks and simple scripts, always recommend the blank template
2. For more complex projects, recommend templates from the provided list
3. Follow the exact XML format
4. Consider both technical requirements and tags
5. If no perfect match exists, recommend the closest option
Important: Provide only the selection tags in your response, no additional text.
`;
const templates: Template[] = STARTER_TEMPLATES.filter((t) => !t.name.includes('shadcn'));
const parseSelectedTemplate = (llmOutput: string): { template: string; title: string } | null => {
try {
// Extract content between <templateName> tags
const templateNameMatch = llmOutput.match(/<templateName>(.*?)<\/templateName>/);
const titleMatch = llmOutput.match(/<title>(.*?)<\/title>/);
if (!templateNameMatch) {
return null;
}
return { template: templateNameMatch[1].trim(), title: titleMatch?.[1].trim() || 'Untitled Project' };
} catch (error) {
console.error('Error parsing template selection:', error);
return null;
}
};
export const selectStarterTemplate = async (options: { message: string; model: string; provider: ProviderInfo }) => {
const { message, model, provider } = options;
const requestBody = {
message,
model,
provider,
system: starterTemplateSelectionPrompt(templates),
};
const response = await fetch('/api/llmcall', {
method: 'POST',
body: JSON.stringify(requestBody),
});
const respJson: { text: string } = await response.json();
console.log(respJson);
const { text } = respJson;
const selectedTemplate = parseSelectedTemplate(text);
if (selectedTemplate) {
return selectedTemplate;
} else {
console.log('No template selected, using blank template');
return {
template: 'blank',
title: '',
};
}
};
const getGitHubRepoContent = async (
repoName: string,
path: string = '',
): Promise<{ name: string; path: string; content: string }[]> => {
const baseUrl = 'https://api.github.com';
try {
const token = Cookies.get('githubToken') || import.meta.env.VITE_GITHUB_ACCESS_TOKEN;
const headers: HeadersInit = {
Accept: 'application/vnd.github.v3+json',
};
// Add your GitHub token if needed
if (token) {
headers.Authorization = 'token ' + token;
}
// Fetch contents of the path
const response = await fetch(`${baseUrl}/repos/${repoName}/contents/${path}`, {
headers,
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data: any = await response.json();
// If it's a single file, return its content
if (!Array.isArray(data)) {
if (data.type === 'file') {
// If it's a file, get its content
const content = atob(data.content); // Decode base64 content
return [
{
name: data.name,
path: data.path,
content,
},
];
}
}
// Process directory contents recursively
const contents = await Promise.all(
data.map(async (item: any) => {
if (item.type === 'dir') {
// Recursively get contents of subdirectories
return await getGitHubRepoContent(repoName, item.path);
} else if (item.type === 'file') {
// Fetch file content
const fileResponse = await fetch(item.url, {
headers,
});
const fileData: any = await fileResponse.json();
const content = atob(fileData.content); // Decode base64 content
return [
{
name: item.name,
path: item.path,
content,
},
];
}
return [];
}),
);
// Flatten the array of contents
return contents.flat();
} catch (error) {
console.error('Error fetching repo contents:', error);
throw error;
}
};
export async function getTemplates(templateName: string, title?: string) {
const template = STARTER_TEMPLATES.find((t) => t.name == templateName);
if (!template) {
return null;
}
const githubRepo = template.githubRepo;
const files = await getGitHubRepoContent(githubRepo);
let filteredFiles = files;
/*
* ignoring common unwanted files
* exclude .git
*/
filteredFiles = filteredFiles.filter((x) => x.path.startsWith('.git') == false);
// exclude lock files
const comminLockFiles = ['package-lock.json', 'yarn.lock', 'pnpm-lock.yaml'];
filteredFiles = filteredFiles.filter((x) => comminLockFiles.includes(x.name) == false);
// exclude .bolt
filteredFiles = filteredFiles.filter((x) => x.path.startsWith('.bolt') == false);
// check for ignore file in .bolt folder
const templateIgnoreFile = files.find((x) => x.path.startsWith('.bolt') && x.name == 'ignore');
const filesToImport = {
files: filteredFiles,
ignoreFile: [] as typeof filteredFiles,
};
if (templateIgnoreFile) {
// redacting files specified in ignore file
const ignorepatterns = templateIgnoreFile.content.split('\n').map((x) => x.trim());
const ig = ignore().add(ignorepatterns);
// filteredFiles = filteredFiles.filter(x => !ig.ignores(x.path))
const ignoredFiles = filteredFiles.filter((x) => ig.ignores(x.path));
filesToImport.files = filteredFiles;
filesToImport.ignoreFile = ignoredFiles;
}
const assistantMessage = `
<boltArtifact id="imported-files" title="${title || 'Importing Starter Files'}" type="bundled">
${filesToImport.files
.map(
(file) =>
`<boltAction type="file" filePath="${file.path}">
${file.content}
</boltAction>`,
)
.join('\n')}
</boltArtifact>
`;
let userMessage = ``;
const templatePromptFile = files.filter((x) => x.path.startsWith('.bolt')).find((x) => x.name == 'prompt');
if (templatePromptFile) {
userMessage = `
TEMPLATE INSTRUCTIONS:
${templatePromptFile.content}
IMPORTANT: Dont Forget to install the dependencies before running the app
---
`;
}
if (filesToImport.ignoreFile.length > 0) {
userMessage =
userMessage +
`
STRICT FILE ACCESS RULES - READ CAREFULLY:
The following files are READ-ONLY and must never be modified:
${filesToImport.ignoreFile.map((file) => `- ${file.path}`).join('\n')}
Permitted actions:
✓ Import these files as dependencies
✓ Read from these files
✓ Reference these files
Strictly forbidden actions:
❌ Modify any content within these files
❌ Delete these files
❌ Rename these files
❌ Move these files
❌ Create new versions of these files
❌ Suggest changes to these files
Any attempt to modify these protected files will result in immediate termination of the operation.
If you need to make changes to functionality, create new files instead of modifying the protected ones listed above.
---
`;
}
userMessage += `
---
template import is done, and you can now use the imported files,
edit only the files that need to be changed, and you can create new files as needed.
NO NOT EDIT/WRITE ANY FILES THAT ALREADY EXIST IN THE PROJECT AND DOES NOT NEED TO BE MODIFIED
---
Now that the Template is imported please continue with my original request
`;
return {
assistantMessage,
userMessage,
};
}

View File

@@ -60,7 +60,9 @@ export class BoltShell {
#webcontainer: WebContainer | undefined;
#terminal: ITerminal | undefined;
#process: WebContainerProcess | undefined;
executionState = atom<{ sessionId: string; active: boolean; executionPrms?: Promise<any> } | undefined>();
executionState = atom<
{ sessionId: string; active: boolean; executionPrms?: Promise<any>; abort?: () => void } | undefined
>();
#outputStream: ReadableStreamDefaultReader<string> | undefined;
#shellInputStream: WritableStreamDefaultWriter<string> | undefined;
@@ -93,13 +95,17 @@ export class BoltShell {
return this.#process;
}
async executeCommand(sessionId: string, command: string): Promise<ExecutionResult> {
async executeCommand(sessionId: string, command: string, abort?: () => void): Promise<ExecutionResult> {
if (!this.process || !this.terminal) {
return undefined;
}
const state = this.executionState.get();
if (state?.active && state.abort) {
state.abort();
}
/*
* interrupt the current execution
* this.#shellInputStream?.write('\x03');
@@ -116,11 +122,19 @@ export class BoltShell {
//wait for the execution to finish
const executionPromise = this.getCurrentExecutionResult();
this.executionState.set({ sessionId, active: true, executionPrms: executionPromise });
this.executionState.set({ sessionId, active: true, executionPrms: executionPromise, abort });
const resp = await executionPromise;
this.executionState.set({ sessionId, active: false });
if (resp) {
try {
resp.output = cleanTerminalOutput(resp.output);
} catch (error) {
console.log('failed to format terminal output', error);
}
}
return resp;
}
@@ -216,6 +230,65 @@ export class BoltShell {
}
}
/**
* Cleans and formats terminal output while preserving structure and paths
* Handles ANSI, OSC, and various terminal control sequences
*/
export function cleanTerminalOutput(input: string): string {
// Step 1: Remove OSC sequences (including those with parameters)
const removeOsc = input
.replace(/\x1b\](\d+;[^\x07\x1b]*|\d+[^\x07\x1b]*)\x07/g, '')
.replace(/\](\d+;[^\n]*|\d+[^\n]*)/g, '');
// Step 2: Remove ANSI escape sequences and color codes more thoroughly
const removeAnsi = removeOsc
// Remove all escape sequences with parameters
.replace(/\u001b\[[\?]?[0-9;]*[a-zA-Z]/g, '')
.replace(/\x1b\[[\?]?[0-9;]*[a-zA-Z]/g, '')
// Remove color codes
.replace(/\u001b\[[0-9;]*m/g, '')
.replace(/\x1b\[[0-9;]*m/g, '')
// Clean up any remaining escape characters
.replace(/\u001b/g, '')
.replace(/\x1b/g, '');
// Step 3: Clean up carriage returns and newlines
const cleanNewlines = removeAnsi
.replace(/\r\n/g, '\n')
.replace(/\r/g, '\n')
.replace(/\n{3,}/g, '\n\n');
// Step 4: Add newlines at key breakpoints while preserving paths
const formatOutput = cleanNewlines
// Preserve prompt line
.replace(/^([~\/][^\n]+)/m, '$1\n')
// Add newline before command output indicators
.replace(/(?<!^|\n)>/g, '\n>')
// Add newline before error keywords without breaking paths
.replace(/(?<!^|\n|\w)(error|failed|warning|Error|Failed|Warning):/g, '\n$1:')
// Add newline before 'at' in stack traces without breaking paths
.replace(/(?<!^|\n|\/)(at\s+(?!async|sync))/g, '\nat ')
// Ensure 'at async' stays on same line
.replace(/\bat\s+async/g, 'at async')
// Add newline before npm error indicators
.replace(/(?<!^|\n)(npm ERR!)/g, '\n$1');
// Step 5: Clean up whitespace while preserving intentional spacing
const cleanSpaces = formatOutput
.split('\n')
.map((line) => line.trim())
.filter((line) => line.length > 0)
.join('\n');
// Step 6: Final cleanup
return cleanSpaces
.replace(/\n{3,}/g, '\n\n') // Replace multiple newlines with double newlines
.replace(/:\s+/g, ': ') // Normalize spacing after colons
.replace(/\s{2,}/g, ' ') // Remove multiple spaces
.replace(/^\s+|\s+$/g, '') // Trim start and end
.replace(/\u0000/g, ''); // Remove null characters
}
export function newBoltShellProcess() {
return new BoltShell();
}

27
app/utils/stacktrace.ts Normal file
View File

@@ -0,0 +1,27 @@
/**
* Cleans webcontainer URLs from stack traces to show relative paths instead
*/
export function cleanStackTrace(stackTrace: string): string {
// Function to clean a single URL
const cleanUrl = (url: string): string => {
const regex = /^https?:\/\/[^\/]+\.webcontainer-api\.io(\/.*)?$/;
if (!regex.test(url)) {
return url;
}
const pathRegex = /^https?:\/\/[^\/]+\.webcontainer-api\.io\/(.*?)$/;
const match = url.match(pathRegex);
return match?.[1] || '';
};
// Split the stack trace into lines and process each line
return stackTrace
.split('\n')
.map((line) => {
// Match any URL in the line that contains webcontainer-api.io
return line.replace(/(https?:\/\/[^\/]+\.webcontainer-api\.io\/[^\s\)]+)/g, (match) => cleanUrl(match));
})
.join('\n');
}

View File

@@ -1,31 +1,14 @@
# Release v0.0.3
# 🚀 Release v0.0.5
### 🔄 Changes since v0.0.2
## What's Changed 🌟
#### 🐛 Bug Fixes
### 🔄 Changes since v0.0.4
- Prompt Enhance
### 🐛 Bug Fixes
* hotfix auto select starter template works without github token #release ([#959](https://github.com/stackblitz-labs/bolt.diy/pull/959)) by @thecodacus
#### 📚 Documentation
## 📈 Stats
- miniflare error knowledge
#### 🔧 Chores
- adding back semantic pull pr check for better changelog system
- update commit hash to 1e72d52278730f7d22448be9d5cf2daf12559486
- update commit hash to 282beb96e2ee92ba8b1174aaaf9f270e03a288e8
#### 🔍 Other Changes
- Merge remote-tracking branch 'upstream/main'
- Merge pull request #781 from thecodacus/semantic-pull-pr
- miniflare and wrangler error
- simplified the fix
- Merge branch 'main' into fix/prompt-enhance
**Full Changelog**: [`v0.0.2..v0.0.3`](https://github.com/stackblitz-labs/bolt.diy/compare/v0.0.2...v0.0.3)
**Full Changelog**: [`v0.0.4..v0.0.5`](https://github.com/stackblitz-labs/bolt.diy/compare/v0.0.4...v0.0.5)

View File

@@ -20,8 +20,10 @@ services:
- OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
- XAI_API_KEY=${XAI_API_KEY}
- TOGETHER_API_KEY=${TOGETHER_API_KEY}
- TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL}
- AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG}
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
- DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
- RUNNING_IN_DOCKER=true
@@ -48,10 +50,12 @@ services:
- OPENAI_API_KEY=${OPENAI_API_KEY}
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
- OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY}
- XAI_API_KEY=${XAI_API_KEY}
- GOOGLE_GENERATIVE_AI_API_KEY=${GOOGLE_GENERATIVE_AI_API_KEY}
- OLLAMA_API_BASE_URL=${OLLAMA_API_BASE_URL}
- TOGETHER_API_KEY=${TOGETHER_API_KEY}
- TOGETHER_API_BASE_URL=${TOGETHER_API_BASE_URL}
- AWS_BEDROCK_CONFIG=${AWS_BEDROCK_CONFIG}
- VITE_LOG_LEVEL=${VITE_LOG_LEVEL:-debug}
- DEFAULT_NUM_CTX=${DEFAULT_NUM_CTX:-32768}
- RUNNING_IN_DOCKER=true

3
docs/.gitignore vendored
View File

@@ -1,2 +1,3 @@
.venv
site/
site/
.python-version

1
docs/.python-version Normal file
View File

@@ -0,0 +1 @@
3.12.0

View File

@@ -1,91 +1,95 @@
# Frequently Asked Questions (FAQ)
<details>
<summary><strong>What are the best models for bolt.diy?</strong></summary>
## Models and Setup
For the best experience with bolt.diy, we recommend using the following models:
??? question "What are the best models for bolt.diy?"
For the best experience with bolt.diy, we recommend using the following models:
- **Claude 3.5 Sonnet (old)**: Best overall coder, providing excellent results across all use cases
- **Gemini 2.0 Flash**: Exceptional speed while maintaining good performance
- **GPT-4o**: Strong alternative to Claude 3.5 Sonnet with comparable capabilities
- **DeepSeekCoder V2 236b**: Best open source model (available through OpenRouter, DeepSeek API, or self-hosted)
- **Qwen 2.5 Coder 32b**: Best model for self-hosting with reasonable hardware requirements
- **Claude 3.5 Sonnet (old)**: Best overall coder, providing excellent results across all use cases
- **Gemini 2.0 Flash**: Exceptional speed while maintaining good performance
- **GPT-4o**: Strong alternative to Claude 3.5 Sonnet with comparable capabilities
- **DeepSeekCoder V3**: Best open source model (available through OpenRouter, DeepSeek API, or self-hosted)
- **DeepSeekCoder V2 236b**: available through OpenRouter, DeepSeek API, or self-hosted
- **Qwen 2.5 Coder 32b**: Best model for self-hosting with reasonable hardware requirements
**Note**: Models with less than 7b parameters typically lack the capability to properly interact with bolt!
</details>
!!! warning
Models with less than 7b parameters typically lack the capability to properly interact with bolt!
<details>
<summary><strong>How do I get the best results with bolt.diy?</strong></summary>
## Best Practices
- **Be specific about your stack**:
Mention the frameworks or libraries you want to use (e.g., Astro, Tailwind, ShadCN) in your initial prompt. This ensures that bolt.diy scaffolds the project according to your preferences.
??? question "How do I get the best results with bolt.diy?"
- **Be specific about your stack**:
Mention the frameworks or libraries you want to use (e.g., Astro, Tailwind, ShadCN) in your initial prompt. This ensures that bolt.diy scaffolds the project according to your preferences.
- **Use the enhance prompt icon**:
Before sending your prompt, click the *enhance* icon to let the AI refine your prompt. You can edit the suggested improvements before submitting.
- **Use the enhance prompt icon**:
Before sending your prompt, click the *enhance* icon to let the AI refine your prompt. You can edit the suggested improvements before submitting.
- **Scaffold the basics first, then add features**:
Ensure the foundational structure of your application is in place before introducing advanced functionality. This helps bolt.diy establish a solid base to build on.
- **Scaffold the basics first, then add features**:
Ensure the foundational structure of your application is in place before introducing advanced functionality. This helps bolt.diy establish a solid base to build on.
- **Batch simple instructions**:
Combine simple tasks into a single prompt to save time and reduce API credit consumption. For example:
*"Change the color scheme, add mobile responsiveness, and restart the dev server."*
</details>
- **Batch simple instructions**:
Combine simple tasks into a single prompt to save time and reduce API credit consumption. For example:
*"Change the color scheme, add mobile responsiveness, and restart the dev server."*
<details>
<summary><strong>How do I contribute to bolt.diy?</strong></summary>
## Project Information
Check out our [Contribution Guide](CONTRIBUTING.md) for more details on how to get involved!
</details>
??? question "How do I contribute to bolt.diy?"
Check out our [Contribution Guide](CONTRIBUTING.md) for more details on how to get involved!
<details>
<summary><strong>What are the future plans for bolt.diy?</strong></summary>
??? question "What are the future plans for bolt.diy?"
Visit our [Roadmap](https://roadmap.sh/r/ottodev-roadmap-2ovzo) for the latest updates.
New features and improvements are on the way!
Visit our [Roadmap](https://roadmap.sh/r/ottodev-roadmap-2ovzo) for the latest updates.
New features and improvements are on the way!
</details>
??? question "Why are there so many open issues/pull requests?"
bolt.diy began as a small showcase project on @ColeMedin's YouTube channel to explore editing open-source projects with local LLMs. However, it quickly grew into a massive community effort!
<details>
<summary><strong>Why are there so many open issues/pull requests?</strong></summary>
We're forming a team of maintainers to manage demand and streamline issue resolution. The maintainers are rockstars, and we're also exploring partnerships to help the project thrive.
bolt.diy began as a small showcase project on @ColeMedin's YouTube channel to explore editing open-source projects with local LLMs. However, it quickly grew into a massive community effort!
## Model Comparisons
We're forming a team of maintainers to manage demand and streamline issue resolution. The maintainers are rockstars, and we're also exploring partnerships to help the project thrive.
</details>
??? question "How do local LLMs compare to larger models like Claude 3.5 Sonnet for bolt.diy?"
While local LLMs are improving rapidly, larger models like GPT-4o, Claude 3.5 Sonnet, and DeepSeek Coder V2 236b still offer the best results for complex applications. Our ongoing focus is to improve prompts, agents, and the platform to better support smaller local LLMs.
<details>
<summary><strong>How do local LLMs compare to larger models like Claude 3.5 Sonnet for bolt.diy?</strong></summary>
## Troubleshooting
While local LLMs are improving rapidly, larger models like GPT-4o, Claude 3.5 Sonnet, and DeepSeek Coder V2 236b still offer the best results for complex applications. Our ongoing focus is to improve prompts, agents, and the platform to better support smaller local LLMs.
</details>
??? error "There was an error processing this request"
This generic error message means something went wrong. Check both:
<details>
<summary><strong>Common Errors and Troubleshooting</strong></summary>
- The terminal (if you started the app with Docker or `pnpm`).
### **"There was an error processing this request"**
This generic error message means something went wrong. Check both:
- The terminal (if you started the app with Docker or `pnpm`).
- The developer console in your browser (press `F12` or right-click > *Inspect*, then go to the *Console* tab).
- The developer console in your browser (press `F12` or right-click > *Inspect*, then go to the *Console* tab).
### **"x-api-key header missing"**
This error is sometimes resolved by restarting the Docker container.
If that doesn't work, try switching from Docker to `pnpm` or vice versa. We're actively investigating this issue.
??? error "x-api-key header missing"
This error is sometimes resolved by restarting the Docker container.
If that doesn't work, try switching from Docker to `pnpm` or vice versa. We're actively investigating this issue.
### **Blank preview when running the app**
A blank preview often occurs due to hallucinated bad code or incorrect commands.
To troubleshoot:
- Check the developer console for errors.
- Remember, previews are core functionality, so the app isn't broken! We're working on making these errors more transparent.
??? error "Blank preview when running the app"
A blank preview often occurs due to hallucinated bad code or incorrect commands.
To troubleshoot:
### **"Everything works, but the results are bad"**
Local LLMs like Qwen-2.5-Coder are powerful for small applications but still experimental for larger projects. For better results, consider using larger models like GPT-4o, Claude 3.5 Sonnet, or DeepSeek Coder V2 236b.
- Check the developer console for errors.
### **"Received structured exception #0xc0000005: access violation"**
If you are getting this, you are probably on Windows. The fix is generally to update the [Visual C++ Redistributable](https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170)
- Remember, previews are core functionality, so the app isn't broken! We're working on making these errors more transparent.
### **"Miniflare or Wrangler errors in Windows"**
You will need to make sure you have the latest version of Visual Studio C++ installed (14.40.33816), more information here https://github.com/stackblitz-labs/bolt.diy/issues/19.
</details>
??? error "Everything works, but the results are bad"
Local LLMs like Qwen-2.5-Coder are powerful for small applications but still experimental for larger projects. For better results, consider using larger models like
- GPT-4o
- Claude 3.5 Sonnet
- DeepSeek Coder V2 236b
??? error "Received structured exception #0xc0000005: access violation"
If you are getting this, you are probably on Windows. The fix is generally to update the [Visual C++ Redistributable](https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170)
??? error "Miniflare or Wrangler errors in Windows"
You will need to make sure you have the latest version of Visual Studio C++ installed (14.40.33816), more information here <a href="https://github.com/stackblitz-labs/bolt.diy/issues/19">Github Issues</a>
---
Got more questions? Feel free to reach out or open an issue in our GitHub repo!
## Get Help & Support
!!! tip "Community Support"
[Join the bolt.diy Community](https://thinktank.ottomator.ai/c/bolt-diy/17){target=_blank} for discussions and help
!!! bug "Report Issues"
[Open an Issue](https://github.com/stackblitz-labs/bolt.diy/issues/19){target=_blank} in our GitHub Repository

View File

@@ -25,6 +25,8 @@ bolt.diy allows you to choose the LLM that you use for each prompt! Currently, y
[Join the community!](https://thinktank.ottomator.ai)
Also [this pinned post in our community](https://thinktank.ottomator.ai/t/videos-tutorial-helpful-content/3243) has a bunch of incredible resources for running and deploying bolt.diy yourself!
---
## Features

View File

@@ -65,4 +65,12 @@ markdown_extensions:
- pymdownx.details
- pymdownx.superfences
- pymdownx.mark
- attr_list
- attr_list
- md_in_html
- tables
- def_list
- admonition
- pymdownx.tasklist:
custom_checkbox: true
- toc:
permalink: true

1
icons/angular.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='27' height='28' fill='none'><g clip-path='url(#a)'><path fill='#7B7B7B' d='m26.45 4.668-.955 14.998L16.363 0zM20.125 24.06l-6.9 3.937-6.9-3.937 1.403-3.401h10.994zm-6.9-16.596 3.616 8.79H9.609zm-12.28 12.2L0 4.669 10.087 0z'/></g><defs><clipPath id='a'><path fill='#fff' d='M0 0h26.45v28H0z'/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 364 B

1
icons/astro.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='28' height='28' fill='none'><path fill='#fff' d='M10.22 23.848c-1.265-1.156-1.635-3.586-1.108-5.346.914 1.11 2.18 1.461 3.492 1.66 2.024.306 4.013.191 5.893-.734.216-.106.415-.247.65-.39.176.512.222 1.03.16 1.555-.15 1.282-.787 2.271-1.801 3.022-.406.3-.835.568-1.254.85-1.286.87-1.634 1.89-1.151 3.373l.048.161a3.383 3.383 0 0 1-1.503-1.285 3.612 3.612 0 0 1-.58-1.962c-.004-.346-.004-.695-.05-1.036-.114-.832-.505-1.204-1.24-1.226-.755-.022-1.352.445-1.51 1.18-.013.056-.03.112-.048.177h.002Z'/><path fill='#7B7B7B' d='M3 18.21s3.746-1.825 7.502-1.825l2.832-8.765c.106-.424.415-.712.765-.712.35 0 .659.288.765.712l2.832 8.765c4.449 0 7.502 1.825 7.502 1.825L18.823.842C18.64.33 18.333 0 17.917 0h-7.635c-.416 0-.712.33-.907.842L3 18.21Z'/></svg>

After

Width:  |  Height:  |  Size: 794 B

1
icons/nativescript.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='29' height='28' fill='none'><g clip-path='url(#a)'><path fill='#7B7B7B' d='M26.523 2.051c1.317 1.317 2 2.966 2.05 4.949v14c-.05 1.982-.733 3.632-2.05 4.949-1.317 1.317-2.967 2-4.95 2.05h-14c-1.982-.05-3.631-.733-4.948-2.05C1.308 24.632.624 22.982.574 21V7c.05-1.982.734-3.632 2.05-4.949C3.943.734 5.592.051 7.575 0h14c1.982.05 3.632.734 4.949 2.051Zm-1.931 11.266c-.44-.438-.669-.987-.687-1.648V7c-.014-.66-.241-1.211-.68-1.65-.44-.44-.99-.667-1.652-.68h-2.33V16.33L9.904 4.67H7.574c-.661.014-1.211.24-1.651.68-.44.44-.666.99-.68 1.65v4.67c-.019.66-.247 1.21-.687 1.648-.44.437-.99.665-1.65.683.66.018 1.21.246 1.65.683.44.438.668.987.687 1.648V21c.014.66.24 1.211.68 1.65.44.44.99.667 1.65.68h2.332V11.67l9.337 11.662h2.331c.661-.014 1.212-.24 1.651-.68.44-.44.667-.99.68-1.65v-4.67c.019-.66.248-1.21.688-1.647.44-.438.99-.665 1.65-.684-.66-.018-1.21-.246-1.65-.683Z'/></g><defs><clipPath id='a'><path fill='#fff' d='M.574 0h28v28h-28z'/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 1010 B

1
icons/nextjs.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='29' height='28' fill='none'><g clip-path='url(#a)'><mask id='b' width='29' height='28' x='0' y='0' maskUnits='userSpaceOnUse' style='mask-type:alpha'><path fill='#000' d='M14.573 28c7.732 0 14-6.268 14-14s-6.268-14-14-14-14 6.268-14 14 6.268 14 14 14Z'/></mask><g mask='url(#b)'><path fill='#7B7B7B' d='M14.573 28c7.732 0 14-6.268 14-14s-6.268-14-14-14-14 6.268-14 14 6.268 14 14 14Z'/><path fill='#fff' d='M23.83 24.503 11.33 8.4H8.973v11.196h1.884v-8.803l11.493 14.85a14.047 14.047 0 0 0 1.48-1.14Z'/><path fill='#fff' d='M20.33 8.4h-1.867v11.2l1.866.526V8.4Z'/></g></g><defs><clipPath id='a'><path fill='#fff' d='M.574 0h28v28h-28z'/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 708 B

1
icons/nuxt.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='28' height='28' fill='none'><path fill='#7B7B7B' d='M15.68 23.667h10.36c.33 0 .647-.116.933-.28.287-.164.582-.37.747-.654.165-.284.28-.605.28-.933 0-.328-.114-.65-.28-.933l-7-12.04a1.702 1.702 0 0 0-.653-.654 2.256 2.256 0 0 0-1.027-.28c-.33 0-.647.117-.933.28-.287.164-.488.37-.654.654l-1.773 3.08-3.547-5.974c-.165-.284-.367-.583-.653-.746-.286-.164-.603-.187-.933-.187-.33 0-.647.023-.934.187a2.213 2.213 0 0 0-.746.746L.187 20.867C.02 21.15 0 21.472 0 21.8c0 .328.021.65.187.933.165.284.46.49.746.654.287.164.603.28.934.28H8.4c2.589 0 4.473-1.162 5.787-3.36L17.36 14.8l1.68-2.893 5.133 8.773H17.36l-1.68 2.987ZM8.307 20.68H3.733l6.814-11.76L14 14.8l-2.287 3.988c-.873 1.426-1.867 1.892-3.406 1.892Z'/></svg>

After

Width:  |  Height:  |  Size: 758 B

1
icons/qwik.svg Normal file
View File

@@ -0,0 +1 @@
<svg width='30' height='30' viewBox='0 0 30 30' fill='none' xmlns='http://www.w3.org/2000/svg'><path d='M24.8544 29.4143L19.6799 24.2665L19.6069 24.2793V24.2248L8.6028 13.3425L11.3199 10.7239L9.72188 1.56836L2.16084 10.9489C0.877089 12.2454 0.63008 14.3653 1.56013 15.9216L6.28578 23.7656C7.00825 24.9681 8.14357 25.7416 9.72884 25.6848C13.0849 25.5654 14.56 25.5654 14.56 25.5654L24.8521 29.412L24.8544 29.4155V29.4143Z' fill='#848484'/><path d='M27.4114 14.9893C28.1559 13.4527 28.4227 12.1087 27.6874 10.7576L26.6414 8.83259L26.0987 7.84455L25.8876 7.45954L25.8679 7.48158L23.0221 2.54487C22.3043 1.29591 20.9683 0.532847 19.5292 0.549082L17.0336 0.619822L9.58386 0.639537C8.17719 0.648814 6.87952 1.3968 6.16632 2.60865L1.64014 11.5984L9.74042 1.52088L20.3665 13.2057L18.4646 15.1331L19.5999 24.2747L19.6161 24.2539V24.2805H19.5999L19.6219 24.3026L20.5079 25.1654L24.7941 29.3518C24.9738 29.5257 25.2661 29.317 25.1466 29.1024L22.4968 23.8886' fill='#B4B4B4'/><path d='M20.3918 13.1765L9.73798 1.55078L11.2513 10.6542L8.54004 13.2855L19.5801 24.2571L18.5851 15.149L20.3918 13.1799V13.1765Z' fill='black'/></svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

1
icons/react.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='28' height='28' fill='none'><path fill='#7B7B7B' d='M26.573 13.944c0-1.628-2.039-3.17-5.164-4.127.721-3.186.4-5.72-1.012-6.532a2.196 2.196 0 0 0-1.122-.28v1.117c.23 0 .416.045.571.13.681.39.977 1.878.746 3.792-.055.47-.145.966-.255 1.472a24.273 24.273 0 0 0-3.18-.546 24.425 24.425 0 0 0-2.084-2.504c1.633-1.518 3.165-2.349 4.207-2.349V3c-1.377 0-3.18.982-5.004 2.685C12.453 3.992 10.65 3.02 9.273 3.02v1.117c1.036 0 2.574.826 4.207 2.334a23.639 23.639 0 0 0-2.069 2.5c-1.132.12-2.203.305-3.185.55-.115-.5-.2-.986-.26-1.452-.236-1.914.054-3.401.73-3.797.15-.09.346-.13.577-.13V3.025c-.421 0-.802.09-1.132.28-1.408.812-1.723 3.341-.997 6.517C4.029 10.784 2 12.322 2 13.944c0 1.628 2.039 3.17 5.164 4.127-.721 3.186-.4 5.72 1.012 6.532.325.19.706.28 1.127.28 1.377 0 3.18-.982 5.003-2.685 1.824 1.693 3.627 2.665 5.004 2.665.42 0 .802-.09 1.132-.28 1.407-.812 1.723-3.341.997-6.517 3.105-.956 5.134-2.5 5.134-4.122Zm-6.522-3.34a22.533 22.533 0 0 1-.676 1.978 27.086 27.086 0 0 0-1.377-2.374c.711.105 1.397.235 2.053.395Zm-2.294 5.334a26.71 26.71 0 0 1-1.207 1.913 26.066 26.066 0 0 1-4.518.005 26.128 26.128 0 0 1-2.254-3.897 26.685 26.685 0 0 1 2.244-3.912 26.051 26.051 0 0 1 4.518-.005 26.117 26.117 0 0 1 2.254 3.897 28.091 28.091 0 0 1-1.037 1.998Zm1.618-.652c.27.672.501 1.343.691 1.994-.656.16-1.347.295-2.063.4a27.72 27.72 0 0 0 1.372-2.394Zm-5.079 5.345a20.648 20.648 0 0 1-1.392-1.603c.45.02.912.035 1.377.035.471 0 .937-.01 1.393-.035-.451.586-.917 1.122-1.378 1.603Zm-3.726-2.95a22.6 22.6 0 0 1-2.054-.396c.186-.646.416-1.312.677-1.979.205.401.42.802.656 1.203.235.4.476.79.72 1.172ZM14.27 7.257c.466.481.932 1.017 1.393 1.603-.451-.02-.912-.035-1.378-.035-.47 0-.936.01-1.392.035.45-.586.916-1.122 1.377-1.603Zm-3.706 2.95a27.624 27.624 0 0 0-1.372 2.39c-.271-.671-.501-1.343-.692-1.994a24.32 24.32 0 0 1 2.064-.395Zm-4.533 6.271c-1.773-.756-2.92-1.748-2.92-2.534 0-.786 1.147-1.783 2.92-2.534.43-.186.902-.351 1.387-.506.286.982.662 2.003 1.127 3.05a23.715 23.715 0 0 0-1.112 3.035 15.23 15.23 0 0 1-1.402-.51Zm2.695 7.158c-.681-.39-.977-1.878-.747-3.792.055-.47.146-.966.256-1.472.982.24 2.053.425 3.18.546a24.43 24.43 0 0 0 2.084 2.504c-1.633 1.518-3.165 2.35-4.207 2.35a1.195 1.195 0 0 1-.566-.136Zm11.88-3.817c.236 1.914-.055 3.401-.73 3.797-.151.09-.346.13-.577.13-1.037 0-2.574-.826-4.207-2.334a23.668 23.668 0 0 0 2.068-2.5 23.393 23.393 0 0 0 3.186-.55c.115.506.205.992.26 1.457Zm1.929-3.34c-.431.185-.902.35-1.388.505a24.059 24.059 0 0 0-1.127-3.05c.461-1.042.832-2.058 1.112-3.035.496.155.967.325 1.408.51 1.773.757 2.92 1.749 2.92 2.535-.005.786-1.152 1.783-2.925 2.534Z'/><path fill='#7B7B7B' d='M14.281 16.236a2.289 2.289 0 1 0 0-4.578 2.289 2.289 0 0 0 0 4.578Z'/></svg>

After

Width:  |  Height:  |  Size: 2.7 KiB

1
icons/remix.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='28' height='28' fill='none'><g fill='#7B7B7B' clip-path='url(#a)'><path fill-rule='evenodd' d='M25.261 21.593c.252 3.235.252 4.752.252 6.407h-7.485c0-.36.006-.69.013-1.025.02-1.04.041-2.124-.127-4.313-.223-3.206-1.603-3.918-4.142-3.918H2V12.91h12.129c3.206 0 4.809-.975 4.809-3.557 0-2.27-1.603-3.647-4.81-3.647H2V0h13.465C22.723 0 26.33 3.428 26.33 8.904c0 4.096-2.538 6.768-5.967 7.213 2.894.579 4.586 2.226 4.898 5.476Z' clip-rule='evenodd'/><path d='M2 28v-4.348h7.914c1.322 0 1.61.98 1.61 1.566V28H2Z'/></g><defs><clipPath id='a'><path fill='#fff' d='M0 0h28v28H0z'/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 643 B

1
icons/remotion.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='28' height='28' fill='none'><g clip-path='url(#a)'><path fill='#4B4B4B' d='M6.087.002a4.874 4.874 0 0 0-1.531.306c-.237.089-.626.283-.84.417a4.72 4.72 0 0 0-1.93 2.384c-.07.194-.262.813-.386 1.24C.578 7.196.112 10.311.012 13.608a55.16 55.16 0 0 0 0 2.292 39.2 39.2 0 0 0 .654 6.172c.154.814.4 1.908.544 2.405a4.712 4.712 0 0 0 1.735 2.5 4.695 4.695 0 0 0 3.06.92c.463-.03 1.381-.153 2.127-.286 3.363-.6 6.47-1.673 9.287-3.21a26.336 26.336 0 0 0 4.78-3.305 23.893 23.893 0 0 0 3.817-4.177 5.32 5.32 0 0 0 .512-.825 4.6 4.6 0 0 0 .484-2.094c0-.698-.132-1.319-.417-1.954-.136-.306-.267-.526-.56-.944a23.833 23.833 0 0 0-3.738-4.178c-2.264-2.017-4.953-3.672-7.956-4.897a30.401 30.401 0 0 0-2.063-.75A32.404 32.404 0 0 0 6.91.05a5.412 5.412 0 0 0-.824-.048Z'/><path fill='#7B7B7B' d='M7.67 2.98c-.46.025-.83.098-1.204.24-.187.07-.492.224-.66.33a3.715 3.715 0 0 0-1.52 1.875c-.055.153-.206.64-.303.975-.647 2.242-1.014 4.693-1.093 7.288a43.377 43.377 0 0 0 0 1.803c.053 1.72.217 3.273.515 4.857a26.7 26.7 0 0 0 .428 1.893c.23.794.698 1.47 1.365 1.966a3.697 3.697 0 0 0 2.408.725 18.771 18.771 0 0 0 1.674-.226c2.646-.472 5.09-1.316 7.308-2.525a20.72 20.72 0 0 0 3.761-2.601 18.8 18.8 0 0 0 3.004-3.287c.201-.28.302-.443.403-.649a3.62 3.62 0 0 0 .38-1.648c0-.55-.103-1.038-.327-1.537-.108-.242-.21-.415-.441-.743a18.759 18.759 0 0 0-2.942-3.288c-1.781-1.588-3.897-2.89-6.26-3.853a23.91 23.91 0 0 0-1.624-.591 25.495 25.495 0 0 0-4.223-.966 4.259 4.259 0 0 0-.648-.038Z'/><path fill='#fff' d='M9.31 6.068c-.33.018-.597.07-.866.173a2.671 2.671 0 0 0-1.567 1.585c-.04.11-.149.46-.218.701-.465 1.612-.73 3.375-.786 5.24a31.225 31.225 0 0 0 0 1.298 22.18 22.18 0 0 0 .37 3.492 19.2 19.2 0 0 0 .308 1.362c.165.57.502 1.057.982 1.414a2.656 2.656 0 0 0 1.731.521c.262-.017.782-.087 1.204-.162 1.903-.34 3.661-.947 5.255-1.816a14.9 14.9 0 0 0 2.706-1.87 13.52 13.52 0 0 0 2.16-2.365c.144-.201.217-.319.29-.466.186-.382.274-.761.273-1.185 0-.396-.075-.747-.236-1.106a2.889 2.889 0 0 0-.317-.534 13.485 13.485 0 0 0-2.115-2.365c-1.281-1.141-2.803-2.078-4.502-2.77-.368-.15-.731-.283-1.168-.426a18.339 18.339 0 0 0-3.037-.694 3.063 3.063 0 0 0-.466-.027Z'/></g><defs><clipPath id='a'><path fill='#fff' d='M0 0h28v28H0z'/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 2.2 KiB

1
icons/slidev.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='29' height='28' fill='none'><g clip-path='url(#a)'><mask id='b' width='29' height='29' x='0' y='-1' maskUnits='userSpaceOnUse' style='mask-type:luminance'><path fill='#fff' d='M28.573-.002h-28v28h28v-28Z'/></mask><g mask='url(#b)'><path fill='#4B4B4B' d='M22.243 3.408H7.634A3.652 3.652 0 0 0 3.982 7.06v14.61a3.652 3.652 0 0 0 3.652 3.651h14.609a3.652 3.652 0 0 0 3.652-3.652V7.06a3.652 3.652 0 0 0-3.652-3.652Z'/><path fill='#7B7B7B' d='M22.486 10.955c0-6.052-4.905-10.957-10.956-10.957C5.479-.002.573 4.903.573 10.955c0 6.05 4.906 10.956 10.957 10.956 6.05 0 10.956-4.905 10.956-10.956Z'/><path fill='#fff' d='M14.239 15.563c-.287-1.07-.43-1.604-.288-1.974.123-.322.378-.576.7-.7.37-.141.904.002 1.974.288l5.315 1.425c1.07.286 1.604.43 1.853.737.217.268.31.616.256.956-.062.391-.453.782-1.236 1.565l-3.891 3.892c-.783.782-1.174 1.174-1.565 1.236-.34.054-.688-.04-.957-.257-.307-.248-.45-.783-.737-1.853l-1.424-5.315Z'/></g></g><defs><clipPath id='a'><path fill='#fff' d='M.574 0h28v28h-28z'/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

1
icons/svelte.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='28' height='28' fill='none'><g clip-path='url(#a)'><path fill='#7B7B7B' fill-rule='evenodd' d='M12.352 1.237c3.701-2.349 8.85-1.258 11.437 2.468a7.818 7.818 0 0 1 1.352 6.05 7.164 7.164 0 0 1-1.115 2.8c.83 1.543 1.092 3.323.783 5.055a7.417 7.417 0 0 1-3.37 5.007l-6.525 4.152c-3.701 2.35-8.827 1.258-11.437-2.467a7.984 7.984 0 0 1-1.352-6.028 7.164 7.164 0 0 1 1.115-2.8c-.83-1.542-1.092-3.322-.783-5.054a7.416 7.416 0 0 1 3.37-5.007l6.525-4.176Zm-6.193 21.36a5.172 5.172 0 0 0 5.553 2.065 5.166 5.166 0 0 0 1.329-.593l6.525-4.153a4.493 4.493 0 0 0 2.04-3.014c.214-1.28-.07-2.586-.83-3.63a5.172 5.172 0 0 0-5.552-2.065 5.166 5.166 0 0 0-1.329.594l-2.492 1.59c-.118.07-.26.118-.403.166a1.557 1.557 0 0 1-1.685-.617 1.473 1.473 0 0 1-.237-1.092c.071-.38.285-.688.617-.902l6.502-4.152c.118-.071.26-.119.403-.166a1.557 1.557 0 0 1 1.685.617c.19.285.285.64.26.973l-.023.237.237.071c.926.285 1.78.712 2.563 1.282l.332.237.119-.38a5.95 5.95 0 0 0 .166-.617c.214-1.281-.071-2.586-.83-3.63a5.172 5.172 0 0 0-5.553-2.065 5.164 5.164 0 0 0-1.329.594L7.702 8.099a4.493 4.493 0 0 0-2.04 3.014 4.793 4.793 0 0 0 .806 3.63 5.172 5.172 0 0 0 5.552 2.065 4.413 4.413 0 0 0 1.33-.57l2.49-1.59c.12-.071.262-.118.404-.166a1.557 1.557 0 0 1 1.685.617c.213.309.308.712.237 1.092-.071.38-.285.688-.617.901l-6.502 4.153a2.047 2.047 0 0 1-.403.166 1.57 1.57 0 0 1-1.685-.64 1.588 1.588 0 0 1-.26-.974l.023-.237-.237-.071a8.654 8.654 0 0 1-2.563-1.282l-.332-.237-.119.38c-.023.107-.047.208-.07.308-.025.101-.048.202-.072.309-.214 1.281.071 2.586.83 3.63Z' clip-rule='evenodd'/></g><defs><clipPath id='a'><path fill='#fff' d='M0 0h28v28H0z'/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

1
icons/typescript.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='29' height='29' fill='none'><g clip-path='url(#a)'><path fill='#7B7B7B' d='M.607 14.5V.5h28v28h-28'/><path fill='#fff' d='M6.747 14.55v1.14h3.64v10.36h2.583V15.69h3.64v-1.12c0-.63 0-1.14-.028-1.155 0-.02-2.22-.028-4.914-.028l-4.9.021v1.148l-.021-.007Zm16.359-1.17a3.39 3.39 0 0 1 1.75 1.001c.258.28.643.77.671.896 0 .042-1.21.861-1.945 1.316-.028.021-.14-.098-.252-.28-.364-.518-.735-.742-1.316-.784-.84-.056-1.4.385-1.4 1.12 0 .224.042.35.126.532.189.385.539.616 1.624 1.092 2.002.861 2.87 1.428 3.395 2.24.595.91.727 2.338.328 3.41-.447 1.168-1.54 1.96-3.1 2.218-.49.084-1.61.07-2.135-.02-1.12-.21-2.191-.77-2.85-1.492-.258-.28-.755-1.029-.727-1.078l.266-.168 1.05-.609.79-.462.183.245c.23.364.749.854 1.05 1.022.91.47 2.128.406 2.73-.14.259-.238.37-.49.37-.84 0-.322-.048-.469-.21-.714-.223-.308-.671-.56-1.931-1.12-1.45-.616-2.065-1.008-2.64-1.61-.328-.364-.63-.93-.77-1.4-.104-.406-.14-1.4-.041-1.799.3-1.4 1.358-2.38 2.87-2.66.49-.098 1.645-.056 2.128.07l-.014.014Z'/></g><defs><clipPath id='a'><path fill='#fff' d='M.607.5h28v28h-28z'/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

1
icons/vite.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='28' height='28' fill='none'><path fill='#7B7B7B' d='M26.914 4.865 14.7 26.771a.663.663 0 0 1-1.155.005L1.088 4.867a.665.665 0 0 1 .693-.985L14.01 6.074a.662.662 0 0 0 .236 0l11.97-2.189a.665.665 0 0 1 .699.98Z'/><path fill='#fff' d='m19.833 1.006-9.038 1.777a.332.332 0 0 0-.268.307l-.556 9.418a.332.332 0 0 0 .406.344l2.517-.582a.332.332 0 0 1 .4.39l-.748 3.673a.332.332 0 0 0 .421.385l1.555-.474a.332.332 0 0 1 .421.386l-1.188 5.768c-.074.36.404.558.604.248l.133-.206 7.365-14.743a.333.333 0 0 0-.36-.476l-2.59.502a.333.333 0 0 1-.382-.42l1.69-5.878a.333.333 0 0 0-.382-.419Z'/></svg>

After

Width:  |  Height:  |  Size: 633 B

1
icons/vue.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns='http://www.w3.org/2000/svg' width='28' height='28' fill='none'><path fill='#7B7B7B' d='M22.398 2h5.6l-14 24.148L0 2h10.709l3.29 5.6 3.22-5.6h5.179Z'/><path fill='#fff' d='m0 2 13.999 24.148L27.997 2h-5.6L14 16.489 5.529 2H0Z'/></svg>

After

Width:  |  Height:  |  Size: 245 B

View File

@@ -5,7 +5,7 @@
"license": "MIT",
"sideEffects": false,
"type": "module",
"version": "0.0.3",
"version": "0.0.5",
"scripts": {
"deploy": "npm run build && wrangler pages deploy",
"build": "remix vite:build",
@@ -36,6 +36,7 @@
"@ai-sdk/mistral": "^0.0.43",
"@ai-sdk/openai": "^0.0.66",
"@anthropic-ai/sdk": "^0.33.1",
"@ai-sdk/amazon-bedrock": "1.0.6",
"@codemirror/autocomplete": "^6.18.3",
"@codemirror/commands": "^6.7.1",
"@codemirror/lang-cpp": "^6.0.2",
@@ -75,8 +76,10 @@
"@xterm/addon-web-links": "^0.11.0",
"@xterm/xterm": "^5.5.0",
"ai": "^4.0.13",
"chalk": "^5.4.1",
"date-fns": "^3.6.0",
"diff": "^5.2.0",
"dotenv": "^16.4.7",
"file-saver": "^2.0.5",
"framer-motion": "^11.12.0",
"ignore": "^6.0.2",

1097
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1 @@
<svg fill="currentColor" fill-rule="evenodd" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Bedrock</title><path d="M13.05 15.513h3.08c.214 0 .389.177.389.394v1.82a1.704 1.704 0 011.296 1.661c0 .943-.755 1.708-1.685 1.708-.931 0-1.686-.765-1.686-1.708 0-.807.554-1.484 1.297-1.662v-1.425h-2.69v4.663a.395.395 0 01-.188.338l-2.69 1.641a.385.385 0 01-.405-.002l-4.926-3.086a.395.395 0 01-.185-.336V16.3L2.196 14.87A.395.395 0 012 14.555L2 14.528V9.406c0-.14.073-.27.192-.34l2.465-1.462V4.448c0-.129.062-.249.165-.322l.021-.014L9.77 1.058a.385.385 0 01.407 0l2.69 1.675a.395.395 0 01.185.336V7.6h3.856V5.683a1.704 1.704 0 01-1.296-1.662c0-.943.755-1.708 1.685-1.708.931 0 1.685.765 1.685 1.708 0 .807-.553 1.484-1.296 1.662v2.311a.391.391 0 01-.389.394h-4.245v1.806h6.624a1.69 1.69 0 011.64-1.313c.93 0 1.685.764 1.685 1.707 0 .943-.754 1.708-1.685 1.708a1.69 1.69 0 01-1.64-1.314H13.05v1.937h4.953l.915 1.18a1.66 1.66 0 01.84-.227c.931 0 1.685.764 1.685 1.707 0 .943-.754 1.708-1.685 1.708-.93 0-1.685-.765-1.685-1.708 0-.346.102-.668.276-.937l-.724-.935H13.05v1.806zM9.973 1.856L7.93 3.122V6.09h-.778V3.604L5.435 4.669v2.945l2.11 1.36L9.712 7.61V5.334h.778V7.83c0 .136-.07.263-.184.335L7.963 9.638v2.081l1.422 1.009-.446.646-1.406-.998-1.53 1.005-.423-.66 1.605-1.055v-1.99L5.038 8.29l-2.26 1.34v1.676l1.972-1.189.398.677-2.37 1.429V14.3l2.166 1.258 2.27-1.368.397.677-2.176 1.311V19.3l1.876 1.175 2.365-1.426.398.678-2.017 1.216 1.918 1.201 2.298-1.403v-5.78l-4.758 2.893-.4-.675 5.158-3.136V3.289L9.972 1.856zM16.13 18.47a.913.913 0 00-.908.92c0 .507.406.918.908.918a.913.913 0 00.907-.919.913.913 0 00-.907-.92zm3.63-3.81a.913.913 0 00-.908.92c0 .508.406.92.907.92a.913.913 0 00.908-.92.913.913 0 00-.908-.92zm1.555-4.99a.913.913 0 00-.908.92c0 .507.407.918.908.918a.913.913 0 00.907-.919.913.913 0 00-.907-.92zM17.296 3.1a.913.913 0 00-.907.92c0 .508.406.92.907.92a.913.913 0 00.908-.92.913.913 0 00-.908-.92z"></path></svg>

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@@ -0,0 +1,3 @@
<svg width="50" height="50" viewBox="0 0 50 50" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M1.17374 40.438C0.920158 41.0455 0.788964 41.6972 0.787668 42.3556C0.787668 45.0847 3.09359 47.5748 6.90525 49.482C7.25319 49.6512 7.63267 49.7456 8.01927 49.7593C8.40589 49.7729 8.79108 49.7054 9.15007 49.5612C9.50907 49.417 9.8339 49.1992 10.1037 48.9218C10.3736 48.6444 10.5824 48.3136 10.7169 47.9507L14.9639 37.73C15.7076 35.9512 16.2948 34.1109 16.7187 32.2298C8.8497 33.5505 2.91813 36.5675 1.27554 40.2588L1.20883 40.4169L1.17374 40.438ZM16.7052 17.533C16.2814 15.652 15.6941 13.8116 14.9503 12.0328L10.7034 1.80865C10.5682 1.44582 10.3587 1.11534 10.0884 0.838345C9.81807 0.561352 9.49284 0.343933 9.13362 0.200047C8.77438 0.05616 8.38911 -0.0108795 8.00244 0.00294113C7.61577 0.0169029 7.23626 0.111584 6.88827 0.280903C3.08013 2.18447 0.77417 4.67824 0.77417 7.40712C0.775742 8.06561 0.906925 8.71717 1.16024 9.32493V9.34594L1.22695 9.50406C2.90464 13.1954 8.83621 16.2089 16.7052 17.533ZM44.4138 0.280903C48.2255 2.18447 50.5314 4.67824 50.5314 7.40712C50.5214 8.06685 50.3831 8.71814 50.1242 9.32493L50.0681 9.45486C48.4396 13.1708 42.4939 16.2018 34.6004 17.533C35.0242 15.652 35.6113 13.8116 36.3552 12.0328L40.6021 1.80865C40.7364 1.44569 40.9453 1.11478 41.2152 0.837513C41.4851 0.560246 41.8101 0.342552 42.1692 0.198662C42.5284 0.0547752 42.9136 -0.0122643 43.3003 0.00183487C43.6869 0.015934 44.0661 0.111031 44.4138 0.280903ZM34.6246 32.2298C35.0405 34.1093 35.6206 35.9487 36.3584 37.7265L40.6054 47.9507C40.7397 48.3136 40.9487 48.6444 41.2185 48.9218C41.4883 49.1992 41.8131 49.417 42.1722 49.5612C42.5312 49.7054 42.9164 49.7729 43.303 49.7593C43.6896 49.7456 44.069 49.6512 44.4169 49.482C48.2286 47.5748 50.5346 45.0847 50.5346 42.3556C50.5315 41.6974 50.4005 41.046 50.1485 40.438L50.0924 40.308C48.4708 36.5921 42.5041 33.5574 34.6246 32.2298ZM34.3566 19.7084C39.9443 18.848 44.5876 17.1727 47.7921 14.9845L46.8831 17.1656C44.8472 22.1033 44.8472 27.6467 46.8831 32.5844L47.7851 34.7584C44.5771 32.5703 39.9336 30.9126 34.3531 30.0415L34.2056 30.0204C31.3759 29.5919 28.5177 29.3794 25.6557 29.3847C22.8008 29.3804 19.9497 29.593 17.1269 30.0204L16.9795 30.0415C11.3954 30.8985 6.75195 32.5739 3.54398 34.762L4.44949 32.5844C6.48536 27.6467 6.48536 22.1033 4.44949 17.1656L3.54398 14.9845C6.7379 17.1832 11.3814 18.848 16.9654 19.7084L17.1129 19.7296C22.7805 20.5725 28.5415 20.5725 34.2092 19.7296L34.3566 19.7084Z" fill="#000000" style="translate: none; rotate: none; scale: none; transform-origin: 0px 0px;" data-svg-origin="0.7741699814796448 -0.000005409121513366699" transform="matrix(1,0,0,1,0,0)"></path>
</svg>

After

Width:  |  Height:  |  Size: 2.6 KiB

View File

@@ -98,6 +98,9 @@ const COLOR_PRIMITIVES = {
};
export default defineConfig({
safelist: [
...Object.keys(customIconCollection[collectionName]||{}).map(x=>`i-bolt:${x}`)
],
shortcuts: {
'bolt-ease-cubic-bezier': 'ease-[cubic-bezier(0.4,0,0.2,1)]',
'transition-theme': 'transition-[background-color,border-color,color] duration-150 bolt-ease-cubic-bezier',

View File

@@ -4,9 +4,11 @@ import { defineConfig, type ViteDevServer } from 'vite';
import { nodePolyfills } from 'vite-plugin-node-polyfills';
import { optimizeCssModules } from 'vite-plugin-optimize-css-modules';
import tsconfigPaths from 'vite-tsconfig-paths';
import * as dotenv from 'dotenv';
import { execSync } from 'child_process';
dotenv.config();
// Get git hash with fallback
const getGitHash = () => {
try {
@@ -17,18 +19,21 @@ const getGitHash = () => {
};
export default defineConfig((config) => {
return {
define: {
__COMMIT_HASH: JSON.stringify(getGitHash()),
__APP_VERSION: JSON.stringify(process.env.npm_package_version),
// 'process.env': JSON.stringify(process.env)
},
build: {
target: 'esnext',
},
plugins: [
nodePolyfills({
include: ['path', 'buffer'],
include: ['path', 'buffer', 'process'],
}),
config.mode !== 'test' && remixCloudflareDevProxy(),
remixVitePlugin({

View File

@@ -16,4 +16,5 @@ interface Env {
MISTRAL_API_KEY: string;
XAI_API_KEY: string;
PERPLEXITY_API_KEY: string;
AWS_BEDROCK_CONFIG: string;
}