Merge branch 'main' into main

This commit is contained in:
Cole Medin
2024-10-24 08:25:07 -05:00
committed by GitHub
19 changed files with 615 additions and 99 deletions

View File

@@ -1,7 +1,7 @@
import { useStore } from '@nanostores/react';
import { motion, type HTMLMotionProps, type Variants } from 'framer-motion';
import { computed } from 'nanostores';
import { memo, useCallback, useEffect } from 'react';
import { memo, useCallback, useEffect, useState } from 'react';
import { toast } from 'react-toastify';
import {
type OnChangeCallback as OnEditorChange,
@@ -55,6 +55,8 @@ const workbenchVariants = {
export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) => {
renderLogger.trace('Workbench');
const [isSyncing, setIsSyncing] = useState(false);
const hasPreview = useStore(computed(workbenchStore.previews, (previews) => previews.length > 0));
const showWorkbench = useStore(workbenchStore.showWorkbench);
const selectedFile = useStore(workbenchStore.selectedFile);
@@ -99,6 +101,21 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
workbenchStore.resetCurrentDocument();
}, []);
const handleSyncFiles = useCallback(async () => {
setIsSyncing(true);
try {
const directoryHandle = await window.showDirectoryPicker();
await workbenchStore.syncFiles(directoryHandle);
toast.success('Files synced successfully');
} catch (error) {
console.error('Error syncing files:', error);
toast.error('Failed to sync files');
} finally {
setIsSyncing(false);
}
}, []);
return (
chatStarted && (
<motion.div
@@ -132,6 +149,10 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
<div className="i-ph:code" />
Download Code
</PanelHeaderButton>
<PanelHeaderButton className="mr-1 text-sm" onClick={handleSyncFiles} disabled={isSyncing}>
{isSyncing ? <div className="i-ph:spinner" /> : <div className="i-ph:cloud-arrow-down" />}
{isSyncing ? 'Syncing...' : 'Sync Files'}
</PanelHeaderButton>
<PanelHeaderButton
className="mr-1 text-sm"
onClick={() => {
@@ -141,6 +162,31 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
<div className="i-ph:terminal" />
Toggle Terminal
</PanelHeaderButton>
<PanelHeaderButton
className="mr-1 text-sm"
onClick={() => {
const repoName = prompt("Please enter a name for your new GitHub repository:", "bolt-generated-project");
if (!repoName) {
alert("Repository name is required. Push to GitHub cancelled.");
return;
}
const githubUsername = prompt("Please enter your GitHub username:");
if (!githubUsername) {
alert("GitHub username is required. Push to GitHub cancelled.");
return;
}
const githubToken = prompt("Please enter your GitHub personal access token:");
if (!githubToken) {
alert("GitHub token is required. Push to GitHub cancelled.");
return;
}
workbenchStore.pushToGitHub(repoName, githubUsername, githubToken);
}}
>
<div className="i-ph:github-logo" />
Push to GitHub
</PanelHeaderButton>
</>
)}
<IconButton
@@ -184,7 +230,6 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
)
);
});
interface ViewProps extends HTMLMotionProps<'div'> {
children: JSX.Element;
}

View File

@@ -19,8 +19,12 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
case 'OpenRouter':
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
case "OpenAILike":
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
case 'Deepseek':
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY
case 'Mistral':
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
case "OpenAILike":
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
default:
return "";
}

View File

@@ -1,5 +1,5 @@
// see https://docs.anthropic.com/en/docs/about-claude/models
export const MAX_TOKENS = 8192;
export const MAX_TOKENS = 8000;
// limits the number of model responses that can be returned in a single request
export const MAX_RESPONSE_SEGMENTS = 2;

View File

@@ -6,6 +6,8 @@ import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { ollama } from 'ollama-ai-provider';
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { mistral } from '@ai-sdk/mistral';
import { createMistral } from '@ai-sdk/mistral';
export function getAnthropicModel(apiKey: string, model: string) {
const anthropic = createAnthropic({
@@ -30,6 +32,14 @@ export function getOpenAIModel(apiKey: string, model: string) {
return openai(model);
}
export function getMistralModel(apiKey: string, model: string) {
const mistral = createMistral({
apiKey
});
return mistral(model);
}
export function getGoogleModel(apiKey: string, model: string) {
const google = createGoogleGenerativeAI(
apiKey,
@@ -53,6 +63,15 @@ export function getOllamaModel(baseURL: string, model: string) {
return Ollama;
}
export function getDeepseekModel(apiKey: string, model: string){
const openai = createOpenAI({
baseURL: 'https://api.deepseek.com/beta',
apiKey,
});
return openai(model);
}
export function getOpenRouterModel(apiKey: string, model: string) {
const openRouter = createOpenRouter({
apiKey
@@ -78,6 +97,10 @@ export function getModel(provider: string, model: string, env: Env) {
return getGoogleModel(apiKey, model)
case 'OpenAILike':
return getOpenAILikeModel(baseURL,apiKey, model);
case 'Deepseek':
return getDeepseekModel(apiKey, model)
case 'Mistral':
return getMistralModel(apiKey, model);
default:
return getOllamaModel(baseURL, model);
}

View File

@@ -29,7 +29,32 @@ You are Bolt, an expert AI assistant and exceptional senior software developer w
IMPORTANT: When choosing databases or npm packages, prefer options that don't rely on native binaries. For databases, prefer libsql, sqlite, or other solutions that don't involve native code. WebContainer CANNOT execute arbitrary native binaries.
Available shell commands: cat, chmod, cp, echo, hostname, kill, ln, ls, mkdir, mv, ps, pwd, rm, rmdir, xxd, alias, cd, clear, curl, env, false, getconf, head, sort, tail, touch, true, uptime, which, code, jq, loadenv, node, python3, wasm, xdg-open, command, exit, export, source
Available shell commands:
File Operations:
- cat: Display file contents
- cp: Copy files/directories
- ls: List directory contents
- mkdir: Create directory
- mv: Move/rename files
- rm: Remove files
- rmdir: Remove empty directories
- touch: Create empty file/update timestamp
System Information:
- hostname: Show system name
- ps: Display running processes
- pwd: Print working directory
- uptime: Show system uptime
- env: Environment variables
Development Tools:
- node: Execute Node.js code
- python3: Run Python scripts
- code: VSCode operations
- jq: Process JSON
Other Utilities:
- curl, head, sort, tail, clear, which, export, chmod, scho, hostname, kill, ln, xxd, alias, false, getconf, true, loadenv, wasm, xdg-open, command, exit, source
</system_constraints>
<code_formatting_info>
@@ -84,6 +109,36 @@ You are Bolt, an expert AI assistant and exceptional senior software developer w
</${MODIFICATIONS_TAG_NAME}>
</diff_spec>
<chain_of_thought_instructions>
Before providing a solution, BRIEFLY outline your implementation steps. This helps ensure systematic thinking and clear communication. Your planning should:
- List concrete steps you'll take
- Identify key components needed
- Note potential challenges
- Be concise (2-4 lines maximum)
Example responses:
User: "Create a todo list app with local storage"
Assistant: "Sure. I'll start by:
1. Set up Vite + React
2. Create TodoList and TodoItem components
3. Implement localStorage for persistence
4. Add CRUD operations
Let's start now.
[Rest of response...]"
User: "Help debug why my API calls aren't working"
Assistant: "Great. My first steps will be:
1. Check network requests
2. Verify API endpoint format
3. Examine error handling
[Rest of response...]"
</chain_of_thought_instructions>
<artifact_info>
Bolt creates a SINGLE, comprehensive artifact for each project. The artifact contains all necessary steps and components, including:

View File

@@ -11,6 +11,7 @@ import { PreviewsStore } from './previews';
import { TerminalStore } from './terminal';
import JSZip from 'jszip';
import { saveAs } from 'file-saver';
import { Octokit } from "@octokit/rest";
export interface ArtifactState {
id: string;
@@ -280,21 +281,22 @@ export class WorkbenchStore {
for (const [filePath, dirent] of Object.entries(files)) {
if (dirent?.type === 'file' && !dirent.isBinary) {
// Remove '/home/project/' from the beginning of the path
// remove '/home/project/' from the beginning of the path
const relativePath = filePath.replace(/^\/home\/project\//, '');
// Split the path into segments
// split the path into segments
const pathSegments = relativePath.split('/');
// If there's more than one segment, we need to create folders
// if there's more than one segment, we need to create folders
if (pathSegments.length > 1) {
let currentFolder = zip;
for (let i = 0; i < pathSegments.length - 1; i++) {
currentFolder = currentFolder.folder(pathSegments[i])!;
}
currentFolder.file(pathSegments[pathSegments.length - 1], dirent.content);
} else {
// If there's only one segment, it's a file in the root
// if there's only one segment, it's a file in the root
zip.file(relativePath, dirent.content);
}
}
@@ -303,6 +305,140 @@ export class WorkbenchStore {
const content = await zip.generateAsync({ type: 'blob' });
saveAs(content, 'project.zip');
}
async syncFiles(targetHandle: FileSystemDirectoryHandle) {
const files = this.files.get();
const syncedFiles = [];
for (const [filePath, dirent] of Object.entries(files)) {
if (dirent?.type === 'file' && !dirent.isBinary) {
const relativePath = filePath.replace(/^\/home\/project\//, '');
const pathSegments = relativePath.split('/');
let currentHandle = targetHandle;
for (let i = 0; i < pathSegments.length - 1; i++) {
currentHandle = await currentHandle.getDirectoryHandle(pathSegments[i], { create: true });
}
// create or get the file
const fileHandle = await currentHandle.getFileHandle(pathSegments[pathSegments.length - 1], { create: true });
// write the file content
const writable = await fileHandle.createWritable();
await writable.write(dirent.content);
await writable.close();
syncedFiles.push(relativePath);
}
}
return syncedFiles;
}
async pushToGitHub(repoName: string, githubUsername: string, ghToken: string) {
try {
// Get the GitHub auth token from environment variables
const githubToken = ghToken;
const owner = githubUsername;
if (!githubToken) {
throw new Error('GitHub token is not set in environment variables');
}
// Initialize Octokit with the auth token
const octokit = new Octokit({ auth: githubToken });
// Check if the repository already exists before creating it
let repo
try {
repo = await octokit.repos.get({ owner: owner, repo: repoName });
} catch (error) {
if (error instanceof Error && 'status' in error && error.status === 404) {
// Repository doesn't exist, so create a new one
const { data: newRepo } = await octokit.repos.createForAuthenticatedUser({
name: repoName,
private: false,
auto_init: true,
});
repo = newRepo;
} else {
console.log('cannot create repo!');
throw error; // Some other error occurred
}
}
// Get all files
const files = this.files.get();
if (!files || Object.keys(files).length === 0) {
throw new Error('No files found to push');
}
// Create blobs for each file
const blobs = await Promise.all(
Object.entries(files).map(async ([filePath, dirent]) => {
if (dirent?.type === 'file' && dirent.content) {
const { data: blob } = await octokit.git.createBlob({
owner: repo.owner.login,
repo: repo.name,
content: Buffer.from(dirent.content).toString('base64'),
encoding: 'base64',
});
return { path: filePath.replace(/^\/home\/project\//, ''), sha: blob.sha };
}
})
);
const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs
if (validBlobs.length === 0) {
throw new Error('No valid files to push');
}
// Get the latest commit SHA (assuming main branch, update dynamically if needed)
const { data: ref } = await octokit.git.getRef({
owner: repo.owner.login,
repo: repo.name,
ref: `heads/${repo.default_branch || 'main'}`, // Handle dynamic branch
});
const latestCommitSha = ref.object.sha;
// Create a new tree
const { data: newTree } = await octokit.git.createTree({
owner: repo.owner.login,
repo: repo.name,
base_tree: latestCommitSha,
tree: validBlobs.map((blob) => ({
path: blob!.path,
mode: '100644',
type: 'blob',
sha: blob!.sha,
})),
});
// Create a new commit
const { data: newCommit } = await octokit.git.createCommit({
owner: repo.owner.login,
repo: repo.name,
message: 'Initial commit from your app',
tree: newTree.sha,
parents: [latestCommitSha],
});
// Update the reference
await octokit.git.updateRef({
owner: repo.owner.login,
repo: repo.name,
ref: `heads/${repo.default_branch || 'main'}`, // Handle dynamic branch
sha: newCommit.sha,
});
alert(`Repository created and code pushed: ${repo.html_url}`);
} catch (error) {
console.error('Error pushing to GitHub:', error instanceof Error ? error.message : String(error));
}
}
}
export const workbenchStore = new WorkbenchStore();

3
app/types/global.d.ts vendored Normal file
View File

@@ -0,0 +1,3 @@
interface Window {
showDirectoryPicker(): Promise<FileSystemDirectoryHandle>;
}

View File

@@ -1,4 +1,4 @@
import type { ModelInfo } from './types';
import type { ModelInfo, OllamaApiResponse, OllamaModel } from './types';
export const WORK_DIR_NAME = 'project';
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
@@ -10,6 +10,8 @@ export const DEFAULT_PROVIDER = 'Anthropic';
const staticModels: ModelInfo[] = [
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' },
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
{ name: 'anthropic/claude-3.5-sonnet', label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)', provider: 'OpenRouter' },
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' },
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' },
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' },
@@ -30,18 +32,29 @@ const staticModels: ModelInfo[] = [
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek'},
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek'},
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' },
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' },
{ name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral' },
{ name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral' },
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral' },
{ name: 'ministral-small-latest', label: 'Mistral Small', provider: 'Mistral' },
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral' },
{ name: 'ministral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral' },
];
export let MODEL_LIST: ModelInfo[] = [...staticModels];
async function getOllamaModels(): Promise<ModelInfo[]> {
try {
const base_url =import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
const base_url = import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
const url = new URL(base_url).toString();
const response = await fetch(`${url}api/tags`);
const data = await response.json();
const response = await fetch(`${url}/api/tags`);
const data = await response.json() as OllamaApiResponse;
return data.models.map((model: any) => ({
return data.models.map((model: OllamaModel) => ({
name: model.name,
label: `${model.name} (${model.details.parameter_size})`,
provider: 'Ollama',

View File

@@ -8,7 +8,7 @@ interface OllamaModelDetails {
quantization_level: string;
}
interface OllamaModel {
export interface OllamaModel {
name: string;
model: string;
modified_at: string;