Merge branch 'coleam00:main' into main

This commit is contained in:
armfuls 2024-11-13 22:27:56 +01:00 committed by GitHub
commit f0d823dc24
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 561 additions and 215 deletions

View File

@ -43,6 +43,12 @@ OPENAI_LIKE_API_KEY=
# You only need this environment variable set if you want to use Mistral models
MISTRAL_API_KEY=
# Get LMStudio Base URL from LM Studio Developer Console
# Make sure to enable CORS
# Example: http://localhost:1234
LMSTUDIO_API_BASE_URL=
# Get your xAI API key
# https://x.ai/api
# You only need this environment variable set if you want to use xAI models

View File

@ -1,39 +0,0 @@
name: Build and Push Container
on:
push:
branches:
- main
# paths:
# - 'Dockerfile'
workflow_dispatch:
jobs:
build-and-push:
runs-on: [ubuntu-latest]
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to GitHub Container Registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and Push Containers
uses: docker/build-push-action@v2
with:
context: .
file: Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |
ghcr.io/${{ github.repository }}:latest
ghcr.io/${{ github.repository }}:${{ github.sha }}

View File

@ -1,32 +0,0 @@
name: Semantic Pull Request
on:
pull_request_target:
types: [opened, reopened, edited, synchronize]
permissions:
pull-requests: read
jobs:
main:
name: Validate PR Title
runs-on: ubuntu-latest
steps:
# https://github.com/amannn/action-semantic-pull-request/releases/tag/v5.5.3
- uses: amannn/action-semantic-pull-request@0723387faaf9b38adef4775cd42cfd5155ed6017
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
subjectPattern: ^(?![A-Z]).+$
subjectPatternError: |
The subject "{subject}" found in the pull request title "{title}"
didn't match the configured pattern. Please ensure that the subject
doesn't start with an uppercase character.
types: |
fix
feat
chore
build
ci
perf
docs
refactor
revert
test

View File

@ -18,7 +18,9 @@ This fork of Bolt.new allows you to choose the LLM that you use for each prompt!
- ✅ Ability to sync files (one way sync) to local folder (@muzafferkadir)
- ✅ Containerize the application with Docker for easy installation (@aaronbolton)
- ✅ Publish projects directly to GitHub (@goncaloalves)
- ⬜ Prevent Bolt from rewriting files as often (Done but need to review PR still)
- ✅ Ability to enter API keys in the UI (@ali00209)
- ✅ xAI Grok Beta Integration (@milutinke)
- ⬜ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs)
- ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
- ⬜ **HIGH PRIORITY** Load local projects into the app
- ⬜ **HIGH PRIORITY** - Attach images to prompts
@ -34,7 +36,6 @@ This fork of Bolt.new allows you to choose the LLM that you use for each prompt!
- ⬜ Ability to revert code to earlier version
- ⬜ Prompt caching
- ⬜ Better prompt enhancing
- ⬜ Ability to enter API keys in the UI
- ⬜ Have LLM plan the project in a MD file for better results/transparency
- ⬜ VSCode Integration with git-like confirmations
- ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
@ -85,7 +86,7 @@ If you see usr/local/bin in the output then you're good to go.
git clone https://github.com/coleam00/bolt.new-any-llm.git
```
3. Rename .env.example to .env and add your LLM API keys. You will find this file on a Mac at "[your name]/bold.new-any-llm/.env.example". For Windows and Linux the path will be similar.
3. Rename .env.example to .env.local and add your LLM API keys. You will find this file on a Mac at "[your name]/bold.new-any-llm/.env.example". For Windows and Linux the path will be similar.
![image](https://github.com/user-attachments/assets/7e6a532c-2268-401f-8310-e8d20c731328)
@ -115,7 +116,7 @@ Optionally, you can set the debug level:
VITE_LOG_LEVEL=debug
```
**Important**: Never commit your `.env` file to version control. It's already included in .gitignore.
**Important**: Never commit your `.env.local` file to version control. It's already included in .gitignore.
## Run with Docker

View File

@ -151,7 +151,13 @@ const ActionList = memo(({ actions }: ActionListProps) => {
<div className="flex items-center gap-1.5 text-sm">
<div className={classNames('text-lg', getIconColor(action.status))}>
{status === 'running' ? (
<div className="i-svg-spinners:90-ring-with-bg"></div>
<>
{type !== 'start' ? (
<div className="i-svg-spinners:90-ring-with-bg"></div>
) : (
<div className="i-ph:terminal-window-duotone"></div>
)}
</>
) : status === 'pending' ? (
<div className="i-ph:circle-duotone"></div>
) : status === 'complete' ? (
@ -171,9 +177,19 @@ const ActionList = memo(({ actions }: ActionListProps) => {
<div className="flex items-center w-full min-h-[28px]">
<span className="flex-1">Run command</span>
</div>
) : type === 'start' ? (
<a
onClick={(e) => {
e.preventDefault();
workbenchStore.currentView.set('preview');
}}
className="flex items-center w-full min-h-[28px]"
>
<span className="flex-1">Start Application</span>
</a>
) : null}
</div>
{type === 'shell' && (
{(type === 'shell' || type === 'start') && (
<ShellCodeBlock
classsName={classNames('mt-1', {
'mb-3.5': !isLast,

View File

@ -24,16 +24,16 @@ const EXAMPLE_PROMPTS = [
{ text: 'How do I center a div?' },
];
const providerList = [...new Set(MODEL_LIST.map((model) => model.provider))]
const providerList = [...new Set(MODEL_LIST.map((model) => model.provider))];
const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList }) => {
return (
<div className="mb-2 flex gap-2">
<select
<select
value={provider}
onChange={(e) => {
setProvider(e.target.value);
const firstModel = [...modelList].find(m => m.provider == e.target.value);
const firstModel = [...modelList].find((m) => m.provider == e.target.value);
setModel(firstModel ? firstModel.name : '');
}}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
@ -49,17 +49,22 @@ const ModelSelector = ({ model, setModel, provider, setProvider, modelList, prov
<option key="OpenAILike" value="OpenAILike">
OpenAILike
</option>
<option key="LMStudio" value="LMStudio">
LMStudio
</option>
</select>
<select
value={model}
onChange={(e) => setModel(e.target.value)}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
>
{[...modelList].filter(e => e.provider == provider && e.name).map((modelOption) => (
<option key={modelOption.name} value={modelOption.name}>
{modelOption.label}
</option>
))}
{[...modelList]
.filter((e) => e.provider == provider && e.name)
.map((modelOption) => (
<option key={modelOption.name} value={modelOption.name}>
{modelOption.label}
</option>
))}
</select>
</div>
);
@ -78,10 +83,10 @@ interface BaseChatProps {
enhancingPrompt?: boolean;
promptEnhanced?: boolean;
input?: string;
model: string;
setModel: (model: string) => void;
provider: string;
setProvider: (provider: string) => void;
model?: string;
setModel?: (model: string) => void;
provider?: string;
setProvider?: (provider: string) => void;
handleStop?: () => void;
sendMessage?: (event: React.UIEvent, messageInput?: string) => void;
handleInputChange?: (event: React.ChangeEvent<HTMLTextAreaElement>) => void;
@ -141,7 +146,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
expires: 30, // 30 days
secure: true, // Only send over HTTPS
sameSite: 'strict', // Protect against CSRF
path: '/' // Accessible across the site
path: '/', // Accessible across the site
});
} catch (error) {
console.error('Error saving API keys to cookies:', error);
@ -278,7 +283,9 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
</div>
{input.length > 3 ? (
<div className="text-xs text-bolt-elements-textTertiary">
Use <kbd className="kdb px-1.5 py-0.5 rounded bg-bolt-elements-background-depth-2">Shift</kbd> + <kbd className="kdb px-1.5 py-0.5 rounded bg-bolt-elements-background-depth-2">Return</kbd> for a new line
Use <kbd className="kdb px-1.5 py-0.5 rounded bg-bolt-elements-background-depth-2">Shift</kbd> +{' '}
<kbd className="kdb px-1.5 py-0.5 rounded bg-bolt-elements-background-depth-2">Return</kbd> for
a new line
</div>
) : null}
</div>
@ -312,4 +319,4 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
</div>
);
},
);
);

View File

@ -74,8 +74,14 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
const textareaRef = useRef<HTMLTextAreaElement>(null);
const [chatStarted, setChatStarted] = useState(initialMessages.length > 0);
const [model, setModel] = useState(DEFAULT_MODEL);
const [provider, setProvider] = useState(DEFAULT_PROVIDER);
const [model, setModel] = useState(() => {
const savedModel = Cookies.get('selectedModel');
return savedModel || DEFAULT_MODEL;
});
const [provider, setProvider] = useState(() => {
const savedProvider = Cookies.get('selectedProvider');
return savedProvider || DEFAULT_PROVIDER;
});
const { showChat } = useStore(chatStore);
@ -216,6 +222,16 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
}
}, []);
const handleModelChange = (newModel: string) => {
setModel(newModel);
Cookies.set('selectedModel', newModel, { expires: 30 });
};
const handleProviderChange = (newProvider: string) => {
setProvider(newProvider);
Cookies.set('selectedProvider', newProvider, { expires: 30 });
};
return (
<BaseChat
ref={animationScope}
@ -228,9 +244,9 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
promptEnhanced={promptEnhanced}
sendMessage={sendMessage}
model={model}
setModel={setModel}
setModel={handleModelChange}
provider={provider}
setProvider={setProvider}
setProvider={handleProviderChange}
messageRef={messageRef}
scrollRef={scrollRef}
handleInputChange={handleInputChange}
@ -246,10 +262,16 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
};
})}
enhancePrompt={() => {
enhancePrompt(input, (input) => {
setInput(input);
scrollTextArea();
});
enhancePrompt(
input,
(input) => {
setInput(input);
scrollTextArea();
},
model,
provider,
apiKeys
);
}}
/>
);

View File

@ -18,7 +18,7 @@ import { themeStore } from '~/lib/stores/theme';
import { workbenchStore } from '~/lib/stores/workbench';
import { classNames } from '~/utils/classNames';
import { WORK_DIR } from '~/utils/constants';
import { renderLogger } from '~/utils/logger';
import { logger, renderLogger } from '~/utils/logger';
import { isMobile } from '~/utils/mobile';
import { FileBreadcrumb } from './FileBreadcrumb';
import { FileTree } from './FileTree';
@ -199,25 +199,48 @@ export const EditorPanel = memo(
<div className="h-full">
<div className="bg-bolt-elements-terminals-background h-full flex flex-col">
<div className="flex items-center bg-bolt-elements-background-depth-2 border-y border-bolt-elements-borderColor gap-1.5 min-h-[34px] p-2">
{Array.from({ length: terminalCount }, (_, index) => {
{Array.from({ length: terminalCount + 1 }, (_, index) => {
const isActive = activeTerminal === index;
return (
<button
key={index}
className={classNames(
'flex items-center text-sm cursor-pointer gap-1.5 px-3 py-2 h-full whitespace-nowrap rounded-full',
{
'bg-bolt-elements-terminals-buttonBackground text-bolt-elements-textPrimary': isActive,
'bg-bolt-elements-background-depth-2 text-bolt-elements-textSecondary hover:bg-bolt-elements-terminals-buttonBackground':
!isActive,
},
<>
{index == 0 ? (
<button
key={index}
className={classNames(
'flex items-center text-sm cursor-pointer gap-1.5 px-3 py-2 h-full whitespace-nowrap rounded-full',
{
'bg-bolt-elements-terminals-buttonBackground text-bolt-elements-textSecondary hover:text-bolt-elements-textPrimary':
isActive,
'bg-bolt-elements-background-depth-2 text-bolt-elements-textSecondary hover:bg-bolt-elements-terminals-buttonBackground':
!isActive,
},
)}
onClick={() => setActiveTerminal(index)}
>
<div className="i-ph:terminal-window-duotone text-lg" />
Bolt Terminal
</button>
) : (
<>
<button
key={index}
className={classNames(
'flex items-center text-sm cursor-pointer gap-1.5 px-3 py-2 h-full whitespace-nowrap rounded-full',
{
'bg-bolt-elements-terminals-buttonBackground text-bolt-elements-textPrimary': isActive,
'bg-bolt-elements-background-depth-2 text-bolt-elements-textSecondary hover:bg-bolt-elements-terminals-buttonBackground':
!isActive,
},
)}
onClick={() => setActiveTerminal(index)}
>
<div className="i-ph:terminal-window-duotone text-lg" />
Terminal {terminalCount > 1 && index}
</button>
</>
)}
onClick={() => setActiveTerminal(index)}
>
<div className="i-ph:terminal-window-duotone text-lg" />
Terminal {terminalCount > 1 && index + 1}
</button>
</>
);
})}
{terminalCount < MAX_TERMINALS && <IconButton icon="i-ph:plus" size="md" onClick={addTerminal} />}
@ -229,9 +252,26 @@ export const EditorPanel = memo(
onClick={() => workbenchStore.toggleTerminal(false)}
/>
</div>
{Array.from({ length: terminalCount }, (_, index) => {
{Array.from({ length: terminalCount + 1 }, (_, index) => {
const isActive = activeTerminal === index;
if (index == 0) {
logger.info('Starting bolt terminal');
return (
<Terminal
key={index}
className={classNames('h-full overflow-hidden', {
hidden: !isActive,
})}
ref={(ref) => {
terminalRefs.current.push(ref);
}}
onTerminalReady={(terminal) => workbenchStore.attachBoltTerminal(terminal)}
onTerminalResize={(cols, rows) => workbenchStore.onTerminalResize(cols, rows)}
theme={theme}
/>
);
}
return (
<Terminal
key={index}

View File

@ -42,6 +42,8 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
switch (provider) {
case 'OpenAILike':
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
case 'LMStudio':
return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
case 'Ollama':
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
if (env.RUNNING_IN_DOCKER === 'true') {

View File

@ -83,6 +83,15 @@ export function getOpenRouterModel(apiKey: string, model: string) {
return openRouter.chat(model);
}
export function getLMStudioModel(baseURL: string, model: string) {
const lmstudio = createOpenAI({
baseUrl: `${baseURL}/v1`,
apiKey: "",
});
return lmstudio(model);
}
export function getXAIModel(apiKey: string, model: string) {
const openai = createOpenAI({
baseURL: 'https://api.x.ai/v1',
@ -105,13 +114,15 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
case 'OpenRouter':
return getOpenRouterModel(apiKey, model);
case 'Google':
return getGoogleModel(apiKey, model)
return getGoogleModel(apiKey, model);
case 'OpenAILike':
return getOpenAILikeModel(baseURL,apiKey, model);
case 'Deepseek':
return getDeepseekModel(apiKey, model)
return getDeepseekModel(apiKey, model);
case 'Mistral':
return getMistralModel(apiKey, model);
case 'LMStudio':
return getLMStudioModel(baseURL, model);
case 'xAI':
return getXAIModel(apiKey, model);
default:

View File

@ -174,10 +174,16 @@ You are Bolt, an expert AI assistant and exceptional senior software developer w
- When Using \`npx\`, ALWAYS provide the \`--yes\` flag.
- When running multiple shell commands, use \`&&\` to run them sequentially.
- ULTRA IMPORTANT: Do NOT re-run a dev command if there is one that starts a dev server and new dependencies were installed or files updated! If a dev server has started already, assume that installing dependencies will be executed in a different process and will be picked up by the dev server.
- ULTRA IMPORTANT: Do NOT re-run a dev command with shell action use dev action to run dev commands
- file: For writing new files or updating existing files. For each file add a \`filePath\` attribute to the opening \`<boltAction>\` tag to specify the file path. The content of the file artifact is the file contents. All file paths MUST BE relative to the current working directory.
- start: For starting development server.
- Use to start application if not already started or NEW dependencies added
- Only use this action when you need to run a dev server or start the application
- ULTRA IMORTANT: do NOT re-run a dev server if files updated, existing dev server can autometically detect changes and executes the file changes
9. The order of the actions is VERY IMPORTANT. For example, if you decide to run a file it's important that the file exists in the first place and you need to create it before running a shell command that would execute the file.
10. ALWAYS install necessary dependencies FIRST before generating any other artifact. If that requires a \`package.json\` then you should create that first!
@ -265,7 +271,7 @@ Here are some examples of correct usage of artifacts:
...
</boltAction>
<boltAction type="shell">
<boltAction type="start">
npm run dev
</boltAction>
</boltArtifact>
@ -322,7 +328,7 @@ Here are some examples of correct usage of artifacts:
...
</boltAction>
<boltAction type="shell">
<boltAction type="start">
npm run dev
</boltAction>
</boltArtifact>

View File

@ -36,6 +36,10 @@ const messageParser = new StreamingMessageParser({
workbenchStore.runAction(data);
},
onActionStream: (data) => {
logger.trace('onActionStream', data.action);
workbenchStore.runAction(data, true);
},
},
});

View File

@ -12,41 +12,55 @@ export function usePromptEnhancer() {
setPromptEnhanced(false);
};
const enhancePrompt = async (input: string, setInput: (value: string) => void) => {
const enhancePrompt = async (
input: string,
setInput: (value: string) => void,
model: string,
provider: string,
apiKeys?: Record<string, string>
) => {
setEnhancingPrompt(true);
setPromptEnhanced(false);
const requestBody: any = {
message: input,
model,
provider,
};
if (apiKeys) {
requestBody.apiKeys = apiKeys;
}
const response = await fetch('/api/enhancer', {
method: 'POST',
body: JSON.stringify({
message: input,
}),
body: JSON.stringify(requestBody),
});
const reader = response.body?.getReader();
const originalInput = input;
if (reader) {
const decoder = new TextDecoder();
let _input = '';
let _error;
try {
setInput('');
while (true) {
const { value, done } = await reader.read();
if (done) {
break;
}
_input += decoder.decode(value);
logger.trace('Set input', _input);
setInput(_input);
}
} catch (error) {
@ -56,10 +70,10 @@ export function usePromptEnhancer() {
if (_error) {
logger.error(_error);
}
setEnhancingPrompt(false);
setPromptEnhanced(true);
setTimeout(() => {
setInput(_input);
});

View File

@ -1,10 +1,12 @@
import { WebContainer } from '@webcontainer/api';
import { map, type MapStore } from 'nanostores';
import { WebContainer, type WebContainerProcess } from '@webcontainer/api';
import { atom, map, type MapStore } from 'nanostores';
import * as nodePath from 'node:path';
import type { BoltAction } from '~/types/actions';
import { createScopedLogger } from '~/utils/logger';
import { unreachable } from '~/utils/unreachable';
import type { ActionCallbackData } from './message-parser';
import type { ITerminal } from '~/types/terminal';
import type { BoltShell } from '~/utils/shell';
const logger = createScopedLogger('ActionRunner');
@ -36,11 +38,14 @@ type ActionsMap = MapStore<Record<string, ActionState>>;
export class ActionRunner {
#webcontainer: Promise<WebContainer>;
#currentExecutionPromise: Promise<void> = Promise.resolve();
#shellTerminal: () => BoltShell;
runnerId = atom<string>(`${Date.now()}`);
actions: ActionsMap = map({});
constructor(webcontainerPromise: Promise<WebContainer>) {
constructor(webcontainerPromise: Promise<WebContainer>, getShellTerminal: () => BoltShell) {
this.#webcontainer = webcontainerPromise;
this.#shellTerminal = getShellTerminal;
}
addAction(data: ActionCallbackData) {
@ -72,7 +77,7 @@ export class ActionRunner {
});
}
async runAction(data: ActionCallbackData) {
async runAction(data: ActionCallbackData, isStreaming: boolean = false) {
const { actionId } = data;
const action = this.actions.get()[actionId];
@ -83,19 +88,22 @@ export class ActionRunner {
if (action.executed) {
return;
}
if (isStreaming && action.type !== 'file') {
return;
}
this.#updateAction(actionId, { ...action, ...data.action, executed: true });
this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming });
this.#currentExecutionPromise = this.#currentExecutionPromise
.then(() => {
return this.#executeAction(actionId);
return this.#executeAction(actionId, isStreaming);
})
.catch((error) => {
console.error('Action failed:', error);
});
}
async #executeAction(actionId: string) {
async #executeAction(actionId: string, isStreaming: boolean = false) {
const action = this.actions.get()[actionId];
this.#updateAction(actionId, { status: 'running' });
@ -110,11 +118,16 @@ export class ActionRunner {
await this.#runFileAction(action);
break;
}
case 'start': {
await this.#runStartAction(action)
break;
}
}
this.#updateAction(actionId, { status: action.abortSignal.aborted ? 'aborted' : 'complete' });
this.#updateAction(actionId, { status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete' });
} catch (error) {
this.#updateAction(actionId, { status: 'failed', error: 'Action failed' });
logger.error(`[${action.type}]:Action failed\n\n`, error);
// re-throw the error to be caught in the promise chain
throw error;
@ -125,28 +138,38 @@ export class ActionRunner {
if (action.type !== 'shell') {
unreachable('Expected shell action');
}
const shell = this.#shellTerminal()
await shell.ready()
if (!shell || !shell.terminal || !shell.process) {
unreachable('Shell terminal not found');
}
const resp = await shell.executeCommand(this.runnerId.get(), action.content)
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`)
if (resp?.exitCode != 0) {
throw new Error("Failed To Execute Shell Command");
const webcontainer = await this.#webcontainer;
}
}
const process = await webcontainer.spawn('jsh', ['-c', action.content], {
env: { npm_config_yes: true },
});
async #runStartAction(action: ActionState) {
if (action.type !== 'start') {
unreachable('Expected shell action');
}
if (!this.#shellTerminal) {
unreachable('Shell terminal not found');
}
const shell = this.#shellTerminal()
await shell.ready()
if (!shell || !shell.terminal || !shell.process) {
unreachable('Shell terminal not found');
}
const resp = await shell.executeCommand(this.runnerId.get(), action.content)
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`)
action.abortSignal.addEventListener('abort', () => {
process.kill();
});
process.output.pipeTo(
new WritableStream({
write(data) {
console.log(data);
},
}),
);
const exitCode = await process.exit;
logger.debug(`Process terminated with code ${exitCode}`);
if (resp?.exitCode != 0) {
throw new Error("Failed To Start Application");
}
return resp
}
async #runFileAction(action: ActionState) {
@ -177,7 +200,6 @@ export class ActionRunner {
logger.error('Failed to write file\n\n', error);
}
}
#updateAction(id: string, newState: ActionStateUpdate) {
const actions = this.actions.get();

View File

@ -28,6 +28,7 @@ export interface ParserCallbacks {
onArtifactOpen?: ArtifactCallback;
onArtifactClose?: ArtifactCallback;
onActionOpen?: ActionCallback;
onActionStream?: ActionCallback;
onActionClose?: ActionCallback;
}
@ -54,7 +55,7 @@ interface MessageState {
export class StreamingMessageParser {
#messages = new Map<string, MessageState>();
constructor(private _options: StreamingMessageParserOptions = {}) {}
constructor(private _options: StreamingMessageParserOptions = {}) { }
parse(messageId: string, input: string) {
let state = this.#messages.get(messageId);
@ -118,6 +119,21 @@ export class StreamingMessageParser {
i = closeIndex + ARTIFACT_ACTION_TAG_CLOSE.length;
} else {
if ('type' in currentAction && currentAction.type === 'file') {
let content = input.slice(i);
this._options.callbacks?.onActionStream?.({
artifactId: currentArtifact.id,
messageId,
actionId: String(state.actionId - 1),
action: {
...currentAction as FileAction,
content,
filePath: currentAction.filePath,
},
});
}
break;
}
} else {
@ -256,7 +272,7 @@ export class StreamingMessageParser {
}
(actionAttributes as FileAction).filePath = filePath;
} else if (actionType !== 'shell') {
} else if (!(['shell', 'start'].includes(actionType))) {
logger.warn(`Unknown action type '${actionType}'`);
}

View File

@ -1,14 +1,15 @@
import type { WebContainer, WebContainerProcess } from '@webcontainer/api';
import { atom, type WritableAtom } from 'nanostores';
import type { ITerminal } from '~/types/terminal';
import { newShellProcess } from '~/utils/shell';
import { newBoltShellProcess, newShellProcess } from '~/utils/shell';
import { coloredText } from '~/utils/terminal';
export class TerminalStore {
#webcontainer: Promise<WebContainer>;
#terminals: Array<{ terminal: ITerminal; process: WebContainerProcess }> = [];
#boltTerminal = newBoltShellProcess()
showTerminal: WritableAtom<boolean> = import.meta.hot?.data.showTerminal ?? atom(false);
showTerminal: WritableAtom<boolean> = import.meta.hot?.data.showTerminal ?? atom(true);
constructor(webcontainerPromise: Promise<WebContainer>) {
this.#webcontainer = webcontainerPromise;
@ -17,10 +18,22 @@ export class TerminalStore {
import.meta.hot.data.showTerminal = this.showTerminal;
}
}
get boltTerminal() {
return this.#boltTerminal;
}
toggleTerminal(value?: boolean) {
this.showTerminal.set(value !== undefined ? value : !this.showTerminal.get());
}
async attachBoltTerminal(terminal: ITerminal) {
try {
let wc = await this.#webcontainer
await this.#boltTerminal.init(wc, terminal)
} catch (error: any) {
terminal.write(coloredText.red('Failed to spawn bolt shell\n\n') + error.message);
return;
}
}
async attachTerminal(terminal: ITerminal) {
try {

View File

@ -11,7 +11,9 @@ import { PreviewsStore } from './previews';
import { TerminalStore } from './terminal';
import JSZip from 'jszip';
import { saveAs } from 'file-saver';
import { Octokit } from "@octokit/rest";
import { Octokit, type RestEndpointMethodTypes } from "@octokit/rest";
import * as nodePath from 'node:path';
import type { WebContainerProcess } from '@webcontainer/api';
export interface ArtifactState {
id: string;
@ -39,6 +41,7 @@ export class WorkbenchStore {
unsavedFiles: WritableAtom<Set<string>> = import.meta.hot?.data.unsavedFiles ?? atom(new Set<string>());
modifiedFiles = new Set<string>();
artifactIdList: string[] = [];
#boltTerminal: { terminal: ITerminal; process: WebContainerProcess } | undefined;
constructor() {
if (import.meta.hot) {
@ -76,6 +79,9 @@ export class WorkbenchStore {
get showTerminal() {
return this.#terminalStore.showTerminal;
}
get boltTerminal() {
return this.#terminalStore.boltTerminal;
}
toggleTerminal(value?: boolean) {
this.#terminalStore.toggleTerminal(value);
@ -84,6 +90,10 @@ export class WorkbenchStore {
attachTerminal(terminal: ITerminal) {
this.#terminalStore.attachTerminal(terminal);
}
attachBoltTerminal(terminal: ITerminal) {
this.#terminalStore.attachBoltTerminal(terminal);
}
onTerminalResize(cols: number, rows: number) {
this.#terminalStore.onTerminalResize(cols, rows);
@ -232,7 +242,7 @@ export class WorkbenchStore {
id,
title,
closed: false,
runner: new ActionRunner(webcontainer),
runner: new ActionRunner(webcontainer, () => this.boltTerminal),
});
}
@ -258,7 +268,7 @@ export class WorkbenchStore {
artifact.runner.addAction(data);
}
async runAction(data: ActionCallbackData) {
async runAction(data: ActionCallbackData, isStreaming: boolean = false) {
const { messageId } = data;
const artifact = this.#getArtifact(messageId);
@ -266,8 +276,29 @@ export class WorkbenchStore {
if (!artifact) {
unreachable('Artifact not found');
}
if (data.action.type === 'file') {
let wc = await webcontainer
const fullPath = nodePath.join(wc.workdir, data.action.filePath);
if (this.selectedFile.value !== fullPath) {
this.setSelectedFile(fullPath);
}
if (this.currentView.value !== 'code') {
this.currentView.set('code');
}
const doc = this.#editorStore.documents.get()[fullPath];
if (!doc) {
await artifact.runner.runAction(data, isStreaming);
}
artifact.runner.runAction(data);
this.#editorStore.updateFile(fullPath, data.action.content);
if (!isStreaming) {
this.resetCurrentDocument();
await artifact.runner.runAction(data);
}
} else {
artifact.runner.runAction(data);
}
}
#getArtifact(id: string) {
@ -336,24 +367,25 @@ export class WorkbenchStore {
}
async pushToGitHub(repoName: string, githubUsername: string, ghToken: string) {
try {
// Get the GitHub auth token from environment variables
const githubToken = ghToken;
const owner = githubUsername;
if (!githubToken) {
throw new Error('GitHub token is not set in environment variables');
}
// Initialize Octokit with the auth token
const octokit = new Octokit({ auth: githubToken });
// Check if the repository already exists before creating it
let repo
let repo: RestEndpointMethodTypes["repos"]["get"]["response"]['data']
try {
repo = await octokit.repos.get({ owner: owner, repo: repoName });
let resp = await octokit.repos.get({ owner: owner, repo: repoName });
repo = resp.data
} catch (error) {
if (error instanceof Error && 'status' in error && error.status === 404) {
// Repository doesn't exist, so create a new one
@ -368,13 +400,13 @@ export class WorkbenchStore {
throw error; // Some other error occurred
}
}
// Get all files
const files = this.files.get();
if (!files || Object.keys(files).length === 0) {
throw new Error('No files found to push');
}
// Create blobs for each file
const blobs = await Promise.all(
Object.entries(files).map(async ([filePath, dirent]) => {
@ -389,13 +421,13 @@ export class WorkbenchStore {
}
})
);
const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs
if (validBlobs.length === 0) {
throw new Error('No valid files to push');
}
// Get the latest commit SHA (assuming main branch, update dynamically if needed)
const { data: ref } = await octokit.git.getRef({
owner: repo.owner.login,
@ -403,7 +435,7 @@ export class WorkbenchStore {
ref: `heads/${repo.default_branch || 'main'}`, // Handle dynamic branch
});
const latestCommitSha = ref.object.sha;
// Create a new tree
const { data: newTree } = await octokit.git.createTree({
owner: repo.owner.login,
@ -416,7 +448,7 @@ export class WorkbenchStore {
sha: blob!.sha,
})),
});
// Create a new commit
const { data: newCommit } = await octokit.git.createCommit({
owner: repo.owner.login,
@ -425,7 +457,7 @@ export class WorkbenchStore {
tree: newTree.sha,
parents: [latestCommitSha],
});
// Update the reference
await octokit.git.updateRef({
owner: repo.owner.login,
@ -433,7 +465,7 @@ export class WorkbenchStore {
ref: `heads/${repo.default_branch || 'main'}`, // Handle dynamic branch
sha: newCommit.sha,
});
alert(`Repository created and code pushed: ${repo.html_url}`);
} catch (error) {
console.error('Error pushing to GitHub:', error instanceof Error ? error.message : String(error));

View File

@ -2,6 +2,7 @@ import { type ActionFunctionArgs } from '@remix-run/cloudflare';
import { StreamingTextResponse, parseStreamPart } from 'ai';
import { streamText } from '~/lib/.server/llm/stream-text';
import { stripIndents } from '~/utils/stripIndent';
import type { StreamingOptions } from '~/lib/.server/llm/stream-text';
const encoder = new TextEncoder();
const decoder = new TextDecoder();
@ -11,14 +12,34 @@ export async function action(args: ActionFunctionArgs) {
}
async function enhancerAction({ context, request }: ActionFunctionArgs) {
const { message } = await request.json<{ message: string }>();
const { message, model, provider, apiKeys } = await request.json<{
message: string;
model: string;
provider: string;
apiKeys?: Record<string, string>;
}>();
// Validate 'model' and 'provider' fields
if (!model || typeof model !== 'string') {
throw new Response('Invalid or missing model', {
status: 400,
statusText: 'Bad Request'
});
}
if (!provider || typeof provider !== 'string') {
throw new Response('Invalid or missing provider', {
status: 400,
statusText: 'Bad Request'
});
}
try {
const result = await streamText(
[
{
role: 'user',
content: stripIndents`
content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n` + stripIndents`
I want you to improve the user prompt that is wrapped in \`<original_prompt>\` tags.
IMPORTANT: Only respond with the improved prompt and nothing else!
@ -30,28 +51,42 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
},
],
context.cloudflare.env,
undefined,
apiKeys
);
const transformStream = new TransformStream({
transform(chunk, controller) {
const processedChunk = decoder
.decode(chunk)
.split('\n')
.filter((line) => line !== '')
.map(parseStreamPart)
.map((part) => part.value)
.join('');
controller.enqueue(encoder.encode(processedChunk));
const text = decoder.decode(chunk);
const lines = text.split('\n').filter(line => line.trim() !== '');
for (const line of lines) {
try {
const parsed = parseStreamPart(line);
if (parsed.type === 'text') {
controller.enqueue(encoder.encode(parsed.value));
}
} catch (e) {
// Skip invalid JSON lines
console.warn('Failed to parse stream part:', line);
}
}
},
});
const transformedStream = result.toAIStream().pipeThrough(transformStream);
return new StreamingTextResponse(transformedStream);
} catch (error) {
} catch (error: unknown) {
console.log(error);
if (error instanceof Error && error.message?.includes('API key')) {
throw new Response('Invalid or missing API key', {
status: 401,
statusText: 'Unauthorized'
});
}
throw new Response(null, {
status: 500,
statusText: 'Internal Server Error',

View File

@ -13,6 +13,10 @@ export interface ShellAction extends BaseAction {
type: 'shell';
}
export type BoltAction = FileAction | ShellAction;
export interface StartAction extends BaseAction {
type: 'start';
}
export type BoltAction = FileAction | ShellAction | StartAction;
export type BoltActionData = BoltAction | BaseAction;

View File

@ -5,4 +5,5 @@ export interface ITerminal {
reset: () => void;
write: (data: string) => void;
onData: (cb: (data: string) => void) => void;
input: (data: string) => void;
}

View File

@ -5,11 +5,10 @@ export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
export const DEFAULT_MODEL = 'claude-3-5-sonnet-20240620';
export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
export const DEFAULT_PROVIDER = 'Anthropic';
const staticModels: ModelInfo[] = [
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' },
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
{ name: 'anthropic/claude-3.5-sonnet', label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)', provider: 'OpenRouter' },
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' },
@ -27,7 +26,10 @@ const staticModels: ModelInfo[] = [
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq' },
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' },
{ name: 'claude-3-opus-20240229', label: 'Claude 3 Opus', provider: 'Anthropic' },
{ name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic' },
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic' },
{ name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic' },
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic' },
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' },
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' },
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
@ -105,10 +107,28 @@ async function getOpenAILikeModels(): Promise<ModelInfo[]> {
}
}
async function getLMStudioModels(): Promise<ModelInfo[]> {
try {
const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
const response = await fetch(`${base_url}/v1/models`);
const data = await response.json() as any;
return data.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: 'LMStudio',
}));
} catch (e) {
return [];
}
}
async function initializeModelList(): Promise<void> {
const ollamaModels = await getOllamaModels();
const openAiLikeModels = await getOpenAILikeModels();
MODEL_LIST = [...ollamaModels, ...openAiLikeModels, ...staticModels];
const lmstudioModels = await getLMStudioModels();
MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels,...lmstudioModels,];
}
initializeModelList().then();
export { getOllamaModels, getOpenAILikeModels, initializeModelList };
export { getOllamaModels,getOpenAILikeModels,getLMStudioModels,initializeModelList };

View File

@ -1,6 +1,7 @@
import type { WebContainer } from '@webcontainer/api';
import type { WebContainer, WebContainerProcess } from '@webcontainer/api';
import type { ITerminal } from '~/types/terminal';
import { withResolvers } from './promises';
import { atom } from 'nanostores';
export async function newShellProcess(webcontainer: WebContainer, terminal: ITerminal) {
const args: string[] = [];
@ -19,7 +20,6 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer
const jshReady = withResolvers<void>();
let isInteractive = false;
output.pipeTo(
new WritableStream({
write(data) {
@ -40,6 +40,8 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer
);
terminal.onData((data) => {
// console.log('terminal onData', { data, isInteractive });
if (isInteractive) {
input.write(data);
}
@ -49,3 +51,145 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer
return process;
}
export class BoltShell {
#initialized: (() => void) | undefined
#readyPromise: Promise<void>
#webcontainer: WebContainer | undefined
#terminal: ITerminal | undefined
#process: WebContainerProcess | undefined
executionState = atom<{ sessionId: string, active: boolean, executionPrms?: Promise<any> } | undefined>()
#outputStream: ReadableStreamDefaultReader<string> | undefined
#shellInputStream: WritableStreamDefaultWriter<string> | undefined
constructor() {
this.#readyPromise = new Promise((resolve) => {
this.#initialized = resolve
})
}
ready() {
return this.#readyPromise;
}
async init(webcontainer: WebContainer, terminal: ITerminal) {
this.#webcontainer = webcontainer
this.#terminal = terminal
let callback = (data: string) => {
console.log(data)
}
let { process, output } = await this.newBoltShellProcess(webcontainer, terminal)
this.#process = process
this.#outputStream = output.getReader()
await this.waitTillOscCode('interactive')
this.#initialized?.()
}
get terminal() {
return this.#terminal
}
get process() {
return this.#process
}
async executeCommand(sessionId: string, command: string) {
if (!this.process || !this.terminal) {
return
}
let state = this.executionState.get()
//interrupt the current execution
// this.#shellInputStream?.write('\x03');
this.terminal.input('\x03');
if (state && state.executionPrms) {
await state.executionPrms
}
//start a new execution
this.terminal.input(command.trim() + '\n');
//wait for the execution to finish
let executionPrms = this.getCurrentExecutionResult()
this.executionState.set({ sessionId, active: true, executionPrms })
let resp = await executionPrms
this.executionState.set({ sessionId, active: false })
return resp
}
async newBoltShellProcess(webcontainer: WebContainer, terminal: ITerminal) {
const args: string[] = [];
// we spawn a JSH process with a fallback cols and rows in case the process is not attached yet to a visible terminal
const process = await webcontainer.spawn('/bin/jsh', ['--osc', ...args], {
terminal: {
cols: terminal.cols ?? 80,
rows: terminal.rows ?? 15,
},
});
const input = process.input.getWriter();
this.#shellInputStream = input;
const [internalOutput, terminalOutput] = process.output.tee();
const jshReady = withResolvers<void>();
let isInteractive = false;
terminalOutput.pipeTo(
new WritableStream({
write(data) {
if (!isInteractive) {
const [, osc] = data.match(/\x1b\]654;([^\x07]+)\x07/) || [];
if (osc === 'interactive') {
// wait until we see the interactive OSC
isInteractive = true;
jshReady.resolve();
}
}
terminal.write(data);
},
}),
);
terminal.onData((data) => {
// console.log('terminal onData', { data, isInteractive });
if (isInteractive) {
input.write(data);
}
});
await jshReady.promise;
return { process, output: internalOutput };
}
async getCurrentExecutionResult() {
let { output, exitCode } = await this.waitTillOscCode('exit')
return { output, exitCode };
}
async waitTillOscCode(waitCode: string) {
let fullOutput = '';
let exitCode: number = 0;
if (!this.#outputStream) return { output: fullOutput, exitCode };
let tappedStream = this.#outputStream
while (true) {
const { value, done } = await tappedStream.read();
if (done) break;
const text = value || '';
fullOutput += text;
// Check if command completion signal with exit code
const [, osc, , pid, code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || [];
if (osc === 'exit') {
exitCode = parseInt(code, 10);
}
if (osc === waitCode) {
break;
}
}
return { output: fullOutput, exitCode };
}
}
export function newBoltShellProcess() {
return new BoltShell();
}

View File

@ -16,7 +16,7 @@
"start": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings",
"dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session",
"dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai",
"dockerbuild:prod": "docker build -t bolt-ai:production bolt-ai:latest --target bolt-ai-production .",
"dockerbuild:prod": "docker build -t bolt-ai:production -t bolt-ai:latest --target bolt-ai-production .",
"dockerbuild": "docker build -t bolt-ai:development -t bolt-ai:latest --target bolt-ai-development .",
"typecheck": "tsc",
"typegen": "wrangler types",
@ -117,5 +117,5 @@
"resolutions": {
"@typescript-eslint/utils": "^8.0.0-alpha.30"
},
"packageManager": "pnpm@9.12.2+sha512.22721b3a11f81661ae1ec68ce1a7b879425a1ca5b991c975b074ac220b187ce56c708fe5db69f4c962c989452eee76c82877f4ee80f474cebd61ee13461b6228"
"packageManager": "pnpm@9.4.0"
}

View File

@ -27,7 +27,7 @@ export default defineConfig((config) => {
chrome129IssuePlugin(),
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
],
envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL"],
envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL","LMSTUDIO_API_BASE_URL"],
css: {
preprocessorOptions: {
scss: {

View File

@ -7,4 +7,5 @@ interface Env {
OPENAI_LIKE_API_KEY: string;
OPENAI_LIKE_API_BASE_URL: string;
DEEPSEEK_API_KEY: string;
LMSTUDIO_API_BASE_URL: string;
}