diff --git a/.env.example b/.env.example index 446034c6..f00e2ce9 100644 --- a/.env.example +++ b/.env.example @@ -43,6 +43,12 @@ OPENAI_LIKE_API_KEY= # You only need this environment variable set if you want to use Mistral models MISTRAL_API_KEY= + +# Get LMStudio Base URL from LM Studio Developer Console +# Make sure to enable CORS +# Example: http://localhost:1234 +LMSTUDIO_API_BASE_URL= + # Get your xAI API key # https://x.ai/api # You only need this environment variable set if you want to use xAI models diff --git a/.github/workflows/github-build-push.yml b/.github/workflows/github-build-push.yml deleted file mode 100644 index 4d4db05d..00000000 --- a/.github/workflows/github-build-push.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Build and Push Container - -on: - push: - branches: - - main - # paths: - # - 'Dockerfile' - workflow_dispatch: -jobs: - build-and-push: - runs-on: [ubuntu-latest] - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - - name: Login to GitHub Container Registry - uses: docker/login-action@v1 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Build and Push Containers - uses: docker/build-push-action@v2 - with: - context: . - file: Dockerfile - platforms: linux/amd64,linux/arm64 - push: true - tags: | - ghcr.io/${{ github.repository }}:latest - ghcr.io/${{ github.repository }}:${{ github.sha }} diff --git a/.github/workflows/semantic-pr.yaml b/.github/workflows/semantic-pr.yaml deleted file mode 100644 index 503b0455..00000000 --- a/.github/workflows/semantic-pr.yaml +++ /dev/null @@ -1,32 +0,0 @@ -name: Semantic Pull Request -on: - pull_request_target: - types: [opened, reopened, edited, synchronize] -permissions: - pull-requests: read -jobs: - main: - name: Validate PR Title - runs-on: ubuntu-latest - steps: - # https://github.com/amannn/action-semantic-pull-request/releases/tag/v5.5.3 - - uses: amannn/action-semantic-pull-request@0723387faaf9b38adef4775cd42cfd5155ed6017 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - subjectPattern: ^(?![A-Z]).+$ - subjectPatternError: | - The subject "{subject}" found in the pull request title "{title}" - didn't match the configured pattern. Please ensure that the subject - doesn't start with an uppercase character. - types: | - fix - feat - chore - build - ci - perf - docs - refactor - revert - test diff --git a/README.md b/README.md index fb70e756..54ae824e 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,9 @@ This fork of Bolt.new allows you to choose the LLM that you use for each prompt! - ✅ Ability to sync files (one way sync) to local folder (@muzafferkadir) - ✅ Containerize the application with Docker for easy installation (@aaronbolton) - ✅ Publish projects directly to GitHub (@goncaloalves) -- ⬜ Prevent Bolt from rewriting files as often (Done but need to review PR still) +- ✅ Ability to enter API keys in the UI (@ali00209) +- ✅ xAI Grok Beta Integration (@milutinke) +- ⬜ **HIGH PRIORITY** - Prevent Bolt from rewriting files as often (file locking and diffs) - ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start) - ⬜ **HIGH PRIORITY** Load local projects into the app - ⬜ **HIGH PRIORITY** - Attach images to prompts @@ -34,7 +36,6 @@ This fork of Bolt.new allows you to choose the LLM that you use for each prompt! - ⬜ Ability to revert code to earlier version - ⬜ Prompt caching - ⬜ Better prompt enhancing -- ⬜ Ability to enter API keys in the UI - ⬜ Have LLM plan the project in a MD file for better results/transparency - ⬜ VSCode Integration with git-like confirmations - ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc. @@ -85,7 +86,7 @@ If you see usr/local/bin in the output then you're good to go. git clone https://github.com/coleam00/bolt.new-any-llm.git ``` -3. Rename .env.example to .env and add your LLM API keys. You will find this file on a Mac at "[your name]/bold.new-any-llm/.env.example". For Windows and Linux the path will be similar. +3. Rename .env.example to .env.local and add your LLM API keys. You will find this file on a Mac at "[your name]/bold.new-any-llm/.env.example". For Windows and Linux the path will be similar. ![image](https://github.com/user-attachments/assets/7e6a532c-2268-401f-8310-e8d20c731328) @@ -115,7 +116,7 @@ Optionally, you can set the debug level: VITE_LOG_LEVEL=debug ``` -**Important**: Never commit your `.env` file to version control. It's already included in .gitignore. +**Important**: Never commit your `.env.local` file to version control. It's already included in .gitignore. ## Run with Docker diff --git a/app/components/chat/Artifact.tsx b/app/components/chat/Artifact.tsx index 9de52dd9..62020fd8 100644 --- a/app/components/chat/Artifact.tsx +++ b/app/components/chat/Artifact.tsx @@ -151,7 +151,13 @@ const ActionList = memo(({ actions }: ActionListProps) => {
{status === 'running' ? ( -
+ <> + {type !== 'start' ? ( +
+ ) : ( +
+ )} + ) : status === 'pending' ? (
) : status === 'complete' ? ( @@ -171,9 +177,19 @@ const ActionList = memo(({ actions }: ActionListProps) => {
Run command
+ ) : type === 'start' ? ( + { + e.preventDefault(); + workbenchStore.currentView.set('preview'); + }} + className="flex items-center w-full min-h-[28px]" + > + Start Application + ) : null}
- {type === 'shell' && ( + {(type === 'shell' || type === 'start') && ( model.provider))] +const providerList = [...new Set(MODEL_LIST.map((model) => model.provider))]; const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList }) => { return (
-
); @@ -78,10 +83,10 @@ interface BaseChatProps { enhancingPrompt?: boolean; promptEnhanced?: boolean; input?: string; - model: string; - setModel: (model: string) => void; - provider: string; - setProvider: (provider: string) => void; + model?: string; + setModel?: (model: string) => void; + provider?: string; + setProvider?: (provider: string) => void; handleStop?: () => void; sendMessage?: (event: React.UIEvent, messageInput?: string) => void; handleInputChange?: (event: React.ChangeEvent) => void; @@ -141,7 +146,7 @@ export const BaseChat = React.forwardRef( expires: 30, // 30 days secure: true, // Only send over HTTPS sameSite: 'strict', // Protect against CSRF - path: '/' // Accessible across the site + path: '/', // Accessible across the site }); } catch (error) { console.error('Error saving API keys to cookies:', error); @@ -278,7 +283,9 @@ export const BaseChat = React.forwardRef(
{input.length > 3 ? (
- Use Shift + Return for a new line + Use Shift +{' '} + Return for + a new line
) : null} @@ -312,4 +319,4 @@ export const BaseChat = React.forwardRef( ); }, -); \ No newline at end of file +); diff --git a/app/components/chat/Chat.client.tsx b/app/components/chat/Chat.client.tsx index 102c4c21..a8f94f0a 100644 --- a/app/components/chat/Chat.client.tsx +++ b/app/components/chat/Chat.client.tsx @@ -74,8 +74,14 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp const textareaRef = useRef(null); const [chatStarted, setChatStarted] = useState(initialMessages.length > 0); - const [model, setModel] = useState(DEFAULT_MODEL); - const [provider, setProvider] = useState(DEFAULT_PROVIDER); + const [model, setModel] = useState(() => { + const savedModel = Cookies.get('selectedModel'); + return savedModel || DEFAULT_MODEL; + }); + const [provider, setProvider] = useState(() => { + const savedProvider = Cookies.get('selectedProvider'); + return savedProvider || DEFAULT_PROVIDER; + }); const { showChat } = useStore(chatStore); @@ -216,6 +222,16 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp } }, []); + const handleModelChange = (newModel: string) => { + setModel(newModel); + Cookies.set('selectedModel', newModel, { expires: 30 }); + }; + + const handleProviderChange = (newProvider: string) => { + setProvider(newProvider); + Cookies.set('selectedProvider', newProvider, { expires: 30 }); + }; + return ( { - enhancePrompt(input, (input) => { - setInput(input); - scrollTextArea(); - }); + enhancePrompt( + input, + (input) => { + setInput(input); + scrollTextArea(); + }, + model, + provider, + apiKeys + ); }} /> ); diff --git a/app/components/workbench/EditorPanel.tsx b/app/components/workbench/EditorPanel.tsx index d1a265a6..e789f1d6 100644 --- a/app/components/workbench/EditorPanel.tsx +++ b/app/components/workbench/EditorPanel.tsx @@ -18,7 +18,7 @@ import { themeStore } from '~/lib/stores/theme'; import { workbenchStore } from '~/lib/stores/workbench'; import { classNames } from '~/utils/classNames'; import { WORK_DIR } from '~/utils/constants'; -import { renderLogger } from '~/utils/logger'; +import { logger, renderLogger } from '~/utils/logger'; import { isMobile } from '~/utils/mobile'; import { FileBreadcrumb } from './FileBreadcrumb'; import { FileTree } from './FileTree'; @@ -199,25 +199,48 @@ export const EditorPanel = memo(
- {Array.from({ length: terminalCount }, (_, index) => { + {Array.from({ length: terminalCount + 1 }, (_, index) => { const isActive = activeTerminal === index; return ( - + ) : ( + <> + + )} - onClick={() => setActiveTerminal(index)} - > -
- Terminal {terminalCount > 1 && index + 1} - + ); })} {terminalCount < MAX_TERMINALS && } @@ -229,9 +252,26 @@ export const EditorPanel = memo( onClick={() => workbenchStore.toggleTerminal(false)} />
- {Array.from({ length: terminalCount }, (_, index) => { + {Array.from({ length: terminalCount + 1 }, (_, index) => { const isActive = activeTerminal === index; + if (index == 0) { + logger.info('Starting bolt terminal'); + return ( + { + terminalRefs.current.push(ref); + }} + onTerminalReady={(terminal) => workbenchStore.attachBoltTerminal(terminal)} + onTerminalResize={(cols, rows) => workbenchStore.onTerminalResize(cols, rows)} + theme={theme} + /> + ); + } return ( \` tag to specify the file path. The content of the file artifact is the file contents. All file paths MUST BE relative to the current working directory. + - start: For starting development server. + - Use to start application if not already started or NEW dependencies added + - Only use this action when you need to run a dev server or start the application + - ULTRA IMORTANT: do NOT re-run a dev server if files updated, existing dev server can autometically detect changes and executes the file changes + + 9. The order of the actions is VERY IMPORTANT. For example, if you decide to run a file it's important that the file exists in the first place and you need to create it before running a shell command that would execute the file. 10. ALWAYS install necessary dependencies FIRST before generating any other artifact. If that requires a \`package.json\` then you should create that first! @@ -265,7 +271,7 @@ Here are some examples of correct usage of artifacts: ... - + npm run dev @@ -322,7 +328,7 @@ Here are some examples of correct usage of artifacts: ... - + npm run dev diff --git a/app/lib/hooks/useMessageParser.ts b/app/lib/hooks/useMessageParser.ts index a70fb82f..97a063da 100644 --- a/app/lib/hooks/useMessageParser.ts +++ b/app/lib/hooks/useMessageParser.ts @@ -36,6 +36,10 @@ const messageParser = new StreamingMessageParser({ workbenchStore.runAction(data); }, + onActionStream: (data) => { + logger.trace('onActionStream', data.action); + workbenchStore.runAction(data, true); + }, }, }); diff --git a/app/lib/hooks/usePromptEnhancer.ts b/app/lib/hooks/usePromptEnhancer.ts index f376cc0c..ee449992 100644 --- a/app/lib/hooks/usePromptEnhancer.ts +++ b/app/lib/hooks/usePromptEnhancer.ts @@ -12,41 +12,55 @@ export function usePromptEnhancer() { setPromptEnhanced(false); }; - const enhancePrompt = async (input: string, setInput: (value: string) => void) => { + const enhancePrompt = async ( + input: string, + setInput: (value: string) => void, + model: string, + provider: string, + apiKeys?: Record + ) => { setEnhancingPrompt(true); setPromptEnhanced(false); - + + const requestBody: any = { + message: input, + model, + provider, + }; + + if (apiKeys) { + requestBody.apiKeys = apiKeys; + } + const response = await fetch('/api/enhancer', { method: 'POST', - body: JSON.stringify({ - message: input, - }), + body: JSON.stringify(requestBody), }); - + const reader = response.body?.getReader(); - + const originalInput = input; - + if (reader) { const decoder = new TextDecoder(); - + let _input = ''; let _error; - + try { setInput(''); - + while (true) { const { value, done } = await reader.read(); - + if (done) { break; } - + _input += decoder.decode(value); - + logger.trace('Set input', _input); - + setInput(_input); } } catch (error) { @@ -56,10 +70,10 @@ export function usePromptEnhancer() { if (_error) { logger.error(_error); } - + setEnhancingPrompt(false); setPromptEnhanced(true); - + setTimeout(() => { setInput(_input); }); diff --git a/app/lib/runtime/action-runner.ts b/app/lib/runtime/action-runner.ts index e2ea6a22..f94390be 100644 --- a/app/lib/runtime/action-runner.ts +++ b/app/lib/runtime/action-runner.ts @@ -1,10 +1,12 @@ -import { WebContainer } from '@webcontainer/api'; -import { map, type MapStore } from 'nanostores'; +import { WebContainer, type WebContainerProcess } from '@webcontainer/api'; +import { atom, map, type MapStore } from 'nanostores'; import * as nodePath from 'node:path'; import type { BoltAction } from '~/types/actions'; import { createScopedLogger } from '~/utils/logger'; import { unreachable } from '~/utils/unreachable'; import type { ActionCallbackData } from './message-parser'; +import type { ITerminal } from '~/types/terminal'; +import type { BoltShell } from '~/utils/shell'; const logger = createScopedLogger('ActionRunner'); @@ -36,11 +38,14 @@ type ActionsMap = MapStore>; export class ActionRunner { #webcontainer: Promise; #currentExecutionPromise: Promise = Promise.resolve(); - + #shellTerminal: () => BoltShell; + runnerId = atom(`${Date.now()}`); actions: ActionsMap = map({}); - constructor(webcontainerPromise: Promise) { + constructor(webcontainerPromise: Promise, getShellTerminal: () => BoltShell) { this.#webcontainer = webcontainerPromise; + this.#shellTerminal = getShellTerminal; + } addAction(data: ActionCallbackData) { @@ -72,7 +77,7 @@ export class ActionRunner { }); } - async runAction(data: ActionCallbackData) { + async runAction(data: ActionCallbackData, isStreaming: boolean = false) { const { actionId } = data; const action = this.actions.get()[actionId]; @@ -83,19 +88,22 @@ export class ActionRunner { if (action.executed) { return; } + if (isStreaming && action.type !== 'file') { + return; + } - this.#updateAction(actionId, { ...action, ...data.action, executed: true }); + this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming }); this.#currentExecutionPromise = this.#currentExecutionPromise .then(() => { - return this.#executeAction(actionId); + return this.#executeAction(actionId, isStreaming); }) .catch((error) => { console.error('Action failed:', error); }); } - async #executeAction(actionId: string) { + async #executeAction(actionId: string, isStreaming: boolean = false) { const action = this.actions.get()[actionId]; this.#updateAction(actionId, { status: 'running' }); @@ -110,11 +118,16 @@ export class ActionRunner { await this.#runFileAction(action); break; } + case 'start': { + await this.#runStartAction(action) + break; + } } - this.#updateAction(actionId, { status: action.abortSignal.aborted ? 'aborted' : 'complete' }); + this.#updateAction(actionId, { status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete' }); } catch (error) { this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }); + logger.error(`[${action.type}]:Action failed\n\n`, error); // re-throw the error to be caught in the promise chain throw error; @@ -125,28 +138,38 @@ export class ActionRunner { if (action.type !== 'shell') { unreachable('Expected shell action'); } + const shell = this.#shellTerminal() + await shell.ready() + if (!shell || !shell.terminal || !shell.process) { + unreachable('Shell terminal not found'); + } + const resp = await shell.executeCommand(this.runnerId.get(), action.content) + logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`) + if (resp?.exitCode != 0) { + throw new Error("Failed To Execute Shell Command"); - const webcontainer = await this.#webcontainer; + } + } - const process = await webcontainer.spawn('jsh', ['-c', action.content], { - env: { npm_config_yes: true }, - }); + async #runStartAction(action: ActionState) { + if (action.type !== 'start') { + unreachable('Expected shell action'); + } + if (!this.#shellTerminal) { + unreachable('Shell terminal not found'); + } + const shell = this.#shellTerminal() + await shell.ready() + if (!shell || !shell.terminal || !shell.process) { + unreachable('Shell terminal not found'); + } + const resp = await shell.executeCommand(this.runnerId.get(), action.content) + logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`) - action.abortSignal.addEventListener('abort', () => { - process.kill(); - }); - - process.output.pipeTo( - new WritableStream({ - write(data) { - console.log(data); - }, - }), - ); - - const exitCode = await process.exit; - - logger.debug(`Process terminated with code ${exitCode}`); + if (resp?.exitCode != 0) { + throw new Error("Failed To Start Application"); + } + return resp } async #runFileAction(action: ActionState) { @@ -177,7 +200,6 @@ export class ActionRunner { logger.error('Failed to write file\n\n', error); } } - #updateAction(id: string, newState: ActionStateUpdate) { const actions = this.actions.get(); diff --git a/app/lib/runtime/message-parser.ts b/app/lib/runtime/message-parser.ts index 317f81df..4b564da1 100644 --- a/app/lib/runtime/message-parser.ts +++ b/app/lib/runtime/message-parser.ts @@ -28,6 +28,7 @@ export interface ParserCallbacks { onArtifactOpen?: ArtifactCallback; onArtifactClose?: ArtifactCallback; onActionOpen?: ActionCallback; + onActionStream?: ActionCallback; onActionClose?: ActionCallback; } @@ -54,7 +55,7 @@ interface MessageState { export class StreamingMessageParser { #messages = new Map(); - constructor(private _options: StreamingMessageParserOptions = {}) {} + constructor(private _options: StreamingMessageParserOptions = {}) { } parse(messageId: string, input: string) { let state = this.#messages.get(messageId); @@ -118,6 +119,21 @@ export class StreamingMessageParser { i = closeIndex + ARTIFACT_ACTION_TAG_CLOSE.length; } else { + if ('type' in currentAction && currentAction.type === 'file') { + let content = input.slice(i); + + this._options.callbacks?.onActionStream?.({ + artifactId: currentArtifact.id, + messageId, + actionId: String(state.actionId - 1), + action: { + ...currentAction as FileAction, + content, + filePath: currentAction.filePath, + }, + + }); + } break; } } else { @@ -256,7 +272,7 @@ export class StreamingMessageParser { } (actionAttributes as FileAction).filePath = filePath; - } else if (actionType !== 'shell') { + } else if (!(['shell', 'start'].includes(actionType))) { logger.warn(`Unknown action type '${actionType}'`); } diff --git a/app/lib/stores/terminal.ts b/app/lib/stores/terminal.ts index 419320e3..b2537ccb 100644 --- a/app/lib/stores/terminal.ts +++ b/app/lib/stores/terminal.ts @@ -1,14 +1,15 @@ import type { WebContainer, WebContainerProcess } from '@webcontainer/api'; import { atom, type WritableAtom } from 'nanostores'; import type { ITerminal } from '~/types/terminal'; -import { newShellProcess } from '~/utils/shell'; +import { newBoltShellProcess, newShellProcess } from '~/utils/shell'; import { coloredText } from '~/utils/terminal'; export class TerminalStore { #webcontainer: Promise; #terminals: Array<{ terminal: ITerminal; process: WebContainerProcess }> = []; + #boltTerminal = newBoltShellProcess() - showTerminal: WritableAtom = import.meta.hot?.data.showTerminal ?? atom(false); + showTerminal: WritableAtom = import.meta.hot?.data.showTerminal ?? atom(true); constructor(webcontainerPromise: Promise) { this.#webcontainer = webcontainerPromise; @@ -17,10 +18,22 @@ export class TerminalStore { import.meta.hot.data.showTerminal = this.showTerminal; } } + get boltTerminal() { + return this.#boltTerminal; + } toggleTerminal(value?: boolean) { this.showTerminal.set(value !== undefined ? value : !this.showTerminal.get()); } + async attachBoltTerminal(terminal: ITerminal) { + try { + let wc = await this.#webcontainer + await this.#boltTerminal.init(wc, terminal) + } catch (error: any) { + terminal.write(coloredText.red('Failed to spawn bolt shell\n\n') + error.message); + return; + } + } async attachTerminal(terminal: ITerminal) { try { diff --git a/app/lib/stores/workbench.ts b/app/lib/stores/workbench.ts index c42cc627..8589391c 100644 --- a/app/lib/stores/workbench.ts +++ b/app/lib/stores/workbench.ts @@ -11,7 +11,9 @@ import { PreviewsStore } from './previews'; import { TerminalStore } from './terminal'; import JSZip from 'jszip'; import { saveAs } from 'file-saver'; -import { Octokit } from "@octokit/rest"; +import { Octokit, type RestEndpointMethodTypes } from "@octokit/rest"; +import * as nodePath from 'node:path'; +import type { WebContainerProcess } from '@webcontainer/api'; export interface ArtifactState { id: string; @@ -39,6 +41,7 @@ export class WorkbenchStore { unsavedFiles: WritableAtom> = import.meta.hot?.data.unsavedFiles ?? atom(new Set()); modifiedFiles = new Set(); artifactIdList: string[] = []; + #boltTerminal: { terminal: ITerminal; process: WebContainerProcess } | undefined; constructor() { if (import.meta.hot) { @@ -76,6 +79,9 @@ export class WorkbenchStore { get showTerminal() { return this.#terminalStore.showTerminal; } + get boltTerminal() { + return this.#terminalStore.boltTerminal; + } toggleTerminal(value?: boolean) { this.#terminalStore.toggleTerminal(value); @@ -84,6 +90,10 @@ export class WorkbenchStore { attachTerminal(terminal: ITerminal) { this.#terminalStore.attachTerminal(terminal); } + attachBoltTerminal(terminal: ITerminal) { + + this.#terminalStore.attachBoltTerminal(terminal); + } onTerminalResize(cols: number, rows: number) { this.#terminalStore.onTerminalResize(cols, rows); @@ -232,7 +242,7 @@ export class WorkbenchStore { id, title, closed: false, - runner: new ActionRunner(webcontainer), + runner: new ActionRunner(webcontainer, () => this.boltTerminal), }); } @@ -258,7 +268,7 @@ export class WorkbenchStore { artifact.runner.addAction(data); } - async runAction(data: ActionCallbackData) { + async runAction(data: ActionCallbackData, isStreaming: boolean = false) { const { messageId } = data; const artifact = this.#getArtifact(messageId); @@ -266,8 +276,29 @@ export class WorkbenchStore { if (!artifact) { unreachable('Artifact not found'); } + if (data.action.type === 'file') { + let wc = await webcontainer + const fullPath = nodePath.join(wc.workdir, data.action.filePath); + if (this.selectedFile.value !== fullPath) { + this.setSelectedFile(fullPath); + } + if (this.currentView.value !== 'code') { + this.currentView.set('code'); + } + const doc = this.#editorStore.documents.get()[fullPath]; + if (!doc) { + await artifact.runner.runAction(data, isStreaming); + } - artifact.runner.runAction(data); + this.#editorStore.updateFile(fullPath, data.action.content); + + if (!isStreaming) { + this.resetCurrentDocument(); + await artifact.runner.runAction(data); + } + } else { + artifact.runner.runAction(data); + } } #getArtifact(id: string) { @@ -336,24 +367,25 @@ export class WorkbenchStore { } async pushToGitHub(repoName: string, githubUsername: string, ghToken: string) { - + try { // Get the GitHub auth token from environment variables const githubToken = ghToken; - + const owner = githubUsername; - + if (!githubToken) { throw new Error('GitHub token is not set in environment variables'); } - + // Initialize Octokit with the auth token const octokit = new Octokit({ auth: githubToken }); - + // Check if the repository already exists before creating it - let repo + let repo: RestEndpointMethodTypes["repos"]["get"]["response"]['data'] try { - repo = await octokit.repos.get({ owner: owner, repo: repoName }); + let resp = await octokit.repos.get({ owner: owner, repo: repoName }); + repo = resp.data } catch (error) { if (error instanceof Error && 'status' in error && error.status === 404) { // Repository doesn't exist, so create a new one @@ -368,13 +400,13 @@ export class WorkbenchStore { throw error; // Some other error occurred } } - + // Get all files const files = this.files.get(); if (!files || Object.keys(files).length === 0) { throw new Error('No files found to push'); } - + // Create blobs for each file const blobs = await Promise.all( Object.entries(files).map(async ([filePath, dirent]) => { @@ -389,13 +421,13 @@ export class WorkbenchStore { } }) ); - + const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs - + if (validBlobs.length === 0) { throw new Error('No valid files to push'); } - + // Get the latest commit SHA (assuming main branch, update dynamically if needed) const { data: ref } = await octokit.git.getRef({ owner: repo.owner.login, @@ -403,7 +435,7 @@ export class WorkbenchStore { ref: `heads/${repo.default_branch || 'main'}`, // Handle dynamic branch }); const latestCommitSha = ref.object.sha; - + // Create a new tree const { data: newTree } = await octokit.git.createTree({ owner: repo.owner.login, @@ -416,7 +448,7 @@ export class WorkbenchStore { sha: blob!.sha, })), }); - + // Create a new commit const { data: newCommit } = await octokit.git.createCommit({ owner: repo.owner.login, @@ -425,7 +457,7 @@ export class WorkbenchStore { tree: newTree.sha, parents: [latestCommitSha], }); - + // Update the reference await octokit.git.updateRef({ owner: repo.owner.login, @@ -433,7 +465,7 @@ export class WorkbenchStore { ref: `heads/${repo.default_branch || 'main'}`, // Handle dynamic branch sha: newCommit.sha, }); - + alert(`Repository created and code pushed: ${repo.html_url}`); } catch (error) { console.error('Error pushing to GitHub:', error instanceof Error ? error.message : String(error)); diff --git a/app/routes/api.enhancer.ts b/app/routes/api.enhancer.ts index 5c8175ca..7040b890 100644 --- a/app/routes/api.enhancer.ts +++ b/app/routes/api.enhancer.ts @@ -2,6 +2,7 @@ import { type ActionFunctionArgs } from '@remix-run/cloudflare'; import { StreamingTextResponse, parseStreamPart } from 'ai'; import { streamText } from '~/lib/.server/llm/stream-text'; import { stripIndents } from '~/utils/stripIndent'; +import type { StreamingOptions } from '~/lib/.server/llm/stream-text'; const encoder = new TextEncoder(); const decoder = new TextDecoder(); @@ -11,14 +12,34 @@ export async function action(args: ActionFunctionArgs) { } async function enhancerAction({ context, request }: ActionFunctionArgs) { - const { message } = await request.json<{ message: string }>(); + const { message, model, provider, apiKeys } = await request.json<{ + message: string; + model: string; + provider: string; + apiKeys?: Record; + }>(); + + // Validate 'model' and 'provider' fields + if (!model || typeof model !== 'string') { + throw new Response('Invalid or missing model', { + status: 400, + statusText: 'Bad Request' + }); + } + + if (!provider || typeof provider !== 'string') { + throw new Response('Invalid or missing provider', { + status: 400, + statusText: 'Bad Request' + }); + } try { const result = await streamText( [ { role: 'user', - content: stripIndents` + content: `[Model: ${model}]\n\n[Provider: ${provider}]\n\n` + stripIndents` I want you to improve the user prompt that is wrapped in \`\` tags. IMPORTANT: Only respond with the improved prompt and nothing else! @@ -30,28 +51,42 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) { }, ], context.cloudflare.env, + undefined, + apiKeys ); const transformStream = new TransformStream({ transform(chunk, controller) { - const processedChunk = decoder - .decode(chunk) - .split('\n') - .filter((line) => line !== '') - .map(parseStreamPart) - .map((part) => part.value) - .join(''); - - controller.enqueue(encoder.encode(processedChunk)); + const text = decoder.decode(chunk); + const lines = text.split('\n').filter(line => line.trim() !== ''); + + for (const line of lines) { + try { + const parsed = parseStreamPart(line); + if (parsed.type === 'text') { + controller.enqueue(encoder.encode(parsed.value)); + } + } catch (e) { + // Skip invalid JSON lines + console.warn('Failed to parse stream part:', line); + } + } }, }); const transformedStream = result.toAIStream().pipeThrough(transformStream); return new StreamingTextResponse(transformedStream); - } catch (error) { + } catch (error: unknown) { console.log(error); + if (error instanceof Error && error.message?.includes('API key')) { + throw new Response('Invalid or missing API key', { + status: 401, + statusText: 'Unauthorized' + }); + } + throw new Response(null, { status: 500, statusText: 'Internal Server Error', diff --git a/app/types/actions.ts b/app/types/actions.ts index b81127aa..08c1f39a 100644 --- a/app/types/actions.ts +++ b/app/types/actions.ts @@ -13,6 +13,10 @@ export interface ShellAction extends BaseAction { type: 'shell'; } -export type BoltAction = FileAction | ShellAction; +export interface StartAction extends BaseAction { + type: 'start'; +} + +export type BoltAction = FileAction | ShellAction | StartAction; export type BoltActionData = BoltAction | BaseAction; diff --git a/app/types/terminal.ts b/app/types/terminal.ts index 75ae3a3a..48e50b47 100644 --- a/app/types/terminal.ts +++ b/app/types/terminal.ts @@ -5,4 +5,5 @@ export interface ITerminal { reset: () => void; write: (data: string) => void; onData: (cb: (data: string) => void) => void; + input: (data: string) => void; } diff --git a/app/utils/constants.ts b/app/utils/constants.ts index 361abf64..8ac4151d 100644 --- a/app/utils/constants.ts +++ b/app/utils/constants.ts @@ -5,11 +5,10 @@ export const WORK_DIR = `/home/${WORK_DIR_NAME}`; export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications'; export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/; export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/; -export const DEFAULT_MODEL = 'claude-3-5-sonnet-20240620'; +export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest'; export const DEFAULT_PROVIDER = 'Anthropic'; const staticModels: ModelInfo[] = [ - { name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' }, { name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' }, { name: 'anthropic/claude-3.5-sonnet', label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)', provider: 'OpenRouter' }, { name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' }, @@ -27,7 +26,10 @@ const staticModels: ModelInfo[] = [ { name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' }, { name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq' }, { name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' }, - { name: 'claude-3-opus-20240229', label: 'Claude 3 Opus', provider: 'Anthropic' }, + { name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic' }, + { name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic' }, + { name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic' }, + { name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic' }, { name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' }, { name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' }, { name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' }, @@ -105,10 +107,28 @@ async function getOpenAILikeModels(): Promise { } } + +async function getLMStudioModels(): Promise { + try { + const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || "http://localhost:1234"; + const response = await fetch(`${base_url}/v1/models`); + const data = await response.json() as any; + return data.data.map((model: any) => ({ + name: model.id, + label: model.id, + provider: 'LMStudio', + })); + } catch (e) { + return []; + } +} + + async function initializeModelList(): Promise { const ollamaModels = await getOllamaModels(); const openAiLikeModels = await getOpenAILikeModels(); - MODEL_LIST = [...ollamaModels, ...openAiLikeModels, ...staticModels]; + const lmstudioModels = await getLMStudioModels(); + MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels,...lmstudioModels,]; } initializeModelList().then(); -export { getOllamaModels, getOpenAILikeModels, initializeModelList }; \ No newline at end of file +export { getOllamaModels,getOpenAILikeModels,getLMStudioModels,initializeModelList }; \ No newline at end of file diff --git a/app/utils/shell.ts b/app/utils/shell.ts index 1c5c834d..d45e8a6b 100644 --- a/app/utils/shell.ts +++ b/app/utils/shell.ts @@ -1,6 +1,7 @@ -import type { WebContainer } from '@webcontainer/api'; +import type { WebContainer, WebContainerProcess } from '@webcontainer/api'; import type { ITerminal } from '~/types/terminal'; import { withResolvers } from './promises'; +import { atom } from 'nanostores'; export async function newShellProcess(webcontainer: WebContainer, terminal: ITerminal) { const args: string[] = []; @@ -19,7 +20,6 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer const jshReady = withResolvers(); let isInteractive = false; - output.pipeTo( new WritableStream({ write(data) { @@ -40,6 +40,8 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer ); terminal.onData((data) => { + // console.log('terminal onData', { data, isInteractive }); + if (isInteractive) { input.write(data); } @@ -49,3 +51,145 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer return process; } + + + +export class BoltShell { + #initialized: (() => void) | undefined + #readyPromise: Promise + #webcontainer: WebContainer | undefined + #terminal: ITerminal | undefined + #process: WebContainerProcess | undefined + executionState = atom<{ sessionId: string, active: boolean, executionPrms?: Promise } | undefined>() + #outputStream: ReadableStreamDefaultReader | undefined + #shellInputStream: WritableStreamDefaultWriter | undefined + constructor() { + this.#readyPromise = new Promise((resolve) => { + this.#initialized = resolve + }) + } + ready() { + return this.#readyPromise; + } + async init(webcontainer: WebContainer, terminal: ITerminal) { + this.#webcontainer = webcontainer + this.#terminal = terminal + let callback = (data: string) => { + console.log(data) + } + let { process, output } = await this.newBoltShellProcess(webcontainer, terminal) + this.#process = process + this.#outputStream = output.getReader() + await this.waitTillOscCode('interactive') + this.#initialized?.() + } + get terminal() { + return this.#terminal + } + get process() { + return this.#process + } + async executeCommand(sessionId: string, command: string) { + if (!this.process || !this.terminal) { + return + } + let state = this.executionState.get() + + //interrupt the current execution + // this.#shellInputStream?.write('\x03'); + this.terminal.input('\x03'); + if (state && state.executionPrms) { + await state.executionPrms + } + //start a new execution + this.terminal.input(command.trim() + '\n'); + + //wait for the execution to finish + let executionPrms = this.getCurrentExecutionResult() + this.executionState.set({ sessionId, active: true, executionPrms }) + + let resp = await executionPrms + this.executionState.set({ sessionId, active: false }) + return resp + + } + async newBoltShellProcess(webcontainer: WebContainer, terminal: ITerminal) { + const args: string[] = []; + + // we spawn a JSH process with a fallback cols and rows in case the process is not attached yet to a visible terminal + const process = await webcontainer.spawn('/bin/jsh', ['--osc', ...args], { + terminal: { + cols: terminal.cols ?? 80, + rows: terminal.rows ?? 15, + }, + }); + + const input = process.input.getWriter(); + this.#shellInputStream = input; + const [internalOutput, terminalOutput] = process.output.tee(); + + const jshReady = withResolvers(); + + let isInteractive = false; + terminalOutput.pipeTo( + new WritableStream({ + write(data) { + if (!isInteractive) { + const [, osc] = data.match(/\x1b\]654;([^\x07]+)\x07/) || []; + + if (osc === 'interactive') { + // wait until we see the interactive OSC + isInteractive = true; + + jshReady.resolve(); + } + } + + terminal.write(data); + }, + }), + ); + + terminal.onData((data) => { + // console.log('terminal onData', { data, isInteractive }); + + if (isInteractive) { + input.write(data); + } + }); + + await jshReady.promise; + + return { process, output: internalOutput }; + } + async getCurrentExecutionResult() { + let { output, exitCode } = await this.waitTillOscCode('exit') + return { output, exitCode }; + } + async waitTillOscCode(waitCode: string) { + let fullOutput = ''; + let exitCode: number = 0; + if (!this.#outputStream) return { output: fullOutput, exitCode }; + let tappedStream = this.#outputStream + + while (true) { + const { value, done } = await tappedStream.read(); + if (done) break; + const text = value || ''; + fullOutput += text; + + // Check if command completion signal with exit code + const [, osc, , pid, code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || []; + if (osc === 'exit') { + exitCode = parseInt(code, 10); + } + if (osc === waitCode) { + break; + } + } + return { output: fullOutput, exitCode }; + } +} +export function newBoltShellProcess() { + return new BoltShell(); +} diff --git a/package.json b/package.json index 3cba1cf5..ce8e95d0 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "start": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings", "dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session", "dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai", - "dockerbuild:prod": "docker build -t bolt-ai:production bolt-ai:latest --target bolt-ai-production .", + "dockerbuild:prod": "docker build -t bolt-ai:production -t bolt-ai:latest --target bolt-ai-production .", "dockerbuild": "docker build -t bolt-ai:development -t bolt-ai:latest --target bolt-ai-development .", "typecheck": "tsc", "typegen": "wrangler types", @@ -117,5 +117,5 @@ "resolutions": { "@typescript-eslint/utils": "^8.0.0-alpha.30" }, - "packageManager": "pnpm@9.12.2+sha512.22721b3a11f81661ae1ec68ce1a7b879425a1ca5b991c975b074ac220b187ce56c708fe5db69f4c962c989452eee76c82877f4ee80f474cebd61ee13461b6228" + "packageManager": "pnpm@9.4.0" } diff --git a/vite.config.ts b/vite.config.ts index 62539070..9c94ceae 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -27,7 +27,7 @@ export default defineConfig((config) => { chrome129IssuePlugin(), config.mode === 'production' && optimizeCssModules({ apply: 'build' }), ], - envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL"], + envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL","LMSTUDIO_API_BASE_URL"], css: { preprocessorOptions: { scss: { diff --git a/worker-configuration.d.ts b/worker-configuration.d.ts index 82961ecd..6db1aa60 100644 --- a/worker-configuration.d.ts +++ b/worker-configuration.d.ts @@ -7,4 +7,5 @@ interface Env { OPENAI_LIKE_API_KEY: string; OPENAI_LIKE_API_BASE_URL: string; DEEPSEEK_API_KEY: string; + LMSTUDIO_API_BASE_URL: string; }