Lint-fix all files in app

This commit is contained in:
Oliver Jägle 2024-11-21 22:05:35 +01:00
parent 424ad1ea18
commit 2327de3810
No known key found for this signature in database
GPG Key ID: 866E2BD1777473E9
21 changed files with 413 additions and 265 deletions

View File

@ -1,5 +1,7 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
/*
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import type { Message } from 'ai';
import React, { type RefCallback, useEffect } from 'react';
import { ClientOnly } from 'remix-utils/client-only';
@ -34,6 +36,7 @@ const ModelSelector = ({ model, setModel, provider, setProvider, modelList, prov
value={provider?.name}
onChange={(e) => {
setProvider(providerList.find((p) => p.name === e.target.value));
const firstModel = [...modelList].find((m) => m.provider == e.target.value);
setModel(firstModel ? firstModel.name : '');
}}
@ -118,14 +121,17 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
// Load API keys from cookies on component mount
try {
const storedApiKeys = Cookies.get('apiKeys');
if (storedApiKeys) {
const parsedKeys = JSON.parse(storedApiKeys);
if (typeof parsedKeys === 'object' && parsedKeys !== null) {
setApiKeys(parsedKeys);
}
}
} catch (error) {
console.error('Error loading API keys from cookies:', error);
// Clear invalid cookie data
Cookies.remove('apiKeys');
}
@ -139,6 +145,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
try {
const updatedApiKeys = { ...apiKeys, [provider]: key };
setApiKeys(updatedApiKeys);
// Save updated API keys to cookies with 30 day expiry and secure settings
Cookies.set('apiKeys', JSON.stringify(updatedApiKeys), {
expires: 30, // 30 days

View File

@ -1,5 +1,7 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
/*
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { useStore } from '@nanostores/react';
import type { Message } from 'ai';
import { useChat } from 'ai/react';
@ -81,7 +83,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
});
const [provider, setProvider] = useState(() => {
const savedProvider = Cookies.get('selectedProvider');
return PROVIDER_LIST.find(p => p.name === savedProvider) || DEFAULT_PROVIDER;
return PROVIDER_LIST.find((p) => p.name === savedProvider) || DEFAULT_PROVIDER;
});
const { showChat } = useStore(chatStore);
@ -93,11 +95,13 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
const { messages, isLoading, input, handleInputChange, setInput, stop, append } = useChat({
api: '/api/chat',
body: {
apiKeys
apiKeys,
},
onError: (error) => {
logger.error('Request failed\n\n', error);
toast.error('There was an error processing your request: ' + (error.message ? error.message : "No details were returned"));
toast.error(
'There was an error processing your request: ' + (error.message ? error.message : 'No details were returned'),
);
},
onFinish: () => {
logger.debug('Finished streaming');
@ -218,6 +222,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
useEffect(() => {
const storedApiKeys = Cookies.get('apiKeys');
if (storedApiKeys) {
setApiKeys(JSON.parse(storedApiKeys));
}
@ -271,7 +276,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
},
model,
provider,
apiKeys
apiKeys,
);
}}
/>

View File

@ -1,5 +1,7 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
/*
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { modificationsRegex } from '~/utils/diff';
import { MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
import { Markdown } from './Markdown';
@ -17,5 +19,9 @@ export function UserMessage({ content }: UserMessageProps) {
}
function sanitizeUserMessage(content: string) {
return content.replace(modificationsRegex, '').replace(MODEL_REGEX, 'Using: $1').replace(PROVIDER_REGEX, ' ($1)\n\n').trim();
return content
.replace(modificationsRegex, '')
.replace(MODEL_REGEX, 'Using: $1')
.replace(PROVIDER_REGEX, ' ($1)\n\n')
.trim();
}

View File

@ -255,6 +255,7 @@ export const EditorPanel = memo(
</div>
{Array.from({ length: terminalCount + 1 }, (_, index) => {
const isActive = activeTerminal === index;
if (index == 0) {
logger.info('Starting bolt terminal');
@ -273,6 +274,7 @@ export const EditorPanel = memo(
/>
);
}
return (
<Terminal
key={index}

View File

@ -111,7 +111,7 @@ export const FileTree = memo(
};
return (
<div className={classNames('text-sm', className ,'overflow-y-auto')}>
<div className={classNames('text-sm', className, 'overflow-y-auto')}>
{filteredFileList.map((fileOrFolder) => {
switch (fileOrFolder.kind) {
case 'file': {

View File

@ -174,16 +174,21 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
'Please enter a name for your new GitHub repository:',
'bolt-generated-project',
);
if (!repoName) {
alert('Repository name is required. Push to GitHub cancelled.');
return;
}
const githubUsername = prompt('Please enter your GitHub username:');
if (!githubUsername) {
alert('GitHub username is required. Push to GitHub cancelled.');
return;
}
const githubToken = prompt('Please enter your GitHub personal access token:');
if (!githubToken) {
alert('GitHub token is required. Push to GitHub cancelled.');
return;

View File

@ -1,5 +1,7 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
/*
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { env } from 'node:process';
export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record<string, string>) {
@ -28,17 +30,19 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
case 'OpenRouter':
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
case 'Deepseek':
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY;
case 'Mistral':
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
case "OpenAILike":
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
case 'OpenAILike':
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
case "xAI":
case 'xAI':
return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
case "Cohere":
case 'Cohere':
return env.COHERE_API_KEY;
case 'AzureOpenAI':
return env.AZURE_OPENAI_API_KEY;
default:
return "";
return '';
}
}
@ -47,14 +51,16 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
case 'OpenAILike':
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
case 'LMStudio':
return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
case 'Ollama':
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434";
if (env.RUNNING_IN_DOCKER === 'true') {
baseUrl = baseUrl.replace("localhost", "host.docker.internal");
}
return baseUrl;
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434';
if (env.RUNNING_IN_DOCKER === 'true') {
baseUrl = baseUrl.replace('localhost', 'host.docker.internal');
}
return baseUrl;
default:
return "";
return '';
}
}

View File

@ -1,13 +1,15 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
/*
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { ollama } from 'ollama-ai-provider';
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { createMistral } from '@ai-sdk/mistral';
import { createCohere } from '@ai-sdk/cohere'
import { createCohere } from '@ai-sdk/cohere';
export function getAnthropicModel(apiKey: string, model: string) {
const anthropic = createAnthropic({
@ -16,7 +18,7 @@ export function getAnthropicModel(apiKey: string, model: string) {
return anthropic(model);
}
export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) {
export function getOpenAILikeModel(baseURL: string, apiKey: string, model: string) {
const openai = createOpenAI({
baseURL,
apiKey,
@ -25,7 +27,7 @@ export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string)
return openai(model);
}
export function getCohereAIModel(apiKey:string, model: string){
export function getCohereAIModel(apiKey: string, model: string) {
const cohere = createCohere({
apiKey,
});
@ -43,7 +45,7 @@ export function getOpenAIModel(apiKey: string, model: string) {
export function getMistralModel(apiKey: string, model: string) {
const mistral = createMistral({
apiKey
apiKey,
});
return mistral(model);
@ -76,15 +78,16 @@ export function getHuggingFaceModel(apiKey: string, model: string) {
}
export function getOllamaModel(baseURL: string, model: string) {
let Ollama = ollama(model, {
const Ollama = ollama(model, {
numCtx: 32768,
});
Ollama.config.baseURL = `${baseURL}/api`;
return Ollama;
}
export function getDeepseekModel(apiKey: string, model: string){
export function getDeepseekModel(apiKey: string, model: string) {
const openai = createOpenAI({
baseURL: 'https://api.deepseek.com/beta',
apiKey,
@ -95,7 +98,7 @@ export function getDeepseekModel(apiKey: string, model: string){
export function getOpenRouterModel(apiKey: string, model: string) {
const openRouter = createOpenRouter({
apiKey
apiKey,
});
return openRouter.chat(model);
@ -104,7 +107,7 @@ export function getOpenRouterModel(apiKey: string, model: string) {
export function getLMStudioModel(baseURL: string, model: string) {
const lmstudio = createOpenAI({
baseUrl: `${baseURL}/v1`,
apiKey: "",
apiKey: '',
});
return lmstudio(model);
@ -119,7 +122,6 @@ export function getXAIModel(apiKey: string, model: string) {
return openai(model);
}
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
const apiKey = getAPIKey(env, provider, apiKeys);
const baseURL = getBaseURL(env, provider);
@ -138,11 +140,11 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
case 'Google':
return getGoogleModel(apiKey, model);
case 'OpenAILike':
return getOpenAILikeModel(baseURL,apiKey, model);
return getOpenAILikeModel(baseURL, apiKey, model);
case 'Deepseek':
return getDeepseekModel(apiKey, model);
case 'Mistral':
return getMistralModel(apiKey, model);
return getMistralModel(apiKey, model);
case 'LMStudio':
return getLMStudioModel(baseURL, model);
case 'xAI':

View File

@ -1,5 +1,7 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
/*
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { streamText as _streamText, convertToCoreMessages } from 'ai';
import { getModel } from '~/lib/.server/llm/model';
import { MAX_TOKENS } from './constants';
@ -34,19 +36,12 @@ function extractPropertiesFromMessage(message: Message): { model: string; provid
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
// Remove model and provider lines from content
const cleanedContent = message.content
.replace(MODEL_REGEX, '')
.replace(PROVIDER_REGEX, '')
.trim();
const cleanedContent = message.content.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '').trim();
return { model, provider, content: cleanedContent };
}
export function streamText(
messages: Messages,
env: Env,
options?: StreamingOptions,
apiKeys?: Record<string, string>
) {
export function streamText(messages: Messages, env: Env, options?: StreamingOptions, apiKeys?: Record<string, string>) {
let currentModel = DEFAULT_MODEL;
let currentProvider = DEFAULT_PROVIDER;
@ -63,17 +58,12 @@ export function streamText(
return { ...message, content };
}
return message;
return message;
});
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
const dynamicMaxTokens =
modelDetails && modelDetails.maxTokenAllowed
? modelDetails.maxTokenAllowed
: MAX_TOKENS;
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
return _streamText({
model: getModel(currentProvider, currentModel, env, apiKeys),

View File

@ -161,11 +161,17 @@ async function getUrlIds(db: IDBDatabase): Promise<string[]> {
export async function forkChat(db: IDBDatabase, chatId: string, messageId: string): Promise<string> {
const chat = await getMessages(db, chatId);
if (!chat) throw new Error('Chat not found');
if (!chat) {
throw new Error('Chat not found');
}
// Find the index of the message to fork at
const messageIndex = chat.messages.findIndex(msg => msg.id === messageId);
if (messageIndex === -1) throw new Error('Message not found');
const messageIndex = chat.messages.findIndex((msg) => msg.id === messageId);
if (messageIndex === -1) {
throw new Error('Message not found');
}
// Get messages up to and including the selected message
const messages = chat.messages.slice(0, messageIndex + 1);
@ -175,19 +181,14 @@ export async function forkChat(db: IDBDatabase, chatId: string, messageId: strin
const urlId = await getUrlId(db, newId);
// Create the forked chat
await setMessages(
db,
newId,
messages,
urlId,
chat.description ? `${chat.description} (fork)` : 'Forked chat'
);
await setMessages(db, newId, messages, urlId, chat.description ? `${chat.description} (fork)` : 'Forked chat');
return urlId;
}
export async function duplicateChat(db: IDBDatabase, id: string): Promise<string> {
const chat = await getMessages(db, id);
if (!chat) {
throw new Error('Chat not found');
}
@ -200,7 +201,7 @@ export async function duplicateChat(db: IDBDatabase, id: string): Promise<string
newId,
chat.messages,
newUrlId, // Use the new urlId
`${chat.description || 'Chat'} (copy)`
`${chat.description || 'Chat'} (copy)`,
);
return newUrlId; // Return the urlId instead of id for navigation

View File

@ -99,7 +99,7 @@ export function useChatHistory() {
await setMessages(db, chatId.get() as string, messages, urlId, description.get());
},
duplicateCurrentChat: async (listItemId:string) => {
duplicateCurrentChat: async (listItemId: string) => {
if (!db || (!mixedId && !listItemId)) {
return;
}
@ -111,7 +111,7 @@ export function useChatHistory() {
} catch (error) {
toast.error('Failed to duplicate chat');
}
}
},
};
}

View File

@ -45,7 +45,6 @@ export class ActionRunner {
constructor(webcontainerPromise: Promise<WebContainer>, getShellTerminal: () => BoltShell) {
this.#webcontainer = webcontainerPromise;
this.#shellTerminal = getShellTerminal;
}
addAction(data: ActionCallbackData) {
@ -88,19 +87,20 @@ export class ActionRunner {
if (action.executed) {
return;
}
if (isStreaming && action.type !== 'file') {
return;
}
this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming });
return this.#currentExecutionPromise = this.#currentExecutionPromise
return (this.#currentExecutionPromise = this.#currentExecutionPromise
.then(() => {
return this.#executeAction(actionId, isStreaming);
})
.catch((error) => {
console.error('Action failed:', error);
});
}));
}
async #executeAction(actionId: string, isStreaming: boolean = false) {
@ -121,17 +121,24 @@ export class ActionRunner {
case 'start': {
// making the start app non blocking
this.#runStartAction(action).then(()=>this.#updateAction(actionId, { status: 'complete' }))
.catch(()=>this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }))
// adding a delay to avoid any race condition between 2 start actions
// i am up for a better approch
await new Promise(resolve=>setTimeout(resolve,2000))
return
this.#runStartAction(action)
.then(() => this.#updateAction(actionId, { status: 'complete' }))
.catch(() => this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }));
/*
* adding a delay to avoid any race condition between 2 start actions
* i am up for a better approch
*/
await new Promise((resolve) => setTimeout(resolve, 2000));
return;
break;
}
}
this.#updateAction(actionId, { status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete' });
this.#updateAction(actionId, {
status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete',
});
} catch (error) {
this.#updateAction(actionId, { status: 'failed', error: 'Action failed' });
logger.error(`[${action.type}]:Action failed\n\n`, error);
@ -145,16 +152,19 @@ export class ActionRunner {
if (action.type !== 'shell') {
unreachable('Expected shell action');
}
const shell = this.#shellTerminal()
await shell.ready()
const shell = this.#shellTerminal();
await shell.ready();
if (!shell || !shell.terminal || !shell.process) {
unreachable('Shell terminal not found');
}
const resp = await shell.executeCommand(this.runnerId.get(), action.content)
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`)
if (resp?.exitCode != 0) {
throw new Error("Failed To Execute Shell Command");
const resp = await shell.executeCommand(this.runnerId.get(), action.content);
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`);
if (resp?.exitCode != 0) {
throw new Error('Failed To Execute Shell Command');
}
}
@ -162,21 +172,26 @@ export class ActionRunner {
if (action.type !== 'start') {
unreachable('Expected shell action');
}
if (!this.#shellTerminal) {
unreachable('Shell terminal not found');
}
const shell = this.#shellTerminal()
await shell.ready()
const shell = this.#shellTerminal();
await shell.ready();
if (!shell || !shell.terminal || !shell.process) {
unreachable('Shell terminal not found');
}
const resp = await shell.executeCommand(this.runnerId.get(), action.content)
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`)
const resp = await shell.executeCommand(this.runnerId.get(), action.content);
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`);
if (resp?.exitCode != 0) {
throw new Error("Failed To Start Application");
throw new Error('Failed To Start Application');
}
return resp
return resp;
}
async #runFileAction(action: ActionState) {

View File

@ -55,7 +55,7 @@ interface MessageState {
export class StreamingMessageParser {
#messages = new Map<string, MessageState>();
constructor(private _options: StreamingMessageParserOptions = {}) { }
constructor(private _options: StreamingMessageParserOptions = {}) {}
parse(messageId: string, input: string) {
let state = this.#messages.get(messageId);
@ -120,20 +120,20 @@ export class StreamingMessageParser {
i = closeIndex + ARTIFACT_ACTION_TAG_CLOSE.length;
} else {
if ('type' in currentAction && currentAction.type === 'file') {
let content = input.slice(i);
const content = input.slice(i);
this._options.callbacks?.onActionStream?.({
artifactId: currentArtifact.id,
messageId,
actionId: String(state.actionId - 1),
action: {
...currentAction as FileAction,
...(currentAction as FileAction),
content,
filePath: currentAction.filePath,
},
});
}
break;
}
} else {
@ -272,7 +272,7 @@ export class StreamingMessageParser {
}
(actionAttributes as FileAction).filePath = filePath;
} else if (!(['shell', 'start'].includes(actionType))) {
} else if (!['shell', 'start'].includes(actionType)) {
logger.warn(`Unknown action type '${actionType}'`);
}

View File

@ -7,7 +7,7 @@ import { coloredText } from '~/utils/terminal';
export class TerminalStore {
#webcontainer: Promise<WebContainer>;
#terminals: Array<{ terminal: ITerminal; process: WebContainerProcess }> = [];
#boltTerminal = newBoltShellProcess()
#boltTerminal = newBoltShellProcess();
showTerminal: WritableAtom<boolean> = import.meta.hot?.data.showTerminal ?? atom(true);
@ -27,8 +27,8 @@ export class TerminalStore {
}
async attachBoltTerminal(terminal: ITerminal) {
try {
let wc = await this.#webcontainer
await this.#boltTerminal.init(wc, terminal)
const wc = await this.#webcontainer;
await this.#boltTerminal.init(wc, terminal);
} catch (error: any) {
terminal.write(coloredText.red('Failed to spawn bolt shell\n\n') + error.message);
return;

View File

@ -11,7 +11,7 @@ import { PreviewsStore } from './previews';
import { TerminalStore } from './terminal';
import JSZip from 'jszip';
import { saveAs } from 'file-saver';
import { Octokit, type RestEndpointMethodTypes } from "@octokit/rest";
import { Octokit, type RestEndpointMethodTypes } from '@octokit/rest';
import * as nodePath from 'node:path';
import type { WebContainerProcess } from '@webcontainer/api';
import { extractRelativePath } from '~/utils/diff';
@ -43,7 +43,7 @@ export class WorkbenchStore {
modifiedFiles = new Set<string>();
artifactIdList: string[] = [];
#boltTerminal: { terminal: ITerminal; process: WebContainerProcess } | undefined;
#globalExecutionQueue=Promise.resolve();
#globalExecutionQueue = Promise.resolve();
constructor() {
if (import.meta.hot) {
import.meta.hot.data.artifacts = this.artifacts;
@ -54,7 +54,7 @@ export class WorkbenchStore {
}
addToExecutionQueue(callback: () => Promise<void>) {
this.#globalExecutionQueue=this.#globalExecutionQueue.then(()=>callback())
this.#globalExecutionQueue = this.#globalExecutionQueue.then(() => callback());
}
get previews() {
@ -96,7 +96,6 @@ export class WorkbenchStore {
this.#terminalStore.attachTerminal(terminal);
}
attachBoltTerminal(terminal: ITerminal) {
this.#terminalStore.attachBoltTerminal(terminal);
}
@ -261,7 +260,8 @@ export class WorkbenchStore {
this.artifacts.setKey(messageId, { ...artifact, ...state });
}
addAction(data: ActionCallbackData) {
this._addAction(data)
this._addAction(data);
// this.addToExecutionQueue(()=>this._addAction(data))
}
async _addAction(data: ActionCallbackData) {
@ -277,11 +277,10 @@ export class WorkbenchStore {
}
runAction(data: ActionCallbackData, isStreaming: boolean = false) {
if(isStreaming) {
this._runAction(data, isStreaming)
}
else{
this.addToExecutionQueue(()=>this._runAction(data, isStreaming))
if (isStreaming) {
this._runAction(data, isStreaming);
} else {
this.addToExecutionQueue(() => this._runAction(data, isStreaming));
}
}
async _runAction(data: ActionCallbackData, isStreaming: boolean = false) {
@ -292,16 +291,21 @@ export class WorkbenchStore {
if (!artifact) {
unreachable('Artifact not found');
}
if (data.action.type === 'file') {
let wc = await webcontainer
const wc = await webcontainer;
const fullPath = nodePath.join(wc.workdir, data.action.filePath);
if (this.selectedFile.value !== fullPath) {
this.setSelectedFile(fullPath);
}
if (this.currentView.value !== 'code') {
this.currentView.set('code');
}
const doc = this.#editorStore.documents.get()[fullPath];
if (!doc) {
await artifact.runner.runAction(data, isStreaming);
}
@ -382,7 +386,6 @@ export class WorkbenchStore {
}
async pushToGitHub(repoName: string, githubUsername: string, ghToken: string) {
try {
// Get the GitHub auth token from environment variables
const githubToken = ghToken;
@ -397,10 +400,11 @@ export class WorkbenchStore {
const octokit = new Octokit({ auth: githubToken });
// Check if the repository already exists before creating it
let repo: RestEndpointMethodTypes["repos"]["get"]["response"]['data']
let repo: RestEndpointMethodTypes['repos']['get']['response']['data'];
try {
let resp = await octokit.repos.get({ owner: owner, repo: repoName });
repo = resp.data
const resp = await octokit.repos.get({ owner, repo: repoName });
repo = resp.data;
} catch (error) {
if (error instanceof Error && 'status' in error && error.status === 404) {
// Repository doesn't exist, so create a new one
@ -418,6 +422,7 @@ export class WorkbenchStore {
// Get all files
const files = this.files.get();
if (!files || Object.keys(files).length === 0) {
throw new Error('No files found to push');
}
@ -434,7 +439,7 @@ export class WorkbenchStore {
});
return { path: extractRelativePath(filePath), sha: blob.sha };
}
})
}),
);
const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs

View File

@ -1,5 +1,7 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
/*
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
@ -14,14 +16,15 @@ function parseCookies(cookieHeader) {
const cookies = {};
// Split the cookie string by semicolons and spaces
const items = cookieHeader.split(";").map(cookie => cookie.trim());
const items = cookieHeader.split(';').map((cookie) => cookie.trim());
items.forEach((item) => {
const [name, ...rest] = item.split('=');
items.forEach(item => {
const [name, ...rest] = item.split("=");
if (name && rest) {
// Decode the name and value, and join value parts in case it contains '='
const decodedName = decodeURIComponent(name.trim());
const decodedValue = decodeURIComponent(rest.join("=").trim());
const decodedValue = decodeURIComponent(rest.join('=').trim());
cookies[decodedName] = decodedValue;
}
});
@ -31,13 +34,13 @@ function parseCookies(cookieHeader) {
async function chatAction({ context, request }: ActionFunctionArgs) {
const { messages } = await request.json<{
messages: Messages
messages: Messages;
}>();
const cookieHeader = request.headers.get("Cookie");
const cookieHeader = request.headers.get('Cookie');
// Parse the cookie's value (returns an object or null if no cookie exists)
const apiKeys = JSON.parse(parseCookies(cookieHeader).apiKeys || "{}");
const apiKeys = JSON.parse(parseCookies(cookieHeader).apiKeys || '{}');
const stream = new SwitchableStream();
@ -83,7 +86,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
if (error.message?.includes('API key')) {
throw new Response('Invalid or missing API key', {
status: 401,
statusText: 'Unauthorized'
statusText: 'Unauthorized',
});
}

View File

@ -1,10 +1,10 @@
import type { ModelInfo } from '~/utils/types';
export type ProviderInfo = {
staticModels: ModelInfo[],
name: string,
getDynamicModels?: () => Promise<ModelInfo[]>,
getApiKeyLink?: string,
labelForGetApiKey?: string,
icon?:string,
staticModels: ModelInfo[];
name: string;
getDynamicModels?: () => Promise<ModelInfo[]>;
getApiKeyLink?: string;
labelForGetApiKey?: string;
icon?: string;
};

View File

@ -12,26 +12,42 @@ const PROVIDER_LIST: ProviderInfo[] = [
{
name: 'Anthropic',
staticModels: [
{ name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic', maxTokenAllowed: 8000 },
{
name: 'claude-3-5-sonnet-latest',
label: 'Claude 3.5 Sonnet (new)',
provider: 'Anthropic',
maxTokenAllowed: 8000,
},
{
name: 'claude-3-5-sonnet-20240620',
label: 'Claude 3.5 Sonnet (old)',
provider: 'Anthropic',
maxTokenAllowed: 8000,
},
{
name: 'claude-3-5-haiku-latest',
label: 'Claude 3.5 Haiku (new)',
provider: 'Anthropic',
maxTokenAllowed: 8000,
},
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 }
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 },
],
getApiKeyLink: "https://console.anthropic.com/settings/keys",
getApiKeyLink: 'https://console.anthropic.com/settings/keys',
},
{
name: 'Ollama',
staticModels: [],
getDynamicModels: getOllamaModels,
getApiKeyLink: "https://ollama.com/download",
labelForGetApiKey: "Download Ollama",
icon: "i-ph:cloud-arrow-down",
}, {
getApiKeyLink: 'https://ollama.com/download',
labelForGetApiKey: 'Download Ollama',
icon: 'i-ph:cloud-arrow-down',
},
{
name: 'OpenAILike',
staticModels: [],
getDynamicModels: getOpenAILikeModels
getDynamicModels: getOpenAILikeModels,
},
{
name: 'Cohere',
@ -47,7 +63,7 @@ const PROVIDER_LIST: ProviderInfo[] = [
{ name: 'c4ai-aya-expanse-8b', label: 'c4AI Aya Expanse 8b', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'c4ai-aya-expanse-32b', label: 'c4AI Aya Expanse 32b', provider: 'Cohere', maxTokenAllowed: 4096 },
],
getApiKeyLink: 'https://dashboard.cohere.com/api-keys'
getApiKeyLink: 'https://dashboard.cohere.com/api-keys',
},
{
name: 'OpenRouter',
@ -56,22 +72,52 @@ const PROVIDER_LIST: ProviderInfo[] = [
{
name: 'anthropic/claude-3.5-sonnet',
label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
provider: 'OpenRouter'
, maxTokenAllowed: 8000
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'anthropic/claude-3-haiku',
label: 'Anthropic: Claude 3 Haiku (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'deepseek/deepseek-coder',
label: 'Deepseek-Coder V2 236B (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'google/gemini-flash-1.5',
label: 'Google Gemini Flash 1.5 (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'google/gemini-pro-1.5',
label: 'Google Gemini Pro 1.5 (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 }
{
name: 'mistralai/mistral-nemo',
label: 'OpenRouter Mistral Nemo (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'qwen/qwen-110b-chat',
label: 'OpenRouter Qwen 110b Chat (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 },
],
getDynamicModels: getOpenRouterModels,
getApiKeyLink: 'https://openrouter.ai/settings/keys',
}, {
},
{
name: 'Google',
staticModels: [
{ name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google', maxTokenAllowed: 8192 },
@ -79,29 +125,50 @@ const PROVIDER_LIST: ProviderInfo[] = [
{ name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-exp-1114', label: 'Gemini exp-1114', provider: 'Google', maxTokenAllowed: 8192 }
{ name: 'gemini-exp-1114', label: 'Gemini exp-1114', provider: 'Google', maxTokenAllowed: 8192 },
],
getApiKeyLink: 'https://aistudio.google.com/app/apikey'
}, {
getApiKeyLink: 'https://aistudio.google.com/app/apikey',
},
{
name: 'Groq',
staticModels: [
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
],
getApiKeyLink: 'https://console.groq.com/keys'
getApiKeyLink: 'https://console.groq.com/keys',
},
{
name: 'HuggingFace',
staticModels: [
{ name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
{ name: '01-ai/Yi-1.5-34B-Chat', label: 'Yi-1.5-34B-Chat (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
{ name: 'codellama/CodeLlama-34b-Instruct-hf', label: 'CodeLlama-34b-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
{ name: 'NousResearch/Hermes-3-Llama-3.1-8B', label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }
{
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: '01-ai/Yi-1.5-34B-Chat',
label: 'Yi-1.5-34B-Chat (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'codellama/CodeLlama-34b-Instruct-hf',
label: 'CodeLlama-34b-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
],
getApiKeyLink: 'https://huggingface.co/settings/tokens'
getApiKeyLink: 'https://huggingface.co/settings/tokens',
},
{
@ -110,23 +177,24 @@ const PROVIDER_LIST: ProviderInfo[] = [
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 }
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
],
getApiKeyLink: "https://platform.openai.com/api-keys",
}, {
getApiKeyLink: 'https://platform.openai.com/api-keys',
},
{
name: 'xAI',
staticModels: [
{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 }
],
getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key'
}, {
staticModels: [{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 }],
getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key',
},
{
name: 'Deepseek',
staticModels: [
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 },
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 }
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 },
],
getApiKeyLink: 'https://platform.deepseek.com/api_keys'
}, {
getApiKeyLink: 'https://platform.deepseek.com/api_keys',
},
{
name: 'Mistral',
staticModels: [
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral', maxTokenAllowed: 8000 },
@ -137,27 +205,29 @@ const PROVIDER_LIST: ProviderInfo[] = [
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 }
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 },
],
getApiKeyLink: 'https://console.mistral.ai/api-keys/'
}, {
getApiKeyLink: 'https://console.mistral.ai/api-keys/',
},
{
name: 'LMStudio',
staticModels: [],
getDynamicModels: getLMStudioModels,
getApiKeyLink: 'https://lmstudio.ai/',
labelForGetApiKey: 'Get LMStudio',
icon: "i-ph:cloud-arrow-down",
}
icon: 'i-ph:cloud-arrow-down',
},
];
export const DEFAULT_PROVIDER = PROVIDER_LIST[0];
const staticModels: ModelInfo[] = PROVIDER_LIST.map(p => p.staticModels).flat();
const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat();
export let MODEL_LIST: ModelInfo[] = [...staticModels];
const getOllamaBaseUrl = () => {
const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
// Check if we're in the browser
if (typeof window !== 'undefined') {
// Frontend always uses localhost
@ -167,22 +237,20 @@ const getOllamaBaseUrl = () => {
// Backend: Check if we're running in Docker
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
return isDocker
? defaultBaseUrl.replace('localhost', 'host.docker.internal')
: defaultBaseUrl;
return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl;
};
async function getOllamaModels(): Promise<ModelInfo[]> {
try {
const base_url = getOllamaBaseUrl();
const response = await fetch(`${base_url}/api/tags`);
const data = await response.json() as OllamaApiResponse;
const data = (await response.json()) as OllamaApiResponse;
return data.models.map((model: OllamaModel) => ({
name: model.name,
label: `${model.name} (${model.details.parameter_size})`,
provider: 'Ollama',
maxTokenAllowed:8000,
maxTokenAllowed: 8000,
}));
} catch (e) {
return [];
@ -192,20 +260,23 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
async function getOpenAILikeModels(): Promise<ModelInfo[]> {
try {
const base_url = import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
if (!base_url) {
return [];
}
const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
const response = await fetch(`${base_url}/models`, {
headers: {
Authorization: `Bearer ${api_key}`
}
Authorization: `Bearer ${api_key}`,
},
});
const res = await response.json() as any;
const res = (await response.json()) as any;
return res.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: 'OpenAILike'
provider: 'OpenAILike',
}));
} catch (e) {
return [];
@ -220,51 +291,66 @@ type OpenRouterModelsResponse = {
pricing: {
prompt: number;
completion: number;
}
}[]
};
}[];
};
async function getOpenRouterModels(): Promise<ModelInfo[]> {
const data: OpenRouterModelsResponse = await (await fetch('https://openrouter.ai/api/v1/models', {
headers: {
'Content-Type': 'application/json'
}
})).json();
const data: OpenRouterModelsResponse = await (
await fetch('https://openrouter.ai/api/v1/models', {
headers: {
'Content-Type': 'application/json',
},
})
).json();
return data.data.sort((a, b) => a.name.localeCompare(b.name)).map(m => ({
name: m.id,
label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed(
2)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(
m.context_length / 1000)}k`,
provider: 'OpenRouter',
maxTokenAllowed:8000,
}));
return data.data
.sort((a, b) => a.name.localeCompare(b.name))
.map((m) => ({
name: m.id,
label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed(
2,
)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
provider: 'OpenRouter',
maxTokenAllowed: 8000,
}));
}
async function getLMStudioModels(): Promise<ModelInfo[]> {
try {
const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
const response = await fetch(`${base_url}/v1/models`);
const data = await response.json() as any;
const data = (await response.json()) as any;
return data.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: 'LMStudio'
provider: 'LMStudio',
}));
} catch (e) {
return [];
}
}
async function initializeModelList(): Promise<ModelInfo[]> {
MODEL_LIST = [...(await Promise.all(
PROVIDER_LIST
.filter((p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels)
.map(p => p.getDynamicModels())))
.flat(), ...staticModels];
MODEL_LIST = [
...(
await Promise.all(
PROVIDER_LIST.filter(
(p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
).map((p) => p.getDynamicModels()),
)
).flat(),
...staticModels,
];
return MODEL_LIST;
}
export { getOllamaModels, getOpenAILikeModels, getLMStudioModels, initializeModelList, getOpenRouterModels, PROVIDER_LIST };
export {
getOllamaModels,
getOpenAILikeModels,
getLMStudioModels,
initializeModelList,
getOpenRouterModels,
PROVIDER_LIST,
};

View File

@ -11,7 +11,7 @@ interface Logger {
setLevel: (level: DebugLevel) => void;
}
let currentLevel: DebugLevel = import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV ? 'debug' : 'info';
let currentLevel: DebugLevel = (import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV) ? 'debug' : 'info';
const isWorker = 'HTMLRewriter' in globalThis;
const supportsColor = !isWorker;

View File

@ -52,66 +52,70 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer
return process;
}
export class BoltShell {
#initialized: (() => void) | undefined
#readyPromise: Promise<void>
#webcontainer: WebContainer | undefined
#terminal: ITerminal | undefined
#process: WebContainerProcess | undefined
executionState = atom<{ sessionId: string, active: boolean, executionPrms?: Promise<any> } | undefined>()
#outputStream: ReadableStreamDefaultReader<string> | undefined
#shellInputStream: WritableStreamDefaultWriter<string> | undefined
#initialized: (() => void) | undefined;
#readyPromise: Promise<void>;
#webcontainer: WebContainer | undefined;
#terminal: ITerminal | undefined;
#process: WebContainerProcess | undefined;
executionState = atom<{ sessionId: string; active: boolean; executionPrms?: Promise<any> } | undefined>();
#outputStream: ReadableStreamDefaultReader<string> | undefined;
#shellInputStream: WritableStreamDefaultWriter<string> | undefined;
constructor() {
this.#readyPromise = new Promise((resolve) => {
this.#initialized = resolve
})
this.#initialized = resolve;
});
}
ready() {
return this.#readyPromise;
}
async init(webcontainer: WebContainer, terminal: ITerminal) {
this.#webcontainer = webcontainer
this.#terminal = terminal
let callback = (data: string) => {
console.log(data)
}
let { process, output } = await this.newBoltShellProcess(webcontainer, terminal)
this.#process = process
this.#outputStream = output.getReader()
await this.waitTillOscCode('interactive')
this.#initialized?.()
this.#webcontainer = webcontainer;
this.#terminal = terminal;
const callback = (data: string) => {
console.log(data);
};
const { process, output } = await this.newBoltShellProcess(webcontainer, terminal);
this.#process = process;
this.#outputStream = output.getReader();
await this.waitTillOscCode('interactive');
this.#initialized?.();
}
get terminal() {
return this.#terminal
return this.#terminal;
}
get process() {
return this.#process
return this.#process;
}
async executeCommand(sessionId: string, command: string) {
if (!this.process || !this.terminal) {
return
return;
}
let state = this.executionState.get()
//interrupt the current execution
// this.#shellInputStream?.write('\x03');
const state = this.executionState.get();
/*
* interrupt the current execution
* this.#shellInputStream?.write('\x03');
*/
this.terminal.input('\x03');
if (state && state.executionPrms) {
await state.executionPrms
await state.executionPrms;
}
//start a new execution
this.terminal.input(command.trim() + '\n');
//wait for the execution to finish
let executionPrms = this.getCurrentExecutionResult()
this.executionState.set({ sessionId, active: true, executionPrms })
const executionPrms = this.getCurrentExecutionResult();
this.executionState.set({ sessionId, active: true, executionPrms });
let resp = await executionPrms
this.executionState.set({ sessionId, active: false })
return resp
const resp = await executionPrms;
this.executionState.set({ sessionId, active: false });
return resp;
}
async newBoltShellProcess(webcontainer: WebContainer, terminal: ITerminal) {
const args: string[] = [];
@ -126,6 +130,7 @@ export class BoltShell {
const input = process.input.getWriter();
this.#shellInputStream = input;
const [internalOutput, terminalOutput] = process.output.tee();
const jshReady = withResolvers<void>();
@ -163,30 +168,41 @@ export class BoltShell {
return { process, output: internalOutput };
}
async getCurrentExecutionResult() {
let { output, exitCode } = await this.waitTillOscCode('exit')
const { output, exitCode } = await this.waitTillOscCode('exit');
return { output, exitCode };
}
async waitTillOscCode(waitCode: string) {
let fullOutput = '';
let exitCode: number = 0;
if (!this.#outputStream) return { output: fullOutput, exitCode };
let tappedStream = this.#outputStream
if (!this.#outputStream) {
return { output: fullOutput, exitCode };
}
const tappedStream = this.#outputStream;
while (true) {
const { value, done } = await tappedStream.read();
if (done) break;
if (done) {
break;
}
const text = value || '';
fullOutput += text;
// Check if command completion signal with exit code
const [, osc, , pid, code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || [];
if (osc === 'exit') {
exitCode = parseInt(code, 10);
}
if (osc === waitCode) {
break;
}
}
return { output: fullOutput, exitCode };
}
}

View File

@ -1,4 +1,3 @@
interface OllamaModelDetails {
parent_model: string;
format: string;
@ -29,10 +28,10 @@ export interface ModelInfo {
}
export interface ProviderInfo {
staticModels: ModelInfo[],
name: string,
getDynamicModels?: () => Promise<ModelInfo[]>,
getApiKeyLink?: string,
labelForGetApiKey?: string,
icon?:string,
};
staticModels: ModelInfo[];
name: string;
getDynamicModels?: () => Promise<ModelInfo[]>;
getApiKeyLink?: string;
labelForGetApiKey?: string;
icon?: string;
}