Merge pull request #367 from mrsimpson/linting

Linting
This commit is contained in:
Eduard Ruzga 2024-11-22 22:32:03 +02:00 committed by GitHub
commit 7fc8e40c03
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 521 additions and 314 deletions

View File

@ -10,6 +10,7 @@ interface APIKeyManagerProps {
labelForGetApiKey?: string; labelForGetApiKey?: string;
} }
// eslint-disable-next-line @typescript-eslint/naming-convention
export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey, setApiKey }) => { export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey, setApiKey }) => {
const [isEditing, setIsEditing] = useState(false); const [isEditing, setIsEditing] = useState(false);
const [tempKey, setTempKey] = useState(apiKey); const [tempKey, setTempKey] = useState(apiKey);

View File

@ -1,5 +1,7 @@
// @ts-nocheck /*
// Preventing TS checks with files presented in the video for a better presentation. * @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import type { Message } from 'ai'; import type { Message } from 'ai';
import React, { type RefCallback, useEffect } from 'react'; import React, { type RefCallback, useEffect } from 'react';
import { ClientOnly } from 'remix-utils/client-only'; import { ClientOnly } from 'remix-utils/client-only';
@ -7,7 +9,7 @@ import { Menu } from '~/components/sidebar/Menu.client';
import { IconButton } from '~/components/ui/IconButton'; import { IconButton } from '~/components/ui/IconButton';
import { Workbench } from '~/components/workbench/Workbench.client'; import { Workbench } from '~/components/workbench/Workbench.client';
import { classNames } from '~/utils/classNames'; import { classNames } from '~/utils/classNames';
import { MODEL_LIST, DEFAULT_PROVIDER, PROVIDER_LIST, initializeModelList } from '~/utils/constants'; import { MODEL_LIST, PROVIDER_LIST, initializeModelList } from '~/utils/constants';
import { Messages } from './Messages.client'; import { Messages } from './Messages.client';
import { SendButton } from './SendButton.client'; import { SendButton } from './SendButton.client';
import { useState } from 'react'; import { useState } from 'react';
@ -25,21 +27,25 @@ const EXAMPLE_PROMPTS = [
{ text: 'How do I center a div?' }, { text: 'How do I center a div?' },
]; ];
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const providerList = PROVIDER_LIST; const providerList = PROVIDER_LIST;
// @ts-ignore TODO: Introduce proper types
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList, apiKeys }) => { const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList, apiKeys }) => {
return ( return (
<div className="mb-2 flex gap-2 flex-col sm:flex-row"> <div className="mb-2 flex gap-2 flex-col sm:flex-row">
<select <select
value={provider?.name} value={provider?.name}
onChange={(e) => { onChange={(e) => {
setProvider(providerList.find((p) => p.name === e.target.value)); setProvider(providerList.find((p: ProviderInfo) => p.name === e.target.value));
const firstModel = [...modelList].find((m) => m.provider == e.target.value); const firstModel = [...modelList].find((m) => m.provider == e.target.value);
setModel(firstModel ? firstModel.name : ''); setModel(firstModel ? firstModel.name : '');
}} }}
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all" className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
> >
{providerList.map((provider) => ( {providerList.map((provider: ProviderInfo) => (
<option key={provider.name} value={provider.name}> <option key={provider.name} value={provider.name}>
{provider.name} {provider.name}
</option> </option>
@ -118,14 +124,17 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
// Load API keys from cookies on component mount // Load API keys from cookies on component mount
try { try {
const storedApiKeys = Cookies.get('apiKeys'); const storedApiKeys = Cookies.get('apiKeys');
if (storedApiKeys) { if (storedApiKeys) {
const parsedKeys = JSON.parse(storedApiKeys); const parsedKeys = JSON.parse(storedApiKeys);
if (typeof parsedKeys === 'object' && parsedKeys !== null) { if (typeof parsedKeys === 'object' && parsedKeys !== null) {
setApiKeys(parsedKeys); setApiKeys(parsedKeys);
} }
} }
} catch (error) { } catch (error) {
console.error('Error loading API keys from cookies:', error); console.error('Error loading API keys from cookies:', error);
// Clear invalid cookie data // Clear invalid cookie data
Cookies.remove('apiKeys'); Cookies.remove('apiKeys');
} }
@ -139,6 +148,7 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
try { try {
const updatedApiKeys = { ...apiKeys, [provider]: key }; const updatedApiKeys = { ...apiKeys, [provider]: key };
setApiKeys(updatedApiKeys); setApiKeys(updatedApiKeys);
// Save updated API keys to cookies with 30 day expiry and secure settings // Save updated API keys to cookies with 30 day expiry and secure settings
Cookies.set('apiKeys', JSON.stringify(updatedApiKeys), { Cookies.set('apiKeys', JSON.stringify(updatedApiKeys), {
expires: 30, // 30 days expires: 30, // 30 days

View File

@ -1,5 +1,7 @@
// @ts-nocheck /*
// Preventing TS checks with files presented in the video for a better presentation. * @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { useStore } from '@nanostores/react'; import { useStore } from '@nanostores/react';
import type { Message } from 'ai'; import type { Message } from 'ai';
import { useChat } from 'ai/react'; import { useChat } from 'ai/react';
@ -81,7 +83,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
}); });
const [provider, setProvider] = useState(() => { const [provider, setProvider] = useState(() => {
const savedProvider = Cookies.get('selectedProvider'); const savedProvider = Cookies.get('selectedProvider');
return PROVIDER_LIST.find(p => p.name === savedProvider) || DEFAULT_PROVIDER; return PROVIDER_LIST.find((p) => p.name === savedProvider) || DEFAULT_PROVIDER;
}); });
const { showChat } = useStore(chatStore); const { showChat } = useStore(chatStore);
@ -93,11 +95,13 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
const { messages, isLoading, input, handleInputChange, setInput, stop, append } = useChat({ const { messages, isLoading, input, handleInputChange, setInput, stop, append } = useChat({
api: '/api/chat', api: '/api/chat',
body: { body: {
apiKeys apiKeys,
}, },
onError: (error) => { onError: (error) => {
logger.error('Request failed\n\n', error); logger.error('Request failed\n\n', error);
toast.error('There was an error processing your request: ' + (error.message ? error.message : "No details were returned")); toast.error(
'There was an error processing your request: ' + (error.message ? error.message : 'No details were returned'),
);
}, },
onFinish: () => { onFinish: () => {
logger.debug('Finished streaming'); logger.debug('Finished streaming');
@ -218,6 +222,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
useEffect(() => { useEffect(() => {
const storedApiKeys = Cookies.get('apiKeys'); const storedApiKeys = Cookies.get('apiKeys');
if (storedApiKeys) { if (storedApiKeys) {
setApiKeys(JSON.parse(storedApiKeys)); setApiKeys(JSON.parse(storedApiKeys));
} }
@ -271,7 +276,7 @@ export const ChatImpl = memo(({ initialMessages, storeMessageHistory }: ChatProp
}, },
model, model,
provider, provider,
apiKeys apiKeys,
); );
}} }}
/> />

View File

@ -1,5 +1,7 @@
// @ts-nocheck /*
// Preventing TS checks with files presented in the video for a better presentation. * @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { modificationsRegex } from '~/utils/diff'; import { modificationsRegex } from '~/utils/diff';
import { MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants'; import { MODEL_REGEX, PROVIDER_REGEX } from '~/utils/constants';
import { Markdown } from './Markdown'; import { Markdown } from './Markdown';
@ -17,5 +19,9 @@ export function UserMessage({ content }: UserMessageProps) {
} }
function sanitizeUserMessage(content: string) { function sanitizeUserMessage(content: string) {
return content.replace(modificationsRegex, '').replace(MODEL_REGEX, 'Using: $1').replace(PROVIDER_REGEX, ' ($1)\n\n').trim(); return content
.replace(modificationsRegex, '')
.replace(MODEL_REGEX, 'Using: $1')
.replace(PROVIDER_REGEX, ' ($1)\n\n')
.trim();
} }

View File

@ -2,7 +2,6 @@ import { motion, type Variants } from 'framer-motion';
import { useCallback, useEffect, useRef, useState } from 'react'; import { useCallback, useEffect, useRef, useState } from 'react';
import { toast } from 'react-toastify'; import { toast } from 'react-toastify';
import { Dialog, DialogButton, DialogDescription, DialogRoot, DialogTitle } from '~/components/ui/Dialog'; import { Dialog, DialogButton, DialogDescription, DialogRoot, DialogTitle } from '~/components/ui/Dialog';
import { IconButton } from '~/components/ui/IconButton';
import { ThemeSwitch } from '~/components/ui/ThemeSwitch'; import { ThemeSwitch } from '~/components/ui/ThemeSwitch';
import { db, deleteById, getAll, chatId, type ChatHistoryItem, useChatHistory } from '~/lib/persistence'; import { db, deleteById, getAll, chatId, type ChatHistoryItem, useChatHistory } from '~/lib/persistence';
import { cubicEasingFn } from '~/utils/easings'; import { cubicEasingFn } from '~/utils/easings';

View File

@ -255,6 +255,7 @@ export const EditorPanel = memo(
</div> </div>
{Array.from({ length: terminalCount + 1 }, (_, index) => { {Array.from({ length: terminalCount + 1 }, (_, index) => {
const isActive = activeTerminal === index; const isActive = activeTerminal === index;
if (index == 0) { if (index == 0) {
logger.info('Starting bolt terminal'); logger.info('Starting bolt terminal');
@ -273,6 +274,7 @@ export const EditorPanel = memo(
/> />
); );
} }
return ( return (
<Terminal <Terminal
key={index} key={index}

View File

@ -111,7 +111,7 @@ export const FileTree = memo(
}; };
return ( return (
<div className={classNames('text-sm', className ,'overflow-y-auto')}> <div className={classNames('text-sm', className, 'overflow-y-auto')}>
{filteredFileList.map((fileOrFolder) => { {filteredFileList.map((fileOrFolder) => {
switch (fileOrFolder.kind) { switch (fileOrFolder.kind) {
case 'file': { case 'file': {

View File

@ -174,16 +174,21 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
'Please enter a name for your new GitHub repository:', 'Please enter a name for your new GitHub repository:',
'bolt-generated-project', 'bolt-generated-project',
); );
if (!repoName) { if (!repoName) {
alert('Repository name is required. Push to GitHub cancelled.'); alert('Repository name is required. Push to GitHub cancelled.');
return; return;
} }
const githubUsername = prompt('Please enter your GitHub username:'); const githubUsername = prompt('Please enter your GitHub username:');
if (!githubUsername) { if (!githubUsername) {
alert('GitHub username is required. Push to GitHub cancelled.'); alert('GitHub username is required. Push to GitHub cancelled.');
return; return;
} }
const githubToken = prompt('Please enter your GitHub personal access token:'); const githubToken = prompt('Please enter your GitHub personal access token:');
if (!githubToken) { if (!githubToken) {
alert('GitHub token is required. Push to GitHub cancelled.'); alert('GitHub token is required. Push to GitHub cancelled.');
return; return;

View File

@ -1,5 +1,7 @@
// @ts-nocheck /*
// Preventing TS checks with files presented in the video for a better presentation. * @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { env } from 'node:process'; import { env } from 'node:process';
export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record<string, string>) { export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Record<string, string>) {
@ -28,17 +30,19 @@ export function getAPIKey(cloudflareEnv: Env, provider: string, userApiKeys?: Re
case 'OpenRouter': case 'OpenRouter':
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY; return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
case 'Deepseek': case 'Deepseek':
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY;
case 'Mistral': case 'Mistral':
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY; return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
case "OpenAILike": case 'OpenAILike':
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY; return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
case "xAI": case 'xAI':
return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY; return env.XAI_API_KEY || cloudflareEnv.XAI_API_KEY;
case "Cohere": case 'Cohere':
return env.COHERE_API_KEY; return env.COHERE_API_KEY;
case 'AzureOpenAI':
return env.AZURE_OPENAI_API_KEY;
default: default:
return ""; return '';
} }
} }
@ -47,14 +51,17 @@ export function getBaseURL(cloudflareEnv: Env, provider: string) {
case 'OpenAILike': case 'OpenAILike':
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL; return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
case 'LMStudio': case 'LMStudio':
return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || "http://localhost:1234"; return env.LMSTUDIO_API_BASE_URL || cloudflareEnv.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
case 'Ollama': case 'Ollama': {
let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || "http://localhost:11434"; let baseUrl = env.OLLAMA_API_BASE_URL || cloudflareEnv.OLLAMA_API_BASE_URL || 'http://localhost:11434';
if (env.RUNNING_IN_DOCKER === 'true') {
baseUrl = baseUrl.replace("localhost", "host.docker.internal"); if (env.RUNNING_IN_DOCKER === 'true') {
} baseUrl = baseUrl.replace('localhost', 'host.docker.internal');
return baseUrl; }
return baseUrl;
}
default: default:
return ""; return '';
} }
} }

View File

@ -1,26 +1,29 @@
// @ts-nocheck /*
// Preventing TS checks with files presented in the video for a better presentation. * @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key'; import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
import { createAnthropic } from '@ai-sdk/anthropic'; import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai'; import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google'; import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { ollama } from 'ollama-ai-provider'; import { ollama } from 'ollama-ai-provider';
import { createOpenRouter } from "@openrouter/ai-sdk-provider"; import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { createMistral } from '@ai-sdk/mistral'; import { createMistral } from '@ai-sdk/mistral';
import { createCohere } from '@ai-sdk/cohere' import { createCohere } from '@ai-sdk/cohere';
import type { LanguageModelV1 } from 'ai';
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
parseInt(process.env.DEFAULT_NUM_CTX, 10) :
32768;
export function getAnthropicModel(apiKey: string, model: string) { type OptionalApiKey = string | undefined;
export function getAnthropicModel(apiKey: OptionalApiKey, model: string) {
const anthropic = createAnthropic({ const anthropic = createAnthropic({
apiKey, apiKey,
}); });
return anthropic(model); return anthropic(model);
} }
export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) { export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({ const openai = createOpenAI({
baseURL, baseURL,
apiKey, apiKey,
@ -29,7 +32,7 @@ export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string)
return openai(model); return openai(model);
} }
export function getCohereAIModel(apiKey:string, model: string){ export function getCohereAIModel(apiKey: OptionalApiKey, model: string) {
const cohere = createCohere({ const cohere = createCohere({
apiKey, apiKey,
}); });
@ -37,7 +40,7 @@ export function getCohereAIModel(apiKey:string, model: string){
return cohere(model); return cohere(model);
} }
export function getOpenAIModel(apiKey: string, model: string) { export function getOpenAIModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({ const openai = createOpenAI({
apiKey, apiKey,
}); });
@ -45,15 +48,15 @@ export function getOpenAIModel(apiKey: string, model: string) {
return openai(model); return openai(model);
} }
export function getMistralModel(apiKey: string, model: string) { export function getMistralModel(apiKey: OptionalApiKey, model: string) {
const mistral = createMistral({ const mistral = createMistral({
apiKey apiKey,
}); });
return mistral(model); return mistral(model);
} }
export function getGoogleModel(apiKey: string, model: string) { export function getGoogleModel(apiKey: OptionalApiKey, model: string) {
const google = createGoogleGenerativeAI({ const google = createGoogleGenerativeAI({
apiKey, apiKey,
}); });
@ -61,7 +64,7 @@ export function getGoogleModel(apiKey: string, model: string) {
return google(model); return google(model);
} }
export function getGroqModel(apiKey: string, model: string) { export function getGroqModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({ const openai = createOpenAI({
baseURL: 'https://api.groq.com/openai/v1', baseURL: 'https://api.groq.com/openai/v1',
apiKey, apiKey,
@ -70,7 +73,7 @@ export function getGroqModel(apiKey: string, model: string) {
return openai(model); return openai(model);
} }
export function getHuggingFaceModel(apiKey: string, model: string) { export function getHuggingFaceModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({ const openai = createOpenAI({
baseURL: 'https://api-inference.huggingface.co/v1/', baseURL: 'https://api-inference.huggingface.co/v1/',
apiKey, apiKey,
@ -80,15 +83,16 @@ export function getHuggingFaceModel(apiKey: string, model: string) {
} }
export function getOllamaModel(baseURL: string, model: string) { export function getOllamaModel(baseURL: string, model: string) {
let Ollama = ollama(model, { const ollamaInstance = ollama(model, {
numCtx: DEFAULT_NUM_CTX, numCtx: DEFAULT_NUM_CTX,
}); }) as LanguageModelV1 & { config: any };
Ollama.config.baseURL = `${baseURL}/api`; ollamaInstance.config.baseURL = `${baseURL}/api`;
return Ollama;
return ollamaInstance;
} }
export function getDeepseekModel(apiKey: string, model: string){ export function getDeepseekModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({ const openai = createOpenAI({
baseURL: 'https://api.deepseek.com/beta', baseURL: 'https://api.deepseek.com/beta',
apiKey, apiKey,
@ -97,9 +101,9 @@ export function getDeepseekModel(apiKey: string, model: string){
return openai(model); return openai(model);
} }
export function getOpenRouterModel(apiKey: string, model: string) { export function getOpenRouterModel(apiKey: OptionalApiKey, model: string) {
const openRouter = createOpenRouter({ const openRouter = createOpenRouter({
apiKey apiKey,
}); });
return openRouter.chat(model); return openRouter.chat(model);
@ -108,13 +112,13 @@ export function getOpenRouterModel(apiKey: string, model: string) {
export function getLMStudioModel(baseURL: string, model: string) { export function getLMStudioModel(baseURL: string, model: string) {
const lmstudio = createOpenAI({ const lmstudio = createOpenAI({
baseUrl: `${baseURL}/v1`, baseUrl: `${baseURL}/v1`,
apiKey: "", apiKey: '',
}); });
return lmstudio(model); return lmstudio(model);
} }
export function getXAIModel(apiKey: string, model: string) { export function getXAIModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({ const openai = createOpenAI({
baseURL: 'https://api.x.ai/v1', baseURL: 'https://api.x.ai/v1',
apiKey, apiKey,
@ -123,7 +127,6 @@ export function getXAIModel(apiKey: string, model: string) {
return openai(model); return openai(model);
} }
export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) { export function getModel(provider: string, model: string, env: Env, apiKeys?: Record<string, string>) {
const apiKey = getAPIKey(env, provider, apiKeys); const apiKey = getAPIKey(env, provider, apiKeys);
const baseURL = getBaseURL(env, provider); const baseURL = getBaseURL(env, provider);
@ -142,11 +145,11 @@ export function getModel(provider: string, model: string, env: Env, apiKeys?: Re
case 'Google': case 'Google':
return getGoogleModel(apiKey, model); return getGoogleModel(apiKey, model);
case 'OpenAILike': case 'OpenAILike':
return getOpenAILikeModel(baseURL,apiKey, model); return getOpenAILikeModel(baseURL, apiKey, model);
case 'Deepseek': case 'Deepseek':
return getDeepseekModel(apiKey, model); return getDeepseekModel(apiKey, model);
case 'Mistral': case 'Mistral':
return getMistralModel(apiKey, model); return getMistralModel(apiKey, model);
case 'LMStudio': case 'LMStudio':
return getLMStudioModel(baseURL, model); return getLMStudioModel(baseURL, model);
case 'xAI': case 'xAI':

View File

@ -1,5 +1,6 @@
// @ts-nocheck // eslint-disable-next-line @typescript-eslint/ban-ts-comment
// Preventing TS checks with files presented in the video for a better presentation. // @ts-nocheck TODO: Provider proper types
import { streamText as _streamText, convertToCoreMessages } from 'ai'; import { streamText as _streamText, convertToCoreMessages } from 'ai';
import { getModel } from '~/lib/.server/llm/model'; import { getModel } from '~/lib/.server/llm/model';
import { MAX_TOKENS } from './constants'; import { MAX_TOKENS } from './constants';
@ -34,19 +35,12 @@ function extractPropertiesFromMessage(message: Message): { model: string; provid
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER; const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER;
// Remove model and provider lines from content // Remove model and provider lines from content
const cleanedContent = message.content const cleanedContent = message.content.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '').trim();
.replace(MODEL_REGEX, '')
.replace(PROVIDER_REGEX, '')
.trim();
return { model, provider, content: cleanedContent }; return { model, provider, content: cleanedContent };
} }
export function streamText(
messages: Messages, export function streamText(messages: Messages, env: Env, options?: StreamingOptions, apiKeys?: Record<string, string>) {
env: Env,
options?: StreamingOptions,
apiKeys?: Record<string, string>
) {
let currentModel = DEFAULT_MODEL; let currentModel = DEFAULT_MODEL;
let currentProvider = DEFAULT_PROVIDER; let currentProvider = DEFAULT_PROVIDER;
@ -63,17 +57,12 @@ export function streamText(
return { ...message, content }; return { ...message, content };
} }
return message; return message;
}); });
const modelDetails = MODEL_LIST.find((m) => m.name === currentModel); const modelDetails = MODEL_LIST.find((m) => m.name === currentModel);
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
const dynamicMaxTokens =
modelDetails && modelDetails.maxTokenAllowed
? modelDetails.maxTokenAllowed
: MAX_TOKENS;
return _streamText({ return _streamText({
model: getModel(currentProvider, currentModel, env, apiKeys), model: getModel(currentProvider, currentModel, env, apiKeys),

View File

@ -161,11 +161,17 @@ async function getUrlIds(db: IDBDatabase): Promise<string[]> {
export async function forkChat(db: IDBDatabase, chatId: string, messageId: string): Promise<string> { export async function forkChat(db: IDBDatabase, chatId: string, messageId: string): Promise<string> {
const chat = await getMessages(db, chatId); const chat = await getMessages(db, chatId);
if (!chat) throw new Error('Chat not found');
if (!chat) {
throw new Error('Chat not found');
}
// Find the index of the message to fork at // Find the index of the message to fork at
const messageIndex = chat.messages.findIndex(msg => msg.id === messageId); const messageIndex = chat.messages.findIndex((msg) => msg.id === messageId);
if (messageIndex === -1) throw new Error('Message not found');
if (messageIndex === -1) {
throw new Error('Message not found');
}
// Get messages up to and including the selected message // Get messages up to and including the selected message
const messages = chat.messages.slice(0, messageIndex + 1); const messages = chat.messages.slice(0, messageIndex + 1);
@ -175,19 +181,14 @@ export async function forkChat(db: IDBDatabase, chatId: string, messageId: strin
const urlId = await getUrlId(db, newId); const urlId = await getUrlId(db, newId);
// Create the forked chat // Create the forked chat
await setMessages( await setMessages(db, newId, messages, urlId, chat.description ? `${chat.description} (fork)` : 'Forked chat');
db,
newId,
messages,
urlId,
chat.description ? `${chat.description} (fork)` : 'Forked chat'
);
return urlId; return urlId;
} }
export async function duplicateChat(db: IDBDatabase, id: string): Promise<string> { export async function duplicateChat(db: IDBDatabase, id: string): Promise<string> {
const chat = await getMessages(db, id); const chat = await getMessages(db, id);
if (!chat) { if (!chat) {
throw new Error('Chat not found'); throw new Error('Chat not found');
} }
@ -200,7 +201,7 @@ export async function duplicateChat(db: IDBDatabase, id: string): Promise<string
newId, newId,
chat.messages, chat.messages,
newUrlId, // Use the new urlId newUrlId, // Use the new urlId
`${chat.description || 'Chat'} (copy)` `${chat.description || 'Chat'} (copy)`,
); );
return newUrlId; // Return the urlId instead of id for navigation return newUrlId; // Return the urlId instead of id for navigation

View File

@ -99,7 +99,7 @@ export function useChatHistory() {
await setMessages(db, chatId.get() as string, messages, urlId, description.get()); await setMessages(db, chatId.get() as string, messages, urlId, description.get());
}, },
duplicateCurrentChat: async (listItemId:string) => { duplicateCurrentChat: async (listItemId: string) => {
if (!db || (!mixedId && !listItemId)) { if (!db || (!mixedId && !listItemId)) {
return; return;
} }
@ -110,8 +110,9 @@ export function useChatHistory() {
toast.success('Chat duplicated successfully'); toast.success('Chat duplicated successfully');
} catch (error) { } catch (error) {
toast.error('Failed to duplicate chat'); toast.error('Failed to duplicate chat');
console.log(error);
} }
} },
}; };
} }

View File

@ -1,11 +1,10 @@
import { WebContainer, type WebContainerProcess } from '@webcontainer/api'; import { WebContainer } from '@webcontainer/api';
import { atom, map, type MapStore } from 'nanostores'; import { atom, map, type MapStore } from 'nanostores';
import * as nodePath from 'node:path'; import * as nodePath from 'node:path';
import type { BoltAction } from '~/types/actions'; import type { BoltAction } from '~/types/actions';
import { createScopedLogger } from '~/utils/logger'; import { createScopedLogger } from '~/utils/logger';
import { unreachable } from '~/utils/unreachable'; import { unreachable } from '~/utils/unreachable';
import type { ActionCallbackData } from './message-parser'; import type { ActionCallbackData } from './message-parser';
import type { ITerminal } from '~/types/terminal';
import type { BoltShell } from '~/utils/shell'; import type { BoltShell } from '~/utils/shell';
const logger = createScopedLogger('ActionRunner'); const logger = createScopedLogger('ActionRunner');
@ -45,7 +44,6 @@ export class ActionRunner {
constructor(webcontainerPromise: Promise<WebContainer>, getShellTerminal: () => BoltShell) { constructor(webcontainerPromise: Promise<WebContainer>, getShellTerminal: () => BoltShell) {
this.#webcontainer = webcontainerPromise; this.#webcontainer = webcontainerPromise;
this.#shellTerminal = getShellTerminal; this.#shellTerminal = getShellTerminal;
} }
addAction(data: ActionCallbackData) { addAction(data: ActionCallbackData) {
@ -88,19 +86,21 @@ export class ActionRunner {
if (action.executed) { if (action.executed) {
return; return;
} }
if (isStreaming && action.type !== 'file') { if (isStreaming && action.type !== 'file') {
return; return;
} }
this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming }); this.#updateAction(actionId, { ...action, ...data.action, executed: !isStreaming });
return this.#currentExecutionPromise = this.#currentExecutionPromise // eslint-disable-next-line consistent-return
return (this.#currentExecutionPromise = this.#currentExecutionPromise
.then(() => { .then(() => {
return this.#executeAction(actionId, isStreaming); this.#executeAction(actionId, isStreaming);
}) })
.catch((error) => { .catch((error) => {
console.error('Action failed:', error); console.error('Action failed:', error);
}); }));
} }
async #executeAction(actionId: string, isStreaming: boolean = false) { async #executeAction(actionId: string, isStreaming: boolean = false) {
@ -121,17 +121,23 @@ export class ActionRunner {
case 'start': { case 'start': {
// making the start app non blocking // making the start app non blocking
this.#runStartAction(action).then(()=>this.#updateAction(actionId, { status: 'complete' })) this.#runStartAction(action)
.catch(()=>this.#updateAction(actionId, { status: 'failed', error: 'Action failed' })) .then(() => this.#updateAction(actionId, { status: 'complete' }))
// adding a delay to avoid any race condition between 2 start actions .catch(() => this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }));
// i am up for a better approch
await new Promise(resolve=>setTimeout(resolve,2000)) /*
return * adding a delay to avoid any race condition between 2 start actions
break; * i am up for a better approach
*/
await new Promise((resolve) => setTimeout(resolve, 2000));
return;
} }
} }
this.#updateAction(actionId, { status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete' }); this.#updateAction(actionId, {
status: isStreaming ? 'running' : action.abortSignal.aborted ? 'aborted' : 'complete',
});
} catch (error) { } catch (error) {
this.#updateAction(actionId, { status: 'failed', error: 'Action failed' }); this.#updateAction(actionId, { status: 'failed', error: 'Action failed' });
logger.error(`[${action.type}]:Action failed\n\n`, error); logger.error(`[${action.type}]:Action failed\n\n`, error);
@ -145,16 +151,19 @@ export class ActionRunner {
if (action.type !== 'shell') { if (action.type !== 'shell') {
unreachable('Expected shell action'); unreachable('Expected shell action');
} }
const shell = this.#shellTerminal()
await shell.ready() const shell = this.#shellTerminal();
await shell.ready();
if (!shell || !shell.terminal || !shell.process) { if (!shell || !shell.terminal || !shell.process) {
unreachable('Shell terminal not found'); unreachable('Shell terminal not found');
} }
const resp = await shell.executeCommand(this.runnerId.get(), action.content)
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`)
if (resp?.exitCode != 0) {
throw new Error("Failed To Execute Shell Command");
const resp = await shell.executeCommand(this.runnerId.get(), action.content);
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`);
if (resp?.exitCode != 0) {
throw new Error('Failed To Execute Shell Command');
} }
} }
@ -162,21 +171,26 @@ export class ActionRunner {
if (action.type !== 'start') { if (action.type !== 'start') {
unreachable('Expected shell action'); unreachable('Expected shell action');
} }
if (!this.#shellTerminal) { if (!this.#shellTerminal) {
unreachable('Shell terminal not found'); unreachable('Shell terminal not found');
} }
const shell = this.#shellTerminal()
await shell.ready() const shell = this.#shellTerminal();
await shell.ready();
if (!shell || !shell.terminal || !shell.process) { if (!shell || !shell.terminal || !shell.process) {
unreachable('Shell terminal not found'); unreachable('Shell terminal not found');
} }
const resp = await shell.executeCommand(this.runnerId.get(), action.content)
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`) const resp = await shell.executeCommand(this.runnerId.get(), action.content);
logger.debug(`${action.type} Shell Response: [exit code:${resp?.exitCode}]`);
if (resp?.exitCode != 0) { if (resp?.exitCode != 0) {
throw new Error("Failed To Start Application"); throw new Error('Failed To Start Application');
} }
return resp
return resp;
} }
async #runFileAction(action: ActionState) { async #runFileAction(action: ActionState) {

View File

@ -55,7 +55,7 @@ interface MessageState {
export class StreamingMessageParser { export class StreamingMessageParser {
#messages = new Map<string, MessageState>(); #messages = new Map<string, MessageState>();
constructor(private _options: StreamingMessageParserOptions = {}) { } constructor(private _options: StreamingMessageParserOptions = {}) {}
parse(messageId: string, input: string) { parse(messageId: string, input: string) {
let state = this.#messages.get(messageId); let state = this.#messages.get(messageId);
@ -120,20 +120,20 @@ export class StreamingMessageParser {
i = closeIndex + ARTIFACT_ACTION_TAG_CLOSE.length; i = closeIndex + ARTIFACT_ACTION_TAG_CLOSE.length;
} else { } else {
if ('type' in currentAction && currentAction.type === 'file') { if ('type' in currentAction && currentAction.type === 'file') {
let content = input.slice(i); const content = input.slice(i);
this._options.callbacks?.onActionStream?.({ this._options.callbacks?.onActionStream?.({
artifactId: currentArtifact.id, artifactId: currentArtifact.id,
messageId, messageId,
actionId: String(state.actionId - 1), actionId: String(state.actionId - 1),
action: { action: {
...currentAction as FileAction, ...(currentAction as FileAction),
content, content,
filePath: currentAction.filePath, filePath: currentAction.filePath,
}, },
}); });
} }
break; break;
} }
} else { } else {
@ -272,7 +272,7 @@ export class StreamingMessageParser {
} }
(actionAttributes as FileAction).filePath = filePath; (actionAttributes as FileAction).filePath = filePath;
} else if (!(['shell', 'start'].includes(actionType))) { } else if (!['shell', 'start'].includes(actionType)) {
logger.warn(`Unknown action type '${actionType}'`); logger.warn(`Unknown action type '${actionType}'`);
} }

View File

@ -7,7 +7,7 @@ import { coloredText } from '~/utils/terminal';
export class TerminalStore { export class TerminalStore {
#webcontainer: Promise<WebContainer>; #webcontainer: Promise<WebContainer>;
#terminals: Array<{ terminal: ITerminal; process: WebContainerProcess }> = []; #terminals: Array<{ terminal: ITerminal; process: WebContainerProcess }> = [];
#boltTerminal = newBoltShellProcess() #boltTerminal = newBoltShellProcess();
showTerminal: WritableAtom<boolean> = import.meta.hot?.data.showTerminal ?? atom(true); showTerminal: WritableAtom<boolean> = import.meta.hot?.data.showTerminal ?? atom(true);
@ -27,8 +27,8 @@ export class TerminalStore {
} }
async attachBoltTerminal(terminal: ITerminal) { async attachBoltTerminal(terminal: ITerminal) {
try { try {
let wc = await this.#webcontainer const wc = await this.#webcontainer;
await this.#boltTerminal.init(wc, terminal) await this.#boltTerminal.init(wc, terminal);
} catch (error: any) { } catch (error: any) {
terminal.write(coloredText.red('Failed to spawn bolt shell\n\n') + error.message); terminal.write(coloredText.red('Failed to spawn bolt shell\n\n') + error.message);
return; return;

View File

@ -11,9 +11,8 @@ import { PreviewsStore } from './previews';
import { TerminalStore } from './terminal'; import { TerminalStore } from './terminal';
import JSZip from 'jszip'; import JSZip from 'jszip';
import { saveAs } from 'file-saver'; import { saveAs } from 'file-saver';
import { Octokit, type RestEndpointMethodTypes } from "@octokit/rest"; import { Octokit, type RestEndpointMethodTypes } from '@octokit/rest';
import * as nodePath from 'node:path'; import * as nodePath from 'node:path';
import type { WebContainerProcess } from '@webcontainer/api';
import { extractRelativePath } from '~/utils/diff'; import { extractRelativePath } from '~/utils/diff';
export interface ArtifactState { export interface ArtifactState {
@ -42,8 +41,7 @@ export class WorkbenchStore {
unsavedFiles: WritableAtom<Set<string>> = import.meta.hot?.data.unsavedFiles ?? atom(new Set<string>()); unsavedFiles: WritableAtom<Set<string>> = import.meta.hot?.data.unsavedFiles ?? atom(new Set<string>());
modifiedFiles = new Set<string>(); modifiedFiles = new Set<string>();
artifactIdList: string[] = []; artifactIdList: string[] = [];
#boltTerminal: { terminal: ITerminal; process: WebContainerProcess } | undefined; #globalExecutionQueue = Promise.resolve();
#globalExecutionQueue=Promise.resolve();
constructor() { constructor() {
if (import.meta.hot) { if (import.meta.hot) {
import.meta.hot.data.artifacts = this.artifacts; import.meta.hot.data.artifacts = this.artifacts;
@ -54,7 +52,7 @@ export class WorkbenchStore {
} }
addToExecutionQueue(callback: () => Promise<void>) { addToExecutionQueue(callback: () => Promise<void>) {
this.#globalExecutionQueue=this.#globalExecutionQueue.then(()=>callback()) this.#globalExecutionQueue = this.#globalExecutionQueue.then(() => callback());
} }
get previews() { get previews() {
@ -96,7 +94,6 @@ export class WorkbenchStore {
this.#terminalStore.attachTerminal(terminal); this.#terminalStore.attachTerminal(terminal);
} }
attachBoltTerminal(terminal: ITerminal) { attachBoltTerminal(terminal: ITerminal) {
this.#terminalStore.attachBoltTerminal(terminal); this.#terminalStore.attachBoltTerminal(terminal);
} }
@ -261,7 +258,8 @@ export class WorkbenchStore {
this.artifacts.setKey(messageId, { ...artifact, ...state }); this.artifacts.setKey(messageId, { ...artifact, ...state });
} }
addAction(data: ActionCallbackData) { addAction(data: ActionCallbackData) {
this._addAction(data) this._addAction(data);
// this.addToExecutionQueue(()=>this._addAction(data)) // this.addToExecutionQueue(()=>this._addAction(data))
} }
async _addAction(data: ActionCallbackData) { async _addAction(data: ActionCallbackData) {
@ -277,11 +275,10 @@ export class WorkbenchStore {
} }
runAction(data: ActionCallbackData, isStreaming: boolean = false) { runAction(data: ActionCallbackData, isStreaming: boolean = false) {
if(isStreaming) { if (isStreaming) {
this._runAction(data, isStreaming) this._runAction(data, isStreaming);
} } else {
else{ this.addToExecutionQueue(() => this._runAction(data, isStreaming));
this.addToExecutionQueue(()=>this._runAction(data, isStreaming))
} }
} }
async _runAction(data: ActionCallbackData, isStreaming: boolean = false) { async _runAction(data: ActionCallbackData, isStreaming: boolean = false) {
@ -292,16 +289,21 @@ export class WorkbenchStore {
if (!artifact) { if (!artifact) {
unreachable('Artifact not found'); unreachable('Artifact not found');
} }
if (data.action.type === 'file') { if (data.action.type === 'file') {
let wc = await webcontainer const wc = await webcontainer;
const fullPath = nodePath.join(wc.workdir, data.action.filePath); const fullPath = nodePath.join(wc.workdir, data.action.filePath);
if (this.selectedFile.value !== fullPath) { if (this.selectedFile.value !== fullPath) {
this.setSelectedFile(fullPath); this.setSelectedFile(fullPath);
} }
if (this.currentView.value !== 'code') { if (this.currentView.value !== 'code') {
this.currentView.set('code'); this.currentView.set('code');
} }
const doc = this.#editorStore.documents.get()[fullPath]; const doc = this.#editorStore.documents.get()[fullPath];
if (!doc) { if (!doc) {
await artifact.runner.runAction(data, isStreaming); await artifact.runner.runAction(data, isStreaming);
} }
@ -382,7 +384,6 @@ export class WorkbenchStore {
} }
async pushToGitHub(repoName: string, githubUsername: string, ghToken: string) { async pushToGitHub(repoName: string, githubUsername: string, ghToken: string) {
try { try {
// Get the GitHub auth token from environment variables // Get the GitHub auth token from environment variables
const githubToken = ghToken; const githubToken = ghToken;
@ -397,10 +398,11 @@ export class WorkbenchStore {
const octokit = new Octokit({ auth: githubToken }); const octokit = new Octokit({ auth: githubToken });
// Check if the repository already exists before creating it // Check if the repository already exists before creating it
let repo: RestEndpointMethodTypes["repos"]["get"]["response"]['data'] let repo: RestEndpointMethodTypes['repos']['get']['response']['data'];
try { try {
let resp = await octokit.repos.get({ owner: owner, repo: repoName }); const resp = await octokit.repos.get({ owner, repo: repoName });
repo = resp.data repo = resp.data;
} catch (error) { } catch (error) {
if (error instanceof Error && 'status' in error && error.status === 404) { if (error instanceof Error && 'status' in error && error.status === 404) {
// Repository doesn't exist, so create a new one // Repository doesn't exist, so create a new one
@ -418,6 +420,7 @@ export class WorkbenchStore {
// Get all files // Get all files
const files = this.files.get(); const files = this.files.get();
if (!files || Object.keys(files).length === 0) { if (!files || Object.keys(files).length === 0) {
throw new Error('No files found to push'); throw new Error('No files found to push');
} }
@ -434,7 +437,9 @@ export class WorkbenchStore {
}); });
return { path: extractRelativePath(filePath), sha: blob.sha }; return { path: extractRelativePath(filePath), sha: blob.sha };
} }
})
return null;
}),
); );
const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs

View File

@ -1,5 +1,6 @@
// @ts-nocheck // eslint-disable-next-line @typescript-eslint/ban-ts-comment
// Preventing TS checks with files presented in the video for a better presentation. // @ts-nocheck TODO: Provider proper types
import { type ActionFunctionArgs } from '@remix-run/cloudflare'; import { type ActionFunctionArgs } from '@remix-run/cloudflare';
import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants'; import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts'; import { CONTINUE_PROMPT } from '~/lib/.server/llm/prompts';
@ -14,14 +15,15 @@ function parseCookies(cookieHeader) {
const cookies = {}; const cookies = {};
// Split the cookie string by semicolons and spaces // Split the cookie string by semicolons and spaces
const items = cookieHeader.split(";").map(cookie => cookie.trim()); const items = cookieHeader.split(';').map((cookie) => cookie.trim());
items.forEach((item) => {
const [name, ...rest] = item.split('=');
items.forEach(item => {
const [name, ...rest] = item.split("=");
if (name && rest) { if (name && rest) {
// Decode the name and value, and join value parts in case it contains '=' // Decode the name and value, and join value parts in case it contains '='
const decodedName = decodeURIComponent(name.trim()); const decodedName = decodeURIComponent(name.trim());
const decodedValue = decodeURIComponent(rest.join("=").trim()); const decodedValue = decodeURIComponent(rest.join('=').trim());
cookies[decodedName] = decodedValue; cookies[decodedName] = decodedValue;
} }
}); });
@ -31,13 +33,13 @@ function parseCookies(cookieHeader) {
async function chatAction({ context, request }: ActionFunctionArgs) { async function chatAction({ context, request }: ActionFunctionArgs) {
const { messages } = await request.json<{ const { messages } = await request.json<{
messages: Messages messages: Messages;
}>(); }>();
const cookieHeader = request.headers.get("Cookie"); const cookieHeader = request.headers.get('Cookie');
// Parse the cookie's value (returns an object or null if no cookie exists) // Parse the cookie's value (returns an object or null if no cookie exists)
const apiKeys = JSON.parse(parseCookies(cookieHeader).apiKeys || "{}"); const apiKeys = JSON.parse(parseCookies(cookieHeader).apiKeys || '{}');
const stream = new SwitchableStream(); const stream = new SwitchableStream();
@ -83,7 +85,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
if (error.message?.includes('API key')) { if (error.message?.includes('API key')) {
throw new Response('Invalid or missing API key', { throw new Response('Invalid or missing API key', {
status: 401, status: 401,
statusText: 'Unauthorized' statusText: 'Unauthorized',
}); });
} }

View File

@ -1,10 +1,10 @@
import type { ModelInfo } from '~/utils/types'; import type { ModelInfo } from '~/utils/types';
export type ProviderInfo = { export type ProviderInfo = {
staticModels: ModelInfo[], staticModels: ModelInfo[];
name: string, name: string;
getDynamicModels?: () => Promise<ModelInfo[]>, getDynamicModels?: () => Promise<ModelInfo[]>;
getApiKeyLink?: string, getApiKeyLink?: string;
labelForGetApiKey?: string, labelForGetApiKey?: string;
icon?:string, icon?: string;
}; };

View File

@ -12,26 +12,42 @@ const PROVIDER_LIST: ProviderInfo[] = [
{ {
name: 'Anthropic', name: 'Anthropic',
staticModels: [ staticModels: [
{ name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic', maxTokenAllowed: 8000 }, {
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic', maxTokenAllowed: 8000 }, name: 'claude-3-5-sonnet-latest',
{ name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic', maxTokenAllowed: 8000 }, label: 'Claude 3.5 Sonnet (new)',
provider: 'Anthropic',
maxTokenAllowed: 8000,
},
{
name: 'claude-3-5-sonnet-20240620',
label: 'Claude 3.5 Sonnet (old)',
provider: 'Anthropic',
maxTokenAllowed: 8000,
},
{
name: 'claude-3-5-haiku-latest',
label: 'Claude 3.5 Haiku (new)',
provider: 'Anthropic',
maxTokenAllowed: 8000,
},
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 }, { name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 }, { name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 },
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 } { name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 },
], ],
getApiKeyLink: "https://console.anthropic.com/settings/keys", getApiKeyLink: 'https://console.anthropic.com/settings/keys',
}, },
{ {
name: 'Ollama', name: 'Ollama',
staticModels: [], staticModels: [],
getDynamicModels: getOllamaModels, getDynamicModels: getOllamaModels,
getApiKeyLink: "https://ollama.com/download", getApiKeyLink: 'https://ollama.com/download',
labelForGetApiKey: "Download Ollama", labelForGetApiKey: 'Download Ollama',
icon: "i-ph:cloud-arrow-down", icon: 'i-ph:cloud-arrow-down',
}, { },
{
name: 'OpenAILike', name: 'OpenAILike',
staticModels: [], staticModels: [],
getDynamicModels: getOpenAILikeModels getDynamicModels: getOpenAILikeModels,
}, },
{ {
name: 'Cohere', name: 'Cohere',
@ -47,7 +63,7 @@ const PROVIDER_LIST: ProviderInfo[] = [
{ name: 'c4ai-aya-expanse-8b', label: 'c4AI Aya Expanse 8b', provider: 'Cohere', maxTokenAllowed: 4096 }, { name: 'c4ai-aya-expanse-8b', label: 'c4AI Aya Expanse 8b', provider: 'Cohere', maxTokenAllowed: 4096 },
{ name: 'c4ai-aya-expanse-32b', label: 'c4AI Aya Expanse 32b', provider: 'Cohere', maxTokenAllowed: 4096 }, { name: 'c4ai-aya-expanse-32b', label: 'c4AI Aya Expanse 32b', provider: 'Cohere', maxTokenAllowed: 4096 },
], ],
getApiKeyLink: 'https://dashboard.cohere.com/api-keys' getApiKeyLink: 'https://dashboard.cohere.com/api-keys',
}, },
{ {
name: 'OpenRouter', name: 'OpenRouter',
@ -56,22 +72,52 @@ const PROVIDER_LIST: ProviderInfo[] = [
{ {
name: 'anthropic/claude-3.5-sonnet', name: 'anthropic/claude-3.5-sonnet',
label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)', label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
provider: 'OpenRouter' provider: 'OpenRouter',
, maxTokenAllowed: 8000 maxTokenAllowed: 8000,
},
{
name: 'anthropic/claude-3-haiku',
label: 'Anthropic: Claude 3 Haiku (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'deepseek/deepseek-coder',
label: 'Deepseek-Coder V2 236B (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'google/gemini-flash-1.5',
label: 'Google Gemini Flash 1.5 (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'google/gemini-pro-1.5',
label: 'Google Gemini Pro 1.5 (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
}, },
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 }, { name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 }, {
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 }, name: 'mistralai/mistral-nemo',
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 } label: 'OpenRouter Mistral Nemo (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{
name: 'qwen/qwen-110b-chat',
label: 'OpenRouter Qwen 110b Chat (OpenRouter)',
provider: 'OpenRouter',
maxTokenAllowed: 8000,
},
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 },
], ],
getDynamicModels: getOpenRouterModels, getDynamicModels: getOpenRouterModels,
getApiKeyLink: 'https://openrouter.ai/settings/keys', getApiKeyLink: 'https://openrouter.ai/settings/keys',
},
}, { {
name: 'Google', name: 'Google',
staticModels: [ staticModels: [
{ name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google', maxTokenAllowed: 8192 }, { name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google', maxTokenAllowed: 8192 },
@ -79,32 +125,92 @@ const PROVIDER_LIST: ProviderInfo[] = [
{ name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google', maxTokenAllowed: 8192 }, { name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google', maxTokenAllowed: 8192 }, { name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google', maxTokenAllowed: 8192 }, { name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google', maxTokenAllowed: 8192 },
{ name: 'gemini-exp-1121', label: 'Gemini exp-1121', provider: 'Google', maxTokenAllowed: 8192 } { name: 'gemini-exp-1121', label: 'Gemini exp-1121', provider: 'Google', maxTokenAllowed: 8192 },
], ],
getApiKeyLink: 'https://aistudio.google.com/app/apikey' getApiKeyLink: 'https://aistudio.google.com/app/apikey',
}, { },
{
name: 'Groq', name: 'Groq',
staticModels: [ staticModels: [
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }, { name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }, { name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }, { name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }, { name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 } { name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
], ],
getApiKeyLink: 'https://console.groq.com/keys' getApiKeyLink: 'https://console.groq.com/keys',
}, },
{ {
name: 'HuggingFace', name: 'HuggingFace',
staticModels: [ staticModels: [
{ name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }, {
{ name: 'Qwen/Qwen2.5-72B-Instruct', label: 'Qwen2.5-72B-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }, name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
{ name: 'meta-llama/Llama-3.1-70B-Instruct', label: 'Llama-3.1-70B-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }, label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
{ name: 'meta-llama/Llama-3.1-405B', label: 'Llama-3.1-405B (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }, provider: 'HuggingFace',
{ name: '01-ai/Yi-1.5-34B-Chat', label: 'Yi-1.5-34B-Chat (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }, maxTokenAllowed: 8000,
{ name: 'codellama/CodeLlama-34b-Instruct-hf', label: 'CodeLlama-34b-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }, },
{ name: 'NousResearch/Hermes-3-Llama-3.1-8B', label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 } {
name: '01-ai/Yi-1.5-34B-Chat',
label: 'Yi-1.5-34B-Chat (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'codellama/CodeLlama-34b-Instruct-hf',
label: 'CodeLlama-34b-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'Qwen/Qwen2.5-72B-Instruct',
label: 'Qwen2.5-72B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'meta-llama/Llama-3.1-70B-Instruct',
label: 'Llama-3.1-70B-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'meta-llama/Llama-3.1-405B',
label: 'Llama-3.1-405B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: '01-ai/Yi-1.5-34B-Chat',
label: 'Yi-1.5-34B-Chat (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'codellama/CodeLlama-34b-Instruct-hf',
label: 'CodeLlama-34b-Instruct (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
{
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
provider: 'HuggingFace',
maxTokenAllowed: 8000,
},
], ],
getApiKeyLink: 'https://huggingface.co/settings/tokens' getApiKeyLink: 'https://huggingface.co/settings/tokens',
}, },
{ {
@ -113,23 +219,24 @@ const PROVIDER_LIST: ProviderInfo[] = [
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI', maxTokenAllowed: 8000 }, { name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 }, { name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI', maxTokenAllowed: 8000 }, { name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI', maxTokenAllowed: 8000 },
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 } { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
], ],
getApiKeyLink: "https://platform.openai.com/api-keys", getApiKeyLink: 'https://platform.openai.com/api-keys',
}, { },
{
name: 'xAI', name: 'xAI',
staticModels: [ staticModels: [{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 }],
{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 } getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key',
], },
getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key' {
}, {
name: 'Deepseek', name: 'Deepseek',
staticModels: [ staticModels: [
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 }, { name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 },
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 } { name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 },
], ],
getApiKeyLink: 'https://platform.deepseek.com/api_keys' getApiKeyLink: 'https://platform.deepseek.com/apiKeys',
}, { },
{
name: 'Mistral', name: 'Mistral',
staticModels: [ staticModels: [
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral', maxTokenAllowed: 8000 }, { name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral', maxTokenAllowed: 8000 },
@ -140,27 +247,29 @@ const PROVIDER_LIST: ProviderInfo[] = [
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral', maxTokenAllowed: 8000 }, { name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral', maxTokenAllowed: 8000 }, { name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral', maxTokenAllowed: 8000 }, { name: 'codestral-latest', label: 'Codestral', provider: 'Mistral', maxTokenAllowed: 8000 },
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 } { name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 },
], ],
getApiKeyLink: 'https://console.mistral.ai/api-keys/' getApiKeyLink: 'https://console.mistral.ai/api-keys/',
}, { },
{
name: 'LMStudio', name: 'LMStudio',
staticModels: [], staticModels: [],
getDynamicModels: getLMStudioModels, getDynamicModels: getLMStudioModels,
getApiKeyLink: 'https://lmstudio.ai/', getApiKeyLink: 'https://lmstudio.ai/',
labelForGetApiKey: 'Get LMStudio', labelForGetApiKey: 'Get LMStudio',
icon: "i-ph:cloud-arrow-down", icon: 'i-ph:cloud-arrow-down',
} },
]; ];
export const DEFAULT_PROVIDER = PROVIDER_LIST[0]; export const DEFAULT_PROVIDER = PROVIDER_LIST[0];
const staticModels: ModelInfo[] = PROVIDER_LIST.map(p => p.staticModels).flat(); const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat();
export let MODEL_LIST: ModelInfo[] = [...staticModels]; export let MODEL_LIST: ModelInfo[] = [...staticModels];
const getOllamaBaseUrl = () => { const getOllamaBaseUrl = () => {
const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434'; const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
// Check if we're in the browser // Check if we're in the browser
if (typeof window !== 'undefined') { if (typeof window !== 'undefined') {
// Frontend always uses localhost // Frontend always uses localhost
@ -170,23 +279,22 @@ const getOllamaBaseUrl = () => {
// Backend: Check if we're running in Docker // Backend: Check if we're running in Docker
const isDocker = process.env.RUNNING_IN_DOCKER === 'true'; const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
return isDocker return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl;
? defaultBaseUrl.replace('localhost', 'host.docker.internal')
: defaultBaseUrl;
}; };
async function getOllamaModels(): Promise<ModelInfo[]> { async function getOllamaModels(): Promise<ModelInfo[]> {
try { try {
const base_url = getOllamaBaseUrl(); const baseUrl = getOllamaBaseUrl();
const response = await fetch(`${base_url}/api/tags`); const response = await fetch(`${baseUrl}/api/tags`);
const data = await response.json() as OllamaApiResponse; const data = (await response.json()) as OllamaApiResponse;
return data.models.map((model: OllamaModel) => ({ return data.models.map((model: OllamaModel) => ({
name: model.name, name: model.name,
label: `${model.name} (${model.details.parameter_size})`, label: `${model.name} (${model.details.parameter_size})`,
provider: 'Ollama', provider: 'Ollama',
maxTokenAllowed:8000, maxTokenAllowed: 8000,
})); }));
// eslint-disable-next-line @typescript-eslint/no-unused-vars
} catch (e) { } catch (e) {
return []; return [];
} }
@ -194,22 +302,26 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
async function getOpenAILikeModels(): Promise<ModelInfo[]> { async function getOpenAILikeModels(): Promise<ModelInfo[]> {
try { try {
const base_url = import.meta.env.OPENAI_LIKE_API_BASE_URL || ''; const baseUrl = import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
if (!base_url) {
if (!baseUrl) {
return []; return [];
} }
const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
const response = await fetch(`${base_url}/models`, { const apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
const response = await fetch(`${baseUrl}/models`, {
headers: { headers: {
Authorization: `Bearer ${api_key}` Authorization: `Bearer ${apiKey}`,
} },
}); });
const res = await response.json() as any; const res = (await response.json()) as any;
return res.data.map((model: any) => ({ return res.data.map((model: any) => ({
name: model.id, name: model.id,
label: model.id, label: model.id,
provider: 'OpenAILike' provider: 'OpenAILike',
})); }));
// eslint-disable-next-line @typescript-eslint/no-unused-vars
} catch (e) { } catch (e) {
return []; return [];
} }
@ -223,51 +335,67 @@ type OpenRouterModelsResponse = {
pricing: { pricing: {
prompt: number; prompt: number;
completion: number; completion: number;
} };
}[] }[];
}; };
async function getOpenRouterModels(): Promise<ModelInfo[]> { async function getOpenRouterModels(): Promise<ModelInfo[]> {
const data: OpenRouterModelsResponse = await (await fetch('https://openrouter.ai/api/v1/models', { const data: OpenRouterModelsResponse = await (
headers: { await fetch('https://openrouter.ai/api/v1/models', {
'Content-Type': 'application/json' headers: {
} 'Content-Type': 'application/json',
})).json(); },
})
).json();
return data.data.sort((a, b) => a.name.localeCompare(b.name)).map(m => ({ return data.data
name: m.id, .sort((a, b) => a.name.localeCompare(b.name))
label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed( .map((m) => ({
2)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor( name: m.id,
m.context_length / 1000)}k`, label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed(
provider: 'OpenRouter', 2,
maxTokenAllowed:8000, )} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
})); provider: 'OpenRouter',
maxTokenAllowed: 8000,
}));
} }
async function getLMStudioModels(): Promise<ModelInfo[]> { async function getLMStudioModels(): Promise<ModelInfo[]> {
try { try {
const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234'; const baseUrl = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
const response = await fetch(`${base_url}/v1/models`); const response = await fetch(`${baseUrl}/v1/models`);
const data = await response.json() as any; const data = (await response.json()) as any;
return data.data.map((model: any) => ({ return data.data.map((model: any) => ({
name: model.id, name: model.id,
label: model.id, label: model.id,
provider: 'LMStudio' provider: 'LMStudio',
})); }));
// eslint-disable-next-line @typescript-eslint/no-unused-vars
} catch (e) { } catch (e) {
return []; return [];
} }
} }
async function initializeModelList(): Promise<ModelInfo[]> { async function initializeModelList(): Promise<ModelInfo[]> {
MODEL_LIST = [...(await Promise.all( MODEL_LIST = [
PROVIDER_LIST ...(
.filter((p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels) await Promise.all(
.map(p => p.getDynamicModels()))) PROVIDER_LIST.filter(
.flat(), ...staticModels]; (p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
).map((p) => p.getDynamicModels()),
)
).flat(),
...staticModels,
];
return MODEL_LIST; return MODEL_LIST;
} }
export { getOllamaModels, getOpenAILikeModels, getLMStudioModels, initializeModelList, getOpenRouterModels, PROVIDER_LIST }; export {
getOllamaModels,
getOpenAILikeModels,
getLMStudioModels,
initializeModelList,
getOpenRouterModels,
PROVIDER_LIST,
};

View File

@ -52,67 +52,77 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer
return process; return process;
} }
export type ExecutionResult = { output: string; exitCode: number } | undefined;
export class BoltShell { export class BoltShell {
#initialized: (() => void) | undefined #initialized: (() => void) | undefined;
#readyPromise: Promise<void> #readyPromise: Promise<void>;
#webcontainer: WebContainer | undefined #webcontainer: WebContainer | undefined;
#terminal: ITerminal | undefined #terminal: ITerminal | undefined;
#process: WebContainerProcess | undefined #process: WebContainerProcess | undefined;
executionState = atom<{ sessionId: string, active: boolean, executionPrms?: Promise<any> } | undefined>() executionState = atom<{ sessionId: string; active: boolean; executionPrms?: Promise<any> } | undefined>();
#outputStream: ReadableStreamDefaultReader<string> | undefined #outputStream: ReadableStreamDefaultReader<string> | undefined;
#shellInputStream: WritableStreamDefaultWriter<string> | undefined #shellInputStream: WritableStreamDefaultWriter<string> | undefined;
constructor() { constructor() {
this.#readyPromise = new Promise((resolve) => { this.#readyPromise = new Promise((resolve) => {
this.#initialized = resolve this.#initialized = resolve;
}) });
} }
ready() { ready() {
return this.#readyPromise; return this.#readyPromise;
} }
async init(webcontainer: WebContainer, terminal: ITerminal) {
this.#webcontainer = webcontainer
this.#terminal = terminal
let callback = (data: string) => {
console.log(data)
}
let { process, output } = await this.newBoltShellProcess(webcontainer, terminal)
this.#process = process
this.#outputStream = output.getReader()
await this.waitTillOscCode('interactive')
this.#initialized?.()
}
get terminal() {
return this.#terminal
}
get process() {
return this.#process
}
async executeCommand(sessionId: string, command: string) {
if (!this.process || !this.terminal) {
return
}
let state = this.executionState.get()
//interrupt the current execution async init(webcontainer: WebContainer, terminal: ITerminal) {
// this.#shellInputStream?.write('\x03'); this.#webcontainer = webcontainer;
this.terminal.input('\x03'); this.#terminal = terminal;
if (state && state.executionPrms) {
await state.executionPrms const { process, output } = await this.newBoltShellProcess(webcontainer, terminal);
this.#process = process;
this.#outputStream = output.getReader();
await this.waitTillOscCode('interactive');
this.#initialized?.();
}
get terminal() {
return this.#terminal;
}
get process() {
return this.#process;
}
async executeCommand(sessionId: string, command: string): Promise<ExecutionResult> {
if (!this.process || !this.terminal) {
return undefined;
} }
const state = this.executionState.get();
/*
* interrupt the current execution
* this.#shellInputStream?.write('\x03');
*/
this.terminal.input('\x03');
if (state && state.executionPrms) {
await state.executionPrms;
}
//start a new execution //start a new execution
this.terminal.input(command.trim() + '\n'); this.terminal.input(command.trim() + '\n');
//wait for the execution to finish //wait for the execution to finish
let executionPrms = this.getCurrentExecutionResult() const executionPromise = this.getCurrentExecutionResult();
this.executionState.set({ sessionId, active: true, executionPrms }) this.executionState.set({ sessionId, active: true, executionPrms: executionPromise });
let resp = await executionPrms const resp = await executionPromise;
this.executionState.set({ sessionId, active: false }) this.executionState.set({ sessionId, active: false });
return resp
return resp;
} }
async newBoltShellProcess(webcontainer: WebContainer, terminal: ITerminal) { async newBoltShellProcess(webcontainer: WebContainer, terminal: ITerminal) {
const args: string[] = []; const args: string[] = [];
@ -126,6 +136,7 @@ export class BoltShell {
const input = process.input.getWriter(); const input = process.input.getWriter();
this.#shellInputStream = input; this.#shellInputStream = input;
const [internalOutput, terminalOutput] = process.output.tee(); const [internalOutput, terminalOutput] = process.output.tee();
const jshReady = withResolvers<void>(); const jshReady = withResolvers<void>();
@ -162,34 +173,48 @@ export class BoltShell {
return { process, output: internalOutput }; return { process, output: internalOutput };
} }
async getCurrentExecutionResult() {
let { output, exitCode } = await this.waitTillOscCode('exit') async getCurrentExecutionResult(): Promise<ExecutionResult> {
const { output, exitCode } = await this.waitTillOscCode('exit');
return { output, exitCode }; return { output, exitCode };
} }
async waitTillOscCode(waitCode: string) { async waitTillOscCode(waitCode: string) {
let fullOutput = ''; let fullOutput = '';
let exitCode: number = 0; let exitCode: number = 0;
if (!this.#outputStream) return { output: fullOutput, exitCode };
let tappedStream = this.#outputStream if (!this.#outputStream) {
return { output: fullOutput, exitCode };
}
const tappedStream = this.#outputStream;
while (true) { while (true) {
const { value, done } = await tappedStream.read(); const { value, done } = await tappedStream.read();
if (done) break;
if (done) {
break;
}
const text = value || ''; const text = value || '';
fullOutput += text; fullOutput += text;
// Check if command completion signal with exit code // Check if command completion signal with exit code
const [, osc, , pid, code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || []; const [, osc, , , code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || [];
if (osc === 'exit') { if (osc === 'exit') {
exitCode = parseInt(code, 10); exitCode = parseInt(code, 10);
} }
if (osc === waitCode) { if (osc === waitCode) {
break; break;
} }
} }
return { output: fullOutput, exitCode }; return { output: fullOutput, exitCode };
} }
} }
export function newBoltShellProcess() { export function newBoltShellProcess() {
return new BoltShell(); return new BoltShell();
} }

View File

@ -1,4 +1,3 @@
interface OllamaModelDetails { interface OllamaModelDetails {
parent_model: string; parent_model: string;
format: string; format: string;
@ -29,10 +28,10 @@ export interface ModelInfo {
} }
export interface ProviderInfo { export interface ProviderInfo {
staticModels: ModelInfo[], staticModels: ModelInfo[];
name: string, name: string;
getDynamicModels?: () => Promise<ModelInfo[]>, getDynamicModels?: () => Promise<ModelInfo[]>;
getApiKeyLink?: string, getApiKeyLink?: string;
labelForGetApiKey?: string, labelForGetApiKey?: string;
icon?:string, icon?: string;
}; }

View File

@ -12,6 +12,8 @@ export default [
'@blitz/catch-error-name': 'off', '@blitz/catch-error-name': 'off',
'@typescript-eslint/no-this-alias': 'off', '@typescript-eslint/no-this-alias': 'off',
'@typescript-eslint/no-empty-object-type': 'off', '@typescript-eslint/no-empty-object-type': 'off',
'@blitz/comment-syntax': 'off',
'@blitz/block-scope-case': 'off',
}, },
}, },
{ {

View File

@ -11,8 +11,8 @@
"dev": "remix vite:dev", "dev": "remix vite:dev",
"test": "vitest --run", "test": "vitest --run",
"test:watch": "vitest", "test:watch": "vitest",
"lint": "eslint --cache --cache-location ./node_modules/.cache/eslint .", "lint": "eslint --cache --cache-location ./node_modules/.cache/eslint app",
"lint:fix": "npm run lint -- --fix", "lint:fix": "npm run lint -- --fix && prettier app --write",
"start": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings", "start": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings",
"dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session", "dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session",
"dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai", "dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai",

View File

@ -9,4 +9,7 @@ interface Env {
OPENAI_LIKE_API_BASE_URL: string; OPENAI_LIKE_API_BASE_URL: string;
DEEPSEEK_API_KEY: string; DEEPSEEK_API_KEY: string;
LMSTUDIO_API_BASE_URL: string; LMSTUDIO_API_BASE_URL: string;
GOOGLE_GENERATIVE_AI_API_KEY: string;
MISTRAL_API_KEY: string;
XAI_API_KEY: string;
} }