mirror of
https://github.com/stackblitz-labs/bolt.diy
synced 2025-06-26 18:26:38 +00:00
Hardcode url for together ai as fallback if not set in env
This commit is contained in:
@@ -7,31 +7,48 @@ export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
|
||||
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
|
||||
export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
|
||||
export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
|
||||
export const PROMPT_COOKIE_KEY = 'cachedPrompt';
|
||||
|
||||
const PROVIDER_LIST: ProviderInfo[] = [
|
||||
{
|
||||
name: 'Anthropic',
|
||||
staticModels: [
|
||||
{ name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
||||
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
||||
{ name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
||||
{
|
||||
name: 'claude-3-5-sonnet-latest',
|
||||
label: 'Claude 3.5 Sonnet (new)',
|
||||
provider: 'Anthropic',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'claude-3-5-sonnet-20240620',
|
||||
label: 'Claude 3.5 Sonnet (old)',
|
||||
provider: 'Anthropic',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'claude-3-5-haiku-latest',
|
||||
label: 'Claude 3.5 Haiku (new)',
|
||||
provider: 'Anthropic',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
||||
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
||||
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 }
|
||||
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
||||
],
|
||||
getApiKeyLink: "https://console.anthropic.com/settings/keys",
|
||||
getApiKeyLink: 'https://console.anthropic.com/settings/keys',
|
||||
},
|
||||
{
|
||||
name: 'Ollama',
|
||||
staticModels: [],
|
||||
getDynamicModels: getOllamaModels,
|
||||
getApiKeyLink: "https://ollama.com/download",
|
||||
labelForGetApiKey: "Download Ollama",
|
||||
icon: "i-ph:cloud-arrow-down",
|
||||
}, {
|
||||
getApiKeyLink: 'https://ollama.com/download',
|
||||
labelForGetApiKey: 'Download Ollama',
|
||||
icon: 'i-ph:cloud-arrow-down',
|
||||
},
|
||||
{
|
||||
name: 'OpenAILike',
|
||||
staticModels: [],
|
||||
getDynamicModels: getOpenAILikeModels
|
||||
getDynamicModels: getOpenAILikeModels,
|
||||
},
|
||||
{
|
||||
name: 'Cohere',
|
||||
@@ -47,7 +64,7 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
||||
{ name: 'c4ai-aya-expanse-8b', label: 'c4AI Aya Expanse 8b', provider: 'Cohere', maxTokenAllowed: 4096 },
|
||||
{ name: 'c4ai-aya-expanse-32b', label: 'c4AI Aya Expanse 32b', provider: 'Cohere', maxTokenAllowed: 4096 },
|
||||
],
|
||||
getApiKeyLink: 'https://dashboard.cohere.com/api-keys'
|
||||
getApiKeyLink: 'https://dashboard.cohere.com/api-keys',
|
||||
},
|
||||
{
|
||||
name: 'OpenRouter',
|
||||
@@ -56,22 +73,52 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
||||
{
|
||||
name: 'anthropic/claude-3.5-sonnet',
|
||||
label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
|
||||
provider: 'OpenRouter'
|
||||
, maxTokenAllowed: 8000
|
||||
provider: 'OpenRouter',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'anthropic/claude-3-haiku',
|
||||
label: 'Anthropic: Claude 3 Haiku (OpenRouter)',
|
||||
provider: 'OpenRouter',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'deepseek/deepseek-coder',
|
||||
label: 'Deepseek-Coder V2 236B (OpenRouter)',
|
||||
provider: 'OpenRouter',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'google/gemini-flash-1.5',
|
||||
label: 'Google Gemini Flash 1.5 (OpenRouter)',
|
||||
provider: 'OpenRouter',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'google/gemini-pro-1.5',
|
||||
label: 'Google Gemini Pro 1.5 (OpenRouter)',
|
||||
provider: 'OpenRouter',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
||||
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
||||
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
||||
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
||||
{ name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
||||
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
||||
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 8000 },
|
||||
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 }
|
||||
{
|
||||
name: 'mistralai/mistral-nemo',
|
||||
label: 'OpenRouter Mistral Nemo (OpenRouter)',
|
||||
provider: 'OpenRouter',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'qwen/qwen-110b-chat',
|
||||
label: 'OpenRouter Qwen 110b Chat (OpenRouter)',
|
||||
provider: 'OpenRouter',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter', maxTokenAllowed: 4096 },
|
||||
],
|
||||
getDynamicModels: getOpenRouterModels,
|
||||
getApiKeyLink: 'https://openrouter.ai/settings/keys',
|
||||
|
||||
}, {
|
||||
},
|
||||
{
|
||||
name: 'Google',
|
||||
staticModels: [
|
||||
{ name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google', maxTokenAllowed: 8192 },
|
||||
@@ -79,29 +126,92 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
||||
{ name: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash-8b', provider: 'Google', maxTokenAllowed: 8192 },
|
||||
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google', maxTokenAllowed: 8192 },
|
||||
{ name: 'gemini-1.5-pro-002', label: 'Gemini 1.5 Pro-002', provider: 'Google', maxTokenAllowed: 8192 },
|
||||
{ name: 'gemini-exp-1114', label: 'Gemini exp-1114', provider: 'Google', maxTokenAllowed: 8192 }
|
||||
{ name: 'gemini-exp-1121', label: 'Gemini exp-1121', provider: 'Google', maxTokenAllowed: 8192 },
|
||||
],
|
||||
getApiKeyLink: 'https://aistudio.google.com/app/apikey'
|
||||
}, {
|
||||
getApiKeyLink: 'https://aistudio.google.com/app/apikey',
|
||||
},
|
||||
{
|
||||
name: 'Groq',
|
||||
staticModels: [
|
||||
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
||||
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
||||
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
||||
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
||||
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 }
|
||||
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq', maxTokenAllowed: 8000 },
|
||||
],
|
||||
getApiKeyLink: 'https://console.groq.com/keys'
|
||||
getApiKeyLink: 'https://console.groq.com/keys',
|
||||
},
|
||||
{
|
||||
name: 'HuggingFace',
|
||||
staticModels: [
|
||||
{ name: 'Qwen/Qwen2.5-Coder-32B-Instruct', label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
|
||||
{ name: '01-ai/Yi-1.5-34B-Chat', label: 'Yi-1.5-34B-Chat (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
|
||||
{ name: 'codellama/CodeLlama-34b-Instruct-hf', label: 'CodeLlama-34b-Instruct (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 },
|
||||
{ name: 'NousResearch/Hermes-3-Llama-3.1-8B', label: 'Hermes-3-Llama-3.1-8B (HuggingFace)', provider: 'HuggingFace', maxTokenAllowed: 8000 }
|
||||
{
|
||||
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
|
||||
label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: '01-ai/Yi-1.5-34B-Chat',
|
||||
label: 'Yi-1.5-34B-Chat (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'codellama/CodeLlama-34b-Instruct-hf',
|
||||
label: 'CodeLlama-34b-Instruct (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
|
||||
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
|
||||
label: 'Qwen2.5-Coder-32B-Instruct (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'Qwen/Qwen2.5-72B-Instruct',
|
||||
label: 'Qwen2.5-72B-Instruct (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'meta-llama/Llama-3.1-70B-Instruct',
|
||||
label: 'Llama-3.1-70B-Instruct (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'meta-llama/Llama-3.1-405B',
|
||||
label: 'Llama-3.1-405B (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: '01-ai/Yi-1.5-34B-Chat',
|
||||
label: 'Yi-1.5-34B-Chat (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'codellama/CodeLlama-34b-Instruct-hf',
|
||||
label: 'CodeLlama-34b-Instruct (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'NousResearch/Hermes-3-Llama-3.1-8B',
|
||||
label: 'Hermes-3-Llama-3.1-8B (HuggingFace)',
|
||||
provider: 'HuggingFace',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
],
|
||||
getApiKeyLink: 'https://huggingface.co/settings/tokens'
|
||||
getApiKeyLink: 'https://huggingface.co/settings/tokens',
|
||||
},
|
||||
|
||||
{
|
||||
@@ -110,23 +220,24 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
||||
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI', maxTokenAllowed: 8000 },
|
||||
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
|
||||
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI', maxTokenAllowed: 8000 },
|
||||
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 }
|
||||
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI', maxTokenAllowed: 8000 },
|
||||
],
|
||||
getApiKeyLink: "https://platform.openai.com/api-keys",
|
||||
}, {
|
||||
getApiKeyLink: 'https://platform.openai.com/api-keys',
|
||||
},
|
||||
{
|
||||
name: 'xAI',
|
||||
staticModels: [
|
||||
{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 }
|
||||
],
|
||||
getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key'
|
||||
}, {
|
||||
staticModels: [{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI', maxTokenAllowed: 8000 }],
|
||||
getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key',
|
||||
},
|
||||
{
|
||||
name: 'Deepseek',
|
||||
staticModels: [
|
||||
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek', maxTokenAllowed: 8000 },
|
||||
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 }
|
||||
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek', maxTokenAllowed: 8000 },
|
||||
],
|
||||
getApiKeyLink: 'https://platform.deepseek.com/api_keys'
|
||||
}, {
|
||||
getApiKeyLink: 'https://platform.deepseek.com/apiKeys',
|
||||
},
|
||||
{
|
||||
name: 'Mistral',
|
||||
staticModels: [
|
||||
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
@@ -137,27 +248,54 @@ const PROVIDER_LIST: ProviderInfo[] = [
|
||||
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 }
|
||||
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral', maxTokenAllowed: 8000 },
|
||||
],
|
||||
getApiKeyLink: 'https://console.mistral.ai/api-keys/'
|
||||
}, {
|
||||
getApiKeyLink: 'https://console.mistral.ai/api-keys/',
|
||||
},
|
||||
{
|
||||
name: 'LMStudio',
|
||||
staticModels: [],
|
||||
getDynamicModels: getLMStudioModels,
|
||||
getApiKeyLink: 'https://lmstudio.ai/',
|
||||
labelForGetApiKey: 'Get LMStudio',
|
||||
icon: "i-ph:cloud-arrow-down",
|
||||
}
|
||||
icon: 'i-ph:cloud-arrow-down',
|
||||
},
|
||||
{
|
||||
name: 'Together',
|
||||
staticModels: [
|
||||
{
|
||||
name: 'Qwen/Qwen2.5-Coder-32B-Instruct',
|
||||
label: 'Qwen/Qwen2.5-Coder-32B-Instruct',
|
||||
provider: 'Together',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
|
||||
label: 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
|
||||
provider: 'Together',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
|
||||
{
|
||||
name: 'mistralai/Mixtral-8x7B-Instruct-v0.1',
|
||||
label: 'Mixtral 8x7B Instruct',
|
||||
provider: 'Together',
|
||||
maxTokenAllowed: 8192,
|
||||
},
|
||||
],
|
||||
getApiKeyLink: 'https://api.together.xyz/settings/api-keys',
|
||||
},
|
||||
];
|
||||
|
||||
export const DEFAULT_PROVIDER = PROVIDER_LIST[0];
|
||||
|
||||
const staticModels: ModelInfo[] = PROVIDER_LIST.map(p => p.staticModels).flat();
|
||||
const staticModels: ModelInfo[] = PROVIDER_LIST.map((p) => p.staticModels).flat();
|
||||
|
||||
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
||||
|
||||
const getOllamaBaseUrl = () => {
|
||||
const defaultBaseUrl = import.meta.env.OLLAMA_API_BASE_URL || 'http://localhost:11434';
|
||||
|
||||
// Check if we're in the browser
|
||||
if (typeof window !== 'undefined') {
|
||||
// Frontend always uses localhost
|
||||
@@ -167,47 +305,56 @@ const getOllamaBaseUrl = () => {
|
||||
// Backend: Check if we're running in Docker
|
||||
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
||||
|
||||
return isDocker
|
||||
? defaultBaseUrl.replace('localhost', 'host.docker.internal')
|
||||
: defaultBaseUrl;
|
||||
return isDocker ? defaultBaseUrl.replace('localhost', 'host.docker.internal') : defaultBaseUrl;
|
||||
};
|
||||
|
||||
async function getOllamaModels(): Promise<ModelInfo[]> {
|
||||
/*
|
||||
* if (typeof window === 'undefined') {
|
||||
* return [];
|
||||
* }
|
||||
*/
|
||||
|
||||
try {
|
||||
const base_url = getOllamaBaseUrl();
|
||||
const response = await fetch(`${base_url}/api/tags`);
|
||||
const data = await response.json() as OllamaApiResponse;
|
||||
const baseUrl = getOllamaBaseUrl();
|
||||
const response = await fetch(`${baseUrl}/api/tags`);
|
||||
const data = (await response.json()) as OllamaApiResponse;
|
||||
|
||||
return data.models.map((model: OllamaModel) => ({
|
||||
name: model.name,
|
||||
label: `${model.name} (${model.details.parameter_size})`,
|
||||
provider: 'Ollama',
|
||||
maxTokenAllowed:8000,
|
||||
maxTokenAllowed: 8000,
|
||||
}));
|
||||
} catch (e) {
|
||||
console.error('Error getting Ollama models:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async function getOpenAILikeModels(): Promise<ModelInfo[]> {
|
||||
try {
|
||||
const base_url = import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
|
||||
if (!base_url) {
|
||||
const baseUrl = import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
|
||||
|
||||
if (!baseUrl) {
|
||||
return [];
|
||||
}
|
||||
const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
|
||||
const response = await fetch(`${base_url}/models`, {
|
||||
|
||||
const apiKey = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
|
||||
const response = await fetch(`${baseUrl}/models`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${api_key}`
|
||||
}
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
});
|
||||
const res = await response.json() as any;
|
||||
const res = (await response.json()) as any;
|
||||
|
||||
return res.data.map((model: any) => ({
|
||||
name: model.id,
|
||||
label: model.id,
|
||||
provider: 'OpenAILike'
|
||||
provider: 'OpenAILike',
|
||||
}));
|
||||
} catch (e) {
|
||||
console.error('Error getting OpenAILike models:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -220,51 +367,71 @@ type OpenRouterModelsResponse = {
|
||||
pricing: {
|
||||
prompt: number;
|
||||
completion: number;
|
||||
}
|
||||
}[]
|
||||
};
|
||||
}[];
|
||||
};
|
||||
|
||||
async function getOpenRouterModels(): Promise<ModelInfo[]> {
|
||||
const data: OpenRouterModelsResponse = await (await fetch('https://openrouter.ai/api/v1/models', {
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
})).json();
|
||||
const data: OpenRouterModelsResponse = await (
|
||||
await fetch('https://openrouter.ai/api/v1/models', {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
).json();
|
||||
|
||||
return data.data.sort((a, b) => a.name.localeCompare(b.name)).map(m => ({
|
||||
name: m.id,
|
||||
label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed(
|
||||
2)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(
|
||||
m.context_length / 1000)}k`,
|
||||
provider: 'OpenRouter',
|
||||
maxTokenAllowed:8000,
|
||||
}));
|
||||
return data.data
|
||||
.sort((a, b) => a.name.localeCompare(b.name))
|
||||
.map((m) => ({
|
||||
name: m.id,
|
||||
label: `${m.name} - in:$${(m.pricing.prompt * 1_000_000).toFixed(
|
||||
2,
|
||||
)} out:$${(m.pricing.completion * 1_000_000).toFixed(2)} - context ${Math.floor(m.context_length / 1000)}k`,
|
||||
provider: 'OpenRouter',
|
||||
maxTokenAllowed: 8000,
|
||||
}));
|
||||
}
|
||||
|
||||
async function getLMStudioModels(): Promise<ModelInfo[]> {
|
||||
if (typeof window === 'undefined') {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
|
||||
const response = await fetch(`${base_url}/v1/models`);
|
||||
const data = await response.json() as any;
|
||||
const baseUrl = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
|
||||
const response = await fetch(`${baseUrl}/v1/models`);
|
||||
const data = (await response.json()) as any;
|
||||
|
||||
return data.data.map((model: any) => ({
|
||||
name: model.id,
|
||||
label: model.id,
|
||||
provider: 'LMStudio'
|
||||
provider: 'LMStudio',
|
||||
}));
|
||||
} catch (e) {
|
||||
console.error('Error getting LMStudio models:', e);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
async function initializeModelList(): Promise<ModelInfo[]> {
|
||||
MODEL_LIST = [...(await Promise.all(
|
||||
PROVIDER_LIST
|
||||
.filter((p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels)
|
||||
.map(p => p.getDynamicModels())))
|
||||
.flat(), ...staticModels];
|
||||
MODEL_LIST = [
|
||||
...(
|
||||
await Promise.all(
|
||||
PROVIDER_LIST.filter(
|
||||
(p): p is ProviderInfo & { getDynamicModels: () => Promise<ModelInfo[]> } => !!p.getDynamicModels,
|
||||
).map((p) => p.getDynamicModels()),
|
||||
)
|
||||
).flat(),
|
||||
...staticModels,
|
||||
];
|
||||
return MODEL_LIST;
|
||||
}
|
||||
|
||||
export { getOllamaModels, getOpenAILikeModels, getLMStudioModels, initializeModelList, getOpenRouterModels, PROVIDER_LIST };
|
||||
export {
|
||||
getOllamaModels,
|
||||
getOpenAILikeModels,
|
||||
getLMStudioModels,
|
||||
initializeModelList,
|
||||
getOpenRouterModels,
|
||||
PROVIDER_LIST,
|
||||
};
|
||||
|
||||
@@ -11,7 +11,7 @@ interface Logger {
|
||||
setLevel: (level: DebugLevel) => void;
|
||||
}
|
||||
|
||||
let currentLevel: DebugLevel = import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV ? 'debug' : 'info';
|
||||
let currentLevel: DebugLevel = (import.meta.env.VITE_LOG_LEVEL ?? import.meta.env.DEV) ? 'debug' : 'info';
|
||||
|
||||
const isWorker = 'HTMLRewriter' in globalThis;
|
||||
const supportsColor = !isWorker;
|
||||
|
||||
@@ -52,67 +52,77 @@ export async function newShellProcess(webcontainer: WebContainer, terminal: ITer
|
||||
return process;
|
||||
}
|
||||
|
||||
|
||||
export type ExecutionResult = { output: string; exitCode: number } | undefined;
|
||||
|
||||
export class BoltShell {
|
||||
#initialized: (() => void) | undefined
|
||||
#readyPromise: Promise<void>
|
||||
#webcontainer: WebContainer | undefined
|
||||
#terminal: ITerminal | undefined
|
||||
#process: WebContainerProcess | undefined
|
||||
executionState = atom<{ sessionId: string, active: boolean, executionPrms?: Promise<any> } | undefined>()
|
||||
#outputStream: ReadableStreamDefaultReader<string> | undefined
|
||||
#shellInputStream: WritableStreamDefaultWriter<string> | undefined
|
||||
#initialized: (() => void) | undefined;
|
||||
#readyPromise: Promise<void>;
|
||||
#webcontainer: WebContainer | undefined;
|
||||
#terminal: ITerminal | undefined;
|
||||
#process: WebContainerProcess | undefined;
|
||||
executionState = atom<{ sessionId: string; active: boolean; executionPrms?: Promise<any> } | undefined>();
|
||||
#outputStream: ReadableStreamDefaultReader<string> | undefined;
|
||||
#shellInputStream: WritableStreamDefaultWriter<string> | undefined;
|
||||
|
||||
constructor() {
|
||||
this.#readyPromise = new Promise((resolve) => {
|
||||
this.#initialized = resolve
|
||||
})
|
||||
this.#initialized = resolve;
|
||||
});
|
||||
}
|
||||
|
||||
ready() {
|
||||
return this.#readyPromise;
|
||||
}
|
||||
async init(webcontainer: WebContainer, terminal: ITerminal) {
|
||||
this.#webcontainer = webcontainer
|
||||
this.#terminal = terminal
|
||||
let callback = (data: string) => {
|
||||
console.log(data)
|
||||
}
|
||||
let { process, output } = await this.newBoltShellProcess(webcontainer, terminal)
|
||||
this.#process = process
|
||||
this.#outputStream = output.getReader()
|
||||
await this.waitTillOscCode('interactive')
|
||||
this.#initialized?.()
|
||||
}
|
||||
get terminal() {
|
||||
return this.#terminal
|
||||
}
|
||||
get process() {
|
||||
return this.#process
|
||||
}
|
||||
async executeCommand(sessionId: string, command: string) {
|
||||
if (!this.process || !this.terminal) {
|
||||
return
|
||||
}
|
||||
let state = this.executionState.get()
|
||||
|
||||
//interrupt the current execution
|
||||
// this.#shellInputStream?.write('\x03');
|
||||
this.terminal.input('\x03');
|
||||
if (state && state.executionPrms) {
|
||||
await state.executionPrms
|
||||
async init(webcontainer: WebContainer, terminal: ITerminal) {
|
||||
this.#webcontainer = webcontainer;
|
||||
this.#terminal = terminal;
|
||||
|
||||
const { process, output } = await this.newBoltShellProcess(webcontainer, terminal);
|
||||
this.#process = process;
|
||||
this.#outputStream = output.getReader();
|
||||
await this.waitTillOscCode('interactive');
|
||||
this.#initialized?.();
|
||||
}
|
||||
|
||||
get terminal() {
|
||||
return this.#terminal;
|
||||
}
|
||||
|
||||
get process() {
|
||||
return this.#process;
|
||||
}
|
||||
|
||||
async executeCommand(sessionId: string, command: string): Promise<ExecutionResult> {
|
||||
if (!this.process || !this.terminal) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const state = this.executionState.get();
|
||||
|
||||
/*
|
||||
* interrupt the current execution
|
||||
* this.#shellInputStream?.write('\x03');
|
||||
*/
|
||||
this.terminal.input('\x03');
|
||||
|
||||
if (state && state.executionPrms) {
|
||||
await state.executionPrms;
|
||||
}
|
||||
|
||||
//start a new execution
|
||||
this.terminal.input(command.trim() + '\n');
|
||||
|
||||
//wait for the execution to finish
|
||||
let executionPrms = this.getCurrentExecutionResult()
|
||||
this.executionState.set({ sessionId, active: true, executionPrms })
|
||||
const executionPromise = this.getCurrentExecutionResult();
|
||||
this.executionState.set({ sessionId, active: true, executionPrms: executionPromise });
|
||||
|
||||
let resp = await executionPrms
|
||||
this.executionState.set({ sessionId, active: false })
|
||||
return resp
|
||||
const resp = await executionPromise;
|
||||
this.executionState.set({ sessionId, active: false });
|
||||
|
||||
return resp;
|
||||
}
|
||||
|
||||
async newBoltShellProcess(webcontainer: WebContainer, terminal: ITerminal) {
|
||||
const args: string[] = [];
|
||||
|
||||
@@ -126,6 +136,7 @@ export class BoltShell {
|
||||
|
||||
const input = process.input.getWriter();
|
||||
this.#shellInputStream = input;
|
||||
|
||||
const [internalOutput, terminalOutput] = process.output.tee();
|
||||
|
||||
const jshReady = withResolvers<void>();
|
||||
@@ -162,34 +173,48 @@ export class BoltShell {
|
||||
|
||||
return { process, output: internalOutput };
|
||||
}
|
||||
async getCurrentExecutionResult() {
|
||||
let { output, exitCode } = await this.waitTillOscCode('exit')
|
||||
|
||||
async getCurrentExecutionResult(): Promise<ExecutionResult> {
|
||||
const { output, exitCode } = await this.waitTillOscCode('exit');
|
||||
return { output, exitCode };
|
||||
}
|
||||
|
||||
async waitTillOscCode(waitCode: string) {
|
||||
let fullOutput = '';
|
||||
let exitCode: number = 0;
|
||||
if (!this.#outputStream) return { output: fullOutput, exitCode };
|
||||
let tappedStream = this.#outputStream
|
||||
|
||||
if (!this.#outputStream) {
|
||||
return { output: fullOutput, exitCode };
|
||||
}
|
||||
|
||||
const tappedStream = this.#outputStream;
|
||||
|
||||
while (true) {
|
||||
const { value, done } = await tappedStream.read();
|
||||
if (done) break;
|
||||
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
|
||||
const text = value || '';
|
||||
fullOutput += text;
|
||||
|
||||
// Check if command completion signal with exit code
|
||||
const [, osc, , pid, code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || [];
|
||||
const [, osc, , , code] = text.match(/\x1b\]654;([^\x07=]+)=?((-?\d+):(\d+))?\x07/) || [];
|
||||
|
||||
if (osc === 'exit') {
|
||||
exitCode = parseInt(code, 10);
|
||||
}
|
||||
|
||||
if (osc === waitCode) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return { output: fullOutput, exitCode };
|
||||
}
|
||||
}
|
||||
|
||||
export function newBoltShellProcess() {
|
||||
return new BoltShell();
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
interface OllamaModelDetails {
|
||||
parent_model: string;
|
||||
format: string;
|
||||
@@ -29,10 +28,10 @@ export interface ModelInfo {
|
||||
}
|
||||
|
||||
export interface ProviderInfo {
|
||||
staticModels: ModelInfo[],
|
||||
name: string,
|
||||
getDynamicModels?: () => Promise<ModelInfo[]>,
|
||||
getApiKeyLink?: string,
|
||||
labelForGetApiKey?: string,
|
||||
icon?:string,
|
||||
};
|
||||
staticModels: ModelInfo[];
|
||||
name: string;
|
||||
getDynamicModels?: () => Promise<ModelInfo[]>;
|
||||
getApiKeyLink?: string;
|
||||
labelForGetApiKey?: string;
|
||||
icon?: string;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user