mirror of
https://github.com/stackblitz/bolt.new
synced 2025-03-12 14:58:30 +00:00
Refactor/standartize model providers, add "get provider key" for those who have it for first time users
This commit is contained in:
parent
4b492b9d97
commit
a2cca14174
@ -1,8 +1,9 @@
|
|||||||
import React, { useState } from 'react';
|
import React, { useState } from 'react';
|
||||||
import { IconButton } from '~/components/ui/IconButton';
|
import { IconButton } from '~/components/ui/IconButton';
|
||||||
|
import type { ProviderInfo } from '~/utils/constants';
|
||||||
|
|
||||||
interface APIKeyManagerProps {
|
interface APIKeyManagerProps {
|
||||||
provider: string;
|
provider: ProviderInfo;
|
||||||
apiKey: string;
|
apiKey: string;
|
||||||
setApiKey: (key: string) => void;
|
setApiKey: (key: string) => void;
|
||||||
}
|
}
|
||||||
@ -18,7 +19,7 @@ export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey,
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center gap-2 mt-2 mb-2">
|
<div className="flex items-center gap-2 mt-2 mb-2">
|
||||||
<span className="text-sm text-bolt-elements-textSecondary">{provider} API Key:</span>
|
<span className="text-sm text-bolt-elements-textSecondary">{provider?.name} API Key:</span>
|
||||||
{isEditing ? (
|
{isEditing ? (
|
||||||
<>
|
<>
|
||||||
<input
|
<input
|
||||||
@ -42,6 +43,7 @@ export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey,
|
|||||||
<IconButton onClick={() => setIsEditing(true)} title="Edit API Key">
|
<IconButton onClick={() => setIsEditing(true)} title="Edit API Key">
|
||||||
<div className="i-ph:pencil-simple" />
|
<div className="i-ph:pencil-simple" />
|
||||||
</IconButton>
|
</IconButton>
|
||||||
|
{!!provider?.getApiKeyLink ? <a href={provider?.getApiKeyLink}>{provider?.labelForGetApiKey || "Get API Key"}</a> : "" }
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
@ -7,7 +7,7 @@ import { Menu } from '~/components/sidebar/Menu.client';
|
|||||||
import { IconButton } from '~/components/ui/IconButton';
|
import { IconButton } from '~/components/ui/IconButton';
|
||||||
import { Workbench } from '~/components/workbench/Workbench.client';
|
import { Workbench } from '~/components/workbench/Workbench.client';
|
||||||
import { classNames } from '~/utils/classNames';
|
import { classNames } from '~/utils/classNames';
|
||||||
import { MODEL_LIST, DEFAULT_PROVIDER } from '~/utils/constants';
|
import { MODEL_LIST, DEFAULT_PROVIDER, PROVIDER_LIST } from '~/utils/constants';
|
||||||
import { Messages } from './Messages.client';
|
import { Messages } from './Messages.client';
|
||||||
import { SendButton } from './SendButton.client';
|
import { SendButton } from './SendButton.client';
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
@ -24,7 +24,7 @@ const EXAMPLE_PROMPTS = [
|
|||||||
{ text: 'How do I center a div?' },
|
{ text: 'How do I center a div?' },
|
||||||
];
|
];
|
||||||
|
|
||||||
const providerList = [...new Set(MODEL_LIST.map((model) => model.provider))]
|
const providerList = PROVIDER_LIST;
|
||||||
|
|
||||||
const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList }) => {
|
const ModelSelector = ({ model, setModel, provider, setProvider, modelList, providerList }) => {
|
||||||
return (
|
return (
|
||||||
@ -39,19 +39,10 @@ const ModelSelector = ({ model, setModel, provider, setProvider, modelList, prov
|
|||||||
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
|
className="flex-1 p-2 rounded-lg border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus transition-all"
|
||||||
>
|
>
|
||||||
{providerList.map((provider) => (
|
{providerList.map((provider) => (
|
||||||
<option key={provider} value={provider}>
|
<option key={provider.name} value={provider.name}>
|
||||||
{provider}
|
{provider.name}
|
||||||
</option>
|
</option>
|
||||||
))}
|
))}
|
||||||
<option key="Ollama" value="Ollama">
|
|
||||||
Ollama
|
|
||||||
</option>
|
|
||||||
<option key="OpenAILike" value="OpenAILike">
|
|
||||||
OpenAILike
|
|
||||||
</option>
|
|
||||||
<option key="LMStudio" value="LMStudio">
|
|
||||||
LMStudio
|
|
||||||
</option>
|
|
||||||
</select>
|
</select>
|
||||||
<select
|
<select
|
||||||
value={model}
|
value={model}
|
||||||
|
@ -8,48 +8,119 @@ export const PROVIDER_REGEX = /\[Provider: (.*?)\]\n\n/;
|
|||||||
export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
|
export const DEFAULT_MODEL = 'claude-3-5-sonnet-latest';
|
||||||
export const DEFAULT_PROVIDER = 'Anthropic';
|
export const DEFAULT_PROVIDER = 'Anthropic';
|
||||||
|
|
||||||
const staticModels: ModelInfo[] = [
|
export type ProviderInfo = {
|
||||||
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
|
staticModels: ModelInfo[],
|
||||||
{ name: 'anthropic/claude-3.5-sonnet', label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)', provider: 'OpenRouter' },
|
name: string,
|
||||||
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' },
|
getDynamicModels?: () => Promise<ModelInfo[]>,
|
||||||
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
|
getApiKeyLink?: string,
|
||||||
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' },
|
labelForGetApiKey?: string,
|
||||||
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' },
|
};
|
||||||
{ name: 'x-ai/grok-beta', label: "xAI Grok Beta (OpenRouter)", provider: 'OpenRouter' },
|
|
||||||
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter' },
|
const PROVIDER_LIST: ProviderInfo[] = [
|
||||||
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
|
{
|
||||||
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' },
|
name: 'Ollama',
|
||||||
{ name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google' },
|
staticModels: [],
|
||||||
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google' },
|
getDynamicModels: getOllamaModels
|
||||||
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq' },
|
}, {
|
||||||
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq' },
|
name: 'OpenAILike',
|
||||||
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
|
staticModels: [],
|
||||||
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq' },
|
getDynamicModels: getOpenAILikeModels
|
||||||
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' },
|
},
|
||||||
{ name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic' },
|
{
|
||||||
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic' },
|
name: 'OpenRouter',
|
||||||
{ name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic' },
|
staticModels: [
|
||||||
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic' },
|
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
|
||||||
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' },
|
{
|
||||||
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' },
|
name: 'anthropic/claude-3.5-sonnet',
|
||||||
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
|
label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)',
|
||||||
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
provider: 'OpenRouter'
|
||||||
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
},
|
||||||
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
|
{ name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' },
|
||||||
{ name: 'grok-beta', label: "xAI Grok Beta", provider: 'xAI' },
|
{ name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
|
||||||
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek'},
|
{ name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' },
|
||||||
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek'},
|
{ name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' },
|
||||||
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
|
{ name: 'x-ai/grok-beta', label: 'xAI Grok Beta (OpenRouter)', provider: 'OpenRouter' },
|
||||||
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' },
|
{ name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter' },
|
||||||
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' },
|
{ name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
|
||||||
{ name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral' },
|
{ name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' }
|
||||||
{ name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral' },
|
],
|
||||||
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral' },
|
getApiKeyLink: 'https://openrouter.ai/settings/keys'
|
||||||
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral' },
|
}, {
|
||||||
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral' },
|
name: 'Google',
|
||||||
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral' },
|
staticModels: [
|
||||||
|
{ name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google' },
|
||||||
|
{ name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google' }
|
||||||
|
],
|
||||||
|
getApiKeyLink: 'https://aistudio.google.com/app/apikey'
|
||||||
|
}, {
|
||||||
|
name: 'Groq',
|
||||||
|
staticModels: [
|
||||||
|
{ name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq' },
|
||||||
|
{ name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq' },
|
||||||
|
{ name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
|
||||||
|
{ name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq' },
|
||||||
|
{ name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' }
|
||||||
|
],
|
||||||
|
getApiKeyLink: 'https://console.groq.com/keys'
|
||||||
|
}, {
|
||||||
|
name: 'Anthropic',
|
||||||
|
staticModels: [
|
||||||
|
{ name: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet (new)', provider: 'Anthropic' },
|
||||||
|
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet (old)', provider: 'Anthropic' },
|
||||||
|
{ name: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku (new)', provider: 'Anthropic' },
|
||||||
|
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic' },
|
||||||
|
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' },
|
||||||
|
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' }
|
||||||
|
],
|
||||||
|
getApiKeyLink: "https://console.anthropic.com/settings/keys",
|
||||||
|
}, {
|
||||||
|
name: 'OpenAI',
|
||||||
|
staticModels: [
|
||||||
|
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
|
||||||
|
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
||||||
|
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
||||||
|
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' }
|
||||||
|
],
|
||||||
|
getApiKeyLink: "https://platform.openai.com/api-keys",
|
||||||
|
}, {
|
||||||
|
name: 'xAI',
|
||||||
|
staticModels: [
|
||||||
|
{ name: 'grok-beta', label: 'xAI Grok Beta', provider: 'xAI' }
|
||||||
|
],
|
||||||
|
getApiKeyLink: 'https://docs.x.ai/docs/quickstart#creating-an-api-key'
|
||||||
|
}, {
|
||||||
|
name: 'Deepseek',
|
||||||
|
staticModels: [
|
||||||
|
{ name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek' },
|
||||||
|
{ name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek' }
|
||||||
|
],
|
||||||
|
getApiKeyLink: 'https://platform.deepseek.com/api_keys'
|
||||||
|
}, {
|
||||||
|
name: 'Mistral',
|
||||||
|
staticModels: [
|
||||||
|
{ name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
|
||||||
|
{ name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' },
|
||||||
|
{ name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' },
|
||||||
|
{ name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral' },
|
||||||
|
{ name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral' },
|
||||||
|
{ name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral' },
|
||||||
|
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral' },
|
||||||
|
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral' },
|
||||||
|
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral' }
|
||||||
|
],
|
||||||
|
getApiKeyLink: 'https://console.mistral.ai/api-keys/'
|
||||||
|
}, {
|
||||||
|
name: 'LMStudio',
|
||||||
|
staticModels: [],
|
||||||
|
getDynamicModels: getLMStudioModels,
|
||||||
|
getApiKeyLink: 'https://lmstudio.ai/',
|
||||||
|
labelForGetApiKey: 'Get LMStudio'
|
||||||
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
||||||
|
const staticModels: ModelInfo[] = PROVIDER_LIST.map(p => p.staticModels).flat();
|
||||||
|
|
||||||
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
||||||
|
|
||||||
const getOllamaBaseUrl = () => {
|
const getOllamaBaseUrl = () => {
|
||||||
@ -64,7 +135,7 @@ const getOllamaBaseUrl = () => {
|
|||||||
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
||||||
|
|
||||||
return isDocker
|
return isDocker
|
||||||
? defaultBaseUrl.replace("localhost", "host.docker.internal")
|
? defaultBaseUrl.replace('localhost', 'host.docker.internal')
|
||||||
: defaultBaseUrl;
|
: defaultBaseUrl;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -77,7 +148,7 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
|
|||||||
return data.models.map((model: OllamaModel) => ({
|
return data.models.map((model: OllamaModel) => ({
|
||||||
name: model.name,
|
name: model.name,
|
||||||
label: `${model.name} (${model.details.parameter_size})`,
|
label: `${model.name} (${model.details.parameter_size})`,
|
||||||
provider: 'Ollama',
|
provider: 'Ollama'
|
||||||
}));
|
}));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return [];
|
return [];
|
||||||
@ -86,37 +157,37 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
|
|||||||
|
|
||||||
async function getOpenAILikeModels(): Promise<ModelInfo[]> {
|
async function getOpenAILikeModels(): Promise<ModelInfo[]> {
|
||||||
try {
|
try {
|
||||||
const base_url = import.meta.env.OPENAI_LIKE_API_BASE_URL || "";
|
const base_url = import.meta.env.OPENAI_LIKE_API_BASE_URL || '';
|
||||||
if (!base_url) {
|
if (!base_url) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? "";
|
const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? '';
|
||||||
const response = await fetch(`${base_url}/models`, {
|
const response = await fetch(`${base_url}/models`, {
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${api_key}`,
|
Authorization: `Bearer ${api_key}`
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const res = await response.json() as any;
|
const res = await response.json() as any;
|
||||||
return res.data.map((model: any) => ({
|
return res.data.map((model: any) => ({
|
||||||
name: model.id,
|
name: model.id,
|
||||||
label: model.id,
|
label: model.id,
|
||||||
provider: 'OpenAILike',
|
provider: 'OpenAILike'
|
||||||
}));
|
}));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return []
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getLMStudioModels(): Promise<ModelInfo[]> {
|
async function getLMStudioModels(): Promise<ModelInfo[]> {
|
||||||
try {
|
try {
|
||||||
const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || "http://localhost:1234";
|
const base_url = import.meta.env.LMSTUDIO_API_BASE_URL || 'http://localhost:1234';
|
||||||
const response = await fetch(`${base_url}/v1/models`);
|
const response = await fetch(`${base_url}/v1/models`);
|
||||||
const data = await response.json() as any;
|
const data = await response.json() as any;
|
||||||
return data.data.map((model: any) => ({
|
return data.data.map((model: any) => ({
|
||||||
name: model.id,
|
name: model.id,
|
||||||
label: model.id,
|
label: model.id,
|
||||||
provider: 'LMStudio',
|
provider: 'LMStudio'
|
||||||
}));
|
}));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return [];
|
return [];
|
||||||
@ -125,10 +196,9 @@ async function getLMStudioModels(): Promise<ModelInfo[]> {
|
|||||||
|
|
||||||
|
|
||||||
async function initializeModelList(): Promise<void> {
|
async function initializeModelList(): Promise<void> {
|
||||||
const ollamaModels = await getOllamaModels();
|
MODEL_LIST = [...(await Promise.all(
|
||||||
const openAiLikeModels = await getOpenAILikeModels();
|
PROVIDER_LIST.filter(p => !!p.getDynamicModels).map(p => p.getDynamicModels()))).flat(), ...staticModels];
|
||||||
const lmstudioModels = await getLMStudioModels();
|
|
||||||
MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels,...lmstudioModels,];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
initializeModelList().then();
|
initializeModelList().then();
|
||||||
export { getOllamaModels,getOpenAILikeModels,getLMStudioModels,initializeModelList };
|
export { getOllamaModels, getOpenAILikeModels, getLMStudioModels, initializeModelList, PROVIDER_LIST };
|
||||||
|
Loading…
Reference in New Issue
Block a user