mirror of
https://github.com/coleam00/bolt.new-any-llm
synced 2024-12-27 22:33:03 +00:00
add mistral models
This commit is contained in:
parent
dd4dd2c21a
commit
5e8a0b077a
@ -29,5 +29,10 @@ GOOGLE_GENERATIVE_AI_API_KEY=
|
||||
# EXAMPLE http://localhost:11434
|
||||
OLLAMA_API_BASE_URL=
|
||||
|
||||
# Get your Mistral API Key by following these instructions -
|
||||
# https://console.mistral.ai/api-keys/
|
||||
# You only need this environment variable set if you want to use Mistral models
|
||||
MISTRAL_API_KEY=
|
||||
|
||||
# Include this environment variable if you want more logging for debugging locally
|
||||
VITE_LOG_LEVEL=debug
|
||||
|
@ -19,6 +19,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
|
||||
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
|
||||
case 'OpenRouter':
|
||||
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
|
||||
case 'Mistral':
|
||||
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
|
||||
default:
|
||||
return "";
|
||||
}
|
||||
|
@ -6,6 +6,8 @@ import { createOpenAI } from '@ai-sdk/openai';
|
||||
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
||||
import { ollama } from 'ollama-ai-provider';
|
||||
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
|
||||
import { mistral } from '@ai-sdk/mistral';
|
||||
import { createMistral } from '@ai-sdk/mistral';
|
||||
|
||||
export function getAnthropicModel(apiKey: string, model: string) {
|
||||
const anthropic = createAnthropic({
|
||||
@ -23,6 +25,14 @@ export function getOpenAIModel(apiKey: string, model: string) {
|
||||
return openai(model);
|
||||
}
|
||||
|
||||
export function getMistralModel(apiKey: string, model: string) {
|
||||
const mistral = createMistral({
|
||||
apiKey
|
||||
});
|
||||
|
||||
return mistral(model);
|
||||
}
|
||||
|
||||
export function getGoogleModel(apiKey: string, model: string) {
|
||||
const google = createGoogleGenerativeAI(
|
||||
apiKey,
|
||||
@ -67,6 +77,8 @@ export function getModel(provider: string, model: string, env: Env) {
|
||||
return getOpenRouterModel(apiKey, model);
|
||||
case 'Google':
|
||||
return getGoogleModel(apiKey, model)
|
||||
case 'Mistral':
|
||||
return getMistralModel(apiKey, model);
|
||||
default:
|
||||
return getOllamaModel(model);
|
||||
}
|
||||
|
@ -30,6 +30,8 @@ const staticModels: ModelInfo[] = [
|
||||
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
|
||||
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
|
||||
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
|
||||
{ name: 'open-mistral-7b', label: 'Mistral', provider: 'Mistral' },
|
||||
|
||||
];
|
||||
|
||||
export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
||||
|
@ -26,6 +26,7 @@
|
||||
"@ai-sdk/anthropic": "^0.0.39",
|
||||
"@ai-sdk/google": "^0.0.52",
|
||||
"@ai-sdk/openai": "^0.0.66",
|
||||
"@ai-sdk/mistral": "^0.0.43",
|
||||
"@codemirror/autocomplete": "^6.17.0",
|
||||
"@codemirror/commands": "^6.6.0",
|
||||
"@codemirror/lang-cpp": "^6.0.2",
|
||||
|
Loading…
Reference in New Issue
Block a user