diff --git a/.env.example b/.env.example index 968e9348..15978eeb 100644 --- a/.env.example +++ b/.env.example @@ -29,5 +29,10 @@ GOOGLE_GENERATIVE_AI_API_KEY= # EXAMPLE http://localhost:11434 OLLAMA_API_BASE_URL= +# Get your Mistral API Key by following these instructions - +# https://console.mistral.ai/api-keys/ +# You only need this environment variable set if you want to use Mistral models +MISTRAL_API_KEY= + # Include this environment variable if you want more logging for debugging locally VITE_LOG_LEVEL=debug diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts index b1a47f42..17eeeef2 100644 --- a/app/lib/.server/llm/api-key.ts +++ b/app/lib/.server/llm/api-key.ts @@ -19,6 +19,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) { return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY; case 'OpenRouter': return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY; + case 'Mistral': + return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY; default: return ""; } diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index 68983756..e07fdc44 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -6,6 +6,8 @@ import { createOpenAI } from '@ai-sdk/openai'; import { createGoogleGenerativeAI } from '@ai-sdk/google'; import { ollama } from 'ollama-ai-provider'; import { createOpenRouter } from "@openrouter/ai-sdk-provider"; +import { mistral } from '@ai-sdk/mistral'; +import { createMistral } from '@ai-sdk/mistral'; export function getAnthropicModel(apiKey: string, model: string) { const anthropic = createAnthropic({ @@ -23,6 +25,14 @@ export function getOpenAIModel(apiKey: string, model: string) { return openai(model); } +export function getMistralModel(apiKey: string, model: string) { + const mistral = createMistral({ + apiKey + }); + + return mistral(model); +} + export function getGoogleModel(apiKey: string, model: string) { const google = createGoogleGenerativeAI( apiKey, @@ -67,6 +77,8 @@ export function getModel(provider: string, model: string, env: Env) { return getOpenRouterModel(apiKey, model); case 'Google': return getGoogleModel(apiKey, model) + case 'Mistral': + return getMistralModel(apiKey, model); default: return getOllamaModel(model); } diff --git a/app/utils/constants.ts b/app/utils/constants.ts index 6db860c5..50956636 100644 --- a/app/utils/constants.ts +++ b/app/utils/constants.ts @@ -30,6 +30,8 @@ const staticModels: ModelInfo[] = [ { name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' }, { name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' }, { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' }, + { name: 'open-mistral-7b', label: 'Mistral', provider: 'Mistral' }, + ]; export let MODEL_LIST: ModelInfo[] = [...staticModels]; diff --git a/package.json b/package.json index 737ca05c..a24fb033 100644 --- a/package.json +++ b/package.json @@ -26,6 +26,7 @@ "@ai-sdk/anthropic": "^0.0.39", "@ai-sdk/google": "^0.0.52", "@ai-sdk/openai": "^0.0.66", + "@ai-sdk/mistral": "^0.0.43", "@codemirror/autocomplete": "^6.17.0", "@codemirror/commands": "^6.6.0", "@codemirror/lang-cpp": "^6.0.2",