diff --git a/.env.example b/.env.example index 968e9348..15978eeb 100644 --- a/.env.example +++ b/.env.example @@ -29,5 +29,10 @@ GOOGLE_GENERATIVE_AI_API_KEY= # EXAMPLE http://localhost:11434 OLLAMA_API_BASE_URL= +# Get your Mistral API Key by following these instructions - +# https://console.mistral.ai/api-keys/ +# You only need this environment variable set if you want to use Mistral models +MISTRAL_API_KEY= + # Include this environment variable if you want more logging for debugging locally VITE_LOG_LEVEL=debug diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts index b1a47f42..17eeeef2 100644 --- a/app/lib/.server/llm/api-key.ts +++ b/app/lib/.server/llm/api-key.ts @@ -19,6 +19,8 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) { return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY; case 'OpenRouter': return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY; + case 'Mistral': + return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY; default: return ""; } diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index 68983756..e07fdc44 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -6,6 +6,8 @@ import { createOpenAI } from '@ai-sdk/openai'; import { createGoogleGenerativeAI } from '@ai-sdk/google'; import { ollama } from 'ollama-ai-provider'; import { createOpenRouter } from "@openrouter/ai-sdk-provider"; +import { mistral } from '@ai-sdk/mistral'; +import { createMistral } from '@ai-sdk/mistral'; export function getAnthropicModel(apiKey: string, model: string) { const anthropic = createAnthropic({ @@ -23,6 +25,14 @@ export function getOpenAIModel(apiKey: string, model: string) { return openai(model); } +export function getMistralModel(apiKey: string, model: string) { + const mistral = createMistral({ + apiKey + }); + + return mistral(model); +} + export function getGoogleModel(apiKey: string, model: string) { const google = createGoogleGenerativeAI( apiKey, @@ -67,6 +77,8 @@ export function getModel(provider: string, model: string, env: Env) { return getOpenRouterModel(apiKey, model); case 'Google': return getGoogleModel(apiKey, model) + case 'Mistral': + return getMistralModel(apiKey, model); default: return getOllamaModel(model); } diff --git a/app/utils/constants.ts b/app/utils/constants.ts index eb7a6eb2..4908735b 100644 --- a/app/utils/constants.ts +++ b/app/utils/constants.ts @@ -32,6 +32,15 @@ const staticModels: ModelInfo[] = [ { name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' }, { name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' }, { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' }, + { name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' }, + { name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' }, + { name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' }, + { name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral' }, + { name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral' }, + { name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral' }, + { name: 'ministral-small-latest', label: 'Mistral Small', provider: 'Mistral' }, + { name: 'codestral-latest', label: 'Codestral', provider: 'Mistral' }, + { name: 'ministral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral' }, ]; export let MODEL_LIST: ModelInfo[] = [...staticModels]; diff --git a/package.json b/package.json index 066738c4..edb2b8da 100644 --- a/package.json +++ b/package.json @@ -25,6 +25,7 @@ "@ai-sdk/anthropic": "^0.0.39", "@ai-sdk/google": "^0.0.52", "@ai-sdk/openai": "^0.0.66", + "@ai-sdk/mistral": "^0.0.43", "@codemirror/autocomplete": "^6.17.0", "@codemirror/commands": "^6.6.0", "@codemirror/lang-cpp": "^6.0.2", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 21dd5a1a..f3a84246 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -17,6 +17,9 @@ importers: '@ai-sdk/google': specifier: ^0.0.52 version: 0.0.52(zod@3.23.8) + '@ai-sdk/mistral': + specifier: ^0.0.43 + version: 0.0.43(zod@3.23.8) '@ai-sdk/openai': specifier: ^0.0.66 version: 0.0.66(zod@3.23.8) @@ -270,6 +273,12 @@ packages: peerDependencies: zod: ^3.0.0 + '@ai-sdk/mistral@0.0.43': + resolution: {integrity: sha512-YcneVvO57bbmseUmnvQaj6OolMj7/q1W/oeiFj1h+CJZsXIOX8P9i2Cmo2B7HMBbt73NIcvtyPze3GjaczZRqw==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + '@ai-sdk/openai@0.0.66': resolution: {integrity: sha512-V4XeDnlNl5/AY3GB3ozJUjqnBLU5pK3DacKTbCNH3zH8/MggJoH6B8wRGdLUPVFMcsMz60mtvh4DC9JsIVFrKw==} engines: {node: '>=18'} @@ -5448,6 +5457,12 @@ snapshots: json-schema: 0.4.0 zod: 3.23.8 + '@ai-sdk/mistral@0.0.43(zod@3.23.8)': + dependencies: + '@ai-sdk/provider': 0.0.24 + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + zod: 3.23.8 + '@ai-sdk/openai@0.0.66(zod@3.23.8)': dependencies: '@ai-sdk/provider': 0.0.24