From e5d16dffd885ed9ba413c3423dff1cf4dbb53636 Mon Sep 17 00:00:00 2001 From: Maki Date: Sun, 20 Oct 2024 20:06:46 +0900 Subject: [PATCH] =?UTF-8?q?=F0=9F=A4=96=20[feat]=20Implement=20Gemini=20mo?= =?UTF-8?q?del=20integration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Import Google Generative AI SDK - Add getGeminiModel function to create Gemini model instances - Update getModel function to support Gemini provider --- app/lib/.server/llm/model.ts | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index 7479592..fd91cc4 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -6,6 +6,7 @@ import { createOpenAI } from '@ai-sdk/openai'; import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock'; import { ollama } from 'ollama-ai-provider'; import { createOpenRouter } from "@openrouter/ai-sdk-provider"; +import { createGoogleGenerativeAI } from '@ai-sdk/google'; export function getAnthropicModel(apiKey: string, model: string) { const anthropic = createAnthropic({ @@ -54,6 +55,14 @@ export function getBedrockModel(modelId: string, credentials: any) { return bedrock(modelId); } +export function getGeminiModel(apiKey: string, model: string = 'gemini-1.5-pro-latest') { + const google = createGoogleGenerativeAI({ + apiKey, + }); + + return google(model); +} + export function getModel(provider: string, model: string, env: Env) { if (provider === 'Bedrock') { const credentials = getAWSCredentials(env); @@ -71,6 +80,8 @@ export function getModel(provider: string, model: string, env: Env) { return getGroqModel(apiKey, model); case 'OpenRouter': return getOpenRouterModel(apiKey, model); + case 'Gemini': + return getGeminiModel(apiKey, model); default: return getOllamaModel(model); }