🤖 [feat] Implement Gemini model integration

- Import Google Generative AI SDK
- Add getGeminiModel function to create Gemini model instances
- Update getModel function to support Gemini provider
This commit is contained in:
Maki 2024-10-20 20:06:46 +09:00
parent 7e0287f7f6
commit e5d16dffd8

View File

@ -6,6 +6,7 @@ import { createOpenAI } from '@ai-sdk/openai';
import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock';
import { ollama } from 'ollama-ai-provider';
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { createGoogleGenerativeAI } from '@ai-sdk/google';
export function getAnthropicModel(apiKey: string, model: string) {
const anthropic = createAnthropic({
@ -54,6 +55,14 @@ export function getBedrockModel(modelId: string, credentials: any) {
return bedrock(modelId);
}
export function getGeminiModel(apiKey: string, model: string = 'gemini-1.5-pro-latest') {
const google = createGoogleGenerativeAI({
apiKey,
});
return google(model);
}
export function getModel(provider: string, model: string, env: Env) {
if (provider === 'Bedrock') {
const credentials = getAWSCredentials(env);
@ -71,6 +80,8 @@ export function getModel(provider: string, model: string, env: Env) {
return getGroqModel(apiKey, model);
case 'OpenRouter':
return getOpenRouterModel(apiKey, model);
case 'Gemini':
return getGeminiModel(apiKey, model);
default:
return getOllamaModel(model);
}