From 56157c15ba206ec10d2ea79d5493a82e17bfa8d0 Mon Sep 17 00:00:00 2001 From: Rushabh Agarwal Date: Wed, 30 Apr 2025 15:07:21 +0530 Subject: [PATCH] added support for both openai and anthropic --- app/lib/.server/llm/api-key.ts | 13 +++++++++++-- app/lib/.server/llm/constants.ts | 2 ++ app/lib/.server/llm/model.ts | 19 +++++++++++-------- app/lib/.server/llm/stream-text.ts | 11 ++++++++--- 4 files changed, 32 insertions(+), 13 deletions(-) diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts index a34a762..6dd5181 100644 --- a/app/lib/.server/llm/api-key.ts +++ b/app/lib/.server/llm/api-key.ts @@ -1,5 +1,14 @@ import { env } from 'node:process'; -export function getAPIKey(cloudflareEnv: Env) { - return env.OPENAI_API_KEY || cloudflareEnv.OPENAI_API_KEY; // Update to use OpenAI API key +import type { Provider } from './constants'; + + +export function getAPIKey(cloudflareEnv: Env, provider: Provider) { + if (provider === 'openai') { + return env.OPENAI_API_KEY || cloudflareEnv.OPENAI_API_KEY; + } else if (provider === 'anthropic') { + return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY; + } else { + throw new Error(`Unknown provider: ${provider}`); + } } diff --git a/app/lib/.server/llm/constants.ts b/app/lib/.server/llm/constants.ts index b24acdf..38a17e3 100644 --- a/app/lib/.server/llm/constants.ts +++ b/app/lib/.server/llm/constants.ts @@ -3,3 +3,5 @@ export const MAX_TOKENS = 8192; // limits the number of model responses that can be returned in a single request export const MAX_RESPONSE_SEGMENTS = 2; + +export type Provider = 'openai' | 'anthropic'; \ No newline at end of file diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index eaa32f5..dfb7cba 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -1,10 +1,13 @@ -// Import the OpenAI SDK -import { createOpenAI } from '@ai-sdk/openai'; // Replace with the actual OpenAI SDK import +import { createOpenAI } from '@ai-sdk/openai'; +import { createAnthropic } from '@ai-sdk/anthropic'; -export function getOpenAIModel(apiKey: string) { - const openai = createOpenAI({ - apiKey, - }); - - return openai('gpt-4o'); // Adjust the model identifier as needed +export function getModel(provider: 'openai' | 'anthropic', apiKey: string) { + if (provider === 'openai') { + const openai = createOpenAI({ apiKey }); + return openai('gpt-4o'); // e.g., 'gpt-4o' + } else if (provider === 'anthropic') { + const anthropic = createAnthropic({ apiKey }); + return anthropic('claude-3-5-sonnet-20240620'); // e.g., 'claude-3-opus-20240229' + } + throw new Error('Unsupported provider'); } \ No newline at end of file diff --git a/app/lib/.server/llm/stream-text.ts b/app/lib/.server/llm/stream-text.ts index 48d1022..81e5b2c 100644 --- a/app/lib/.server/llm/stream-text.ts +++ b/app/lib/.server/llm/stream-text.ts @@ -1,6 +1,6 @@ import { streamText as _streamText, convertToCoreMessages } from 'ai'; import { getAPIKey } from '~/lib/.server/llm/api-key'; -import { getOpenAIModel } from '~/lib/.server/llm/model'; +import { getModel } from '~/lib/.server/llm/model'; import { MAX_TOKENS } from './constants'; import { getSystemPrompt } from './prompts'; @@ -21,9 +21,14 @@ export type Messages = Message[]; export type StreamingOptions = Omit[0], 'model'>; -export function streamText(messages: Messages, env: Env, options?: StreamingOptions) { +export function streamText( + messages: Messages, + env: Env, + options?: StreamingOptions, + provider: 'openai' | 'anthropic' = 'openai', +) { return _streamText({ - model: getOpenAIModel(getAPIKey(env)), + model: getModel(provider=provider, getAPIKey(env, provider)), system: getSystemPrompt(), maxTokens: MAX_TOKENS, headers: {