added support for both openai and anthropic

This commit is contained in:
Rushabh Agarwal 2025-04-30 15:07:21 +05:30
parent cbfef91646
commit 56157c15ba
4 changed files with 32 additions and 13 deletions

View File

@ -1,5 +1,14 @@
import { env } from 'node:process';
export function getAPIKey(cloudflareEnv: Env) {
return env.OPENAI_API_KEY || cloudflareEnv.OPENAI_API_KEY; // Update to use OpenAI API key
import type { Provider } from './constants';
export function getAPIKey(cloudflareEnv: Env, provider: Provider) {
if (provider === 'openai') {
return env.OPENAI_API_KEY || cloudflareEnv.OPENAI_API_KEY;
} else if (provider === 'anthropic') {
return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY;
} else {
throw new Error(`Unknown provider: ${provider}`);
}
}

View File

@ -3,3 +3,5 @@ export const MAX_TOKENS = 8192;
// limits the number of model responses that can be returned in a single request
export const MAX_RESPONSE_SEGMENTS = 2;
export type Provider = 'openai' | 'anthropic';

View File

@ -1,10 +1,13 @@
// Import the OpenAI SDK
import { createOpenAI } from '@ai-sdk/openai'; // Replace with the actual OpenAI SDK import
import { createOpenAI } from '@ai-sdk/openai';
import { createAnthropic } from '@ai-sdk/anthropic';
export function getOpenAIModel(apiKey: string) {
const openai = createOpenAI({
apiKey,
});
return openai('gpt-4o'); // Adjust the model identifier as needed
export function getModel(provider: 'openai' | 'anthropic', apiKey: string) {
if (provider === 'openai') {
const openai = createOpenAI({ apiKey });
return openai('gpt-4o'); // e.g., 'gpt-4o'
} else if (provider === 'anthropic') {
const anthropic = createAnthropic({ apiKey });
return anthropic('claude-3-5-sonnet-20240620'); // e.g., 'claude-3-opus-20240229'
}
throw new Error('Unsupported provider');
}

View File

@ -1,6 +1,6 @@
import { streamText as _streamText, convertToCoreMessages } from 'ai';
import { getAPIKey } from '~/lib/.server/llm/api-key';
import { getOpenAIModel } from '~/lib/.server/llm/model';
import { getModel } from '~/lib/.server/llm/model';
import { MAX_TOKENS } from './constants';
import { getSystemPrompt } from './prompts';
@ -21,9 +21,14 @@ export type Messages = Message[];
export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
export function streamText(messages: Messages, env: Env, options?: StreamingOptions) {
export function streamText(
messages: Messages,
env: Env,
options?: StreamingOptions,
provider: 'openai' | 'anthropic' = 'openai',
) {
return _streamText({
model: getOpenAIModel(getAPIKey(env)),
model: getModel(provider=provider, getAPIKey(env, provider)),
system: getSystemPrompt(),
maxTokens: MAX_TOKENS,
headers: {