diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index 9b99e85..b00466d 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -1,4 +1,6 @@ import { createAnthropic } from '@ai-sdk/anthropic'; +import { createOpenAI } from '@ai-sdk/openai'; + export function getAnthropicModel(apiKey: string, baseURL: string) { console.log('baseURL', baseURL); const anthropic = createAnthropic({ @@ -8,3 +10,14 @@ export function getAnthropicModel(apiKey: string, baseURL: string) { return anthropic('claude-3-5-sonnet-20240620'); } + +export function getOpenAIModel(apiKey: string, baseURL: string) { + console.log('OpenAI baseURL', baseURL); + const openai = createOpenAI({ + apiKey, + baseURL, + }); + + // return openai('gpt-4-turbo-preview'); + return openai('claude-3-5-sonnet-20240620'); +} diff --git a/app/lib/.server/llm/stream-text.ts b/app/lib/.server/llm/stream-text.ts index 9b7776a..575f84c 100644 --- a/app/lib/.server/llm/stream-text.ts +++ b/app/lib/.server/llm/stream-text.ts @@ -1,6 +1,6 @@ import { streamText as _streamText, convertToCoreMessages } from 'ai'; import { getAPIKey } from '~/lib/.server/llm/api-key'; -import { getAnthropicModel } from '~/lib/.server/llm/model'; +import { getAnthropicModel, getOpenAIModel } from '~/lib/.server/llm/model'; import { getBaseURL } from '~/lib/.server/llm/base-url'; import { MAX_TOKENS } from './constants'; import { getSystemPrompt } from './prompts'; @@ -24,7 +24,8 @@ export type StreamingOptions = Omit[0], 'model'>; export function streamText(messages: Messages, env: Env, options?: StreamingOptions) { return _streamText({ - model: getAnthropicModel(getAPIKey(env), getBaseURL(env)), + // model: getAnthropicModel(getAPIKey(env), getBaseURL(env)), + model: getOpenAIModel(getAPIKey(env), getBaseURL(env)), system: getSystemPrompt(), maxTokens: MAX_TOKENS, headers: { diff --git a/package.json b/package.json index 6ece036..5bf188c 100644 --- a/package.json +++ b/package.json @@ -24,6 +24,7 @@ }, "dependencies": { "@ai-sdk/anthropic": "^0.0.39", + "@ai-sdk/openai": "^0.0.66", "@codemirror/autocomplete": "^6.17.0", "@codemirror/commands": "^6.6.0", "@codemirror/lang-cpp": "^6.0.2", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 22b1b93..7d12b1a 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -11,6 +11,9 @@ dependencies: '@ai-sdk/anthropic': specifier: ^0.0.39 version: 0.0.39(zod@3.23.8) + '@ai-sdk/openai': + specifier: ^0.0.66 + version: 0.0.66(zod@3.23.8) '@codemirror/autocomplete': specifier: ^6.17.0 version: 6.18.1(@codemirror/language@6.10.3)(@codemirror/state@6.4.1)(@codemirror/view@6.34.1)(@lezer/common@1.2.2) @@ -243,6 +246,17 @@ packages: zod: 3.23.8 dev: false + /@ai-sdk/openai@0.0.66(zod@3.23.8): + resolution: {integrity: sha512-V4XeDnlNl5/AY3GB3ozJUjqnBLU5pK3DacKTbCNH3zH8/MggJoH6B8wRGdLUPVFMcsMz60mtvh4DC9JsIVFrKw==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + dependencies: + '@ai-sdk/provider': 0.0.24 + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + zod: 3.23.8 + dev: false + /@ai-sdk/provider-utils@1.0.20(zod@3.23.8): resolution: {integrity: sha512-ngg/RGpnA00eNOWEtXHenpX1MsM2QshQh4QJFjUfwcqHpM5kTfG7je7Rc3HcEDP+OkRVv2GF+X4fC1Vfcnl8Ow==} engines: {node: '>=18'}