diff --git a/app/lib/modules/llm/providers/cloudflare.ts b/app/lib/modules/llm/providers/cloudflare.ts new file mode 100644 index 00000000..b2b0a907 --- /dev/null +++ b/app/lib/modules/llm/providers/cloudflare.ts @@ -0,0 +1,55 @@ +import { BaseProvider } from '~/lib/modules/llm/base-provider'; +import type { ModelInfo } from '~/lib/modules/llm/types'; +import type { IProviderSetting } from '~/types/model'; +import type { LanguageModelV1 } from 'ai'; +import { createOpenAI } from '@ai-sdk/openai'; + +export default class CloudflareProvider extends BaseProvider { + name = 'Cloudflare'; + getApiKeyLink = 'https://dash.cloudflare.com/profile/api-tokens'; + + config = { + apiTokenKey: 'CLOUDFLARE_API_TOKEN', + accountIdKey: 'CLOUDFLARE_ACCOUNT_ID', + }; + + staticModels: ModelInfo[] = [ + { name: '@cf/meta/llama-2-7b-chat-int8', label: 'Llama-2-7b-chat-int8', provider: 'Cloudflare', maxTokenAllowed: 4096 }, + { name: '@cf/meta/llama-2-7b-chat-fp16', label: 'Llama-2-7b-chat-fp16', provider: 'Cloudflare', maxTokenAllowed: 4096 }, + { name: '@cf/mistral/mistral-7b-instruct-v0.1', label: 'Mistral-7b-instruct', provider: 'Cloudflare', maxTokenAllowed: 4096 }, + ]; + + getModelInstance(options: { + model: string; + serverEnv: Env; + apiKeys?: Record; + providerSettings?: Record; + }): LanguageModelV1 { + const { model, serverEnv, apiKeys, providerSettings } = options; + + const { apiKey } = this.getProviderBaseUrlAndKey({ + apiKeys, + providerSettings: providerSettings?.[this.name], + serverEnv: serverEnv as any, + defaultBaseUrlKey: '', + defaultApiTokenKey: 'CLOUDFLARE_API_TOKEN', + }); + + const accountId = serverEnv?.CLOUDFLARE_ACCOUNT_ID || process?.env?.CLOUDFLARE_ACCOUNT_ID; + + if (!apiKey) { + throw new Error(`Missing API token for ${this.name} provider`); + } + + if (!accountId) { + throw new Error(`Missing Account ID for ${this.name} provider`); + } + + const openai = createOpenAI({ + baseURL: `https://api.cloudflare.com/client/v4/accounts/${accountId}/ai/v1`, + apiKey, + }); + + return openai(model); + } +} diff --git a/app/lib/modules/llm/registry.ts b/app/lib/modules/llm/registry.ts index 6edba6d8..496688eb 100644 --- a/app/lib/modules/llm/registry.ts +++ b/app/lib/modules/llm/registry.ts @@ -1,5 +1,6 @@ import AnthropicProvider from './providers/anthropic'; import CohereProvider from './providers/cohere'; +import CloudflareProvider from './providers/cloudflare'; import DeepseekProvider from './providers/deepseek'; import GoogleProvider from './providers/google'; import GroqProvider from './providers/groq'; @@ -19,6 +20,7 @@ import GithubProvider from './providers/github'; export { AnthropicProvider, + CloudflareProvider, CohereProvider, DeepseekProvider, GoogleProvider, diff --git a/worker-configuration.d.ts b/worker-configuration.d.ts index b2ae1ce7..c61a48c4 100644 --- a/worker-configuration.d.ts +++ b/worker-configuration.d.ts @@ -1,6 +1,5 @@ interface Env { - RUNNING_IN_DOCKER: Settings; - DEFAULT_NUM_CTX: Settings; + DEFAULT_NUM_CTX:Settings; ANTHROPIC_API_KEY: string; OPENAI_API_KEY: string; GROQ_API_KEY: string; @@ -18,4 +17,6 @@ interface Env { XAI_API_KEY: string; PERPLEXITY_API_KEY: string; AWS_BEDROCK_CONFIG: string; + CLOUDFLARE_API_KEY: string; + CLOUDFLARE_ACCOUNT_ID: string; }