From c7738243cab29f2f3924c01030ccb9d6c469b253 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ng=C3=B4=20T=E1=BA=A5n=20T=C3=A0i?= Date: Fri, 17 Jan 2025 14:52:51 +0700 Subject: [PATCH] feat: added Github provider (#1109) --- app/lib/modules/llm/providers/github.ts | 52 +++++++++++++++++++++++++ app/lib/modules/llm/registry.ts | 2 + 2 files changed, 54 insertions(+) create mode 100644 app/lib/modules/llm/providers/github.ts diff --git a/app/lib/modules/llm/providers/github.ts b/app/lib/modules/llm/providers/github.ts new file mode 100644 index 00000000..f346cdd1 --- /dev/null +++ b/app/lib/modules/llm/providers/github.ts @@ -0,0 +1,52 @@ +import { BaseProvider } from '~/lib/modules/llm/base-provider'; +import type { ModelInfo } from '~/lib/modules/llm/types'; +import type { IProviderSetting } from '~/types/model'; +import type { LanguageModelV1 } from 'ai'; +import { createOpenAI } from '@ai-sdk/openai'; + +export default class GithubProvider extends BaseProvider { + name = 'Github'; + getApiKeyLink = 'https://github.com/settings/personal-access-tokens'; + + config = { + apiTokenKey: 'GITHUB_API_KEY', + }; +// find more in https://github.com/marketplace?type=models + staticModels: ModelInfo[] = [ + { name: 'gpt-4o', label: 'GPT-4o', provider: 'Github', maxTokenAllowed: 8000 }, + { name: 'o1', label: 'o1-preview', provider: 'Github', maxTokenAllowed: 100000 }, + { name: 'o1-mini', label: 'o1-mini', provider: 'Github', maxTokenAllowed: 8000 }, + { name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'Github', maxTokenAllowed: 8000 }, + { name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'Github', maxTokenAllowed: 8000 }, + { name: 'gpt-4', label: 'GPT-4', provider: 'Github', maxTokenAllowed: 8000 }, + { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'Github', maxTokenAllowed: 8000 }, + ]; + + getModelInstance(options: { + model: string; + serverEnv: Env; + apiKeys?: Record; + providerSettings?: Record; + }): LanguageModelV1 { + const { model, serverEnv, apiKeys, providerSettings } = options; + + const { apiKey } = this.getProviderBaseUrlAndKey({ + apiKeys, + providerSettings: providerSettings?.[this.name], + serverEnv: serverEnv as any, + defaultBaseUrlKey: '', + defaultApiTokenKey: 'GITHUB_API_KEY', + }); + + if (!apiKey) { + throw new Error(`Missing API key for ${this.name} provider`); + } + + const openai = createOpenAI({ + baseURL: 'https://models.inference.ai.azure.com', + apiKey, + }); + + return openai(model); + } +} diff --git a/app/lib/modules/llm/registry.ts b/app/lib/modules/llm/registry.ts index dbbe27ea..6edba6d8 100644 --- a/app/lib/modules/llm/registry.ts +++ b/app/lib/modules/llm/registry.ts @@ -15,6 +15,7 @@ import TogetherProvider from './providers/together'; import XAIProvider from './providers/xai'; import HyperbolicProvider from './providers/hyperbolic'; import AmazonBedrockProvider from './providers/amazon-bedrock'; +import GithubProvider from './providers/github'; export { AnthropicProvider, @@ -34,4 +35,5 @@ export { TogetherProvider, LMStudioProvider, AmazonBedrockProvider, + GithubProvider, };