bolt.diy/app/lib/.server/llm/model.ts
2024-12-18 21:34:18 +05:30

191 lines
4.7 KiB
TypeScript

/*
* @ts-nocheck
* Preventing TS checks with files presented in the video for a better presentation.
*/
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { ollama } from 'ollama-ai-provider';
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import { createMistral } from '@ai-sdk/mistral';
import { createCohere } from '@ai-sdk/cohere';
import type { LanguageModelV1 } from 'ai';
import type { IProviderSetting } from '~/types/model';
export const DEFAULT_NUM_CTX = process.env.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
type OptionalApiKey = string | undefined;
export function getAnthropicModel(apiKey: OptionalApiKey, model: string) {
const anthropic = createAnthropic({
apiKey,
});
return anthropic(model);
}
export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL,
apiKey,
});
return openai(model);
}
export function getCohereAIModel(apiKey: OptionalApiKey, model: string) {
const cohere = createCohere({
apiKey,
});
return cohere(model);
}
export function getOpenAIModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
apiKey,
});
return openai(model);
}
export function getMistralModel(apiKey: OptionalApiKey, model: string) {
const mistral = createMistral({
apiKey,
});
return mistral(model);
}
export function getGoogleModel(apiKey: OptionalApiKey, model: string) {
const google = createGoogleGenerativeAI({
apiKey,
});
return google(model);
}
export function getGroqModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL: 'https://api.groq.com/openai/v1',
apiKey,
});
return openai(model);
}
export function getHuggingFaceModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL: 'https://api-inference.huggingface.co/v1/',
apiKey,
});
return openai(model);
}
export function getOllamaModel(baseURL: string, model: string) {
const ollamaInstance = ollama(model, {
numCtx: DEFAULT_NUM_CTX,
}) as LanguageModelV1 & { config: any };
ollamaInstance.config.baseURL = `${baseURL}/api`;
return ollamaInstance;
}
export function getDeepseekModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL: 'https://api.deepseek.com/beta',
apiKey,
});
return openai(model);
}
export function getOpenRouterModel(apiKey: OptionalApiKey, model: string) {
const openRouter = createOpenRouter({
apiKey,
});
return openRouter.chat(model);
}
export function getLMStudioModel(baseURL: string, model: string) {
const lmstudio = createOpenAI({
baseUrl: `${baseURL}/v1`,
apiKey: '',
});
return lmstudio(model);
}
export function getXAIModel(apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL: 'https://api.x.ai/v1',
apiKey,
});
return openai(model);
}
export function getPerplexityModel(apiKey: OptionalApiKey, model: string) {
const perplexity = createOpenAI({
baseURL: 'https://api.perplexity.ai/',
apiKey,
});
return perplexity(model);
}
export function getModel(
provider: string,
model: string,
serverEnv: Env,
apiKeys?: Record<string, string>,
providerSettings?: Record<string, IProviderSetting>,
) {
/*
* let apiKey; // Declare first
* let baseURL;
*/
// console.log({provider,model});
const apiKey = getAPIKey(serverEnv, provider, apiKeys); // Then assign
const baseURL = getBaseURL(serverEnv, provider, providerSettings);
// console.log({apiKey,baseURL});
switch (provider) {
case 'Anthropic':
return getAnthropicModel(apiKey, model);
case 'OpenAI':
return getOpenAIModel(apiKey, model);
case 'Groq':
return getGroqModel(apiKey, model);
case 'HuggingFace':
return getHuggingFaceModel(apiKey, model);
case 'OpenRouter':
return getOpenRouterModel(apiKey, model);
case 'Google':
return getGoogleModel(apiKey, model);
case 'OpenAILike':
return getOpenAILikeModel(baseURL, apiKey, model);
case 'Together':
return getOpenAILikeModel(baseURL, apiKey, model);
case 'Deepseek':
return getDeepseekModel(apiKey, model);
case 'Mistral':
return getMistralModel(apiKey, model);
case 'LMStudio':
return getLMStudioModel(baseURL, model);
case 'xAI':
return getXAIModel(apiKey, model);
case 'Cohere':
return getCohereAIModel(apiKey, model);
case 'Perplexity':
return getPerplexityModel(apiKey, model);
default:
return getOllamaModel(baseURL, model);
}
}