mirror of
https://github.com/stackblitz-labs/bolt.diy
synced 2025-01-24 03:37:29 +00:00
6494f5ac2e
* fix: updated logger and model caching * usage token stream issue fix * minor changes * updated starter template change to fix the app title * starter template bigfix * fixed hydretion errors and raw logs * removed raw log * made auto select template false by default * more cleaner logs and updated logic to call dynamicModels only if not found in static models * updated starter template instructions * browser console log improved for firefox * provider icons fix icons
69 lines
1.9 KiB
TypeScript
69 lines
1.9 KiB
TypeScript
import { BaseProvider } from '~/lib/modules/llm/base-provider';
|
|
import type { ModelInfo } from '~/lib/modules/llm/types';
|
|
import type { IProviderSetting } from '~/types/model';
|
|
import { createOpenAI } from '@ai-sdk/openai';
|
|
import type { LanguageModelV1 } from 'ai';
|
|
|
|
export default class LMStudioProvider extends BaseProvider {
|
|
name = 'LMStudio';
|
|
getApiKeyLink = 'https://lmstudio.ai/';
|
|
labelForGetApiKey = 'Get LMStudio';
|
|
icon = 'i-ph:cloud-arrow-down';
|
|
|
|
config = {
|
|
baseUrlKey: 'LMSTUDIO_API_BASE_URL',
|
|
baseUrl: 'http://localhost:1234/',
|
|
};
|
|
|
|
staticModels: ModelInfo[] = [];
|
|
|
|
async getDynamicModels(
|
|
apiKeys?: Record<string, string>,
|
|
settings?: IProviderSetting,
|
|
serverEnv: Record<string, string> = {},
|
|
): Promise<ModelInfo[]> {
|
|
const { baseUrl } = this.getProviderBaseUrlAndKey({
|
|
apiKeys,
|
|
providerSettings: settings,
|
|
serverEnv,
|
|
defaultBaseUrlKey: 'LMSTUDIO_API_BASE_URL',
|
|
defaultApiTokenKey: '',
|
|
});
|
|
|
|
if (!baseUrl) {
|
|
return [];
|
|
}
|
|
|
|
const response = await fetch(`${baseUrl}/v1/models`);
|
|
const data = (await response.json()) as { data: Array<{ id: string }> };
|
|
|
|
return data.data.map((model) => ({
|
|
name: model.id,
|
|
label: model.id,
|
|
provider: this.name,
|
|
maxTokenAllowed: 8000,
|
|
}));
|
|
}
|
|
getModelInstance: (options: {
|
|
model: string;
|
|
serverEnv: Env;
|
|
apiKeys?: Record<string, string>;
|
|
providerSettings?: Record<string, IProviderSetting>;
|
|
}) => LanguageModelV1 = (options) => {
|
|
const { apiKeys, providerSettings, serverEnv, model } = options;
|
|
const { baseUrl } = this.getProviderBaseUrlAndKey({
|
|
apiKeys,
|
|
providerSettings,
|
|
serverEnv: serverEnv as any,
|
|
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
|
|
defaultApiTokenKey: '',
|
|
});
|
|
const lmstudio = createOpenAI({
|
|
baseUrl: `${baseUrl}/v1`,
|
|
apiKey: '',
|
|
});
|
|
|
|
return lmstudio(model);
|
|
};
|
|
}
|