mirror of
https://github.com/stackblitz-labs/bolt.diy
synced 2025-06-23 02:16:08 +00:00
Merge b4788775ba
into d0d9818964
This commit is contained in:
commit
e6b36131ac
@ -51,6 +51,10 @@ OPENAI_LIKE_API_KEY=
|
|||||||
# Get your Together API Key
|
# Get your Together API Key
|
||||||
TOGETHER_API_KEY=
|
TOGETHER_API_KEY=
|
||||||
|
|
||||||
|
#Glhf.Chat
|
||||||
|
GLHF_API_KEY=your glhf.chat api
|
||||||
|
GLHF_API_BASE_URL=https://glhf.chat/api/openai/v1
|
||||||
|
|
||||||
# You only need this environment variable set if you want to use Hyperbolic models
|
# You only need this environment variable set if you want to use Hyperbolic models
|
||||||
#Get your Hyperbolics API Key at https://app.hyperbolic.xyz/settings
|
#Get your Hyperbolics API Key at https://app.hyperbolic.xyz/settings
|
||||||
#baseURL="https://api.hyperbolic.xyz/v1/chat/completions"
|
#baseURL="https://api.hyperbolic.xyz/v1/chat/completions"
|
||||||
|
119
app/lib/modules/llm/providers/glhf.chat.ts
Normal file
119
app/lib/modules/llm/providers/glhf.chat.ts
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
import { BaseProvider, getOpenAILikeModel } from '~/lib/modules/llm/base-provider';
|
||||||
|
import type { ModelInfo } from '~/lib/modules/llm/types';
|
||||||
|
import type { IProviderSetting } from '~/types/model';
|
||||||
|
import type { LanguageModelV1 } from 'ai';
|
||||||
|
|
||||||
|
export default class GLHFProvider extends BaseProvider {
|
||||||
|
name = 'glhf.chat';
|
||||||
|
getApiKeyLink = 'https://glhf.chat/users/settings/api';
|
||||||
|
|
||||||
|
config = {
|
||||||
|
baseUrlKey: 'GLHF_API_BASE_URL',
|
||||||
|
apiTokenKey: 'GLHF_API_KEY',
|
||||||
|
};
|
||||||
|
|
||||||
|
defaultBaseUrl = 'https://glhf.chat/api/openai/v1';
|
||||||
|
|
||||||
|
get staticModels(): ModelInfo[] {
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
name: 'hf:mistralai/Mistral-7B-Instruct-v0.3',
|
||||||
|
label: 'Mistral-7B-Instruct',
|
||||||
|
provider: this.name,
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'hf:qwen/Qwen2.5-Coder-32B-Instruct',
|
||||||
|
label: 'Qwen2.5-Coder-32B',
|
||||||
|
provider: this.name,
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'hf:deepseek-ai/DeepSeek-Coder-V2-Lite-Instruct',
|
||||||
|
label: 'DeepSeek-Coder-V2-Lite',
|
||||||
|
provider: this.name,
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'hf:nvidia/Llama-3.1-Nemotron-70B-Instruct-HF',
|
||||||
|
label: 'Llama-3.1-Nemotron-70B',
|
||||||
|
provider: this.name,
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'hf:google/codegemma-7b-it',
|
||||||
|
label: 'CodeGemma-7B',
|
||||||
|
provider: this.name,
|
||||||
|
maxTokenAllowed: 8000,
|
||||||
|
}
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
async getDynamicModels(
|
||||||
|
apiKeys?: Record<string, string>,
|
||||||
|
settings?: IProviderSetting,
|
||||||
|
serverEnv: Record<string, string> = {},
|
||||||
|
): Promise<ModelInfo[]> {
|
||||||
|
// Retornamos apenas os modelos estáticos, evitando duplicação
|
||||||
|
return this.staticModels;
|
||||||
|
}
|
||||||
|
|
||||||
|
getModelInstance(options: {
|
||||||
|
model?: string;
|
||||||
|
serverEnv: Record<string, string>;
|
||||||
|
apiKeys?: Record<string, string>;
|
||||||
|
providerSettings?: Record<string, IProviderSetting>;
|
||||||
|
}): LanguageModelV1 {
|
||||||
|
const { model, serverEnv, apiKeys, providerSettings } = options;
|
||||||
|
|
||||||
|
const { baseUrl, apiKey } = this.getProviderBaseUrlAndKey({
|
||||||
|
apiKeys,
|
||||||
|
providerSettings: providerSettings?.[this.name],
|
||||||
|
serverEnv,
|
||||||
|
defaultBaseUrlKey: this.config.baseUrlKey,
|
||||||
|
defaultApiTokenKey: this.config.apiTokenKey,
|
||||||
|
});
|
||||||
|
|
||||||
|
const effectiveBaseUrl = baseUrl || this.defaultBaseUrl;
|
||||||
|
|
||||||
|
if (!apiKey) {
|
||||||
|
throw new Error(`Missing API key for ${this.name} provider`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Usa o primeiro modelo como padrão se nenhum for especificado
|
||||||
|
const modelToUse = model || this.staticModels[0].name;
|
||||||
|
return getOpenAILikeModel(effectiveBaseUrl, apiKey, modelToUse);
|
||||||
|
}
|
||||||
|
|
||||||
|
async testApiConnection(): Promise<void> {
|
||||||
|
const { baseUrl, apiKey } = this.getProviderBaseUrlAndKey({
|
||||||
|
serverEnv: process.env as any,
|
||||||
|
defaultBaseUrlKey: this.config.baseUrlKey,
|
||||||
|
defaultApiTokenKey: this.config.apiTokenKey,
|
||||||
|
});
|
||||||
|
|
||||||
|
const effectiveBaseUrl = baseUrl || this.defaultBaseUrl;
|
||||||
|
|
||||||
|
if (!apiKey) {
|
||||||
|
throw new Error('Missing API key for GLHF provider during connection test.');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${effectiveBaseUrl}/models`, {
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${apiKey}`,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`GLHF API connection failed: ${response.status} ${response.statusText}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('GLHF API connection successful.');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error during GLHF API connection test:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -9,6 +9,7 @@ import MistralProvider from './providers/mistral';
|
|||||||
import OllamaProvider from './providers/ollama';
|
import OllamaProvider from './providers/ollama';
|
||||||
import OpenRouterProvider from './providers/open-router';
|
import OpenRouterProvider from './providers/open-router';
|
||||||
import OpenAILikeProvider from './providers/openai-like';
|
import OpenAILikeProvider from './providers/openai-like';
|
||||||
|
import glhfchatProvider from './providers/glhf.chat';
|
||||||
import OpenAIProvider from './providers/openai';
|
import OpenAIProvider from './providers/openai';
|
||||||
import PerplexityProvider from './providers/perplexity';
|
import PerplexityProvider from './providers/perplexity';
|
||||||
import TogetherProvider from './providers/together';
|
import TogetherProvider from './providers/together';
|
||||||
@ -28,6 +29,7 @@ export {
|
|||||||
MistralProvider,
|
MistralProvider,
|
||||||
OllamaProvider,
|
OllamaProvider,
|
||||||
OpenAIProvider,
|
OpenAIProvider,
|
||||||
|
glhfchatProvider,
|
||||||
OpenRouterProvider,
|
OpenRouterProvider,
|
||||||
OpenAILikeProvider,
|
OpenAILikeProvider,
|
||||||
PerplexityProvider,
|
PerplexityProvider,
|
||||||
|
Loading…
Reference in New Issue
Block a user