mirror of
https://github.com/stackblitz-labs/bolt.diy
synced 2025-01-22 19:06:12 +00:00
chore: Add environment variables for OpenAI API Like integration
This commit is contained in:
parent
dd4dd2c21a
commit
30dfa4fda4
@ -29,5 +29,13 @@ GOOGLE_GENERATIVE_AI_API_KEY=
|
|||||||
# EXAMPLE http://localhost:11434
|
# EXAMPLE http://localhost:11434
|
||||||
OLLAMA_API_BASE_URL=
|
OLLAMA_API_BASE_URL=
|
||||||
|
|
||||||
|
# You only need this environment variable set if you want to use OpenAI API Like models
|
||||||
|
# EXAMPLE https://xxx.xxxx.com/v1
|
||||||
|
OPENAI_API_LIKE_BASE_URL=
|
||||||
|
|
||||||
|
# Get your OpenAI API Like Key by following these instructions -
|
||||||
|
# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
|
||||||
|
OPENAI_API_LIKE_KEY=
|
||||||
|
|
||||||
# Include this environment variable if you want more logging for debugging locally
|
# Include this environment variable if you want more logging for debugging locally
|
||||||
VITE_LOG_LEVEL=debug
|
VITE_LOG_LEVEL=debug
|
||||||
|
@ -45,6 +45,9 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
|
|||||||
<option key="Ollama" value="Ollama">
|
<option key="Ollama" value="Ollama">
|
||||||
Ollama
|
Ollama
|
||||||
</option>
|
</option>
|
||||||
|
<option key="OpenAILike" value="OpenAILike">
|
||||||
|
Ollama
|
||||||
|
</option>
|
||||||
</select>
|
</select>
|
||||||
<select
|
<select
|
||||||
value={model}
|
value={model}
|
||||||
|
@ -19,6 +19,17 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
|
|||||||
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
|
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
|
||||||
case 'OpenRouter':
|
case 'OpenRouter':
|
||||||
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
|
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
|
||||||
|
case "OpenAILike":
|
||||||
|
return env.OPENAI_API_LIKE_KEY || cloudflareEnv.OPENAI_API_LIKE_KEY;
|
||||||
|
default:
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getBaseURL(cloudflareEnv: Env, provider: string) {
|
||||||
|
switch (provider) {
|
||||||
|
case 'OpenAILike':
|
||||||
|
return env.OPENAI_API_LIKE_BASE_URL || cloudflareEnv.OPENAI_API_LIKE_BASE_URL;
|
||||||
default:
|
default:
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
// @ts-nocheck
|
// @ts-nocheck
|
||||||
// Preventing TS checks with files presented in the video for a better presentation.
|
// Preventing TS checks with files presented in the video for a better presentation.
|
||||||
import { getAPIKey } from '~/lib/.server/llm/api-key';
|
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
|
||||||
import { createAnthropic } from '@ai-sdk/anthropic';
|
import { createAnthropic } from '@ai-sdk/anthropic';
|
||||||
import { createOpenAI } from '@ai-sdk/openai';
|
import { createOpenAI } from '@ai-sdk/openai';
|
||||||
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
||||||
@ -14,7 +14,14 @@ export function getAnthropicModel(apiKey: string, model: string) {
|
|||||||
|
|
||||||
return anthropic(model);
|
return anthropic(model);
|
||||||
}
|
}
|
||||||
|
export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) {
|
||||||
|
const openai = createOpenAI({
|
||||||
|
baseURL,
|
||||||
|
apiKey,
|
||||||
|
});
|
||||||
|
|
||||||
|
return openai(model);
|
||||||
|
}
|
||||||
export function getOpenAIModel(apiKey: string, model: string) {
|
export function getOpenAIModel(apiKey: string, model: string) {
|
||||||
const openai = createOpenAI({
|
const openai = createOpenAI({
|
||||||
apiKey,
|
apiKey,
|
||||||
@ -54,7 +61,7 @@ export function getOpenRouterModel(apiKey: string, model: string) {
|
|||||||
|
|
||||||
export function getModel(provider: string, model: string, env: Env) {
|
export function getModel(provider: string, model: string, env: Env) {
|
||||||
const apiKey = getAPIKey(env, provider);
|
const apiKey = getAPIKey(env, provider);
|
||||||
|
const baseURL = getBaseURL(env, provider);
|
||||||
|
|
||||||
switch (provider) {
|
switch (provider) {
|
||||||
case 'Anthropic':
|
case 'Anthropic':
|
||||||
@ -67,6 +74,8 @@ export function getModel(provider: string, model: string, env: Env) {
|
|||||||
return getOpenRouterModel(apiKey, model);
|
return getOpenRouterModel(apiKey, model);
|
||||||
case 'Google':
|
case 'Google':
|
||||||
return getGoogleModel(apiKey, model)
|
return getGoogleModel(apiKey, model)
|
||||||
|
case 'OpenAILike':
|
||||||
|
return getOpenAILikeModel(baseURL,apiKey, model);
|
||||||
default:
|
default:
|
||||||
return getOllamaModel(model);
|
return getOllamaModel(model);
|
||||||
}
|
}
|
||||||
|
@ -36,7 +36,9 @@ export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
|||||||
|
|
||||||
async function getOllamaModels(): Promise<ModelInfo[]> {
|
async function getOllamaModels(): Promise<ModelInfo[]> {
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`http://localhost:11434/api/tags`);
|
const base_url =import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
||||||
|
const url = new URL(base_url).toString();
|
||||||
|
const response = await fetch(`${url}/api/tags`);
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
|
|
||||||
return data.models.map((model: any) => ({
|
return data.models.map((model: any) => ({
|
||||||
@ -49,9 +51,36 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function getOpenAILikeModels(): Promise<ModelInfo[]> {
|
||||||
|
|
||||||
|
try {
|
||||||
|
const base_url =import.meta.env.OPENAI_API_LIKE_BASE_URL || "";
|
||||||
|
if (!base_url) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
const url = new URL(base_url).toString();
|
||||||
|
const api_key = import.meta.env.OPENAI_API_LIKE_KEY ?? "";
|
||||||
|
const response = await fetch(`${url}/models`, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${api_key}`,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const res = await response.json();
|
||||||
|
return res.data.map((model: any) => ({
|
||||||
|
name: model.id,
|
||||||
|
label: model.id,
|
||||||
|
provider: 'OpenAILike',
|
||||||
|
}));
|
||||||
|
}catch (e) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
async function initializeModelList(): Promise<void> {
|
async function initializeModelList(): Promise<void> {
|
||||||
const ollamaModels = await getOllamaModels();
|
const ollamaModels = await getOllamaModels();
|
||||||
MODEL_LIST = [...ollamaModels, ...staticModels];
|
const openAiLikeModels = await getOpenAILikeModels();
|
||||||
|
console.log(openAiLikeModels);
|
||||||
|
MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
|
||||||
}
|
}
|
||||||
initializeModelList().then();
|
initializeModelList().then();
|
||||||
export { getOllamaModels, initializeModelList };
|
export { getOllamaModels, initializeModelList };
|
||||||
|
@ -27,6 +27,7 @@ export default defineConfig((config) => {
|
|||||||
chrome129IssuePlugin(),
|
chrome129IssuePlugin(),
|
||||||
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
|
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
|
||||||
],
|
],
|
||||||
|
envPrefix:["VITE_","OPENAI_API_LIKE_","OLLAMA_API_BASE_URL"],
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
2
worker-configuration.d.ts
vendored
2
worker-configuration.d.ts
vendored
@ -4,4 +4,6 @@ interface Env {
|
|||||||
GROQ_API_KEY: string;
|
GROQ_API_KEY: string;
|
||||||
OPEN_ROUTER_API_KEY: string;
|
OPEN_ROUTER_API_KEY: string;
|
||||||
OLLAMA_API_BASE_URL: string;
|
OLLAMA_API_BASE_URL: string;
|
||||||
|
OPENAI_API_LIKE_KEY: string;
|
||||||
|
OPENAI_API_LIKE_BASE_URL: string;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user