mirror of
https://github.com/stackblitz/bolt.new
synced 2025-02-06 04:48:04 +00:00
Merge pull request #60 from ZerxZ/main
chore: Add environment variables for OpenAI Like integration
This commit is contained in:
commit
5f6a595eaa
@ -29,6 +29,12 @@ GOOGLE_GENERATIVE_AI_API_KEY=
|
|||||||
# EXAMPLE http://localhost:11434
|
# EXAMPLE http://localhost:11434
|
||||||
OLLAMA_API_BASE_URL=
|
OLLAMA_API_BASE_URL=
|
||||||
|
|
||||||
|
# You only need this environment variable set if you want to use OpenAI Like models
|
||||||
|
OPENAI_LIKE_API_BASE_URL=
|
||||||
|
|
||||||
|
# Get your OpenAI Like API Key
|
||||||
|
OPENAI_LIKE_API_KEY=
|
||||||
|
|
||||||
# Get your Mistral API Key by following these instructions -
|
# Get your Mistral API Key by following these instructions -
|
||||||
# https://console.mistral.ai/api-keys/
|
# https://console.mistral.ai/api-keys/
|
||||||
# You only need this environment variable set if you want to use Mistral models
|
# You only need this environment variable set if you want to use Mistral models
|
||||||
|
@ -45,6 +45,9 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
|
|||||||
<option key="Ollama" value="Ollama">
|
<option key="Ollama" value="Ollama">
|
||||||
Ollama
|
Ollama
|
||||||
</option>
|
</option>
|
||||||
|
<option key="OpenAILike" value="OpenAILike">
|
||||||
|
OpenAILike
|
||||||
|
</option>
|
||||||
</select>
|
</select>
|
||||||
<select
|
<select
|
||||||
value={model}
|
value={model}
|
||||||
|
@ -23,6 +23,17 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
|
|||||||
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY
|
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY
|
||||||
case 'Mistral':
|
case 'Mistral':
|
||||||
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
|
return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
|
||||||
|
case "OpenAILike":
|
||||||
|
return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
|
||||||
|
default:
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getBaseURL(cloudflareEnv: Env, provider: string) {
|
||||||
|
switch (provider) {
|
||||||
|
case 'OpenAILike':
|
||||||
|
return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
|
||||||
default:
|
default:
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
// @ts-nocheck
|
// @ts-nocheck
|
||||||
// Preventing TS checks with files presented in the video for a better presentation.
|
// Preventing TS checks with files presented in the video for a better presentation.
|
||||||
import { getAPIKey } from '~/lib/.server/llm/api-key';
|
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
|
||||||
import { createAnthropic } from '@ai-sdk/anthropic';
|
import { createAnthropic } from '@ai-sdk/anthropic';
|
||||||
import { createOpenAI } from '@ai-sdk/openai';
|
import { createOpenAI } from '@ai-sdk/openai';
|
||||||
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
||||||
@ -16,7 +16,14 @@ export function getAnthropicModel(apiKey: string, model: string) {
|
|||||||
|
|
||||||
return anthropic(model);
|
return anthropic(model);
|
||||||
}
|
}
|
||||||
|
export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) {
|
||||||
|
const openai = createOpenAI({
|
||||||
|
baseURL,
|
||||||
|
apiKey,
|
||||||
|
});
|
||||||
|
|
||||||
|
return openai(model);
|
||||||
|
}
|
||||||
export function getOpenAIModel(apiKey: string, model: string) {
|
export function getOpenAIModel(apiKey: string, model: string) {
|
||||||
const openai = createOpenAI({
|
const openai = createOpenAI({
|
||||||
apiKey,
|
apiKey,
|
||||||
@ -72,7 +79,7 @@ export function getOpenRouterModel(apiKey: string, model: string) {
|
|||||||
|
|
||||||
export function getModel(provider: string, model: string, env: Env) {
|
export function getModel(provider: string, model: string, env: Env) {
|
||||||
const apiKey = getAPIKey(env, provider);
|
const apiKey = getAPIKey(env, provider);
|
||||||
|
const baseURL = getBaseURL(env, provider);
|
||||||
|
|
||||||
switch (provider) {
|
switch (provider) {
|
||||||
case 'Anthropic':
|
case 'Anthropic':
|
||||||
@ -85,6 +92,8 @@ export function getModel(provider: string, model: string, env: Env) {
|
|||||||
return getOpenRouterModel(apiKey, model);
|
return getOpenRouterModel(apiKey, model);
|
||||||
case 'Google':
|
case 'Google':
|
||||||
return getGoogleModel(apiKey, model)
|
return getGoogleModel(apiKey, model)
|
||||||
|
case 'OpenAILike':
|
||||||
|
return getOpenAILikeModel(baseURL,apiKey, model);
|
||||||
case 'Deepseek':
|
case 'Deepseek':
|
||||||
return getDeepseekModel(apiKey, model)
|
return getDeepseekModel(apiKey, model)
|
||||||
case 'Mistral':
|
case 'Mistral':
|
||||||
|
@ -49,7 +49,9 @@ export let MODEL_LIST: ModelInfo[] = [...staticModels];
|
|||||||
|
|
||||||
async function getOllamaModels(): Promise<ModelInfo[]> {
|
async function getOllamaModels(): Promise<ModelInfo[]> {
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`http://localhost:11434/api/tags`);
|
const base_url = import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
|
||||||
|
const url = new URL(base_url).toString();
|
||||||
|
const response = await fetch(`${url}/api/tags`);
|
||||||
const data = await response.json() as OllamaApiResponse;
|
const data = await response.json() as OllamaApiResponse;
|
||||||
|
|
||||||
return data.models.map((model: OllamaModel) => ({
|
return data.models.map((model: OllamaModel) => ({
|
||||||
@ -62,9 +64,36 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function getOpenAILikeModels(): Promise<ModelInfo[]> {
|
||||||
|
|
||||||
|
try {
|
||||||
|
const base_url =import.meta.env.OPENAI_LIKE_API_BASE_URL || "";
|
||||||
|
if (!base_url) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
const url = new URL(base_url).toString();
|
||||||
|
const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? "";
|
||||||
|
const response = await fetch(`${url}/models`, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${api_key}`,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const res = await response.json();
|
||||||
|
return res.data.map((model: any) => ({
|
||||||
|
name: model.id,
|
||||||
|
label: model.id,
|
||||||
|
provider: 'OpenAILike',
|
||||||
|
}));
|
||||||
|
}catch (e) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
async function initializeModelList(): Promise<void> {
|
async function initializeModelList(): Promise<void> {
|
||||||
const ollamaModels = await getOllamaModels();
|
const ollamaModels = await getOllamaModels();
|
||||||
MODEL_LIST = [...ollamaModels, ...staticModels];
|
const openAiLikeModels = await getOpenAILikeModels();
|
||||||
|
console.log(openAiLikeModels);
|
||||||
|
MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
|
||||||
}
|
}
|
||||||
initializeModelList().then();
|
initializeModelList().then();
|
||||||
export { getOllamaModels, initializeModelList };
|
export { getOllamaModels, initializeModelList };
|
||||||
|
@ -27,6 +27,7 @@ export default defineConfig((config) => {
|
|||||||
chrome129IssuePlugin(),
|
chrome129IssuePlugin(),
|
||||||
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
|
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
|
||||||
],
|
],
|
||||||
|
envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL"],
|
||||||
css: {
|
css: {
|
||||||
preprocessorOptions: {
|
preprocessorOptions: {
|
||||||
scss: {
|
scss: {
|
||||||
|
2
worker-configuration.d.ts
vendored
2
worker-configuration.d.ts
vendored
@ -4,5 +4,7 @@ interface Env {
|
|||||||
GROQ_API_KEY: string;
|
GROQ_API_KEY: string;
|
||||||
OPEN_ROUTER_API_KEY: string;
|
OPEN_ROUTER_API_KEY: string;
|
||||||
OLLAMA_API_BASE_URL: string;
|
OLLAMA_API_BASE_URL: string;
|
||||||
|
OPENAI_LIKE_API_KEY: string;
|
||||||
|
OPENAI_LIKE_API_BASE_URL: string;
|
||||||
DEEPSEEK_API_KEY: string;
|
DEEPSEEK_API_KEY: string;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user