chore: Add environment variables for OpenAI API Like integration

This commit is contained in:
Gaoyao Massimo Hu 2024-10-23 16:36:12 +01:00
parent dd4dd2c21a
commit 30dfa4fda4
No known key found for this signature in database
GPG Key ID: E3A9EE580E0CB24C
7 changed files with 72 additions and 9 deletions

View File

@ -29,5 +29,13 @@ GOOGLE_GENERATIVE_AI_API_KEY=
# EXAMPLE http://localhost:11434
OLLAMA_API_BASE_URL=
# You only need this environment variable set if you want to use OpenAI API Like models
# EXAMPLE https://xxx.xxxx.com/v1
OPENAI_API_LIKE_BASE_URL=
# Get your OpenAI API Like Key by following these instructions -
# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
OPENAI_API_LIKE_KEY=
# Include this environment variable if you want more logging for debugging locally
VITE_LOG_LEVEL=debug

View File

@ -28,7 +28,7 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
const [provider, setProvider] = useState(DEFAULT_PROVIDER);
return (
<div className="mb-2">
<select
<select
value={provider}
onChange={(e) => {
setProvider(e.target.value);
@ -42,9 +42,12 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
{provider}
</option>
))}
<option key="Ollama" value="Ollama">
Ollama
</option>
<option key="Ollama" value="Ollama">
Ollama
</option>
<option key="OpenAILike" value="OpenAILike">
Ollama
</option>
</select>
<select
value={model}
@ -263,4 +266,4 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
</div>
);
},
);
);

View File

@ -19,6 +19,17 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
case 'OpenRouter':
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
case "OpenAILike":
return env.OPENAI_API_LIKE_KEY || cloudflareEnv.OPENAI_API_LIKE_KEY;
default:
return "";
}
}
export function getBaseURL(cloudflareEnv: Env, provider: string) {
switch (provider) {
case 'OpenAILike':
return env.OPENAI_API_LIKE_BASE_URL || cloudflareEnv.OPENAI_API_LIKE_BASE_URL;
default:
return "";
}

View File

@ -1,6 +1,6 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
import { getAPIKey } from '~/lib/.server/llm/api-key';
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
@ -14,7 +14,14 @@ export function getAnthropicModel(apiKey: string, model: string) {
return anthropic(model);
}
export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) {
const openai = createOpenAI({
baseURL,
apiKey,
});
return openai(model);
}
export function getOpenAIModel(apiKey: string, model: string) {
const openai = createOpenAI({
apiKey,
@ -54,7 +61,7 @@ export function getOpenRouterModel(apiKey: string, model: string) {
export function getModel(provider: string, model: string, env: Env) {
const apiKey = getAPIKey(env, provider);
const baseURL = getBaseURL(env, provider);
switch (provider) {
case 'Anthropic':
@ -67,6 +74,8 @@ export function getModel(provider: string, model: string, env: Env) {
return getOpenRouterModel(apiKey, model);
case 'Google':
return getGoogleModel(apiKey, model)
case 'OpenAILike':
return getOpenAILikeModel(baseURL,apiKey, model);
default:
return getOllamaModel(model);
}

View File

@ -36,7 +36,9 @@ export let MODEL_LIST: ModelInfo[] = [...staticModels];
async function getOllamaModels(): Promise<ModelInfo[]> {
try {
const response = await fetch(`http://localhost:11434/api/tags`);
const base_url =import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
const url = new URL(base_url).toString();
const response = await fetch(`${url}/api/tags`);
const data = await response.json();
return data.models.map((model: any) => ({
@ -49,9 +51,36 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
}
}
async function getOpenAILikeModels(): Promise<ModelInfo[]> {
try {
const base_url =import.meta.env.OPENAI_API_LIKE_BASE_URL || "";
if (!base_url) {
return [];
}
const url = new URL(base_url).toString();
const api_key = import.meta.env.OPENAI_API_LIKE_KEY ?? "";
const response = await fetch(`${url}/models`, {
headers: {
Authorization: `Bearer ${api_key}`,
}
});
const res = await response.json();
return res.data.map((model: any) => ({
name: model.id,
label: model.id,
provider: 'OpenAILike',
}));
}catch (e) {
return []
}
}
async function initializeModelList(): Promise<void> {
const ollamaModels = await getOllamaModels();
MODEL_LIST = [...ollamaModels, ...staticModels];
const openAiLikeModels = await getOpenAILikeModels();
console.log(openAiLikeModels);
MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
}
initializeModelList().then();
export { getOllamaModels, initializeModelList };

View File

@ -27,6 +27,7 @@ export default defineConfig((config) => {
chrome129IssuePlugin(),
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
],
envPrefix:["VITE_","OPENAI_API_LIKE_","OLLAMA_API_BASE_URL"],
};
});

View File

@ -4,4 +4,6 @@ interface Env {
GROQ_API_KEY: string;
OPEN_ROUTER_API_KEY: string;
OLLAMA_API_BASE_URL: string;
OPENAI_API_LIKE_KEY: string;
OPENAI_API_LIKE_BASE_URL: string;
}