mirror of
https://github.com/open-webui/open-webui
synced 2025-01-29 13:58:09 +00:00
refac
This commit is contained in:
parent
6e19e46b07
commit
cdbabdfa5a
@ -21,6 +21,7 @@ from utils.utils import (
|
|||||||
)
|
)
|
||||||
from config import (
|
from config import (
|
||||||
SRC_LOG_LEVELS,
|
SRC_LOG_LEVELS,
|
||||||
|
ENABLE_OPENAI_API,
|
||||||
OPENAI_API_BASE_URLS,
|
OPENAI_API_BASE_URLS,
|
||||||
OPENAI_API_KEYS,
|
OPENAI_API_KEYS,
|
||||||
CACHE_DIR,
|
CACHE_DIR,
|
||||||
@ -51,6 +52,8 @@ app.state.config = AppConfig()
|
|||||||
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
|
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
|
||||||
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
|
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
|
||||||
|
|
||||||
|
|
||||||
|
app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
|
||||||
app.state.config.OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS
|
app.state.config.OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS
|
||||||
app.state.config.OPENAI_API_KEYS = OPENAI_API_KEYS
|
app.state.config.OPENAI_API_KEYS = OPENAI_API_KEYS
|
||||||
|
|
||||||
@ -68,6 +71,21 @@ async def check_url(request: Request, call_next):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/config")
|
||||||
|
async def get_config(user=Depends(get_admin_user)):
|
||||||
|
return {"ENABLE_OPENAI_API": app.state.config.ENABLE_OPENAI_API}
|
||||||
|
|
||||||
|
|
||||||
|
class OpenAIConfigForm(BaseModel):
|
||||||
|
enable_openai_api: Optional[bool] = None
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/config/update")
|
||||||
|
async def update_config(form_data: OpenAIConfigForm, user=Depends(get_admin_user)):
|
||||||
|
app.state.config.ENABLE_OPENAI_API = form_data.enable_openai_api
|
||||||
|
return {"ENABLE_OPENAI_API": app.state.config.ENABLE_OPENAI_API}
|
||||||
|
|
||||||
|
|
||||||
class UrlsUpdateForm(BaseModel):
|
class UrlsUpdateForm(BaseModel):
|
||||||
urls: List[str]
|
urls: List[str]
|
||||||
|
|
||||||
@ -165,10 +183,13 @@ async def speech(request: Request, user=Depends(get_verified_user)):
|
|||||||
|
|
||||||
async def fetch_url(url, key):
|
async def fetch_url(url, key):
|
||||||
try:
|
try:
|
||||||
headers = {"Authorization": f"Bearer {key}"}
|
if key != "":
|
||||||
async with aiohttp.ClientSession() as session:
|
headers = {"Authorization": f"Bearer {key}"}
|
||||||
async with session.get(url, headers=headers) as response:
|
async with aiohttp.ClientSession() as session:
|
||||||
return await response.json()
|
async with session.get(url, headers=headers) as response:
|
||||||
|
return await response.json()
|
||||||
|
else:
|
||||||
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Handle connection error here
|
# Handle connection error here
|
||||||
log.error(f"Connection error: {e}")
|
log.error(f"Connection error: {e}")
|
||||||
@ -200,7 +221,7 @@ async def get_all_models():
|
|||||||
if (
|
if (
|
||||||
len(app.state.config.OPENAI_API_KEYS) == 1
|
len(app.state.config.OPENAI_API_KEYS) == 1
|
||||||
and app.state.config.OPENAI_API_KEYS[0] == ""
|
and app.state.config.OPENAI_API_KEYS[0] == ""
|
||||||
):
|
) or not app.state.config.ENABLE_OPENAI_API:
|
||||||
models = {"data": []}
|
models = {"data": []}
|
||||||
else:
|
else:
|
||||||
tasks = [
|
tasks = [
|
||||||
|
@ -417,6 +417,14 @@ OLLAMA_BASE_URLS = PersistentConfig(
|
|||||||
# OPENAI_API
|
# OPENAI_API
|
||||||
####################################
|
####################################
|
||||||
|
|
||||||
|
|
||||||
|
ENABLE_OPENAI_API = PersistentConfig(
|
||||||
|
"ENABLE_OPENAI_API",
|
||||||
|
"openai.enable",
|
||||||
|
os.environ.get("ENABLE_OPENAI_API", "True").lower() == "true",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")
|
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")
|
||||||
OPENAI_API_BASE_URL = os.environ.get("OPENAI_API_BASE_URL", "")
|
OPENAI_API_BASE_URL = os.environ.get("OPENAI_API_BASE_URL", "")
|
||||||
|
|
||||||
|
@ -1,6 +1,73 @@
|
|||||||
import { OPENAI_API_BASE_URL } from '$lib/constants';
|
import { OPENAI_API_BASE_URL } from '$lib/constants';
|
||||||
import { promptTemplate } from '$lib/utils';
|
import { promptTemplate } from '$lib/utils';
|
||||||
|
|
||||||
|
export const getOpenAIConfig = async (token: string = '') => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${OPENAI_API_BASE_URL}/config`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(token && { authorization: `Bearer ${token}` })
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
console.log(err);
|
||||||
|
if ('detail' in err) {
|
||||||
|
error = err.detail;
|
||||||
|
} else {
|
||||||
|
error = 'Server connection failed';
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const updateOpenAIConfig = async (token: string = '', enable_openai_api: boolean) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const res = await fetch(`${OPENAI_API_BASE_URL}/config/update`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(token && { authorization: `Bearer ${token}` })
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
enable_openai_api: enable_openai_api
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
console.log(err);
|
||||||
|
if ('detail' in err) {
|
||||||
|
error = err.detail;
|
||||||
|
} else {
|
||||||
|
error = 'Server connection failed';
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
export const getOpenAIUrls = async (token: string = '') => {
|
export const getOpenAIUrls = async (token: string = '') => {
|
||||||
let error = null;
|
let error = null;
|
||||||
|
|
||||||
|
@ -584,7 +584,7 @@
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
<form
|
<form
|
||||||
class=" flex flex-col relative w-full rounded-3xl px-1.5 border border-gray-100 dark:border-gray-850 bg-white dark:bg-gray-900 dark:text-gray-100"
|
class=" flex flex-col relative w-full rounded-3xl px-1.5 bg-gray-50 dark:bg-gray-850 dark:text-gray-100"
|
||||||
on:submit|preventDefault={() => {
|
on:submit|preventDefault={() => {
|
||||||
submitPrompt(prompt, user);
|
submitPrompt(prompt, user);
|
||||||
}}
|
}}
|
||||||
@ -754,7 +754,7 @@
|
|||||||
<textarea
|
<textarea
|
||||||
id="chat-textarea"
|
id="chat-textarea"
|
||||||
bind:this={chatTextAreaElement}
|
bind:this={chatTextAreaElement}
|
||||||
class="scrollbar-hidden dark:bg-gray-900 dark:text-gray-100 outline-none w-full py-3 px-3 {fileUploadEnabled
|
class="scrollbar-hidden bg-gray-50 dark:bg-gray-850 dark:text-gray-100 outline-none w-full py-3 px-3 {fileUploadEnabled
|
||||||
? ''
|
? ''
|
||||||
: ' pl-4'} rounded-xl resize-none h-[48px]"
|
: ' pl-4'} rounded-xl resize-none h-[48px]"
|
||||||
placeholder={chatInputPlaceholder !== ''
|
placeholder={chatInputPlaceholder !== ''
|
||||||
|
@ -308,7 +308,10 @@ __builtins__.input = input`);
|
|||||||
class="language-{lang} rounded-t-none whitespace-pre">{@html highlightedCode || code}</code
|
class="language-{lang} rounded-t-none whitespace-pre">{@html highlightedCode || code}</code
|
||||||
></pre>
|
></pre>
|
||||||
|
|
||||||
<div id="plt-canvas-{id}" class="bg-[#202123] text-white" />
|
<div
|
||||||
|
id="plt-canvas-{id}"
|
||||||
|
class="bg-[#202123] text-white max-w-full overflow-x-auto scrollbar-hidden"
|
||||||
|
/>
|
||||||
|
|
||||||
{#if executing}
|
{#if executing}
|
||||||
<div class="bg-[#202123] text-white px-4 py-4 rounded-b-lg">
|
<div class="bg-[#202123] text-white px-4 py-4 rounded-b-lg">
|
||||||
|
@ -5,28 +5,27 @@
|
|||||||
|
|
||||||
import { getOllamaUrls, getOllamaVersion, updateOllamaUrls } from '$lib/apis/ollama';
|
import { getOllamaUrls, getOllamaVersion, updateOllamaUrls } from '$lib/apis/ollama';
|
||||||
import {
|
import {
|
||||||
|
getOpenAIConfig,
|
||||||
getOpenAIKeys,
|
getOpenAIKeys,
|
||||||
getOpenAIUrls,
|
getOpenAIUrls,
|
||||||
|
updateOpenAIConfig,
|
||||||
updateOpenAIKeys,
|
updateOpenAIKeys,
|
||||||
updateOpenAIUrls
|
updateOpenAIUrls
|
||||||
} from '$lib/apis/openai';
|
} from '$lib/apis/openai';
|
||||||
import { toast } from 'svelte-sonner';
|
import { toast } from 'svelte-sonner';
|
||||||
|
import Switch from '$lib/components/common/Switch.svelte';
|
||||||
|
|
||||||
const i18n = getContext('i18n');
|
const i18n = getContext('i18n');
|
||||||
|
|
||||||
export let getModels: Function;
|
export let getModels: Function;
|
||||||
|
|
||||||
// External
|
// External
|
||||||
let OLLAMA_BASE_URL = '';
|
|
||||||
let OLLAMA_BASE_URLS = [''];
|
let OLLAMA_BASE_URLS = [''];
|
||||||
|
|
||||||
let OPENAI_API_KEY = '';
|
|
||||||
let OPENAI_API_BASE_URL = '';
|
|
||||||
|
|
||||||
let OPENAI_API_KEYS = [''];
|
let OPENAI_API_KEYS = [''];
|
||||||
let OPENAI_API_BASE_URLS = [''];
|
let OPENAI_API_BASE_URLS = [''];
|
||||||
|
|
||||||
let showOpenAI = false;
|
let ENABLE_OPENAI_API = false;
|
||||||
|
|
||||||
const updateOpenAIHandler = async () => {
|
const updateOpenAIHandler = async () => {
|
||||||
OPENAI_API_BASE_URLS = await updateOpenAIUrls(localStorage.token, OPENAI_API_BASE_URLS);
|
OPENAI_API_BASE_URLS = await updateOpenAIUrls(localStorage.token, OPENAI_API_BASE_URLS);
|
||||||
@ -52,6 +51,10 @@
|
|||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
if ($user.role === 'admin') {
|
if ($user.role === 'admin') {
|
||||||
OLLAMA_BASE_URLS = await getOllamaUrls(localStorage.token);
|
OLLAMA_BASE_URLS = await getOllamaUrls(localStorage.token);
|
||||||
|
|
||||||
|
const config = await getOpenAIConfig(localStorage.token);
|
||||||
|
ENABLE_OPENAI_API = config.ENABLE_OPENAI_API;
|
||||||
|
|
||||||
OPENAI_API_BASE_URLS = await getOpenAIUrls(localStorage.token);
|
OPENAI_API_BASE_URLS = await getOpenAIUrls(localStorage.token);
|
||||||
OPENAI_API_KEYS = await getOpenAIKeys(localStorage.token);
|
OPENAI_API_KEYS = await getOpenAIKeys(localStorage.token);
|
||||||
}
|
}
|
||||||
@ -70,16 +73,18 @@
|
|||||||
<div class="mt-2 space-y-2 pr-1.5">
|
<div class="mt-2 space-y-2 pr-1.5">
|
||||||
<div class="flex justify-between items-center text-sm">
|
<div class="flex justify-between items-center text-sm">
|
||||||
<div class=" font-medium">{$i18n.t('OpenAI API')}</div>
|
<div class=" font-medium">{$i18n.t('OpenAI API')}</div>
|
||||||
<button
|
|
||||||
class=" text-xs font-medium text-gray-500"
|
<div class="mt-1">
|
||||||
type="button"
|
<Switch
|
||||||
on:click={() => {
|
bind:state={ENABLE_OPENAI_API}
|
||||||
showOpenAI = !showOpenAI;
|
on:change={async () => {
|
||||||
}}>{showOpenAI ? $i18n.t('Hide') : $i18n.t('Show')}</button
|
updateOpenAIConfig(localStorage.token, ENABLE_OPENAI_API);
|
||||||
>
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{#if showOpenAI}
|
{#if ENABLE_OPENAI_API}
|
||||||
<div class="flex flex-col gap-1">
|
<div class="flex flex-col gap-1">
|
||||||
{#each OPENAI_API_BASE_URLS as url, idx}
|
{#each OPENAI_API_BASE_URLS as url, idx}
|
||||||
<div class="flex w-full gap-2">
|
<div class="flex w-full gap-2">
|
||||||
|
Loading…
Reference in New Issue
Block a user