mirror of
https://github.com/open-webui/open-webui
synced 2025-01-18 00:30:51 +00:00
refac: openai connections
This commit is contained in:
parent
b82e25cac8
commit
0809eb79b8
@ -46,7 +46,11 @@ log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["OLLAMA"])
|
||||
|
||||
|
||||
app = FastAPI(docs_url="/docs" if ENV == "dev" else None, openapi_url="/openapi.json" if ENV == "dev" else None, redoc_url=None)
|
||||
app = FastAPI(
|
||||
docs_url="/docs" if ENV == "dev" else None,
|
||||
openapi_url="/openapi.json" if ENV == "dev" else None,
|
||||
redoc_url=None,
|
||||
)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
@ -90,34 +94,26 @@ async def get_status():
|
||||
|
||||
@app.get("/config")
|
||||
async def get_config(user=Depends(get_admin_user)):
|
||||
return {"ENABLE_OLLAMA_API": app.state.config.ENABLE_OLLAMA_API}
|
||||
return {
|
||||
"ENABLE_OLLAMA_API": app.state.config.ENABLE_OLLAMA_API,
|
||||
"OLLAMA_BASE_URLS": app.state.config.OLLAMA_BASE_URLS,
|
||||
}
|
||||
|
||||
|
||||
class OllamaConfigForm(BaseModel):
|
||||
enable_ollama_api: Optional[bool] = None
|
||||
ENABLE_OLLAMA_API: Optional[bool] = None
|
||||
OLLAMA_BASE_URLS: list[str]
|
||||
|
||||
|
||||
@app.post("/config/update")
|
||||
async def update_config(form_data: OllamaConfigForm, user=Depends(get_admin_user)):
|
||||
app.state.config.ENABLE_OLLAMA_API = form_data.enable_ollama_api
|
||||
return {"ENABLE_OLLAMA_API": app.state.config.ENABLE_OLLAMA_API}
|
||||
app.state.config.ENABLE_OLLAMA_API = form_data.ENABLE_OLLAMA_API
|
||||
app.state.config.OLLAMA_BASE_URLS = form_data.OLLAMA_BASE_URLS
|
||||
|
||||
|
||||
@app.get("/urls")
|
||||
async def get_ollama_api_urls(user=Depends(get_admin_user)):
|
||||
return {"OLLAMA_BASE_URLS": app.state.config.OLLAMA_BASE_URLS}
|
||||
|
||||
|
||||
class UrlUpdateForm(BaseModel):
|
||||
urls: list[str]
|
||||
|
||||
|
||||
@app.post("/urls/update")
|
||||
async def update_ollama_api_url(form_data: UrlUpdateForm, user=Depends(get_admin_user)):
|
||||
app.state.config.OLLAMA_BASE_URLS = form_data.urls
|
||||
|
||||
log.info(f"app.state.config.OLLAMA_BASE_URLS: {app.state.config.OLLAMA_BASE_URLS}")
|
||||
return {"OLLAMA_BASE_URLS": app.state.config.OLLAMA_BASE_URLS}
|
||||
return {
|
||||
"ENABLE_OLLAMA_API": app.state.config.ENABLE_OLLAMA_API,
|
||||
"OLLAMA_BASE_URLS": app.state.config.OLLAMA_BASE_URLS,
|
||||
}
|
||||
|
||||
|
||||
async def fetch_url(url):
|
||||
|
@ -16,6 +16,7 @@ from open_webui.config import (
|
||||
MODEL_FILTER_LIST,
|
||||
OPENAI_API_BASE_URLS,
|
||||
OPENAI_API_KEYS,
|
||||
OPENAI_API_CONFIGS,
|
||||
AppConfig,
|
||||
)
|
||||
from open_webui.env import (
|
||||
@ -43,7 +44,11 @@ log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["OPENAI"])
|
||||
|
||||
|
||||
app = FastAPI(docs_url="/docs" if ENV == "dev" else None, openapi_url="/openapi.json" if ENV == "dev" else None, redoc_url=None)
|
||||
app = FastAPI(
|
||||
docs_url="/docs" if ENV == "dev" else None,
|
||||
openapi_url="/openapi.json" if ENV == "dev" else None,
|
||||
redoc_url=None,
|
||||
)
|
||||
|
||||
|
||||
app.add_middleware(
|
||||
@ -62,6 +67,7 @@ app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
|
||||
app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
|
||||
app.state.config.OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS
|
||||
app.state.config.OPENAI_API_KEYS = OPENAI_API_KEYS
|
||||
app.state.config.OPENAI_API_CONFIGS = OPENAI_API_CONFIGS
|
||||
|
||||
app.state.MODELS = {}
|
||||
|
||||
@ -77,48 +83,58 @@ async def check_url(request: Request, call_next):
|
||||
|
||||
@app.get("/config")
|
||||
async def get_config(user=Depends(get_admin_user)):
|
||||
return {"ENABLE_OPENAI_API": app.state.config.ENABLE_OPENAI_API}
|
||||
return {
|
||||
"ENABLE_OPENAI_API": app.state.config.ENABLE_OPENAI_API,
|
||||
"OPENAI_API_BASE_URLS": app.state.config.OPENAI_API_BASE_URLS,
|
||||
"OPENAI_API_KEYS": app.state.config.OPENAI_API_KEYS,
|
||||
"OPENAI_API_CONFIGS": app.state.config.OPENAI_API_CONFIGS,
|
||||
}
|
||||
|
||||
|
||||
class OpenAIConfigForm(BaseModel):
|
||||
enable_openai_api: Optional[bool] = None
|
||||
ENABLE_OPENAI_API: Optional[bool] = None
|
||||
OPENAI_API_BASE_URLS: list[str]
|
||||
OPENAI_API_KEYS: list[str]
|
||||
OPENAI_API_CONFIGS: dict
|
||||
|
||||
|
||||
@app.post("/config/update")
|
||||
async def update_config(form_data: OpenAIConfigForm, user=Depends(get_admin_user)):
|
||||
app.state.config.ENABLE_OPENAI_API = form_data.enable_openai_api
|
||||
return {"ENABLE_OPENAI_API": app.state.config.ENABLE_OPENAI_API}
|
||||
app.state.config.ENABLE_OPENAI_API = form_data.ENABLE_OPENAI_API
|
||||
|
||||
app.state.config.OPENAI_API_BASE_URLS = form_data.OPENAI_API_BASE_URLS
|
||||
app.state.config.OPENAI_API_KEYS = form_data.OPENAI_API_KEYS
|
||||
|
||||
class UrlsUpdateForm(BaseModel):
|
||||
urls: list[str]
|
||||
# Check if API KEYS length is same than API URLS length
|
||||
if len(app.state.config.OPENAI_API_KEYS) != len(
|
||||
app.state.config.OPENAI_API_BASE_URLS
|
||||
):
|
||||
if len(app.state.config.OPENAI_API_KEYS) > len(
|
||||
app.state.config.OPENAI_API_BASE_URLS
|
||||
):
|
||||
app.state.config.OPENAI_API_KEYS = app.state.config.OPENAI_API_KEYS[
|
||||
: len(app.state.config.OPENAI_API_BASE_URLS)
|
||||
]
|
||||
else:
|
||||
app.state.config.OPENAI_API_KEYS += [""] * (
|
||||
len(app.state.config.OPENAI_API_BASE_URLS)
|
||||
- len(app.state.config.OPENAI_API_KEYS)
|
||||
)
|
||||
|
||||
app.state.config.OPENAI_API_CONFIGS = form_data.OPENAI_API_CONFIGS
|
||||
|
||||
class KeysUpdateForm(BaseModel):
|
||||
keys: list[str]
|
||||
# Remove any extra configs
|
||||
config_urls = app.state.config.OPENAI_API_CONFIGS.keys()
|
||||
for idx, url in enumerate(app.state.config.OPENAI_API_BASE_URLS):
|
||||
if url not in config_urls:
|
||||
app.state.config.OPENAI_API_CONFIGS.pop(url, None)
|
||||
|
||||
|
||||
@app.get("/urls")
|
||||
async def get_openai_urls(user=Depends(get_admin_user)):
|
||||
return {"OPENAI_API_BASE_URLS": app.state.config.OPENAI_API_BASE_URLS}
|
||||
|
||||
|
||||
@app.post("/urls/update")
|
||||
async def update_openai_urls(form_data: UrlsUpdateForm, user=Depends(get_admin_user)):
|
||||
await get_all_models()
|
||||
app.state.config.OPENAI_API_BASE_URLS = form_data.urls
|
||||
return {"OPENAI_API_BASE_URLS": app.state.config.OPENAI_API_BASE_URLS}
|
||||
|
||||
|
||||
@app.get("/keys")
|
||||
async def get_openai_keys(user=Depends(get_admin_user)):
|
||||
return {"OPENAI_API_KEYS": app.state.config.OPENAI_API_KEYS}
|
||||
|
||||
|
||||
@app.post("/keys/update")
|
||||
async def update_openai_key(form_data: KeysUpdateForm, user=Depends(get_admin_user)):
|
||||
app.state.config.OPENAI_API_KEYS = form_data.keys
|
||||
return {"OPENAI_API_KEYS": app.state.config.OPENAI_API_KEYS}
|
||||
return {
|
||||
"ENABLE_OPENAI_API": app.state.config.ENABLE_OPENAI_API,
|
||||
"OPENAI_API_BASE_URLS": app.state.config.OPENAI_API_BASE_URLS,
|
||||
"OPENAI_API_KEYS": app.state.config.OPENAI_API_KEYS,
|
||||
"OPENAI_API_CONFIGS": app.state.config.OPENAI_API_CONFIGS,
|
||||
}
|
||||
|
||||
|
||||
@app.post("/audio/speech")
|
||||
@ -190,7 +206,7 @@ async def speech(request: Request, user=Depends(get_verified_user)):
|
||||
raise HTTPException(status_code=401, detail=ERROR_MESSAGES.OPENAI_NOT_FOUND)
|
||||
|
||||
|
||||
async def fetch_url(url, key):
|
||||
async def aiohttp_get(url, key):
|
||||
timeout = aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST)
|
||||
try:
|
||||
headers = {"Authorization": f"Bearer {key}"}
|
||||
@ -248,12 +264,8 @@ def merge_models_lists(model_lists):
|
||||
return merged_list
|
||||
|
||||
|
||||
def is_openai_api_disabled():
|
||||
return not app.state.config.ENABLE_OPENAI_API
|
||||
|
||||
|
||||
async def get_all_models_raw() -> list:
|
||||
if is_openai_api_disabled():
|
||||
if not app.state.config.ENABLE_OPENAI_API:
|
||||
return []
|
||||
|
||||
# Check if API KEYS length is same than API URLS length
|
||||
@ -269,12 +281,55 @@ async def get_all_models_raw() -> list:
|
||||
else:
|
||||
app.state.config.OPENAI_API_KEYS += [""] * (num_urls - num_keys)
|
||||
|
||||
tasks = [
|
||||
fetch_url(f"{url}/models", app.state.config.OPENAI_API_KEYS[idx])
|
||||
for idx, url in enumerate(app.state.config.OPENAI_API_BASE_URLS)
|
||||
]
|
||||
tasks = []
|
||||
for idx, url in enumerate(app.state.config.OPENAI_API_BASE_URLS):
|
||||
if url not in app.state.config.OPENAI_API_CONFIGS:
|
||||
tasks.append(
|
||||
aiohttp_get(f"{url}/models", app.state.config.OPENAI_API_KEYS[idx])
|
||||
)
|
||||
else:
|
||||
api_config = app.state.config.OPENAI_API_CONFIGS[url]
|
||||
|
||||
enabled = api_config.get("enabled", True)
|
||||
model_ids = api_config.get("model_ids", [])
|
||||
|
||||
if enabled:
|
||||
if len(model_ids) == 0:
|
||||
tasks.append(
|
||||
aiohttp_get(
|
||||
f"{url}/models", app.state.config.OPENAI_API_KEYS[idx]
|
||||
)
|
||||
)
|
||||
else:
|
||||
model_list = {
|
||||
"object": "list",
|
||||
"data": [
|
||||
{
|
||||
"id": model_id,
|
||||
"name": model_id,
|
||||
"owned_by": "openai",
|
||||
"openai": {"id": model_id},
|
||||
"urlIdx": idx,
|
||||
}
|
||||
for model_id in model_ids
|
||||
],
|
||||
}
|
||||
|
||||
tasks.append(asyncio.ensure_future(asyncio.sleep(0, model_list)))
|
||||
|
||||
responses = await asyncio.gather(*tasks)
|
||||
|
||||
for idx, response in enumerate(responses):
|
||||
if response:
|
||||
url = app.state.config.OPENAI_API_BASE_URLS[idx]
|
||||
api_config = app.state.config.OPENAI_API_CONFIGS[url]
|
||||
|
||||
prefix_id = api_config.get("prefix_id", None)
|
||||
|
||||
if prefix_id:
|
||||
for model in response["data"]:
|
||||
model["id"] = f"{prefix_id}.{model['id']}"
|
||||
|
||||
log.debug(f"get_all_models:responses() {responses}")
|
||||
|
||||
return responses
|
||||
@ -290,7 +345,7 @@ async def get_all_models(raw: Literal[False] = False) -> dict[str, list]: ...
|
||||
|
||||
async def get_all_models(raw=False) -> dict[str, list] | list:
|
||||
log.info("get_all_models()")
|
||||
if is_openai_api_disabled():
|
||||
if not app.state.config.ENABLE_OPENAI_API:
|
||||
return [] if raw else {"data": []}
|
||||
|
||||
responses = await get_all_models_raw()
|
||||
@ -342,7 +397,6 @@ async def get_models(url_idx: Optional[int] = None, user=Depends(get_verified_us
|
||||
|
||||
r = None
|
||||
|
||||
|
||||
timeout = aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST)
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
try:
|
||||
@ -361,7 +415,8 @@ async def get_models(url_idx: Optional[int] = None, user=Depends(get_verified_us
|
||||
if "api.openai.com" in url:
|
||||
# Filter models according to the specified conditions
|
||||
response_data["data"] = [
|
||||
model for model in response_data.get("data", [])
|
||||
model
|
||||
for model in response_data.get("data", [])
|
||||
if not any(
|
||||
name in model["id"]
|
||||
for name in [
|
||||
@ -381,7 +436,9 @@ async def get_models(url_idx: Optional[int] = None, user=Depends(get_verified_us
|
||||
# ClientError covers all aiohttp requests issues
|
||||
log.exception(f"Client error: {str(e)}")
|
||||
# Handle aiohttp-specific connection issues, timeout etc.
|
||||
raise HTTPException(status_code=500, detail="Open WebUI: Server Connection Error")
|
||||
raise HTTPException(
|
||||
status_code=500, detail="Open WebUI: Server Connection Error"
|
||||
)
|
||||
except Exception as e:
|
||||
log.exception(f"Unexpected error: {e}")
|
||||
# Generic error handler in case parsing JSON or other steps fail
|
||||
@ -389,6 +446,49 @@ async def get_models(url_idx: Optional[int] = None, user=Depends(get_verified_us
|
||||
raise HTTPException(status_code=500, detail=error_detail)
|
||||
|
||||
|
||||
class ConnectionVerificationForm(BaseModel):
|
||||
url: str
|
||||
key: str
|
||||
|
||||
|
||||
@app.post("/verify")
|
||||
async def verify_connection(
|
||||
form_data: ConnectionVerificationForm, user=Depends(get_admin_user)
|
||||
):
|
||||
url = form_data.url
|
||||
key = form_data.key
|
||||
|
||||
headers = {}
|
||||
headers["Authorization"] = f"Bearer {key}"
|
||||
headers["Content-Type"] = "application/json"
|
||||
|
||||
timeout = aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST)
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
try:
|
||||
async with session.get(f"{url}/models", headers=headers) as r:
|
||||
if r.status != 200:
|
||||
# Extract response error details if available
|
||||
error_detail = f"HTTP Error: {r.status}"
|
||||
res = await r.json()
|
||||
if "error" in res:
|
||||
error_detail = f"External Error: {res['error']}"
|
||||
raise Exception(error_detail)
|
||||
|
||||
response_data = await r.json()
|
||||
return response_data
|
||||
|
||||
except aiohttp.ClientError as e:
|
||||
# ClientError covers all aiohttp requests issues
|
||||
log.exception(f"Client error: {str(e)}")
|
||||
# Handle aiohttp-specific connection issues, timeout etc.
|
||||
raise HTTPException(
|
||||
status_code=500, detail="Open WebUI: Server Connection Error"
|
||||
)
|
||||
except Exception as e:
|
||||
log.exception(f"Unexpected error: {e}")
|
||||
# Generic error handler in case parsing JSON or other steps fail
|
||||
error_detail = f"Unexpected error: {str(e)}"
|
||||
raise HTTPException(status_code=500, detail=error_detail)
|
||||
|
||||
|
||||
@app.post("/chat/completions")
|
||||
@ -418,6 +518,14 @@ async def generate_chat_completion(
|
||||
model = app.state.MODELS[payload.get("model")]
|
||||
idx = model["urlIdx"]
|
||||
|
||||
api_config = app.state.config.OPENAI_API_CONFIGS.get(
|
||||
app.state.config.OPENAI_API_BASE_URLS[idx], {}
|
||||
)
|
||||
prefix_id = api_config.get("prefix_id", None)
|
||||
|
||||
if prefix_id:
|
||||
payload["model"] = payload["model"].replace(f"{prefix_id}.", "")
|
||||
|
||||
if "pipeline" in model and model.get("pipeline"):
|
||||
payload["user"] = {
|
||||
"name": user.name,
|
||||
|
@ -607,6 +607,12 @@ OLLAMA_BASE_URLS = PersistentConfig(
|
||||
"OLLAMA_BASE_URLS", "ollama.base_urls", OLLAMA_BASE_URLS
|
||||
)
|
||||
|
||||
OLLAMA_API_CONFIGS = PersistentConfig(
|
||||
"OLLAMA_API_CONFIGS",
|
||||
"ollama.api_configs",
|
||||
{},
|
||||
)
|
||||
|
||||
####################################
|
||||
# OPENAI_API
|
||||
####################################
|
||||
@ -647,15 +653,20 @@ OPENAI_API_BASE_URLS = PersistentConfig(
|
||||
"OPENAI_API_BASE_URLS", "openai.api_base_urls", OPENAI_API_BASE_URLS
|
||||
)
|
||||
|
||||
OPENAI_API_KEY = ""
|
||||
OPENAI_API_CONFIGS = PersistentConfig(
|
||||
"OPENAI_API_CONFIGS",
|
||||
"openai.api_configs",
|
||||
{},
|
||||
)
|
||||
|
||||
# Get the actual OpenAI API key based on the base URL
|
||||
OPENAI_API_KEY = ""
|
||||
try:
|
||||
OPENAI_API_KEY = OPENAI_API_KEYS.value[
|
||||
OPENAI_API_BASE_URLS.value.index("https://api.openai.com/v1")
|
||||
]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
OPENAI_API_BASE_URL = "https://api.openai.com/v1"
|
||||
|
||||
####################################
|
||||
|
@ -32,7 +32,13 @@ export const getOllamaConfig = async (token: string = '') => {
|
||||
return res;
|
||||
};
|
||||
|
||||
export const updateOllamaConfig = async (token: string = '', enable_ollama_api: boolean) => {
|
||||
type OllamaConfig = {
|
||||
ENABLE_OLLAMA_API: boolean,
|
||||
OLLAMA_BASE_URLS: string[],
|
||||
OLLAMA_API_CONFIGS: object
|
||||
}
|
||||
|
||||
export const updateOllamaConfig = async (token: string = '', config: OllamaConfig) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${OLLAMA_API_BASE_URL}/config/update`, {
|
||||
@ -43,7 +49,7 @@ export const updateOllamaConfig = async (token: string = '', enable_ollama_api:
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
enable_ollama_api: enable_ollama_api
|
||||
...config
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
|
@ -32,7 +32,17 @@ export const getOpenAIConfig = async (token: string = '') => {
|
||||
return res;
|
||||
};
|
||||
|
||||
export const updateOpenAIConfig = async (token: string = '', enable_openai_api: boolean) => {
|
||||
|
||||
type OpenAIConfig = {
|
||||
ENABLE_OPENAI_API: boolean;
|
||||
OPENAI_API_BASE_URLS: string[];
|
||||
OPENAI_API_KEYS: string[];
|
||||
OPENAI_API_CONFIGS: object;
|
||||
}
|
||||
|
||||
|
||||
|
||||
export const updateOpenAIConfig = async (token: string = '', config: OpenAIConfig) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${OPENAI_API_BASE_URL}/config/update`, {
|
||||
@ -43,7 +53,7 @@ export const updateOpenAIConfig = async (token: string = '', enable_openai_api:
|
||||
...(token && { authorization: `Bearer ${token}` })
|
||||
},
|
||||
body: JSON.stringify({
|
||||
enable_openai_api: enable_openai_api
|
||||
...config
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
@ -99,6 +109,7 @@ export const getOpenAIUrls = async (token: string = '') => {
|
||||
return res.OPENAI_API_BASE_URLS;
|
||||
};
|
||||
|
||||
|
||||
export const updateOpenAIUrls = async (token: string = '', urls: string[]) => {
|
||||
let error = null;
|
||||
|
||||
@ -231,41 +242,43 @@ export const getOpenAIModels = async (token: string, urlIdx?: number) => {
|
||||
return res;
|
||||
};
|
||||
|
||||
export const getOpenAIModelsDirect = async (
|
||||
base_url: string = 'https://api.openai.com/v1',
|
||||
api_key: string = ''
|
||||
export const verifyOpenAIConnection = async (
|
||||
token: string = '',
|
||||
url: string = 'https://api.openai.com/v1',
|
||||
key: string = ''
|
||||
) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${base_url}/models`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${api_key}`
|
||||
const res = await fetch(
|
||||
`${OPENAI_API_BASE_URL}/verify`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url,
|
||||
key
|
||||
})
|
||||
}
|
||||
})
|
||||
)
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
error = `OpenAI: ${err?.error?.message ?? 'Network Problem'}`;
|
||||
return null;
|
||||
return [];
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const models = Array.isArray(res) ? res : (res?.data ?? null);
|
||||
|
||||
return models
|
||||
.map((model) => ({ id: model.id, name: model.name ?? model.id, external: true }))
|
||||
.filter((model) => (base_url.includes('openai') ? model.name.includes('gpt') : true))
|
||||
.sort((a, b) => {
|
||||
return a.name.localeCompare(b.name);
|
||||
});
|
||||
return res;
|
||||
};
|
||||
|
||||
export const generateOpenAIChatCompletion = async (
|
||||
|
@ -39,7 +39,7 @@
|
||||
});
|
||||
</script>
|
||||
|
||||
<div class="flex flex-col lg:flex-row w-full h-full py-2 lg:space-x-4">
|
||||
<div class="flex flex-col lg:flex-row w-full h-full pt-1 pb-2 lg:space-x-4">
|
||||
<div
|
||||
id="admin-settings-tabs-container"
|
||||
class="tabs flex flex-row overflow-x-auto space-x-1 max-w-full lg:space-x-0 lg:space-y-1 lg:flex-col lg:flex-none lg:w-44 dark:text-gray-200 text-xs text-left scrollbar-none"
|
||||
|
@ -1,33 +1,22 @@
|
||||
<script lang="ts">
|
||||
import { models, user } from '$lib/stores';
|
||||
import { toast } from 'svelte-sonner';
|
||||
import { createEventDispatcher, onMount, getContext, tick } from 'svelte';
|
||||
|
||||
const dispatch = createEventDispatcher();
|
||||
|
||||
import {
|
||||
getOllamaConfig,
|
||||
getOllamaUrls,
|
||||
getOllamaVersion,
|
||||
updateOllamaConfig,
|
||||
updateOllamaUrls
|
||||
} from '$lib/apis/ollama';
|
||||
import {
|
||||
getOpenAIConfig,
|
||||
getOpenAIKeys,
|
||||
getOpenAIModels,
|
||||
getOpenAIUrls,
|
||||
updateOpenAIConfig,
|
||||
updateOpenAIKeys,
|
||||
updateOpenAIUrls
|
||||
} from '$lib/apis/openai';
|
||||
import { getOllamaConfig, updateOllamaConfig } from '$lib/apis/ollama';
|
||||
import { getOpenAIConfig, updateOpenAIConfig, getOpenAIModels } from '$lib/apis/openai';
|
||||
import { getModels as _getModels } from '$lib/apis';
|
||||
|
||||
import { models, user } from '$lib/stores';
|
||||
|
||||
import { toast } from 'svelte-sonner';
|
||||
import Switch from '$lib/components/common/Switch.svelte';
|
||||
import Spinner from '$lib/components/common/Spinner.svelte';
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
import { getModels as _getModels } from '$lib/apis';
|
||||
import SensitiveInput from '$lib/components/common/SensitiveInput.svelte';
|
||||
import Cog6 from '$lib/components/icons/Cog6.svelte';
|
||||
|
||||
import OpenAIConnection from './Connections/OpenAIConnection.svelte';
|
||||
import OpenAIConnectionModal from './Connections/OpenAIConnectionModal.svelte';
|
||||
import Plus from '$lib/components/icons/Plus.svelte';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
@ -38,126 +27,113 @@
|
||||
|
||||
// External
|
||||
let OLLAMA_BASE_URLS = [''];
|
||||
let OLLAMA_API_CONFIGS = {};
|
||||
|
||||
let OPENAI_API_KEYS = [''];
|
||||
let OPENAI_API_BASE_URLS = [''];
|
||||
let OPENAI_API_CONFIGS = {};
|
||||
|
||||
let ENABLE_OPENAI_API: null | boolean = null;
|
||||
let ENABLE_OLLAMA_API: null | boolean = null;
|
||||
|
||||
let pipelineUrls = {};
|
||||
|
||||
let ENABLE_OPENAI_API = null;
|
||||
let ENABLE_OLLAMA_API = null;
|
||||
|
||||
const verifyOpenAIHandler = async (idx) => {
|
||||
OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS.map((url) => url.replace(/\/$/, ''));
|
||||
|
||||
OPENAI_API_BASE_URLS = await updateOpenAIUrls(localStorage.token, OPENAI_API_BASE_URLS);
|
||||
OPENAI_API_KEYS = await updateOpenAIKeys(localStorage.token, OPENAI_API_KEYS);
|
||||
|
||||
const res = await getOpenAIModels(localStorage.token, idx).catch((error) => {
|
||||
toast.error(error);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (res) {
|
||||
toast.success($i18n.t('Server connection verified'));
|
||||
if (res.pipelines) {
|
||||
pipelineUrls[OPENAI_API_BASE_URLS[idx]] = true;
|
||||
}
|
||||
}
|
||||
|
||||
await models.set(await getModels());
|
||||
};
|
||||
|
||||
const verifyOllamaHandler = async (idx) => {
|
||||
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS.filter((url) => url !== '').map((url) =>
|
||||
url.replace(/\/$/, '')
|
||||
);
|
||||
|
||||
OLLAMA_BASE_URLS = await updateOllamaUrls(localStorage.token, OLLAMA_BASE_URLS);
|
||||
|
||||
const res = await getOllamaVersion(localStorage.token, idx).catch((error) => {
|
||||
toast.error(error);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (res) {
|
||||
toast.success($i18n.t('Server connection verified'));
|
||||
}
|
||||
|
||||
await models.set(await getModels());
|
||||
};
|
||||
let showAddOpenAIConnectionModal = false;
|
||||
|
||||
const updateOpenAIHandler = async () => {
|
||||
OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS.map((url) => url.replace(/\/$/, ''));
|
||||
if (ENABLE_OPENAI_API !== null) {
|
||||
OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS.map((url) => url.replace(/\/$/, ''));
|
||||
// Check if API KEYS length is same than API URLS length
|
||||
if (OPENAI_API_KEYS.length !== OPENAI_API_BASE_URLS.length) {
|
||||
// if there are more keys than urls, remove the extra keys
|
||||
if (OPENAI_API_KEYS.length > OPENAI_API_BASE_URLS.length) {
|
||||
OPENAI_API_KEYS = OPENAI_API_KEYS.slice(0, OPENAI_API_BASE_URLS.length);
|
||||
}
|
||||
|
||||
// Check if API KEYS length is same than API URLS length
|
||||
if (OPENAI_API_KEYS.length !== OPENAI_API_BASE_URLS.length) {
|
||||
// if there are more keys than urls, remove the extra keys
|
||||
if (OPENAI_API_KEYS.length > OPENAI_API_BASE_URLS.length) {
|
||||
OPENAI_API_KEYS = OPENAI_API_KEYS.slice(0, OPENAI_API_BASE_URLS.length);
|
||||
}
|
||||
|
||||
// if there are more urls than keys, add empty keys
|
||||
if (OPENAI_API_KEYS.length < OPENAI_API_BASE_URLS.length) {
|
||||
const diff = OPENAI_API_BASE_URLS.length - OPENAI_API_KEYS.length;
|
||||
for (let i = 0; i < diff; i++) {
|
||||
OPENAI_API_KEYS.push('');
|
||||
// if there are more urls than keys, add empty keys
|
||||
if (OPENAI_API_KEYS.length < OPENAI_API_BASE_URLS.length) {
|
||||
const diff = OPENAI_API_BASE_URLS.length - OPENAI_API_KEYS.length;
|
||||
for (let i = 0; i < diff; i++) {
|
||||
OPENAI_API_KEYS.push('');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
OPENAI_API_BASE_URLS = await updateOpenAIUrls(localStorage.token, OPENAI_API_BASE_URLS);
|
||||
OPENAI_API_KEYS = await updateOpenAIKeys(localStorage.token, OPENAI_API_KEYS);
|
||||
await models.set(await getModels());
|
||||
};
|
||||
|
||||
const updateOllamaUrlsHandler = async () => {
|
||||
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS.filter((url) => url !== '').map((url) =>
|
||||
url.replace(/\/$/, '')
|
||||
);
|
||||
|
||||
console.log(OLLAMA_BASE_URLS);
|
||||
|
||||
if (OLLAMA_BASE_URLS.length === 0) {
|
||||
ENABLE_OLLAMA_API = false;
|
||||
await updateOllamaConfig(localStorage.token, ENABLE_OLLAMA_API);
|
||||
|
||||
toast.info($i18n.t('Ollama API disabled'));
|
||||
} else {
|
||||
OLLAMA_BASE_URLS = await updateOllamaUrls(localStorage.token, OLLAMA_BASE_URLS);
|
||||
|
||||
const ollamaVersion = await getOllamaVersion(localStorage.token).catch((error) => {
|
||||
const res = await updateOpenAIConfig(localStorage.token, {
|
||||
ENABLE_OPENAI_API: ENABLE_OPENAI_API,
|
||||
OPENAI_API_BASE_URLS: OPENAI_API_BASE_URLS,
|
||||
OPENAI_API_KEYS: OPENAI_API_KEYS,
|
||||
OPENAI_API_CONFIGS: OPENAI_API_CONFIGS
|
||||
}).catch((error) => {
|
||||
toast.error(error);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (ollamaVersion) {
|
||||
toast.success($i18n.t('Server connection verified'));
|
||||
if (res) {
|
||||
toast.success($i18n.t('OpenAI API settings updated'));
|
||||
await models.set(await getModels());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const updateOllamaHandler = async () => {
|
||||
if (ENABLE_OLLAMA_API !== null) {
|
||||
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS.filter((url) => url !== '').map((url) =>
|
||||
url.replace(/\/$/, '')
|
||||
);
|
||||
|
||||
console.log(OLLAMA_BASE_URLS);
|
||||
|
||||
if (OLLAMA_BASE_URLS.length === 0) {
|
||||
ENABLE_OLLAMA_API = false;
|
||||
toast.info($i18n.t('Ollama API disabled'));
|
||||
}
|
||||
|
||||
const res = await updateOllamaConfig(localStorage.token, {
|
||||
ENABLE_OLLAMA_API: ENABLE_OLLAMA_API,
|
||||
OLLAMA_BASE_URLS: OLLAMA_BASE_URLS,
|
||||
OLLAMA_API_CONFIGS: OLLAMA_API_CONFIGS
|
||||
}).catch((error) => {
|
||||
toast.error(error);
|
||||
});
|
||||
|
||||
if (res) {
|
||||
toast.success($i18n.t('Ollama API settings updated'));
|
||||
await models.set(await getModels());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const addOpenAIConnectionHandler = async (connection) => {
|
||||
OPENAI_API_BASE_URLS = [...OPENAI_API_BASE_URLS, connection.url];
|
||||
OPENAI_API_KEYS = [...OPENAI_API_KEYS, connection.key];
|
||||
OPENAI_API_CONFIGS[connection.url] = connection.config;
|
||||
|
||||
await updateOpenAIHandler();
|
||||
};
|
||||
|
||||
onMount(async () => {
|
||||
if ($user.role === 'admin') {
|
||||
let ollamaConfig = {};
|
||||
let openaiConfig = {};
|
||||
|
||||
await Promise.all([
|
||||
(async () => {
|
||||
OLLAMA_BASE_URLS = await getOllamaUrls(localStorage.token);
|
||||
ollamaConfig = await getOllamaConfig(localStorage.token);
|
||||
})(),
|
||||
(async () => {
|
||||
OPENAI_API_BASE_URLS = await getOpenAIUrls(localStorage.token);
|
||||
})(),
|
||||
(async () => {
|
||||
OPENAI_API_KEYS = await getOpenAIKeys(localStorage.token);
|
||||
openaiConfig = await getOpenAIConfig(localStorage.token);
|
||||
})()
|
||||
]);
|
||||
|
||||
const ollamaConfig = await getOllamaConfig(localStorage.token);
|
||||
const openaiConfig = await getOpenAIConfig(localStorage.token);
|
||||
|
||||
ENABLE_OPENAI_API = openaiConfig.ENABLE_OPENAI_API;
|
||||
ENABLE_OLLAMA_API = ollamaConfig.ENABLE_OLLAMA_API;
|
||||
|
||||
OPENAI_API_BASE_URLS = openaiConfig.OPENAI_API_BASE_URLS;
|
||||
OPENAI_API_KEYS = openaiConfig.OPENAI_API_KEYS;
|
||||
OPENAI_API_CONFIGS = openaiConfig.OPENAI_API_CONFIGS;
|
||||
|
||||
OLLAMA_BASE_URLS = ollamaConfig.OLLAMA_BASE_URLS;
|
||||
OLLAMA_API_CONFIGS = ollamaConfig.OLLAMA_API_CONFIGS;
|
||||
|
||||
if (ENABLE_OPENAI_API) {
|
||||
OPENAI_API_BASE_URLS.forEach(async (url, idx) => {
|
||||
const res = await getOpenAIModels(localStorage.token, idx);
|
||||
@ -165,16 +141,35 @@
|
||||
pipelineUrls[url] = true;
|
||||
}
|
||||
});
|
||||
|
||||
for (const url of OPENAI_API_BASE_URLS) {
|
||||
if (!OPENAI_API_CONFIGS[url]) {
|
||||
OPENAI_API_CONFIGS[url] = {};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ENABLE_OLLAMA_API) {
|
||||
for (const url of OLLAMA_BASE_URLS) {
|
||||
if (!OLLAMA_API_CONFIGS[url]) {
|
||||
OLLAMA_API_CONFIGS[url] = {};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<OpenAIConnectionModal
|
||||
bind:show={showAddOpenAIConnectionModal}
|
||||
onSubmit={addOpenAIConnectionHandler}
|
||||
/>
|
||||
|
||||
<form
|
||||
class="flex flex-col h-full justify-between text-sm"
|
||||
on:submit|preventDefault={() => {
|
||||
updateOpenAIHandler();
|
||||
updateOllamaUrlsHandler();
|
||||
updateOllamaHandler();
|
||||
|
||||
dispatch('save');
|
||||
}}
|
||||
@ -191,7 +186,7 @@
|
||||
<Switch
|
||||
bind:state={ENABLE_OPENAI_API}
|
||||
on:change={async () => {
|
||||
updateOpenAIConfig(localStorage.token, ENABLE_OPENAI_API);
|
||||
updateOpenAIHandler();
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
@ -202,149 +197,39 @@
|
||||
<hr class=" border-gray-50 dark:border-gray-850" />
|
||||
|
||||
<div class="">
|
||||
<div class="flex justify-between items-center mb-1.5">
|
||||
<div class="flex justify-between items-center">
|
||||
<div class="font-medium">{$i18n.t('Manage OpenAI API Connections')}</div>
|
||||
|
||||
<button
|
||||
class="px-1"
|
||||
on:click={() => {
|
||||
OPENAI_API_BASE_URLS = [...OPENAI_API_BASE_URLS, ''];
|
||||
OPENAI_API_KEYS = [...OPENAI_API_KEYS, ''];
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
<Tooltip content={$i18n.t(`Add Connection`)}>
|
||||
<button
|
||||
class="px-1"
|
||||
on:click={() => {
|
||||
showAddOpenAIConnectionModal = true;
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
<path
|
||||
d="M8.75 3.75a.75.75 0 0 0-1.5 0v3.5h-3.5a.75.75 0 0 0 0 1.5h3.5v3.5a.75.75 0 0 0 1.5 0v-3.5h3.5a.75.75 0 0 0 0-1.5h-3.5v-3.5Z"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
<Plus />
|
||||
</button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col gap-1.5">
|
||||
<div class="flex flex-col gap-1.5 mt-1.5">
|
||||
{#each OPENAI_API_BASE_URLS as url, idx}
|
||||
<div class="flex w-full gap-2 items-center">
|
||||
<Tooltip
|
||||
className="w-full"
|
||||
content={$i18n.t(`WebUI will make requests to "{{url}}/chat/completions"`, {
|
||||
url
|
||||
})}
|
||||
placement="top-start"
|
||||
>
|
||||
<div class="flex w-full">
|
||||
<div class="flex-1 relative">
|
||||
<input
|
||||
class=" outline-none w-full bg-transparent {pipelineUrls[url]
|
||||
? 'pr-8'
|
||||
: ''}"
|
||||
placeholder={$i18n.t('API Base URL')}
|
||||
bind:value={url}
|
||||
autocomplete="off"
|
||||
/>
|
||||
|
||||
{#if pipelineUrls[url]}
|
||||
<div class=" absolute top-2.5 right-2.5">
|
||||
<Tooltip content="Pipelines">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
class="size-4"
|
||||
>
|
||||
<path
|
||||
d="M11.644 1.59a.75.75 0 0 1 .712 0l9.75 5.25a.75.75 0 0 1 0 1.32l-9.75 5.25a.75.75 0 0 1-.712 0l-9.75-5.25a.75.75 0 0 1 0-1.32l9.75-5.25Z"
|
||||
/>
|
||||
<path
|
||||
d="m3.265 10.602 7.668 4.129a2.25 2.25 0 0 0 2.134 0l7.668-4.13 1.37.739a.75.75 0 0 1 0 1.32l-9.75 5.25a.75.75 0 0 1-.71 0l-9.75-5.25a.75.75 0 0 1 0-1.32l1.37-.738Z"
|
||||
/>
|
||||
<path
|
||||
d="m10.933 19.231-7.668-4.13-1.37.739a.75.75 0 0 0 0 1.32l9.75 5.25c.221.12.489.12.71 0l9.75-5.25a.75.75 0 0 0 0-1.32l-1.37-.738-7.668 4.13a2.25 2.25 0 0 1-2.134-.001Z"
|
||||
/>
|
||||
</svg>
|
||||
</Tooltip>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<SensitiveInput
|
||||
inputClassName=" outline-none bg-transparent w-full"
|
||||
placeholder={$i18n.t('API Key')}
|
||||
bind:value={OPENAI_API_KEYS[idx]}
|
||||
/>
|
||||
</div>
|
||||
</Tooltip>
|
||||
|
||||
<div class="flex gap-1">
|
||||
<Tooltip content="Verify" className="self-start">
|
||||
<button
|
||||
class="self-center p-1 bg-transparent hover:bg-gray-100 dark:bg-gray-900 dark:hover:bg-gray-850 rounded-lg transition"
|
||||
on:click={() => {
|
||||
verifyOpenAIHandler(idx);
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 20 20"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
<!-- <Tooltip content={$i18n.t('Configure')} className="self-start">
|
||||
<button
|
||||
class="self-center p-1 bg-transparent hover:bg-gray-100 dark:bg-gray-900 dark:hover:bg-gray-850 rounded-lg transition"
|
||||
on:click={() => {
|
||||
OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS.filter(
|
||||
(url, urlIdx) => idx !== urlIdx
|
||||
);
|
||||
OPENAI_API_KEYS = OPENAI_API_KEYS.filter(
|
||||
(key, keyIdx) => idx !== keyIdx
|
||||
);
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
<Cog6 />
|
||||
</button>
|
||||
</Tooltip> -->
|
||||
|
||||
<Tooltip content={$i18n.t('Remove')} className="self-start">
|
||||
<button
|
||||
class="self-center p-1 bg-gray-200 hover:bg-gray-300 dark:bg-gray-900 dark:hover:bg-gray-850 rounded-lg transition"
|
||||
on:click={() => {
|
||||
OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS.filter(
|
||||
(url, urlIdx) => idx !== urlIdx
|
||||
);
|
||||
OPENAI_API_KEYS = OPENAI_API_KEYS.filter(
|
||||
(key, keyIdx) => idx !== keyIdx
|
||||
);
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path d="M3.75 7.25a.75.75 0 0 0 0 1.5h8.5a.75.75 0 0 0 0-1.5h-8.5Z" />
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
<OpenAIConnection
|
||||
pipeline={pipelineUrls[url] ? true : false}
|
||||
bind:url
|
||||
bind:key={OPENAI_API_KEYS[idx]}
|
||||
bind:config={OPENAI_API_CONFIGS[OPENAI_API_BASE_URLS[idx]]}
|
||||
onSubmit={() => {
|
||||
updateOpenAIHandler();
|
||||
}}
|
||||
onDelete={() => {
|
||||
OPENAI_API_BASE_URLS = OPENAI_API_BASE_URLS.filter(
|
||||
(url, urlIdx) => idx !== urlIdx
|
||||
);
|
||||
OPENAI_API_KEYS = OPENAI_API_KEYS.filter((key, keyIdx) => idx !== keyIdx);
|
||||
}}
|
||||
/>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
@ -435,7 +320,7 @@
|
||||
|
||||
<Tooltip content={$i18n.t('Remove')} className="self-start">
|
||||
<button
|
||||
class="self-center p-1 bg-gray-200 hover:bg-gray-300 dark:bg-gray-900 dark:hover:bg-gray-850 rounded-lg transition"
|
||||
class="self-center p-1 bg-transparent hover:bg-gray-100 dark:bg-gray-900 dark:hover:bg-gray-850 rounded-lg transition"
|
||||
on:click={() => {
|
||||
OLLAMA_BASE_URLS = OLLAMA_BASE_URLS.filter(
|
||||
(url, urlIdx) => idx !== urlIdx
|
||||
|
@ -0,0 +1,107 @@
|
||||
<script lang="ts">
|
||||
import { getContext, tick } from 'svelte';
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
import SensitiveInput from '$lib/components/common/SensitiveInput.svelte';
|
||||
import Cog6 from '$lib/components/icons/Cog6.svelte';
|
||||
import OpenAIConnectionModal from './OpenAIConnectionModal.svelte';
|
||||
import { connect } from 'socket.io-client';
|
||||
|
||||
export let onDelete = () => {};
|
||||
export let onSubmit = () => {};
|
||||
|
||||
export let pipeline = false;
|
||||
|
||||
export let url = '';
|
||||
export let key = '';
|
||||
export let config = {};
|
||||
|
||||
let showConfigModal = false;
|
||||
</script>
|
||||
|
||||
<OpenAIConnectionModal
|
||||
edit
|
||||
bind:show={showConfigModal}
|
||||
connection={{
|
||||
url,
|
||||
key,
|
||||
config
|
||||
}}
|
||||
{onDelete}
|
||||
onSubmit={(connection) => {
|
||||
url = connection.url;
|
||||
key = connection.key;
|
||||
config = connection.config;
|
||||
onSubmit(connection);
|
||||
}}
|
||||
/>
|
||||
|
||||
<div class="flex w-full gap-2 items-center">
|
||||
<Tooltip
|
||||
className="w-full relative"
|
||||
content={$i18n.t(`WebUI will make requests to "{{url}}/chat/completions"`, {
|
||||
url
|
||||
})}
|
||||
placement="top-start"
|
||||
>
|
||||
{#if !(config?.enabled ?? true)}
|
||||
<div
|
||||
class="absolute top-0 bottom-0 left-0 right-0 opacity-60 bg-white dark:bg-gray-900 z-10"
|
||||
></div>
|
||||
{/if}
|
||||
<div class="flex w-full">
|
||||
<div class="flex-1 relative">
|
||||
<input
|
||||
class=" outline-none w-full bg-transparent {pipeline ? 'pr-8' : ''}"
|
||||
placeholder={$i18n.t('API Base URL')}
|
||||
bind:value={url}
|
||||
autocomplete="off"
|
||||
/>
|
||||
|
||||
{#if pipeline}
|
||||
<div class=" absolute top-2.5 right-2.5">
|
||||
<Tooltip content="Pipelines">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
class="size-4"
|
||||
>
|
||||
<path
|
||||
d="M11.644 1.59a.75.75 0 0 1 .712 0l9.75 5.25a.75.75 0 0 1 0 1.32l-9.75 5.25a.75.75 0 0 1-.712 0l-9.75-5.25a.75.75 0 0 1 0-1.32l9.75-5.25Z"
|
||||
/>
|
||||
<path
|
||||
d="m3.265 10.602 7.668 4.129a2.25 2.25 0 0 0 2.134 0l7.668-4.13 1.37.739a.75.75 0 0 1 0 1.32l-9.75 5.25a.75.75 0 0 1-.71 0l-9.75-5.25a.75.75 0 0 1 0-1.32l1.37-.738Z"
|
||||
/>
|
||||
<path
|
||||
d="m10.933 19.231-7.668-4.13-1.37.739a.75.75 0 0 0 0 1.32l9.75 5.25c.221.12.489.12.71 0l9.75-5.25a.75.75 0 0 0 0-1.32l-1.37-.738-7.668 4.13a2.25 2.25 0 0 1-2.134-.001Z"
|
||||
/>
|
||||
</svg>
|
||||
</Tooltip>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<SensitiveInput
|
||||
inputClassName=" outline-none bg-transparent w-full"
|
||||
placeholder={$i18n.t('API Key')}
|
||||
bind:value={key}
|
||||
/>
|
||||
</div>
|
||||
</Tooltip>
|
||||
|
||||
<div class="flex gap-1">
|
||||
<Tooltip content={$i18n.t('Configure')} className="self-start">
|
||||
<button
|
||||
class="self-center p-1 bg-transparent hover:bg-gray-100 dark:bg-gray-900 dark:hover:bg-gray-850 rounded-lg transition"
|
||||
on:click={() => {
|
||||
showConfigModal = true;
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
<Cog6 />
|
||||
</button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
@ -0,0 +1,339 @@
|
||||
<script lang="ts">
|
||||
import { toast } from 'svelte-sonner';
|
||||
import { getContext, onMount } from 'svelte';
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
import { models } from '$lib/stores';
|
||||
import { verifyOpenAIConnection } from '$lib/apis/openai';
|
||||
|
||||
import Modal from '$lib/components/common/Modal.svelte';
|
||||
import Plus from '$lib/components/icons/Plus.svelte';
|
||||
import Minus from '$lib/components/icons/Minus.svelte';
|
||||
import PencilSolid from '$lib/components/icons/PencilSolid.svelte';
|
||||
import SensitiveInput from '$lib/components/common/SensitiveInput.svelte';
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
import Switch from '$lib/components/common/Switch.svelte';
|
||||
|
||||
export let onSubmit: Function = () => {};
|
||||
export let onDelete: Function = () => {};
|
||||
|
||||
export let show = false;
|
||||
export let edit = false;
|
||||
|
||||
export let connection = null;
|
||||
|
||||
let url = '';
|
||||
let key = '';
|
||||
|
||||
let prefixId = '';
|
||||
let enabled = true;
|
||||
|
||||
let modelId = '';
|
||||
let modelIds = [];
|
||||
|
||||
let loading = false;
|
||||
|
||||
const verifyOpenAIHandler = async () => {
|
||||
const res = await verifyOpenAIConnection(localStorage.token, url, key).catch((error) => {
|
||||
toast.error(error);
|
||||
});
|
||||
|
||||
if (res) {
|
||||
toast.success($i18n.t('Server connection verified'));
|
||||
}
|
||||
};
|
||||
|
||||
const addModelHandler = () => {
|
||||
if (modelId) {
|
||||
modelIds = [...modelIds, modelId];
|
||||
modelId = '';
|
||||
}
|
||||
};
|
||||
|
||||
const submitHandler = async () => {
|
||||
loading = true;
|
||||
|
||||
if (!url || !key) {
|
||||
loading = false;
|
||||
toast.error('URL and Key are required');
|
||||
return;
|
||||
}
|
||||
|
||||
const connection = {
|
||||
url,
|
||||
key,
|
||||
config: {
|
||||
enabled: enabled,
|
||||
prefix_id: prefixId,
|
||||
model_ids: modelIds
|
||||
}
|
||||
};
|
||||
|
||||
await onSubmit(connection);
|
||||
|
||||
loading = false;
|
||||
show = false;
|
||||
|
||||
url = '';
|
||||
key = '';
|
||||
prefixId = '';
|
||||
modelIds = [];
|
||||
};
|
||||
|
||||
const init = () => {
|
||||
if (connection) {
|
||||
url = connection.url;
|
||||
key = connection.key;
|
||||
|
||||
enabled = connection.config?.enabled ?? true;
|
||||
prefixId = connection.config?.prefix_id ?? '';
|
||||
modelIds = connection.config?.model_ids ?? [];
|
||||
}
|
||||
};
|
||||
|
||||
$: if (show) {
|
||||
init();
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
init();
|
||||
});
|
||||
</script>
|
||||
|
||||
<Modal size="sm" bind:show>
|
||||
<div>
|
||||
<div class=" flex justify-between dark:text-gray-100 px-5 pt-4 pb-2">
|
||||
<div class=" text-lg font-medium self-center font-primary">
|
||||
{#if edit}
|
||||
{$i18n.t('Edit Connection')}
|
||||
{:else}
|
||||
{$i18n.t('Add Connection')}
|
||||
{/if}
|
||||
</div>
|
||||
<button
|
||||
class="self-center"
|
||||
on:click={() => {
|
||||
show = false;
|
||||
}}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 20 20"
|
||||
fill="currentColor"
|
||||
class="w-5 h-5"
|
||||
>
|
||||
<path
|
||||
d="M6.28 5.22a.75.75 0 00-1.06 1.06L8.94 10l-3.72 3.72a.75.75 0 101.06 1.06L10 11.06l3.72 3.72a.75.75 0 101.06-1.06L11.06 10l3.72-3.72a.75.75 0 00-1.06-1.06L10 8.94 6.28 5.22z"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col md:flex-row w-full px-4 pb-4 md:space-x-4 dark:text-gray-200">
|
||||
<div class=" flex flex-col w-full sm:flex-row sm:justify-center sm:space-x-6">
|
||||
<form
|
||||
class="flex flex-col w-full"
|
||||
on:submit={(e) => {
|
||||
e.preventDefault();
|
||||
submitHandler();
|
||||
}}
|
||||
>
|
||||
<div class="px-1">
|
||||
<div class="flex gap-2">
|
||||
<div class="flex flex-col w-full">
|
||||
<div class=" mb-0.5 text-xs text-gray-500">{$i18n.t('URL')}</div>
|
||||
|
||||
<div class="flex-1">
|
||||
<input
|
||||
class="w-full text-sm bg-transparent placeholder:text-gray-300 dark:placeholder:text-gray-700 outline-none"
|
||||
type="text"
|
||||
bind:value={url}
|
||||
placeholder={$i18n.t('API Base URL')}
|
||||
autocomplete="off"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Tooltip content="Verify Connection" className="self-end -mb-1">
|
||||
<button
|
||||
class="self-center p-1 bg-transparent hover:bg-gray-100 dark:bg-gray-900 dark:hover:bg-gray-850 rounded-lg transition"
|
||||
on:click={() => {
|
||||
verifyOpenAIHandler();
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 20 20"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M15.312 11.424a5.5 5.5 0 01-9.201 2.466l-.312-.311h2.433a.75.75 0 000-1.5H3.989a.75.75 0 00-.75.75v4.242a.75.75 0 001.5 0v-2.43l.31.31a7 7 0 0011.712-3.138.75.75 0 00-1.449-.39zm1.23-3.723a.75.75 0 00.219-.53V2.929a.75.75 0 00-1.5 0V5.36l-.31-.31A7 7 0 003.239 8.188a.75.75 0 101.448.389A5.5 5.5 0 0113.89 6.11l.311.31h-2.432a.75.75 0 000 1.5h4.243a.75.75 0 00.53-.219z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</Tooltip>
|
||||
|
||||
<div class="flex flex-col flex-shrink-0 self-end">
|
||||
<Tooltip content={enabled ? $i18n.t('Enabled') : $i18n.t('Disabled')}>
|
||||
<Switch bind:state={enabled} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex gap-2 mt-2">
|
||||
<div class="flex flex-col w-full">
|
||||
<div class=" mb-0.5 text-xs text-gray-500">{$i18n.t('Key')}</div>
|
||||
|
||||
<div class="flex-1">
|
||||
<SensitiveInput
|
||||
className="w-full text-sm bg-transparent placeholder:text-gray-300 dark:placeholder:text-gray-700 outline-none"
|
||||
bind:value={key}
|
||||
placeholder={$i18n.t('API Key')}
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col w-full">
|
||||
<div class=" mb-1 text-xs text-gray-500">{$i18n.t('Prefix ID')}</div>
|
||||
|
||||
<div class="flex-1">
|
||||
<Tooltip
|
||||
content={$i18n.t(
|
||||
'Prefix ID is used to avoid conflicts with other connections by adding a prefix to the model IDs - leave empty to disable'
|
||||
)}
|
||||
>
|
||||
<input
|
||||
class="w-full text-sm bg-transparent placeholder:text-gray-300 dark:placeholder:text-gray-700 outline-none"
|
||||
type="text"
|
||||
bind:value={prefixId}
|
||||
placeholder={$i18n.t('Prefix ID')}
|
||||
autocomplete="off"
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr class=" border-gray-100 dark:border-gray-700/10 my-2.5 w-full" />
|
||||
|
||||
<div class="flex flex-col w-full">
|
||||
<div class="mb-1 flex justify-between">
|
||||
<div class="text-xs text-gray-500">{$i18n.t('Model IDs')}</div>
|
||||
</div>
|
||||
|
||||
{#if modelIds.length > 0}
|
||||
<div class="flex flex-col">
|
||||
{#each modelIds as modelId, modelIdx}
|
||||
<div class=" flex gap-2 w-full justify-between items-center">
|
||||
<div class=" text-sm flex-1 py-1 rounded-lg">
|
||||
{modelId}
|
||||
</div>
|
||||
<div class="flex-shrink-0">
|
||||
<button
|
||||
type="button"
|
||||
on:click={() => {
|
||||
modelIds = modelIds.filter((_, idx) => idx !== modelIdx);
|
||||
}}
|
||||
>
|
||||
<Minus strokeWidth="2" className="size-3.5" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{:else}
|
||||
<div class="text-gray-500 text-xs text-center py-2 px-10">
|
||||
{$i18n.t('Leave empty to include all models from "{{URL}}/models" endpoint', {
|
||||
URL: url
|
||||
})}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<hr class=" border-gray-100 dark:border-gray-700/10 my-2.5 w-full" />
|
||||
|
||||
<div class="flex items-center">
|
||||
<input
|
||||
class="w-full py-1 text-sm rounded-lg bg-transparent {modelId
|
||||
? ''
|
||||
: 'text-gray-500'} placeholder:text-gray-300 dark:placeholder:text-gray-700 outline-none"
|
||||
bind:value={modelId}
|
||||
placeholder={$i18n.t('Add a model ID')}
|
||||
/>
|
||||
|
||||
<div>
|
||||
<button
|
||||
type="button"
|
||||
on:click={() => {
|
||||
addModelHandler();
|
||||
}}
|
||||
>
|
||||
<Plus className="size-3.5" strokeWidth="2" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex justify-end pt-3 text-sm font-medium gap-1.5">
|
||||
{#if edit}
|
||||
<button
|
||||
class="px-3.5 py-1.5 text-sm font-medium dark:bg-black dark:hover:bg-gray-900 dark:text-white bg-white text-black hover:bg-gray-100 transition rounded-full flex flex-row space-x-1 items-center"
|
||||
type="button"
|
||||
on:click={() => {
|
||||
onDelete();
|
||||
show = false;
|
||||
}}
|
||||
>
|
||||
{$i18n.t('Delete')}
|
||||
</button>
|
||||
{/if}
|
||||
|
||||
<button
|
||||
class="px-3.5 py-1.5 text-sm font-medium bg-black hover:bg-gray-900 text-white dark:bg-white dark:text-black dark:hover:bg-gray-100 transition rounded-full flex flex-row space-x-1 items-center {loading
|
||||
? ' cursor-not-allowed'
|
||||
: ''}"
|
||||
type="submit"
|
||||
disabled={loading}
|
||||
>
|
||||
{$i18n.t('Save')}
|
||||
|
||||
{#if loading}
|
||||
<div class="ml-2 self-center">
|
||||
<svg
|
||||
class=" w-4 h-4"
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
><style>
|
||||
.spinner_ajPY {
|
||||
transform-origin: center;
|
||||
animation: spinner_AtaB 0.75s infinite linear;
|
||||
}
|
||||
@keyframes spinner_AtaB {
|
||||
100% {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
</style><path
|
||||
d="M12,1A11,11,0,1,0,23,12,11,11,0,0,0,12,1Zm0,19a8,8,0,1,1,8-8A8,8,0,0,1,12,20Z"
|
||||
opacity=".25"
|
||||
/><path
|
||||
d="M10.14,1.16a11,11,0,0,0-9,8.92A1.59,1.59,0,0,0,2.46,12,1.52,1.52,0,0,0,4.11,10.7a8,8,0,0,1,6.66-6.61A1.42,1.42,0,0,0,12,2.69h0A1.57,1.57,0,0,0,10.14,1.16Z"
|
||||
class="spinner_ajPY"
|
||||
/></svg
|
||||
>
|
||||
</div>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
@ -181,37 +181,6 @@
|
||||
</div>
|
||||
</button>
|
||||
{/if}
|
||||
|
||||
<hr class=" dark:border-gray-850 my-1" />
|
||||
|
||||
<button
|
||||
type="button"
|
||||
class=" flex rounded-md py-2 px-3 w-full hover:bg-gray-200 dark:hover:bg-gray-800 transition"
|
||||
on:click={() => {
|
||||
downloadLiteLLMConfig(localStorage.token).catch((error) => {
|
||||
toast.error(error);
|
||||
});
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-3">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path d="M2 3a1 1 0 0 1 1-1h10a1 1 0 0 1 1 1v1a1 1 0 0 1-1 1H3a1 1 0 0 1-1-1V3Z" />
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M13 6H3v6a2 2 0 0 0 2 2h6a2 2 0 0 0 2-2V6ZM8.75 7.75a.75.75 0 0 0-1.5 0v2.69L6.03 9.22a.75.75 0 0 0-1.06 1.06l2.5 2.5a.75.75 0 0 0 1.06 0l2.5-2.5a.75.75 0 1 0-1.06-1.06l-1.22 1.22V7.75Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class=" self-center text-sm font-medium">
|
||||
{$i18n.t('Export LiteLLM config.yaml')}
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
@ -69,7 +69,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr class=" dark:border-gray-850 my-2" />
|
||||
<hr class=" border-gray-50 dark:border-gray-850 my-2" />
|
||||
|
||||
<div class="my-3 flex w-full items-center justify-between pr-2">
|
||||
<div class=" self-center text-xs font-medium">
|
||||
@ -91,7 +91,7 @@
|
||||
<Switch bind:state={adminConfig.ENABLE_MESSAGE_RATING} />
|
||||
</div>
|
||||
|
||||
<hr class=" dark:border-gray-850 my-2" />
|
||||
<hr class=" border-gray-50 dark:border-gray-850 my-2" />
|
||||
|
||||
<div class=" w-full justify-between">
|
||||
<div class="flex w-full justify-between">
|
||||
@ -115,7 +115,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr class=" dark:border-gray-850 my-2" />
|
||||
<hr class=" border-gray-50 dark:border-gray-850 my-2" />
|
||||
|
||||
<div class=" w-full justify-between">
|
||||
<div class="flex w-full justify-between">
|
||||
|
@ -133,7 +133,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr class=" dark:border-gray-850" />
|
||||
<hr class=" border-gray-50 dark:border-gray-850" />
|
||||
|
||||
<div class=" space-y-3">
|
||||
<div class="mt-2 space-y-2 pr-1.5">
|
||||
@ -323,7 +323,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr class=" dark:border-gray-850 my-2" />
|
||||
<hr class=" border-gray-50 dark:border-gray-850 my-2" />
|
||||
|
||||
<div class="mt-2 space-y-3">
|
||||
<div>
|
||||
|
@ -4,7 +4,8 @@
|
||||
export let required = true;
|
||||
export let readOnly = false;
|
||||
export let outerClassName = 'flex flex-1 bg-transparent';
|
||||
export let inputClassName = 'w-full text-sm py-0.5 bg-transparent outline-none';
|
||||
export let inputClassName =
|
||||
'w-full text-sm py-0.5 placeholder:text-gray-300 dark:placeholder:text-gray-700 bg-transparent outline-none';
|
||||
export let showButtonClassName = 'pl-1.5 transition bg-transparent';
|
||||
|
||||
let show = false;
|
||||
|
Loading…
Reference in New Issue
Block a user