open-webui/backend/main.py

2131 lines
67 KiB
Python
Raw Normal View History

import base64
import uuid
from contextlib import asynccontextmanager
from authlib.integrations.starlette_client import OAuth
from authlib.oidc.core import UserInfo
2024-02-23 08:30:26 +00:00
from bs4 import BeautifulSoup
import json
import markdown
2024-01-07 10:48:21 +00:00
import time
2024-02-24 08:21:53 +00:00
import os
import sys
import logging
2024-04-10 06:03:05 +00:00
import aiohttp
2024-02-25 19:26:58 +00:00
import requests
import mimetypes
2024-06-05 20:57:48 +00:00
import shutil
import os
2024-06-18 18:36:55 +00:00
import uuid
2024-06-11 17:19:59 +00:00
import inspect
2024-06-05 20:57:48 +00:00
import asyncio
2024-02-23 08:30:26 +00:00
2024-06-20 11:38:59 +00:00
from fastapi.concurrency import run_in_threadpool
2024-06-05 20:57:48 +00:00
from fastapi import FastAPI, Request, Depends, status, UploadFile, File, Form
2023-11-15 00:28:51 +00:00
from fastapi.staticfiles import StaticFiles
2024-05-28 16:50:17 +00:00
from fastapi.responses import JSONResponse
2023-11-15 00:28:51 +00:00
from fastapi import HTTPException
from fastapi.middleware.wsgi import WSGIMiddleware
from fastapi.middleware.cors import CORSMiddleware
2023-11-19 00:47:12 +00:00
from starlette.exceptions import HTTPException as StarletteHTTPException
2024-03-09 06:34:47 +00:00
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.middleware.sessions import SessionMiddleware
from starlette.responses import StreamingResponse, Response, RedirectResponse
2024-01-07 06:07:20 +00:00
2024-06-04 06:39:52 +00:00
from apps.socket.main import app as socket_app
from apps.ollama.main import (
app as ollama_app,
OpenAIChatCompletionForm,
get_all_models as get_ollama_models,
generate_openai_chat_completion as generate_ollama_chat_completion,
)
from apps.openai.main import (
app as openai_app,
get_all_models as get_openai_models,
generate_chat_completion as generate_openai_chat_completion,
)
2024-04-10 06:03:05 +00:00
2024-02-11 08:17:50 +00:00
from apps.audio.main import app as audio_app
2024-02-22 02:12:01 +00:00
from apps.images.main import app as images_app
from apps.rag.main import app as rag_app
2024-06-24 18:17:18 +00:00
from apps.webui.main import (
app as webui_app,
get_pipe_models,
generate_function_chat_completion,
)
2024-01-07 06:07:20 +00:00
2024-06-05 20:57:48 +00:00
2024-03-10 05:19:20 +00:00
from pydantic import BaseModel
2024-06-20 11:38:59 +00:00
from typing import List, Optional, Iterator, Generator, Union
2024-02-24 06:44:56 +00:00
from apps.webui.models.auths import Auths
2024-05-26 08:15:48 +00:00
from apps.webui.models.models import Models, ModelModel
2024-06-11 06:40:27 +00:00
from apps.webui.models.tools import Tools
2024-06-20 09:30:00 +00:00
from apps.webui.models.functions import Functions
from apps.webui.models.users import Users
2024-06-20 09:30:00 +00:00
from apps.webui.utils import load_toolkit_module_by_id, load_function_module_by_id
2024-06-11 06:40:27 +00:00
2024-05-28 02:16:07 +00:00
from utils.utils import (
get_admin_user,
get_verified_user,
get_current_user,
get_http_authorization_cred,
get_password_hash,
create_token,
2024-05-28 02:16:07 +00:00
)
2024-06-11 06:40:27 +00:00
from utils.task import (
title_generation_template,
search_query_generation_template,
tools_function_calling_generation_template,
)
2024-06-20 11:38:59 +00:00
from utils.misc import (
get_last_user_message,
add_or_update_system_message,
stream_message_template,
parse_duration,
2024-06-20 11:38:59 +00:00
)
2024-06-09 21:25:31 +00:00
2024-06-11 08:10:24 +00:00
from apps.rag.utils import get_rag_context, rag_template
2024-03-09 06:34:47 +00:00
2024-03-10 05:47:01 +00:00
from config import (
2024-03-24 03:16:18 +00:00
CONFIG_DATA,
2024-03-10 05:47:01 +00:00
WEBUI_NAME,
2024-05-07 00:29:16 +00:00
WEBUI_URL,
2024-05-08 15:40:18 +00:00
WEBUI_AUTH,
2024-03-10 05:47:01 +00:00
ENV,
VERSION,
CHANGELOG,
FRONTEND_BUILD_DIR,
2024-06-18 18:36:55 +00:00
UPLOAD_DIR,
CACHE_DIR,
STATIC_DIR,
2024-06-30 21:48:05 +00:00
DEFAULT_LOCALE,
2024-05-24 08:40:48 +00:00
ENABLE_OPENAI_API,
ENABLE_OLLAMA_API,
ENABLE_MODEL_FILTER,
2024-03-10 05:47:01 +00:00
MODEL_FILTER_LIST,
GLOBAL_LOG_LEVEL,
SRC_LOG_LEVELS,
2024-03-21 01:35:02 +00:00
WEBHOOK_URL,
ENABLE_ADMIN_EXPORT,
2024-05-26 07:49:30 +00:00
WEBUI_BUILD_HASH,
2024-06-09 21:53:10 +00:00
TASK_MODEL,
TASK_MODEL_EXTERNAL,
2024-06-09 21:25:31 +00:00
TITLE_GENERATION_PROMPT_TEMPLATE,
2024-06-09 21:53:10 +00:00
SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE,
2024-06-09 22:19:36 +00:00
SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD,
2024-06-11 06:40:27 +00:00
TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
2024-06-24 02:28:33 +00:00
SAFE_MODE,
OAUTH_PROVIDERS,
ENABLE_OAUTH_SIGNUP,
OAUTH_MERGE_ACCOUNTS_BY_EMAIL,
WEBUI_SECRET_KEY,
WEBUI_SESSION_COOKIE_SAME_SITE,
2024-06-07 08:13:42 +00:00
WEBUI_SESSION_COOKIE_SECURE,
2024-06-09 21:25:31 +00:00
AppConfig,
2024-03-10 05:47:01 +00:00
)
from constants import ERROR_MESSAGES, WEBHOOK_MESSAGES
from utils.webhook import post_webhook
2024-02-25 19:26:58 +00:00
2024-06-24 02:28:33 +00:00
if SAFE_MODE:
print("SAFE MODE ENABLED")
Functions.deactivate_all_functions()
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["MAIN"])
2023-11-15 00:28:51 +00:00
2024-03-28 09:45:56 +00:00
2023-11-15 00:28:51 +00:00
class SPAStaticFiles(StaticFiles):
async def get_response(self, path: str, scope):
try:
return await super().get_response(path, scope)
except (HTTPException, StarletteHTTPException) as ex:
if ex.status_code == 404:
return await super().get_response("index.html", scope)
else:
raise ex
2024-04-02 10:03:55 +00:00
print(
2024-05-03 21:23:38 +00:00
rf"""
2024-04-02 10:03:55 +00:00
___ __ __ _ _ _ ___
/ _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _|
| | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || |
| |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || |
\___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___|
|_|
2024-05-22 19:22:38 +00:00
v{VERSION} - building the best open-source AI user interface.
2024-05-26 07:49:30 +00:00
{f"Commit: {WEBUI_BUILD_HASH}" if WEBUI_BUILD_HASH != "dev-build" else ""}
2024-04-02 10:03:55 +00:00
https://github.com/open-webui/open-webui
"""
)
2023-11-15 00:28:51 +00:00
@asynccontextmanager
async def lifespan(app: FastAPI):
yield
app = FastAPI(
docs_url="/docs" if ENV == "dev" else None, redoc_url=None, lifespan=lifespan
)
2023-11-15 00:28:51 +00:00
app.state.config = AppConfig()
2024-05-24 08:40:48 +00:00
app.state.config.ENABLE_OPENAI_API = ENABLE_OPENAI_API
app.state.config.ENABLE_OLLAMA_API = ENABLE_OLLAMA_API
app.state.config.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
app.state.config.MODEL_FILTER_LIST = MODEL_FILTER_LIST
2024-03-10 05:19:20 +00:00
app.state.config.WEBHOOK_URL = WEBHOOK_URL
2024-06-09 21:53:10 +00:00
app.state.config.TASK_MODEL = TASK_MODEL
app.state.config.TASK_MODEL_EXTERNAL = TASK_MODEL_EXTERNAL
2024-06-09 21:25:31 +00:00
app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE = TITLE_GENERATION_PROMPT_TEMPLATE
2024-06-09 21:53:10 +00:00
app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = (
SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE
)
2024-06-09 22:19:36 +00:00
app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = (
SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD
)
2024-06-11 06:40:27 +00:00
app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = (
TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
)
2024-05-25 01:26:36 +00:00
app.state.MODELS = {}
2023-11-15 00:28:51 +00:00
origins = ["*"]
2024-05-17 03:49:28 +00:00
2024-06-20 08:51:39 +00:00
##################################
#
# ChatCompletion Middleware
#
##################################
2024-06-18 23:08:42 +00:00
async def get_function_call_response(
2024-07-02 02:33:58 +00:00
messages, files, tool_id, template, task_model_id, user, model
2024-06-18 23:08:42 +00:00
):
2024-06-11 06:40:27 +00:00
tool = Tools.get_tool_by_id(tool_id)
tools_specs = json.dumps(tool.specs, indent=2)
content = tools_function_calling_generation_template(template, tools_specs)
2024-05-17 03:49:28 +00:00
2024-06-11 18:15:43 +00:00
user_message = get_last_user_message(messages)
prompt = (
"History:\n"
+ "\n".join(
[
2024-06-11 18:31:14 +00:00
f"{message['role'].upper()}: \"\"\"{message['content']}\"\"\""
2024-06-11 18:15:43 +00:00
for message in messages[::-1][:4]
]
)
+ f"\nQuery: {user_message}"
)
print(prompt)
2024-06-11 06:40:27 +00:00
payload = {
"model": task_model_id,
"messages": [
{"role": "system", "content": content},
{"role": "user", "content": f"Query: {prompt}"},
],
"stream": False,
}
2024-06-12 20:34:34 +00:00
try:
payload = filter_pipeline(payload, user)
except Exception as e:
raise e
2024-06-11 06:40:27 +00:00
model = app.state.MODELS[task_model_id]
2024-05-17 03:49:28 +00:00
2024-06-11 06:40:27 +00:00
response = None
2024-06-11 07:18:45 +00:00
try:
2024-06-24 18:17:18 +00:00
response = await generate_chat_completions(form_data=payload, user=user)
2024-06-11 07:18:45 +00:00
content = None
2024-06-11 18:31:14 +00:00
if hasattr(response, "body_iterator"):
async for chunk in response.body_iterator:
data = json.loads(chunk.decode("utf-8"))
content = data["choices"][0]["message"]["content"]
# Cleanup any remaining background tasks if necessary
if response.background is not None:
await response.background()
else:
content = response["choices"][0]["message"]["content"]
2024-06-11 07:18:45 +00:00
# Parse the function response
if content is not None:
2024-06-11 18:31:14 +00:00
print(f"content: {content}")
2024-06-11 07:18:45 +00:00
result = json.loads(content)
print(result)
2024-06-20 21:14:12 +00:00
citation = None
2024-06-11 07:18:45 +00:00
# Call the function
if "name" in result:
if tool_id in webui_app.state.TOOLS:
toolkit_module = webui_app.state.TOOLS[tool_id]
else:
2024-06-24 03:31:40 +00:00
toolkit_module, frontmatter = load_toolkit_module_by_id(tool_id)
2024-06-11 07:18:45 +00:00
webui_app.state.TOOLS[tool_id] = toolkit_module
2024-06-18 23:45:03 +00:00
file_handler = False
# check if toolkit_module has file_handler self variable
if hasattr(toolkit_module, "file_handler"):
file_handler = True
print("file_handler: ", file_handler)
2024-06-24 02:18:13 +00:00
if hasattr(toolkit_module, "valves") and hasattr(
toolkit_module, "Valves"
):
2024-06-24 02:37:35 +00:00
valves = Tools.get_tool_valves_by_id(tool_id)
2024-06-24 02:18:13 +00:00
toolkit_module.valves = toolkit_module.Valves(
2024-06-24 02:37:35 +00:00
**(valves if valves else {})
2024-06-24 02:18:13 +00:00
)
2024-06-11 07:18:45 +00:00
function = getattr(toolkit_module, result["name"])
function_result = None
try:
2024-06-11 17:19:59 +00:00
# Get the signature of the function
sig = inspect.signature(function)
2024-06-17 20:28:29 +00:00
params = result["parameters"]
2024-06-11 17:35:13 +00:00
if "__user__" in sig.parameters:
# Call the function with the '__user__' parameter included
2024-06-22 21:06:19 +00:00
__user__ = {
"id": user.id,
"email": user.email,
"name": user.name,
"role": user.role,
2024-06-17 20:28:29 +00:00
}
2024-06-22 21:06:19 +00:00
try:
if hasattr(toolkit_module, "UserValves"):
__user__["valves"] = toolkit_module.UserValves(
**Tools.get_user_valves_by_id_and_user_id(
tool_id, user.id
)
)
except Exception as e:
print(e)
params = {**params, "__user__": __user__}
2024-06-17 20:28:29 +00:00
if "__messages__" in sig.parameters:
# Call the function with the '__messages__' parameter included
params = {
**params,
"__messages__": messages,
}
2024-06-18 23:08:42 +00:00
if "__files__" in sig.parameters:
# Call the function with the '__files__' parameter included
params = {
**params,
"__files__": files,
}
2024-06-19 01:50:36 +00:00
if "__model__" in sig.parameters:
# Call the function with the '__model__' parameter included
params = {
**params,
"__model__": model,
}
if "__id__" in sig.parameters:
# Call the function with the '__id__' parameter included
params = {
**params,
"__id__": tool_id,
}
2024-06-21 03:40:03 +00:00
if inspect.iscoroutinefunction(function):
function_result = await function(**params)
else:
function_result = function(**params)
2024-06-20 21:14:12 +00:00
if hasattr(toolkit_module, "citation") and toolkit_module.citation:
citation = {
"source": {"name": f"TOOL:{tool.name}/{result['name']}"},
"document": [function_result],
"metadata": [{"source": result["name"]}],
}
2024-06-11 07:18:45 +00:00
except Exception as e:
print(e)
# Add the function result to the system prompt
2024-06-18 23:45:03 +00:00
if function_result is not None:
2024-06-20 21:14:12 +00:00
return function_result, citation, file_handler
2024-06-11 07:18:45 +00:00
except Exception as e:
print(f"Error: {e}")
2024-06-11 06:40:27 +00:00
2024-06-20 21:14:12 +00:00
return None, None, False
2024-06-11 06:40:27 +00:00
2024-07-02 02:33:58 +00:00
def get_task_model_id(default_model_id):
# Set the task model
task_model_id = default_model_id
# Check if the user has a custom task model and use that model
if app.state.MODELS[task_model_id]["owned_by"] == "ollama":
if (
app.state.config.TASK_MODEL
and app.state.config.TASK_MODEL in app.state.MODELS
):
task_model_id = app.state.config.TASK_MODEL
else:
if (
app.state.config.TASK_MODEL_EXTERNAL
and app.state.config.TASK_MODEL_EXTERNAL in app.state.MODELS
):
task_model_id = app.state.config.TASK_MODEL_EXTERNAL
2024-07-02 02:33:58 +00:00
return task_model_id
2024-06-20 21:14:12 +00:00
2024-03-09 06:52:42 +00:00
2024-07-02 02:33:58 +00:00
def get_filter_function_ids(model):
def get_priority(function_id):
function = Functions.get_function_by_id(function_id)
if function is not None and hasattr(function, "valves"):
return (function.valves if function.valves else {}).get("priority", 0)
return 0
2024-06-20 09:37:36 +00:00
2024-07-02 02:33:58 +00:00
filter_ids = [function.id for function in Functions.get_global_filter_functions()]
if "info" in model and "meta" in model["info"]:
filter_ids.extend(model["info"]["meta"].get("filterIds", []))
filter_ids = list(set(filter_ids))
enabled_filter_ids = [
function.id
for function in Functions.get_functions_by_type("filter", active_only=True)
]
filter_ids = [
filter_id for filter_id in filter_ids if filter_id in enabled_filter_ids
]
filter_ids.sort(key=get_priority)
return filter_ids
async def chat_completion_functions_handler(body, model, user):
skip_files = None
filter_ids = get_filter_function_ids(model)
for filter_id in filter_ids:
filter = Functions.get_function_by_id(filter_id)
if filter:
if filter_id in webui_app.state.FUNCTIONS:
function_module = webui_app.state.FUNCTIONS[filter_id]
else:
function_module, function_type, frontmatter = (
load_function_module_by_id(filter_id)
2024-06-11 06:40:27 +00:00
)
2024-07-02 02:33:58 +00:00
webui_app.state.FUNCTIONS[filter_id] = function_module
2024-06-20 09:30:00 +00:00
2024-07-02 02:33:58 +00:00
# Check if the function has a file_handler variable
if hasattr(function_module, "file_handler"):
skip_files = function_module.file_handler
2024-07-02 02:33:58 +00:00
if hasattr(function_module, "valves") and hasattr(
function_module, "Valves"
):
valves = Functions.get_function_valves_by_id(filter_id)
function_module.valves = function_module.Valves(
**(valves if valves else {})
2024-06-27 20:04:12 +00:00
)
2024-07-02 02:33:58 +00:00
try:
if hasattr(function_module, "inlet"):
inlet = function_module.inlet
# Get the signature of the function
sig = inspect.signature(inlet)
params = {"body": body}
if "__user__" in sig.parameters:
__user__ = {
"id": user.id,
"email": user.email,
"name": user.name,
"role": user.role,
}
try:
if hasattr(function_module, "UserValves"):
__user__["valves"] = function_module.UserValves(
**Functions.get_user_valves_by_id_and_user_id(
filter_id, user.id
)
)
except Exception as e:
print(e)
params = {**params, "__user__": __user__}
if "__id__" in sig.parameters:
params = {
**params,
"__id__": filter_id,
}
if "__model__" in sig.parameters:
params = {
**params,
"__model__": model,
}
if inspect.iscoroutinefunction(inlet):
body = await inlet(**params)
2024-06-24 01:39:27 +00:00
else:
2024-07-02 02:33:58 +00:00
body = inlet(**params)
2024-06-24 02:18:13 +00:00
2024-07-02 02:33:58 +00:00
except Exception as e:
print(f"Error: {e}")
raise e
2024-06-20 09:30:00 +00:00
2024-07-02 02:33:58 +00:00
if skip_files:
if "files" in body:
del body["files"]
2024-06-11 06:40:27 +00:00
2024-07-02 02:33:58 +00:00
return body, {}
2024-06-11 08:10:24 +00:00
2024-06-20 09:06:10 +00:00
2024-07-02 02:33:58 +00:00
async def chat_completion_tools_handler(body, model, user):
skip_files = None
2024-06-11 08:10:24 +00:00
2024-07-02 02:33:58 +00:00
contexts = []
citations = None
2024-06-11 08:10:24 +00:00
2024-07-02 02:33:58 +00:00
task_model_id = get_task_model_id(body["model"])
# If tool_ids field is present, call the functions
if "tool_ids" in body:
print(body["tool_ids"])
for tool_id in body["tool_ids"]:
print(tool_id)
try:
response, citation, file_handler = await get_function_call_response(
messages=body["messages"],
files=body.get("files", []),
tool_id=tool_id,
template=app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
task_model_id=task_model_id,
user=user,
model=model,
)
print(file_handler)
if isinstance(response, str):
contexts.append(response)
if citation:
if citations is None:
citations = [citation]
else:
citations.append(citation)
if file_handler:
skip_files = True
except Exception as e:
print(f"Error: {e}")
del body["tool_ids"]
print(f"tool_contexts: {contexts}")
if skip_files:
if "files" in body:
del body["files"]
return body, {
**({"contexts": contexts} if contexts is not None else {}),
**({"citations": citations} if citations is not None else {}),
}
async def chat_completion_files_handler(body):
contexts = []
citations = None
if "files" in body:
files = body["files"]
del body["files"]
contexts, citations = get_rag_context(
files=files,
messages=body["messages"],
embedding_function=rag_app.state.EMBEDDING_FUNCTION,
k=rag_app.state.config.TOP_K,
reranking_function=rag_app.state.sentence_transformer_rf,
r=rag_app.state.config.RELEVANCE_THRESHOLD,
hybrid_search=rag_app.state.config.ENABLE_RAG_HYBRID_SEARCH,
)
log.debug(f"rag_contexts: {contexts}, citations: {citations}")
return body, {
**({"contexts": contexts} if contexts is not None else {}),
**({"citations": citations} if citations is not None else {}),
}
async def get_body_and_model_and_user(request):
# Read the original request body
body = await request.body()
body_str = body.decode("utf-8")
body = json.loads(body_str) if body_str else {}
model_id = body["model"]
if model_id not in app.state.MODELS:
raise "Model not found"
model = app.state.MODELS[model_id]
2024-06-20 11:21:55 +00:00
2024-07-02 02:33:58 +00:00
user = get_current_user(
request,
get_http_authorization_cred(request.headers.get("Authorization")),
)
return body, model, user
class ChatCompletionMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
if request.method == "POST" and any(
endpoint in request.url.path
for endpoint in ["/ollama/api/chat", "/chat/completions"]
):
log.debug(f"request.url.path: {request.url.path}")
try:
body, model, user = await get_body_and_model_and_user(request)
except Exception as e:
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content={"detail": str(e)},
)
# Extract chat_id and message_id from the request body
chat_id = None
if "chat_id" in body:
chat_id = body["chat_id"]
del body["chat_id"]
message_id = None
if "id" in body:
message_id = body["id"]
del body["id"]
# Initialize data_items to store additional data to be sent to the client
data_items = []
# Initialize context, and citations
contexts = []
citations = []
print(body)
try:
body, flags = await chat_completion_functions_handler(body, model, user)
except Exception as e:
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content={"detail": str(e)},
2024-06-11 08:10:24 +00:00
)
2024-07-02 02:33:58 +00:00
try:
body, flags = await chat_completion_tools_handler(body, model, user)
contexts.extend(flags.get("contexts", []))
citations.extend(flags.get("citations", []))
except Exception as e:
print(e)
pass
try:
body, flags = await chat_completion_files_handler(body)
contexts.extend(flags.get("contexts", []))
citations.extend(flags.get("citations", []))
except Exception as e:
print(e)
pass
# If context is not empty, insert it into the messages
if len(contexts) > 0:
context_string = "/n".join(contexts).strip()
prompt = get_last_user_message(body["messages"])
body["messages"] = add_or_update_system_message(
rag_template(
rag_app.state.config.RAG_TEMPLATE, context_string, prompt
),
body["messages"],
)
2024-03-10 04:12:32 +00:00
2024-07-02 02:33:58 +00:00
# If there are citations, add them to the data_items
if len(citations) > 0:
data_items.append({"citations": citations})
modified_body_bytes = json.dumps(body).encode("utf-8")
2024-03-11 01:40:50 +00:00
# Replace the request body with the modified one
request._body = modified_body_bytes
# Set custom header to ensure content-length matches new body length
request.headers.__dict__["_list"] = [
(b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
*[
(k, v)
for k, v in request.headers.raw
if k.lower() != b"content-length"
],
]
2024-03-09 06:34:47 +00:00
2024-06-20 10:23:50 +00:00
response = await call_next(request)
if isinstance(response, StreamingResponse):
# If it's a streaming response, inject it as SSE event or NDJSON line
content_type = response.headers.get("Content-Type")
if "text/event-stream" in content_type:
return StreamingResponse(
2024-06-20 09:06:10 +00:00
self.openai_stream_wrapper(response.body_iterator, data_items),
)
if "application/x-ndjson" in content_type:
return StreamingResponse(
2024-06-20 09:06:10 +00:00
self.ollama_stream_wrapper(response.body_iterator, data_items),
)
return response
2024-06-20 10:23:50 +00:00
else:
return response
2024-06-20 10:23:50 +00:00
# If it's not a chat completion request, just pass it through
response = await call_next(request)
2024-03-09 06:34:47 +00:00
return response
async def _receive(self, body: bytes):
return {"type": "http.request", "body": body, "more_body": False}
2024-06-20 09:06:10 +00:00
async def openai_stream_wrapper(self, original_generator, data_items):
for item in data_items:
yield f"data: {json.dumps(item)}\n\n"
async for data in original_generator:
yield data
2024-06-20 09:06:10 +00:00
async def ollama_stream_wrapper(self, original_generator, data_items):
for item in data_items:
yield f"{json.dumps(item)}\n"
async for data in original_generator:
yield data
2024-03-09 06:34:47 +00:00
2024-06-11 06:40:27 +00:00
app.add_middleware(ChatCompletionMiddleware)
2024-03-09 06:34:47 +00:00
2024-06-20 08:51:39 +00:00
##################################
#
# Pipeline Middleware
#
##################################
2024-03-09 06:34:47 +00:00
2024-06-09 21:25:31 +00:00
def filter_pipeline(payload, user):
2024-06-20 00:19:35 +00:00
user = {"id": user.id, "email": user.email, "name": user.name, "role": user.role}
2024-06-09 21:25:31 +00:00
model_id = payload["model"]
filters = [
model
for model in app.state.MODELS.values()
if "pipeline" in model
and "type" in model["pipeline"]
and model["pipeline"]["type"] == "filter"
and (
model["pipeline"]["pipelines"] == ["*"]
or any(
model_id == target_model_id
for target_model_id in model["pipeline"]["pipelines"]
)
)
]
sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
model = app.state.MODELS[model_id]
if "pipeline" in model:
sorted_filters.append(model)
for filter in sorted_filters:
r = None
try:
urlIdx = filter["urlIdx"]
url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
if key != "":
headers = {"Authorization": f"Bearer {key}"}
r = requests.post(
f"{url}/{filter['id']}/filter/inlet",
headers=headers,
json={
"user": user,
"body": payload,
},
)
r.raise_for_status()
payload = r.json()
except Exception as e:
# Handle connection error here
print(f"Connection error: {e}")
if r is not None:
try:
res = r.json()
except:
pass
2024-06-12 20:31:05 +00:00
if "detail" in res:
raise Exception(r.status_code, res["detail"])
2024-06-09 21:25:31 +00:00
else:
pass
if "pipeline" not in app.state.MODELS[model_id]:
if "title" in payload:
del payload["title"]
2024-06-12 20:31:05 +00:00
2024-06-13 04:18:53 +00:00
if "task" in payload:
del payload["task"]
2024-06-09 21:25:31 +00:00
return payload
2024-05-28 02:03:26 +00:00
class PipelineMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
if request.method == "POST" and (
2024-05-30 09:04:29 +00:00
"/ollama/api/chat" in request.url.path
or "/chat/completions" in request.url.path
2024-05-28 02:03:26 +00:00
):
log.debug(f"request.url.path: {request.url.path}")
# Read the original request body
body = await request.body()
# Decode body to string
body_str = body.decode("utf-8")
# Parse string to JSON
data = json.loads(body_str) if body_str else {}
2024-06-09 21:25:31 +00:00
user = get_current_user(
2024-06-19 21:49:35 +00:00
request,
2024-06-20 00:19:35 +00:00
get_http_authorization_cred(request.headers.get("Authorization")),
2024-06-09 21:25:31 +00:00
)
2024-06-12 20:31:05 +00:00
try:
data = filter_pipeline(data, user)
except Exception as e:
return JSONResponse(
status_code=e.args[0],
content={"detail": e.args[1]},
)
2024-05-30 09:04:29 +00:00
2024-05-28 02:03:26 +00:00
modified_body_bytes = json.dumps(data).encode("utf-8")
# Replace the request body with the modified one
request._body = modified_body_bytes
# Set custom header to ensure content-length matches new body length
request.headers.__dict__["_list"] = [
(b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
*[
(k, v)
for k, v in request.headers.raw
if k.lower() != b"content-length"
],
]
response = await call_next(request)
return response
async def _receive(self, body: bytes):
return {"type": "http.request", "body": body, "more_body": False}
app.add_middleware(PipelineMiddleware)
2024-05-28 16:50:17 +00:00
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
2023-11-15 00:28:51 +00:00
@app.middleware("http")
async def check_url(request: Request, call_next):
2024-05-25 01:26:36 +00:00
if len(app.state.MODELS) == 0:
await get_all_models()
else:
pass
2023-11-15 00:28:51 +00:00
start_time = int(time.time())
response = await call_next(request)
process_time = int(time.time()) - start_time
response.headers["X-Process-Time"] = str(process_time)
return response
2024-05-19 15:00:07 +00:00
@app.middleware("http")
async def update_embedding_function(request: Request, call_next):
response = await call_next(request)
if "/embedding/update" in request.url.path:
webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
return response
2024-02-22 11:22:23 +00:00
2024-05-19 15:00:07 +00:00
2024-06-04 06:39:52 +00:00
app.mount("/ws", socket_app)
app.mount("/ollama", ollama_app)
2024-05-24 08:40:48 +00:00
app.mount("/openai", openai_app)
2024-02-11 08:17:50 +00:00
2024-02-22 02:12:01 +00:00
app.mount("/images/api/v1", images_app)
2024-02-11 08:17:50 +00:00
app.mount("/audio/api/v1", audio_app)
2024-01-07 06:07:20 +00:00
app.mount("/rag/api/v1", rag_app)
2024-05-19 15:00:07 +00:00
app.mount("/api/v1", webui_app)
webui_app.state.EMBEDDING_FUNCTION = rag_app.state.EMBEDDING_FUNCTION
2024-03-31 20:59:39 +00:00
2024-05-25 01:26:36 +00:00
async def get_all_models():
2024-06-20 11:21:55 +00:00
pipe_models = []
2024-05-24 08:40:48 +00:00
openai_models = []
ollama_models = []
2024-06-20 11:21:55 +00:00
pipe_models = await get_pipe_models()
2024-05-24 08:40:48 +00:00
if app.state.config.ENABLE_OPENAI_API:
openai_models = await get_openai_models()
openai_models = openai_models["data"]
if app.state.config.ENABLE_OLLAMA_API:
ollama_models = await get_ollama_models()
ollama_models = [
{
"id": model["model"],
"name": model["name"],
"object": "model",
"created": int(time.time()),
"owned_by": "ollama",
"ollama": model,
}
for model in ollama_models["models"]
]
2024-06-20 11:21:55 +00:00
models = pipe_models + openai_models + ollama_models
2024-05-24 08:40:48 +00:00
2024-06-20 11:21:55 +00:00
custom_models = Models.get_all_models()
2024-05-24 08:40:48 +00:00
for custom_model in custom_models:
if custom_model.base_model_id == None:
for model in models:
2024-05-24 09:11:17 +00:00
if (
custom_model.id == model["id"]
or custom_model.id == model["id"].split(":")[0]
):
2024-05-24 08:40:48 +00:00
model["name"] = custom_model.name
model["info"] = custom_model.model_dump()
else:
2024-05-24 10:06:57 +00:00
owned_by = "openai"
for model in models:
2024-05-25 03:29:13 +00:00
if (
custom_model.base_model_id == model["id"]
or custom_model.base_model_id == model["id"].split(":")[0]
):
2024-05-24 10:06:57 +00:00
owned_by = model["owned_by"]
break
2024-05-24 08:40:48 +00:00
models.append(
{
"id": custom_model.id,
"name": custom_model.name,
"object": "model",
"created": custom_model.created_at,
2024-05-24 10:06:57 +00:00
"owned_by": owned_by,
2024-05-24 08:40:48 +00:00
"info": custom_model.model_dump(),
2024-05-25 03:29:13 +00:00
"preset": True,
2024-05-24 08:40:48 +00:00
}
)
2024-05-25 01:26:36 +00:00
app.state.MODELS = {model["id"]: model for model in models}
webui_app.state.MODELS = app.state.MODELS
return models
@app.get("/api/models")
async def get_models(user=Depends(get_verified_user)):
models = await get_all_models()
2024-05-28 02:03:26 +00:00
2024-05-28 02:34:05 +00:00
# Filter out filter pipelines
2024-05-28 02:03:26 +00:00
models = [
model
for model in models
2024-05-28 18:43:48 +00:00
if "pipeline" not in model or model["pipeline"].get("type", None) != "filter"
2024-05-28 02:03:26 +00:00
]
2024-05-24 08:40:48 +00:00
if app.state.config.ENABLE_MODEL_FILTER:
if user.role == "user":
models = list(
filter(
lambda model: model["id"] in app.state.config.MODEL_FILTER_LIST,
models,
)
)
return {"data": models}
return {"data": models}
2024-06-20 08:51:39 +00:00
@app.post("/api/chat/completions")
async def generate_chat_completions(form_data: dict, user=Depends(get_verified_user)):
model_id = form_data["model"]
if model_id not in app.state.MODELS:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Model not found",
)
model = app.state.MODELS[model_id]
2024-06-20 11:38:59 +00:00
pipe = model.get("pipe")
if pipe:
2024-06-24 18:17:18 +00:00
return await generate_function_chat_completion(form_data, user=user)
2024-06-20 08:51:39 +00:00
if model["owned_by"] == "ollama":
return await generate_ollama_chat_completion(form_data, user=user)
else:
return await generate_openai_chat_completion(form_data, user=user)
@app.post("/api/chat/completed")
async def chat_completed(form_data: dict, user=Depends(get_verified_user)):
data = form_data
model_id = data["model"]
2024-06-20 10:23:50 +00:00
if model_id not in app.state.MODELS:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Model not found",
)
model = app.state.MODELS[model_id]
2024-06-20 08:51:39 +00:00
filters = [
model
for model in app.state.MODELS.values()
if "pipeline" in model
and "type" in model["pipeline"]
and model["pipeline"]["type"] == "filter"
and (
model["pipeline"]["pipelines"] == ["*"]
or any(
model_id == target_model_id
for target_model_id in model["pipeline"]["pipelines"]
)
)
]
2024-06-20 10:23:50 +00:00
sorted_filters = sorted(filters, key=lambda x: x["pipeline"]["priority"])
if "pipeline" in model:
sorted_filters = [model] + sorted_filters
2024-06-20 08:51:39 +00:00
for filter in sorted_filters:
r = None
try:
urlIdx = filter["urlIdx"]
url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
if key != "":
headers = {"Authorization": f"Bearer {key}"}
r = requests.post(
f"{url}/{filter['id']}/filter/outlet",
headers=headers,
json={
2024-06-22 19:14:12 +00:00
"user": {
"id": user.id,
"name": user.name,
"email": user.email,
"role": user.role,
},
2024-06-20 08:51:39 +00:00
"body": data,
},
)
r.raise_for_status()
data = r.json()
except Exception as e:
# Handle connection error here
print(f"Connection error: {e}")
if r is not None:
try:
res = r.json()
if "detail" in res:
return JSONResponse(
status_code=r.status_code,
content=res,
)
except:
pass
else:
pass
def get_priority(function_id):
function = Functions.get_function_by_id(function_id)
if function is not None and hasattr(function, "valves"):
return (function.valves if function.valves else {}).get("priority", 0)
return 0
2024-06-27 20:04:12 +00:00
filter_ids = [function.id for function in Functions.get_global_filter_functions()]
2024-06-20 11:38:59 +00:00
if "info" in model and "meta" in model["info"]:
2024-06-27 20:04:12 +00:00
filter_ids.extend(model["info"]["meta"].get("filterIds", []))
filter_ids = list(set(filter_ids))
2024-06-20 10:23:50 +00:00
2024-06-27 20:04:12 +00:00
enabled_filter_ids = [
function.id
for function in Functions.get_functions_by_type("filter", active_only=True)
]
filter_ids = [
filter_id for filter_id in filter_ids if filter_id in enabled_filter_ids
]
# Sort filter_ids by priority, using the get_priority function
filter_ids.sort(key=get_priority)
2024-06-24 02:18:13 +00:00
for filter_id in filter_ids:
filter = Functions.get_function_by_id(filter_id)
if filter:
if filter_id in webui_app.state.FUNCTIONS:
function_module = webui_app.state.FUNCTIONS[filter_id]
else:
2024-06-24 03:31:40 +00:00
function_module, function_type, frontmatter = (
load_function_module_by_id(filter_id)
)
webui_app.state.FUNCTIONS[filter_id] = function_module
2024-06-22 08:39:53 +00:00
if hasattr(function_module, "valves") and hasattr(
function_module, "Valves"
):
valves = Functions.get_function_valves_by_id(filter_id)
function_module.valves = function_module.Valves(
**(valves if valves else {})
)
try:
if hasattr(function_module, "outlet"):
outlet = function_module.outlet
# Get the signature of the function
sig = inspect.signature(outlet)
params = {"body": data}
if "__user__" in sig.parameters:
__user__ = {
"id": user.id,
"email": user.email,
"name": user.name,
"role": user.role,
}
try:
if hasattr(function_module, "UserValves"):
__user__["valves"] = function_module.UserValves(
**Functions.get_user_valves_by_id_and_user_id(
filter_id, user.id
2024-06-22 21:06:19 +00:00
)
)
except Exception as e:
print(e)
2024-06-22 21:06:19 +00:00
params = {**params, "__user__": __user__}
2024-06-22 21:06:19 +00:00
if "__id__" in sig.parameters:
params = {
**params,
"__id__": filter_id,
}
2024-06-22 08:39:53 +00:00
2024-07-01 22:43:19 +00:00
if "__model__" in sig.parameters:
params = {
**params,
"__model__": model,
}
if inspect.iscoroutinefunction(outlet):
data = await outlet(**params)
else:
data = outlet(**params)
2024-06-21 03:26:28 +00:00
except Exception as e:
print(f"Error: {e}")
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content={"detail": str(e)},
)
2024-06-20 10:23:50 +00:00
2024-06-20 08:51:39 +00:00
return data
##################################
#
# Task Endpoints
#
##################################
# TODO: Refactor task API endpoints below into a separate file
2024-06-09 21:53:10 +00:00
@app.get("/api/task/config")
async def get_task_config(user=Depends(get_verified_user)):
return {
"TASK_MODEL": app.state.config.TASK_MODEL,
"TASK_MODEL_EXTERNAL": app.state.config.TASK_MODEL_EXTERNAL,
"TITLE_GENERATION_PROMPT_TEMPLATE": app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE,
"SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE": app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE,
2024-06-09 22:29:55 +00:00
"SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD": app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD,
2024-06-11 06:40:27 +00:00
"TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE": app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
2024-06-09 21:53:10 +00:00
}
class TaskConfigForm(BaseModel):
TASK_MODEL: Optional[str]
TASK_MODEL_EXTERNAL: Optional[str]
TITLE_GENERATION_PROMPT_TEMPLATE: str
SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE: str
2024-06-09 22:29:55 +00:00
SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD: int
2024-06-11 06:40:27 +00:00
TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE: str
2024-06-09 21:53:10 +00:00
@app.post("/api/task/config/update")
async def update_task_config(form_data: TaskConfigForm, user=Depends(get_admin_user)):
app.state.config.TASK_MODEL = form_data.TASK_MODEL
app.state.config.TASK_MODEL_EXTERNAL = form_data.TASK_MODEL_EXTERNAL
app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE = (
form_data.TITLE_GENERATION_PROMPT_TEMPLATE
)
app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = (
form_data.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE
)
2024-06-09 22:29:55 +00:00
app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = (
form_data.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD
)
2024-06-11 06:40:27 +00:00
app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = (
form_data.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
)
2024-06-09 21:53:10 +00:00
return {
"TASK_MODEL": app.state.config.TASK_MODEL,
"TASK_MODEL_EXTERNAL": app.state.config.TASK_MODEL_EXTERNAL,
"TITLE_GENERATION_PROMPT_TEMPLATE": app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE,
"SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE": app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE,
2024-06-09 22:29:55 +00:00
"SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD": app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD,
2024-06-11 06:40:27 +00:00
"TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE": app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
2024-06-09 21:53:10 +00:00
}
2024-06-09 21:26:49 +00:00
@app.post("/api/task/title/completions")
2024-06-09 21:25:31 +00:00
async def generate_title(form_data: dict, user=Depends(get_verified_user)):
print("generate_title")
2024-06-09 21:53:10 +00:00
2024-06-09 21:25:31 +00:00
model_id = form_data["model"]
if model_id not in app.state.MODELS:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Model not found",
)
2024-06-09 21:53:10 +00:00
# Check if the user has a custom task model
# If the user has a custom task model, use that model
if app.state.MODELS[model_id]["owned_by"] == "ollama":
if app.state.config.TASK_MODEL:
task_model_id = app.state.config.TASK_MODEL
if task_model_id in app.state.MODELS:
model_id = task_model_id
else:
if app.state.config.TASK_MODEL_EXTERNAL:
task_model_id = app.state.config.TASK_MODEL_EXTERNAL
if task_model_id in app.state.MODELS:
model_id = task_model_id
print(model_id)
2024-06-09 21:25:31 +00:00
model = app.state.MODELS[model_id]
template = app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE
content = title_generation_template(
2024-06-16 22:32:26 +00:00
template,
form_data["prompt"],
{
"name": user.name,
"location": user.info.get("location") if user.info else None,
},
2024-06-09 21:25:31 +00:00
)
payload = {
"model": model_id,
"messages": [{"role": "user", "content": content}],
"stream": False,
"max_tokens": 50,
"chat_id": form_data.get("chat_id", None),
"title": True,
}
log.debug(payload)
2024-06-12 20:34:34 +00:00
try:
payload = filter_pipeline(payload, user)
except Exception as e:
return JSONResponse(
status_code=e.args[0],
content={"detail": e.args[1]},
)
2024-06-09 21:25:31 +00:00
2024-07-02 02:33:58 +00:00
if "chat_id" in payload:
del payload["chat_id"]
2024-06-24 18:17:18 +00:00
return await generate_chat_completions(form_data=payload, user=user)
2024-06-09 21:25:31 +00:00
2024-06-09 21:53:10 +00:00
@app.post("/api/task/query/completions")
async def generate_search_query(form_data: dict, user=Depends(get_verified_user)):
print("generate_search_query")
2024-06-09 22:19:36 +00:00
if len(form_data["prompt"]) < app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Skip search query generation for short prompts (< {app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD} characters)",
)
2024-06-09 21:53:10 +00:00
model_id = form_data["model"]
if model_id not in app.state.MODELS:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Model not found",
)
# Check if the user has a custom task model
# If the user has a custom task model, use that model
if app.state.MODELS[model_id]["owned_by"] == "ollama":
if app.state.config.TASK_MODEL:
task_model_id = app.state.config.TASK_MODEL
if task_model_id in app.state.MODELS:
model_id = task_model_id
else:
if app.state.config.TASK_MODEL_EXTERNAL:
task_model_id = app.state.config.TASK_MODEL_EXTERNAL
if task_model_id in app.state.MODELS:
model_id = task_model_id
print(model_id)
model = app.state.MODELS[model_id]
template = app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE
content = search_query_generation_template(
2024-06-16 22:32:26 +00:00
template, form_data["prompt"], {"name": user.name}
2024-06-09 21:53:10 +00:00
)
payload = {
"model": model_id,
"messages": [{"role": "user", "content": content}],
"stream": False,
"max_tokens": 30,
2024-06-13 04:18:53 +00:00
"task": True,
}
print(payload)
try:
payload = filter_pipeline(payload, user)
except Exception as e:
return JSONResponse(
status_code=e.args[0],
content={"detail": e.args[1]},
)
2024-07-02 02:33:58 +00:00
if "chat_id" in payload:
del payload["chat_id"]
2024-06-24 18:17:18 +00:00
return await generate_chat_completions(form_data=payload, user=user)
2024-06-13 04:18:53 +00:00
@app.post("/api/task/emoji/completions")
async def generate_emoji(form_data: dict, user=Depends(get_verified_user)):
print("generate_emoji")
model_id = form_data["model"]
if model_id not in app.state.MODELS:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Model not found",
)
# Check if the user has a custom task model
# If the user has a custom task model, use that model
if app.state.MODELS[model_id]["owned_by"] == "ollama":
if app.state.config.TASK_MODEL:
task_model_id = app.state.config.TASK_MODEL
if task_model_id in app.state.MODELS:
model_id = task_model_id
else:
if app.state.config.TASK_MODEL_EXTERNAL:
task_model_id = app.state.config.TASK_MODEL_EXTERNAL
if task_model_id in app.state.MODELS:
model_id = task_model_id
print(model_id)
model = app.state.MODELS[model_id]
template = '''
2024-06-13 09:29:56 +00:00
Your task is to reflect the speaker's likely facial expression through a fitting emoji. Interpret emotions from the message and reflect their facial expression using fitting, diverse emojis (e.g., 😊, 😢, 😡, 😱).
2024-06-13 04:18:53 +00:00
Message: """{{prompt}}"""
'''
content = title_generation_template(
2024-06-16 22:32:26 +00:00
template,
form_data["prompt"],
{
"name": user.name,
"location": user.info.get("location") if user.info else None,
},
2024-06-13 04:18:53 +00:00
)
payload = {
"model": model_id,
"messages": [{"role": "user", "content": content}],
"stream": False,
"max_tokens": 4,
"chat_id": form_data.get("chat_id", None),
"task": True,
2024-06-09 21:53:10 +00:00
}
log.debug(payload)
2024-06-12 20:34:34 +00:00
try:
payload = filter_pipeline(payload, user)
except Exception as e:
return JSONResponse(
status_code=e.args[0],
content={"detail": e.args[1]},
)
2024-06-09 21:53:10 +00:00
2024-07-02 02:33:58 +00:00
if "chat_id" in payload:
del payload["chat_id"]
2024-06-24 18:17:18 +00:00
return await generate_chat_completions(form_data=payload, user=user)
2024-06-09 21:53:10 +00:00
2024-06-11 06:40:27 +00:00
@app.post("/api/task/tools/completions")
async def get_tools_function_calling(form_data: dict, user=Depends(get_verified_user)):
print("get_tools_function_calling")
model_id = form_data["model"]
if model_id not in app.state.MODELS:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail="Model not found",
)
# Check if the user has a custom task model
# If the user has a custom task model, use that model
if app.state.MODELS[model_id]["owned_by"] == "ollama":
if app.state.config.TASK_MODEL:
task_model_id = app.state.config.TASK_MODEL
if task_model_id in app.state.MODELS:
model_id = task_model_id
else:
if app.state.config.TASK_MODEL_EXTERNAL:
task_model_id = app.state.config.TASK_MODEL_EXTERNAL
if task_model_id in app.state.MODELS:
model_id = task_model_id
print(model_id)
template = app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
2024-06-12 20:34:34 +00:00
try:
2024-06-20 21:14:12 +00:00
context, citation, file_handler = await get_function_call_response(
2024-06-18 23:08:42 +00:00
form_data["messages"],
form_data.get("files", []),
form_data["tool_id"],
template,
model_id,
user,
2024-06-12 20:34:34 +00:00
)
return context
except Exception as e:
return JSONResponse(
status_code=e.args[0],
content={"detail": e.args[1]},
)
2024-06-11 06:40:27 +00:00
2024-06-20 08:51:39 +00:00
##################################
#
# Pipelines Endpoints
#
##################################
2024-05-30 09:04:29 +00:00
2024-06-20 08:51:39 +00:00
# TODO: Refactor pipelines API endpoints below into a separate file
2024-05-30 09:04:29 +00:00
@app.get("/api/pipelines/list")
async def get_pipelines_list(user=Depends(get_admin_user)):
2024-05-30 05:41:51 +00:00
responses = await get_openai_models(raw=True)
print(responses)
2024-06-02 23:46:33 +00:00
urlIdxs = [
idx
for idx, response in enumerate(responses)
if response != None and "pipelines" in response
]
return {
"data": [
{
"url": openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx],
"idx": urlIdx,
}
for urlIdx in urlIdxs
]
}
2024-06-05 20:57:48 +00:00
@app.post("/api/pipelines/upload")
async def upload_pipeline(
urlIdx: int = Form(...), file: UploadFile = File(...), user=Depends(get_admin_user)
):
print("upload_pipeline", urlIdx, file.filename)
# Check if the uploaded file is a python file
if not file.filename.endswith(".py"):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Only Python (.py) files are allowed.",
)
upload_folder = f"{CACHE_DIR}/pipelines"
os.makedirs(upload_folder, exist_ok=True)
file_path = os.path.join(upload_folder, file.filename)
try:
# Save the uploaded file
with open(file_path, "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
headers = {"Authorization": f"Bearer {key}"}
with open(file_path, "rb") as f:
files = {"file": f}
r = requests.post(f"{url}/pipelines/upload", headers=headers, files=files)
r.raise_for_status()
data = r.json()
return {**data}
except Exception as e:
# Handle connection error here
print(f"Connection error: {e}")
detail = "Pipeline not found"
if r is not None:
try:
res = r.json()
if "detail" in res:
detail = res["detail"]
except:
pass
raise HTTPException(
status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
detail=detail,
)
finally:
# Ensure the file is deleted after the upload is completed or on failure
if os.path.exists(file_path):
os.remove(file_path)
2024-05-30 05:03:22 +00:00
class AddPipelineForm(BaseModel):
url: str
urlIdx: int
@app.post("/api/pipelines/add")
async def add_pipeline(form_data: AddPipelineForm, user=Depends(get_admin_user)):
r = None
try:
urlIdx = form_data.urlIdx
url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
headers = {"Authorization": f"Bearer {key}"}
r = requests.post(
f"{url}/pipelines/add", headers=headers, json={"url": form_data.url}
)
r.raise_for_status()
data = r.json()
return {**data}
except Exception as e:
# Handle connection error here
print(f"Connection error: {e}")
detail = "Pipeline not found"
if r is not None:
try:
res = r.json()
if "detail" in res:
detail = res["detail"]
except:
pass
raise HTTPException(
status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
detail=detail,
)
class DeletePipelineForm(BaseModel):
id: str
urlIdx: int
@app.delete("/api/pipelines/delete")
async def delete_pipeline(form_data: DeletePipelineForm, user=Depends(get_admin_user)):
r = None
try:
urlIdx = form_data.urlIdx
url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
headers = {"Authorization": f"Bearer {key}"}
r = requests.delete(
f"{url}/pipelines/delete", headers=headers, json={"id": form_data.id}
)
r.raise_for_status()
data = r.json()
return {**data}
except Exception as e:
# Handle connection error here
print(f"Connection error: {e}")
detail = "Pipeline not found"
if r is not None:
try:
res = r.json()
if "detail" in res:
detail = res["detail"]
except:
pass
raise HTTPException(
status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
detail=detail,
)
2024-05-28 19:04:19 +00:00
@app.get("/api/pipelines")
async def get_pipelines(urlIdx: Optional[int] = None, user=Depends(get_admin_user)):
2024-05-30 05:18:27 +00:00
r = None
try:
urlIdx
2024-05-30 05:18:27 +00:00
url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
2024-05-30 05:18:27 +00:00
headers = {"Authorization": f"Bearer {key}"}
r = requests.get(f"{url}/pipelines", headers=headers)
2024-05-30 05:18:27 +00:00
r.raise_for_status()
data = r.json()
2024-05-28 19:04:19 +00:00
2024-05-30 05:18:27 +00:00
return {**data}
except Exception as e:
# Handle connection error here
print(f"Connection error: {e}")
2024-05-28 19:04:19 +00:00
2024-05-30 05:18:27 +00:00
detail = "Pipeline not found"
if r is not None:
try:
res = r.json()
if "detail" in res:
detail = res["detail"]
except:
pass
raise HTTPException(
status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
detail=detail,
)
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
@app.get("/api/pipelines/{pipeline_id}/valves")
async def get_pipeline_valves(
urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
):
models = await get_all_models()
r = None
try:
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
headers = {"Authorization": f"Bearer {key}"}
r = requests.get(f"{url}/{pipeline_id}/valves", headers=headers)
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
r.raise_for_status()
data = r.json()
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
return {**data}
except Exception as e:
# Handle connection error here
print(f"Connection error: {e}")
2024-05-28 20:05:31 +00:00
2024-05-30 05:18:27 +00:00
detail = "Pipeline not found"
2024-05-28 20:05:31 +00:00
2024-05-30 05:18:27 +00:00
if r is not None:
try:
res = r.json()
if "detail" in res:
detail = res["detail"]
except:
pass
2024-05-28 19:32:49 +00:00
raise HTTPException(
2024-05-30 05:18:27 +00:00
status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
detail=detail,
2024-05-28 19:32:49 +00:00
)
@app.get("/api/pipelines/{pipeline_id}/valves/spec")
2024-05-30 05:18:27 +00:00
async def get_pipeline_valves_spec(
urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user)
):
2024-05-28 19:32:49 +00:00
models = await get_all_models()
2024-05-30 05:18:27 +00:00
r = None
try:
url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
headers = {"Authorization": f"Bearer {key}"}
r = requests.get(f"{url}/{pipeline_id}/valves/spec", headers=headers)
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
r.raise_for_status()
data = r.json()
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
return {**data}
except Exception as e:
# Handle connection error here
print(f"Connection error: {e}")
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
detail = "Pipeline not found"
if r is not None:
try:
res = r.json()
if "detail" in res:
detail = res["detail"]
except:
pass
2024-05-28 20:05:31 +00:00
2024-05-28 19:32:49 +00:00
raise HTTPException(
2024-05-30 05:18:27 +00:00
status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
detail=detail,
2024-05-28 19:32:49 +00:00
)
@app.post("/api/pipelines/{pipeline_id}/valves/update")
async def update_pipeline_valves(
2024-05-30 05:18:27 +00:00
urlIdx: Optional[int],
pipeline_id: str,
form_data: dict,
user=Depends(get_admin_user),
2024-05-28 19:32:49 +00:00
):
models = await get_all_models()
2024-05-30 05:18:27 +00:00
r = None
try:
url = openai_app.state.config.OPENAI_API_BASE_URLS[urlIdx]
key = openai_app.state.config.OPENAI_API_KEYS[urlIdx]
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
headers = {"Authorization": f"Bearer {key}"}
r = requests.post(
f"{url}/{pipeline_id}/valves/update",
headers=headers,
json={**form_data},
)
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
r.raise_for_status()
data = r.json()
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
return {**data}
except Exception as e:
# Handle connection error here
print(f"Connection error: {e}")
2024-05-28 19:32:49 +00:00
2024-05-30 05:18:27 +00:00
detail = "Pipeline not found"
2024-05-28 20:05:31 +00:00
2024-05-30 05:18:27 +00:00
if r is not None:
try:
res = r.json()
if "detail" in res:
detail = res["detail"]
except:
pass
2024-05-28 20:05:31 +00:00
2024-05-28 19:32:49 +00:00
raise HTTPException(
2024-05-30 05:41:51 +00:00
status_code=(r.status_code if r is not None else status.HTTP_404_NOT_FOUND),
2024-05-30 05:18:27 +00:00
detail=detail,
2024-05-28 19:32:49 +00:00
)
2024-06-20 08:51:39 +00:00
##################################
#
# Config Endpoints
#
##################################
2024-02-22 02:12:01 +00:00
@app.get("/api/config")
async def get_app_config():
return {
"status": True,
2024-02-24 01:12:19 +00:00
"name": WEBUI_NAME,
2024-02-23 08:30:26 +00:00
"version": VERSION,
2024-06-30 21:48:05 +00:00
"default_locale": str(DEFAULT_LOCALE),
"default_models": webui_app.state.config.DEFAULT_MODELS,
"default_prompt_suggestions": webui_app.state.config.DEFAULT_PROMPT_SUGGESTIONS,
2024-05-26 20:02:40 +00:00
"features": {
"auth": WEBUI_AUTH,
"auth_trusted_header": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
2024-05-26 20:02:40 +00:00
"enable_signup": webui_app.state.config.ENABLE_SIGNUP,
2024-06-02 02:03:56 +00:00
"enable_web_search": rag_app.state.config.ENABLE_RAG_WEB_SEARCH,
"enable_image_generation": images_app.state.config.ENABLED,
2024-05-26 16:10:25 +00:00
"enable_community_sharing": webui_app.state.config.ENABLE_COMMUNITY_SHARING,
"enable_admin_export": ENABLE_ADMIN_EXPORT,
},
2024-06-08 03:18:48 +00:00
"audio": {
"tts": {
"engine": audio_app.state.config.TTS_ENGINE,
"voice": audio_app.state.config.TTS_VOICE,
},
"stt": {
"engine": audio_app.state.config.STT_ENGINE,
},
},
"oauth": {
"providers": {
name: config.get("name", name)
for name, config in OAUTH_PROVIDERS.items()
}
},
2024-02-22 02:12:01 +00:00
}
2024-03-10 05:19:20 +00:00
@app.get("/api/config/model/filter")
async def get_model_filter_config(user=Depends(get_admin_user)):
2024-03-10 05:47:01 +00:00
return {
"enabled": app.state.config.ENABLE_MODEL_FILTER,
"models": app.state.config.MODEL_FILTER_LIST,
2024-03-10 05:47:01 +00:00
}
2024-03-10 05:19:20 +00:00
class ModelFilterConfigForm(BaseModel):
enabled: bool
models: List[str]
@app.post("/api/config/model/filter")
2024-03-21 01:35:02 +00:00
async def update_model_filter_config(
2024-03-10 05:19:20 +00:00
form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
):
2024-05-17 17:35:33 +00:00
app.state.config.ENABLE_MODEL_FILTER = form_data.enabled
app.state.config.MODEL_FILTER_LIST = form_data.models
2024-03-10 05:19:20 +00:00
2024-03-10 05:47:01 +00:00
return {
"enabled": app.state.config.ENABLE_MODEL_FILTER,
"models": app.state.config.MODEL_FILTER_LIST,
2024-03-10 05:47:01 +00:00
}
2024-03-10 05:19:20 +00:00
2024-06-20 08:51:39 +00:00
# TODO: webhook endpoint should be under config endpoints
2024-03-21 01:35:02 +00:00
@app.get("/api/webhook")
async def get_webhook_url(user=Depends(get_admin_user)):
return {
"url": app.state.config.WEBHOOK_URL,
2024-03-21 01:35:02 +00:00
}
class UrlForm(BaseModel):
url: str
@app.post("/api/webhook")
async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
app.state.config.WEBHOOK_URL = form_data.url
webui_app.state.WEBHOOK_URL = app.state.config.WEBHOOK_URL
return {"url": app.state.config.WEBHOOK_URL}
2024-03-05 08:59:35 +00:00
@app.get("/api/version")
async def get_app_config():
return {
"version": VERSION,
}
2024-02-23 08:30:26 +00:00
@app.get("/api/changelog")
async def get_app_changelog():
2024-03-31 08:10:57 +00:00
return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5}
2024-02-23 08:30:26 +00:00
2024-02-25 19:26:58 +00:00
@app.get("/api/version/updates")
async def get_app_latest_release_version():
try:
async with aiohttp.ClientSession(trust_env=True) as session:
2024-04-10 06:03:05 +00:00
async with session.get(
"https://api.github.com/repos/open-webui/open-webui/releases/latest"
) as response:
response.raise_for_status()
data = await response.json()
latest_version = data["tag_name"]
return {"current": VERSION, "latest": latest_version[1:]}
except aiohttp.ClientError as e:
2024-02-25 19:26:58 +00:00
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
2024-02-25 19:55:15 +00:00
detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
2024-02-25 19:26:58 +00:00
)
2024-04-10 08:27:19 +00:00
############################
# OAuth Login & Callback
############################
oauth = OAuth()
for provider_name, provider_config in OAUTH_PROVIDERS.items():
oauth.register(
name=provider_name,
client_id=provider_config["client_id"],
client_secret=provider_config["client_secret"],
server_metadata_url=provider_config["server_metadata_url"],
client_kwargs={
"scope": provider_config["scope"],
},
)
# SessionMiddleware is used by authlib for oauth
if len(OAUTH_PROVIDERS) > 0:
app.add_middleware(
SessionMiddleware,
secret_key=WEBUI_SECRET_KEY,
session_cookie="oui-session",
same_site=WEBUI_SESSION_COOKIE_SAME_SITE,
2024-06-07 08:13:42 +00:00
https_only=WEBUI_SESSION_COOKIE_SECURE,
)
@app.get("/oauth/{provider}/login")
async def oauth_login(provider: str, request: Request):
if provider not in OAUTH_PROVIDERS:
raise HTTPException(404)
redirect_uri = request.url_for("oauth_callback", provider=provider)
return await oauth.create_client(provider).authorize_redirect(request, redirect_uri)
# OAuth login logic is as follows:
# 1. Attempt to find a user with matching subject ID, tied to the provider
# 2. If OAUTH_MERGE_ACCOUNTS_BY_EMAIL is true, find a user with the email address provided via OAuth
# - This is considered insecure in general, as OAuth providers do not always verify email addresses
# 3. If there is no user, and ENABLE_OAUTH_SIGNUP is true, create a user
# - Email addresses are considered unique, so we fail registration if the email address is alreayd taken
@app.get("/oauth/{provider}/callback")
async def oauth_callback(provider: str, request: Request, response: Response):
if provider not in OAUTH_PROVIDERS:
raise HTTPException(404)
client = oauth.create_client(provider)
try:
token = await client.authorize_access_token(request)
except Exception as e:
log.warning(f"OAuth callback error: {e}")
raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED)
user_data: UserInfo = token["userinfo"]
sub = user_data.get("sub")
if not sub:
log.warning(f"OAuth callback failed, sub is missing: {user_data}")
raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED)
provider_sub = f"{provider}@{sub}"
email = user_data.get("email", "").lower()
# We currently mandate that email addresses are provided
if not email:
log.warning(f"OAuth callback failed, email is missing: {user_data}")
raise HTTPException(400, detail=ERROR_MESSAGES.INVALID_CRED)
# Check if the user exists
user = Users.get_user_by_oauth_sub(provider_sub)
if not user:
# If the user does not exist, check if merging is enabled
if OAUTH_MERGE_ACCOUNTS_BY_EMAIL.value:
# Check if the user exists by email
user = Users.get_user_by_email(email)
if user:
# Update the user with the new oauth sub
Users.update_user_oauth_sub_by_id(user.id, provider_sub)
if not user:
# If the user does not exist, check if signups are enabled
if ENABLE_OAUTH_SIGNUP.value:
# Check if an existing user with the same email already exists
existing_user = Users.get_user_by_email(user_data.get("email", "").lower())
if existing_user:
raise HTTPException(400, detail=ERROR_MESSAGES.EMAIL_TAKEN)
picture_url = user_data.get("picture", "")
if picture_url:
# Download the profile image into a base64 string
try:
async with aiohttp.ClientSession() as session:
async with session.get(picture_url) as resp:
picture = await resp.read()
base64_encoded_picture = base64.b64encode(picture).decode(
"utf-8"
)
guessed_mime_type = mimetypes.guess_type(picture_url)[0]
if guessed_mime_type is None:
# assume JPG, browsers are tolerant enough of image formats
guessed_mime_type = "image/jpeg"
picture_url = f"data:{guessed_mime_type};base64,{base64_encoded_picture}"
except Exception as e:
log.error(f"Error downloading profile image '{picture_url}': {e}")
picture_url = ""
if not picture_url:
picture_url = "/user.png"
role = (
"admin"
if Users.get_num_users() == 0
else webui_app.state.config.DEFAULT_USER_ROLE
)
user = Auths.insert_new_auth(
email=email,
password=get_password_hash(
str(uuid.uuid4())
), # Random password, not used
name=user_data.get("name", "User"),
profile_image_url=picture_url,
role=role,
oauth_sub=provider_sub,
)
if webui_app.state.config.WEBHOOK_URL:
post_webhook(
webui_app.state.config.WEBHOOK_URL,
WEBHOOK_MESSAGES.USER_SIGNUP(user.name),
{
"action": "signup",
"message": WEBHOOK_MESSAGES.USER_SIGNUP(user.name),
"user": user.model_dump_json(exclude_none=True),
},
)
else:
raise HTTPException(
status.HTTP_403_FORBIDDEN, detail=ERROR_MESSAGES.ACCESS_PROHIBITED
)
jwt_token = create_token(
data={"id": user.id},
expires_delta=parse_duration(webui_app.state.config.JWT_EXPIRES_IN),
)
# Set the cookie token
response.set_cookie(
key="token",
value=jwt_token,
httponly=True, # Ensures the cookie is not accessible via JavaScript
)
# Redirect back to the frontend with the JWT token
redirect_url = f"{request.base_url}auth#token={jwt_token}"
return RedirectResponse(url=redirect_url)
@app.get("/manifest.json")
async def get_manifest_json():
return {
2024-04-04 03:43:55 +00:00
"name": WEBUI_NAME,
"short_name": WEBUI_NAME,
"start_url": "/",
"display": "standalone",
"background_color": "#343541",
"orientation": "portrait-primary",
2024-05-02 02:32:36 +00:00
"icons": [{"src": "/static/logo.png", "type": "image/png", "sizes": "500x500"}],
}
2024-04-10 08:27:19 +00:00
2024-05-07 00:29:16 +00:00
@app.get("/opensearch.xml")
async def get_opensearch_xml():
xml_content = rf"""
<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/" xmlns:moz="http://www.mozilla.org/2006/browser/search/">
<ShortName>{WEBUI_NAME}</ShortName>
<Description>Search {WEBUI_NAME}</Description>
<InputEncoding>UTF-8</InputEncoding>
<Image width="16" height="16" type="image/x-icon">{WEBUI_URL}/favicon.png</Image>
<Url type="text/html" method="get" template="{WEBUI_URL}/?q={"{searchTerms}"}"/>
<moz:SearchForm>{WEBUI_URL}</moz:SearchForm>
</OpenSearchDescription>
"""
return Response(content=xml_content, media_type="application/xml")
2024-05-15 18:17:18 +00:00
@app.get("/health")
async def healthcheck():
return {"status": True}
app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
2024-02-24 01:12:19 +00:00
if os.path.exists(FRONTEND_BUILD_DIR):
2024-05-22 04:38:58 +00:00
mimetypes.add_type("text/javascript", ".js")
app.mount(
"/",
SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
name="spa-static-files",
)
else:
log.warning(
f"Frontend build directory not found at '{FRONTEND_BUILD_DIR}'. Serving API only."
)