open-webui/backend/main.py

388 lines
12 KiB
Python
Raw Normal View History

2024-02-23 08:30:26 +00:00
from bs4 import BeautifulSoup
import json
import markdown
2024-01-07 10:48:21 +00:00
import time
2024-02-24 08:21:53 +00:00
import os
import sys
import logging
2024-04-10 06:03:05 +00:00
import aiohttp
2024-02-25 19:26:58 +00:00
import requests
2024-02-23 08:30:26 +00:00
2024-02-25 19:26:58 +00:00
from fastapi import FastAPI, Request, Depends, status
2023-11-15 00:28:51 +00:00
from fastapi.staticfiles import StaticFiles
from fastapi import HTTPException
from fastapi.middleware.wsgi import WSGIMiddleware
from fastapi.middleware.cors import CORSMiddleware
2023-11-19 00:47:12 +00:00
from starlette.exceptions import HTTPException as StarletteHTTPException
2024-03-09 06:34:47 +00:00
from starlette.middleware.base import BaseHTTPMiddleware
2024-05-07 00:29:16 +00:00
from starlette.responses import StreamingResponse, Response
2024-01-07 06:07:20 +00:00
2023-11-15 00:28:51 +00:00
from apps.ollama.main import app as ollama_app
2024-01-05 02:38:03 +00:00
from apps.openai.main import app as openai_app
2024-04-10 06:03:05 +00:00
2024-04-21 06:46:09 +00:00
from apps.litellm.main import (
app as litellm_app,
start_litellm_background,
shutdown_litellm_background,
)
2024-05-03 21:23:38 +00:00
2024-02-11 08:17:50 +00:00
from apps.audio.main import app as audio_app
2024-02-22 02:12:01 +00:00
from apps.images.main import app as images_app
from apps.rag.main import app as rag_app
2023-11-19 00:47:12 +00:00
from apps.web.main import app as webui_app
2024-01-07 06:07:20 +00:00
2024-04-21 06:22:02 +00:00
import asyncio
2024-03-10 05:19:20 +00:00
from pydantic import BaseModel
from typing import List
2024-02-24 06:44:56 +00:00
2024-03-10 05:19:20 +00:00
from utils.utils import get_admin_user
2024-03-11 01:40:50 +00:00
from apps.rag.utils import rag_messages
2024-03-09 06:34:47 +00:00
2024-03-10 05:47:01 +00:00
from config import (
2024-03-24 03:16:18 +00:00
CONFIG_DATA,
2024-03-10 05:47:01 +00:00
WEBUI_NAME,
2024-05-07 00:29:16 +00:00
WEBUI_URL,
2024-03-10 05:47:01 +00:00
ENV,
VERSION,
CHANGELOG,
FRONTEND_BUILD_DIR,
CACHE_DIR,
STATIC_DIR,
2024-04-26 21:19:50 +00:00
ENABLE_LITELLM,
ENABLE_MODEL_FILTER,
2024-03-10 05:47:01 +00:00
MODEL_FILTER_LIST,
GLOBAL_LOG_LEVEL,
SRC_LOG_LEVELS,
2024-03-21 01:35:02 +00:00
WEBHOOK_URL,
ENABLE_ADMIN_EXPORT,
2024-03-10 05:47:01 +00:00
)
2024-02-25 19:26:58 +00:00
from constants import ERROR_MESSAGES
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["MAIN"])
2023-11-15 00:28:51 +00:00
2024-03-28 09:45:56 +00:00
2023-11-15 00:28:51 +00:00
class SPAStaticFiles(StaticFiles):
async def get_response(self, path: str, scope):
try:
return await super().get_response(path, scope)
except (HTTPException, StarletteHTTPException) as ex:
if ex.status_code == 404:
return await super().get_response("index.html", scope)
else:
raise ex
2024-04-02 10:03:55 +00:00
print(
2024-05-03 21:23:38 +00:00
rf"""
2024-04-02 10:03:55 +00:00
___ __ __ _ _ _ ___
/ _ \ _ __ ___ _ __ \ \ / /__| |__ | | | |_ _|
| | | | '_ \ / _ \ '_ \ \ \ /\ / / _ \ '_ \| | | || |
| |_| | |_) | __/ | | | \ V V / __/ |_) | |_| || |
\___/| .__/ \___|_| |_| \_/\_/ \___|_.__/ \___/|___|
|_|
v{VERSION} - building the best open-source AI user interface.
https://github.com/open-webui/open-webui
"""
)
2024-01-07 10:48:21 +00:00
app = FastAPI(docs_url="/docs" if ENV == "dev" else None, redoc_url=None)
2023-11-15 00:28:51 +00:00
app.state.ENABLE_MODEL_FILTER = ENABLE_MODEL_FILTER
2024-03-10 05:47:01 +00:00
app.state.MODEL_FILTER_LIST = MODEL_FILTER_LIST
2024-03-10 05:19:20 +00:00
2024-03-21 01:35:02 +00:00
app.state.WEBHOOK_URL = WEBHOOK_URL
2023-11-15 00:28:51 +00:00
origins = ["*"]
2024-02-24 08:21:53 +00:00
2024-03-09 06:34:47 +00:00
class RAGMiddleware(BaseHTTPMiddleware):
async def dispatch(self, request: Request, call_next):
return_citations = False
if request.method == "POST" and (
"/api/chat" in request.url.path or "/chat/completions" in request.url.path
):
log.debug(f"request.url.path: {request.url.path}")
2024-03-09 06:52:42 +00:00
2024-03-09 06:34:47 +00:00
# Read the original request body
body = await request.body()
# Decode body to string
body_str = body.decode("utf-8")
# Parse string to JSON
data = json.loads(body_str) if body_str else {}
return_citations = data.get("citations", False)
if "citations" in data:
del data["citations"]
2024-03-09 06:34:47 +00:00
# Example: Add a new key-value pair or modify existing ones
# data["modified"] = True # Example modification
if "docs" in data:
2024-03-11 01:40:50 +00:00
data = {**data}
data["messages"], citations = rag_messages(
2024-04-27 19:38:50 +00:00
docs=data["docs"],
messages=data["messages"],
template=rag_app.state.RAG_TEMPLATE,
embedding_function=rag_app.state.EMBEDDING_FUNCTION,
k=rag_app.state.TOP_K,
reranking_function=rag_app.state.sentence_transformer_rf,
r=rag_app.state.RELEVANCE_THRESHOLD,
hybrid_search=rag_app.state.ENABLE_RAG_HYBRID_SEARCH,
2024-03-11 01:40:50 +00:00
)
2024-03-09 06:34:47 +00:00
del data["docs"]
log.debug(
f"data['messages']: {data['messages']}, citations: {citations}"
)
2024-03-10 04:12:32 +00:00
2024-03-09 06:34:47 +00:00
modified_body_bytes = json.dumps(data).encode("utf-8")
2024-03-11 01:40:50 +00:00
# Replace the request body with the modified one
request._body = modified_body_bytes
# Set custom header to ensure content-length matches new body length
request.headers.__dict__["_list"] = [
(b"content-length", str(len(modified_body_bytes)).encode("utf-8")),
*[
(k, v)
for k, v in request.headers.raw
if k.lower() != b"content-length"
],
]
2024-03-09 06:34:47 +00:00
response = await call_next(request)
if return_citations:
# Inject the citations into the response
if isinstance(response, StreamingResponse):
# If it's a streaming response, inject it as SSE event or NDJSON line
content_type = response.headers.get("Content-Type")
if "text/event-stream" in content_type:
return StreamingResponse(
self.openai_stream_wrapper(response.body_iterator, citations),
)
if "application/x-ndjson" in content_type:
return StreamingResponse(
self.ollama_stream_wrapper(response.body_iterator, citations),
)
2024-03-09 06:34:47 +00:00
return response
async def _receive(self, body: bytes):
return {"type": "http.request", "body": body, "more_body": False}
async def openai_stream_wrapper(self, original_generator, citations):
yield f"data: {json.dumps({'citations': citations})}\n\n"
async for data in original_generator:
yield data
async def ollama_stream_wrapper(self, original_generator, citations):
yield f"{json.dumps({'citations': citations})}\n"
async for data in original_generator:
yield data
2024-03-09 06:34:47 +00:00
app.add_middleware(RAGMiddleware)
2024-03-11 01:40:50 +00:00
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
2023-11-15 00:28:51 +00:00
@app.middleware("http")
async def check_url(request: Request, call_next):
start_time = int(time.time())
response = await call_next(request)
process_time = int(time.time()) - start_time
response.headers["X-Process-Time"] = str(process_time)
return response
2024-03-11 01:40:50 +00:00
@app.on_event("startup")
async def on_startup():
2024-04-26 21:19:50 +00:00
if ENABLE_LITELLM:
asyncio.create_task(start_litellm_background())
2024-03-11 01:40:50 +00:00
2023-11-19 00:47:12 +00:00
app.mount("/api/v1", webui_app)
2024-02-22 11:22:23 +00:00
app.mount("/litellm/api", litellm_app)
app.mount("/ollama", ollama_app)
2024-01-05 02:38:03 +00:00
app.mount("/openai/api", openai_app)
2024-02-11 08:17:50 +00:00
2024-02-22 02:12:01 +00:00
app.mount("/images/api/v1", images_app)
2024-02-11 08:17:50 +00:00
app.mount("/audio/api/v1", audio_app)
2024-01-07 06:07:20 +00:00
app.mount("/rag/api/v1", rag_app)
2024-03-31 20:59:39 +00:00
2024-02-22 02:12:01 +00:00
@app.get("/api/config")
async def get_app_config():
# Checking and Handling the Absence of 'ui' in CONFIG_DATA
2024-03-31 20:59:39 +00:00
default_locale = "en-US"
if "ui" in CONFIG_DATA:
default_locale = CONFIG_DATA["ui"].get("default_locale", "en-US")
# The Rest of the Function Now Uses the Variables Defined Above
2024-02-22 02:12:01 +00:00
return {
"status": True,
2024-02-24 01:12:19 +00:00
"name": WEBUI_NAME,
2024-02-23 08:30:26 +00:00
"version": VERSION,
"default_locale": default_locale,
2024-02-22 02:12:01 +00:00
"images": images_app.state.ENABLED,
"default_models": webui_app.state.DEFAULT_MODELS,
2024-03-31 20:59:39 +00:00
"default_prompt_suggestions": webui_app.state.DEFAULT_PROMPT_SUGGESTIONS,
"trusted_header_auth": bool(webui_app.state.AUTH_TRUSTED_EMAIL_HEADER),
"admin_export_enabled": ENABLE_ADMIN_EXPORT,
2024-02-22 02:12:01 +00:00
}
2024-03-10 05:19:20 +00:00
@app.get("/api/config/model/filter")
async def get_model_filter_config(user=Depends(get_admin_user)):
2024-03-10 05:47:01 +00:00
return {
"enabled": app.state.ENABLE_MODEL_FILTER,
2024-03-10 05:47:01 +00:00
"models": app.state.MODEL_FILTER_LIST,
}
2024-03-10 05:19:20 +00:00
class ModelFilterConfigForm(BaseModel):
enabled: bool
models: List[str]
@app.post("/api/config/model/filter")
2024-03-21 01:35:02 +00:00
async def update_model_filter_config(
2024-03-10 05:19:20 +00:00
form_data: ModelFilterConfigForm, user=Depends(get_admin_user)
):
app.state.ENABLE_MODEL_FILTER = form_data.enabled
2024-03-10 05:47:01 +00:00
app.state.MODEL_FILTER_LIST = form_data.models
2024-03-10 05:19:20 +00:00
ollama_app.state.ENABLE_MODEL_FILTER = app.state.ENABLE_MODEL_FILTER
2024-03-10 05:47:01 +00:00
ollama_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
2024-03-10 05:19:20 +00:00
openai_app.state.ENABLE_MODEL_FILTER = app.state.ENABLE_MODEL_FILTER
2024-03-10 05:47:01 +00:00
openai_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
2024-03-10 05:19:20 +00:00
litellm_app.state.ENABLE_MODEL_FILTER = app.state.ENABLE_MODEL_FILTER
2024-03-28 09:45:56 +00:00
litellm_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST
2024-03-10 05:47:01 +00:00
return {
"enabled": app.state.ENABLE_MODEL_FILTER,
2024-03-10 05:47:01 +00:00
"models": app.state.MODEL_FILTER_LIST,
}
2024-03-10 05:19:20 +00:00
2024-03-21 01:35:02 +00:00
@app.get("/api/webhook")
async def get_webhook_url(user=Depends(get_admin_user)):
return {
"url": app.state.WEBHOOK_URL,
}
class UrlForm(BaseModel):
url: str
@app.post("/api/webhook")
async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)):
app.state.WEBHOOK_URL = form_data.url
webui_app.state.WEBHOOK_URL = app.state.WEBHOOK_URL
return {
"url": app.state.WEBHOOK_URL,
}
2024-03-05 08:59:35 +00:00
@app.get("/api/version")
async def get_app_config():
return {
"version": VERSION,
}
2024-02-23 08:30:26 +00:00
@app.get("/api/changelog")
async def get_app_changelog():
2024-03-31 08:10:57 +00:00
return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5}
2024-02-23 08:30:26 +00:00
2024-02-25 19:26:58 +00:00
@app.get("/api/version/updates")
async def get_app_latest_release_version():
try:
2024-04-10 06:03:05 +00:00
async with aiohttp.ClientSession() as session:
async with session.get(
"https://api.github.com/repos/open-webui/open-webui/releases/latest"
) as response:
response.raise_for_status()
data = await response.json()
latest_version = data["tag_name"]
return {"current": VERSION, "latest": latest_version[1:]}
except aiohttp.ClientError as e:
2024-02-25 19:26:58 +00:00
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
2024-02-25 19:55:15 +00:00
detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
2024-02-25 19:26:58 +00:00
)
2024-04-10 08:27:19 +00:00
@app.get("/manifest.json")
async def get_manifest_json():
return {
2024-04-04 03:43:55 +00:00
"name": WEBUI_NAME,
"short_name": WEBUI_NAME,
"start_url": "/",
"display": "standalone",
"background_color": "#343541",
"theme_color": "#343541",
"orientation": "portrait-primary",
2024-05-02 02:32:36 +00:00
"icons": [{"src": "/static/logo.png", "type": "image/png", "sizes": "500x500"}],
}
2024-04-10 08:27:19 +00:00
2024-05-07 00:29:16 +00:00
@app.get("/opensearch.xml")
async def get_opensearch_xml():
xml_content = rf"""
<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/" xmlns:moz="http://www.mozilla.org/2006/browser/search/">
<ShortName>{WEBUI_NAME}</ShortName>
<Description>Search {WEBUI_NAME}</Description>
<InputEncoding>UTF-8</InputEncoding>
<Image width="16" height="16" type="image/x-icon">{WEBUI_URL}/favicon.png</Image>
<Url type="text/html" method="get" template="{WEBUI_URL}/?q={"{searchTerms}"}"/>
<moz:SearchForm>{WEBUI_URL}</moz:SearchForm>
</OpenSearchDescription>
"""
return Response(content=xml_content, media_type="application/xml")
app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
2024-02-24 01:12:19 +00:00
if os.path.exists(FRONTEND_BUILD_DIR):
app.mount(
"/",
SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
name="spa-static-files",
)
else:
log.warning(
f"Frontend build directory not found at '{FRONTEND_BUILD_DIR}'. Serving API only."
)
2024-04-21 06:46:09 +00:00
@app.on_event("shutdown")
async def shutdown_event():
2024-04-26 21:19:50 +00:00
if ENABLE_LITELLM:
await shutdown_litellm_background()