2024-02-23 08:30:26 +00:00
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
import json
|
|
|
|
import markdown
|
2024-01-07 10:48:21 +00:00
|
|
|
import time
|
2024-02-24 08:21:53 +00:00
|
|
|
import os
|
|
|
|
import sys
|
2024-02-25 19:26:58 +00:00
|
|
|
import requests
|
2024-02-23 08:30:26 +00:00
|
|
|
|
2024-02-25 19:26:58 +00:00
|
|
|
from fastapi import FastAPI, Request, Depends, status
|
2023-11-15 00:28:51 +00:00
|
|
|
from fastapi.staticfiles import StaticFiles
|
|
|
|
from fastapi import HTTPException
|
|
|
|
from fastapi.middleware.wsgi import WSGIMiddleware
|
|
|
|
from fastapi.middleware.cors import CORSMiddleware
|
2023-11-19 00:47:12 +00:00
|
|
|
from starlette.exceptions import HTTPException as StarletteHTTPException
|
2024-03-09 06:34:47 +00:00
|
|
|
from starlette.middleware.base import BaseHTTPMiddleware
|
2023-11-15 00:28:51 +00:00
|
|
|
|
2024-01-07 06:07:20 +00:00
|
|
|
|
2023-11-15 00:28:51 +00:00
|
|
|
from apps.ollama.main import app as ollama_app
|
2024-01-05 02:38:03 +00:00
|
|
|
from apps.openai.main import app as openai_app
|
2024-03-08 21:33:56 +00:00
|
|
|
from apps.litellm.main import app as litellm_app, startup as litellm_app_startup
|
2024-02-11 08:17:50 +00:00
|
|
|
from apps.audio.main import app as audio_app
|
2024-02-22 02:12:01 +00:00
|
|
|
from apps.images.main import app as images_app
|
|
|
|
from apps.rag.main import app as rag_app
|
2023-11-19 00:47:12 +00:00
|
|
|
from apps.web.main import app as webui_app
|
2024-01-07 06:07:20 +00:00
|
|
|
|
2024-02-24 06:44:56 +00:00
|
|
|
|
2024-03-09 06:34:47 +00:00
|
|
|
from apps.rag.utils import query_doc, query_collection, rag_template
|
|
|
|
|
2024-02-24 01:12:19 +00:00
|
|
|
from config import WEBUI_NAME, ENV, VERSION, CHANGELOG, FRONTEND_BUILD_DIR
|
2024-02-25 19:26:58 +00:00
|
|
|
from constants import ERROR_MESSAGES
|
|
|
|
|
2023-11-15 00:28:51 +00:00
|
|
|
|
|
|
|
class SPAStaticFiles(StaticFiles):
|
|
|
|
async def get_response(self, path: str, scope):
|
|
|
|
try:
|
|
|
|
return await super().get_response(path, scope)
|
|
|
|
except (HTTPException, StarletteHTTPException) as ex:
|
|
|
|
if ex.status_code == 404:
|
|
|
|
return await super().get_response("index.html", scope)
|
|
|
|
else:
|
|
|
|
raise ex
|
|
|
|
|
|
|
|
|
2024-01-07 10:48:21 +00:00
|
|
|
app = FastAPI(docs_url="/docs" if ENV == "dev" else None, redoc_url=None)
|
2023-11-15 00:28:51 +00:00
|
|
|
|
|
|
|
origins = ["*"]
|
|
|
|
|
|
|
|
app.add_middleware(
|
|
|
|
CORSMiddleware,
|
|
|
|
allow_origins=origins,
|
|
|
|
allow_credentials=True,
|
|
|
|
allow_methods=["*"],
|
|
|
|
allow_headers=["*"],
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-02-24 08:21:53 +00:00
|
|
|
@app.on_event("startup")
|
|
|
|
async def on_startup():
|
2024-03-08 21:33:56 +00:00
|
|
|
await litellm_app_startup()
|
2024-02-24 08:21:53 +00:00
|
|
|
|
|
|
|
|
2024-03-09 06:34:47 +00:00
|
|
|
class RAGMiddleware(BaseHTTPMiddleware):
|
|
|
|
async def dispatch(self, request: Request, call_next):
|
2024-03-09 06:51:42 +00:00
|
|
|
if request.method == "POST" and (
|
|
|
|
"/api/chat" in request.url.path or "/chat/completions" in request.url.path
|
|
|
|
):
|
2024-03-09 06:52:42 +00:00
|
|
|
print(request.url.path)
|
|
|
|
|
2024-03-09 06:34:47 +00:00
|
|
|
# Read the original request body
|
|
|
|
body = await request.body()
|
|
|
|
# Decode body to string
|
|
|
|
body_str = body.decode("utf-8")
|
|
|
|
# Parse string to JSON
|
|
|
|
data = json.loads(body_str) if body_str else {}
|
|
|
|
|
|
|
|
# Example: Add a new key-value pair or modify existing ones
|
|
|
|
# data["modified"] = True # Example modification
|
|
|
|
if "docs" in data:
|
|
|
|
docs = data["docs"]
|
|
|
|
print(docs)
|
|
|
|
|
|
|
|
last_user_message_idx = None
|
|
|
|
for i in range(len(data["messages"]) - 1, -1, -1):
|
|
|
|
if data["messages"][i]["role"] == "user":
|
|
|
|
last_user_message_idx = i
|
|
|
|
break
|
|
|
|
|
2024-03-09 07:19:20 +00:00
|
|
|
user_message = data["messages"][last_user_message_idx]
|
|
|
|
|
|
|
|
if isinstance(user_message["content"], list):
|
|
|
|
# Handle list content input
|
|
|
|
content_type = "list"
|
|
|
|
query = ""
|
|
|
|
for content_item in user_message["content"]:
|
|
|
|
if content_item["type"] == "text":
|
|
|
|
query = content_item["text"]
|
|
|
|
break
|
|
|
|
elif isinstance(user_message["content"], str):
|
|
|
|
# Handle text content input
|
|
|
|
content_type = "text"
|
|
|
|
query = user_message["content"]
|
|
|
|
else:
|
|
|
|
# Fallback in case the input does not match expected types
|
|
|
|
content_type = None
|
|
|
|
query = ""
|
2024-03-09 06:34:47 +00:00
|
|
|
|
|
|
|
relevant_contexts = []
|
|
|
|
|
|
|
|
for doc in docs:
|
|
|
|
context = None
|
|
|
|
if doc["type"] == "collection":
|
|
|
|
context = query_collection(
|
|
|
|
collection_names=doc["collection_names"],
|
|
|
|
query=query,
|
|
|
|
k=rag_app.state.TOP_K,
|
|
|
|
embedding_function=rag_app.state.sentence_transformer_ef,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
context = query_doc(
|
|
|
|
collection_name=doc["collection_name"],
|
|
|
|
query=query,
|
|
|
|
k=rag_app.state.TOP_K,
|
|
|
|
embedding_function=rag_app.state.sentence_transformer_ef,
|
|
|
|
)
|
|
|
|
relevant_contexts.append(context)
|
|
|
|
|
|
|
|
context_string = ""
|
|
|
|
for context in relevant_contexts:
|
|
|
|
if context:
|
|
|
|
context_string += " ".join(context["documents"][0]) + "\n"
|
|
|
|
|
2024-03-09 07:19:20 +00:00
|
|
|
ra_content = rag_template(
|
2024-03-09 06:34:47 +00:00
|
|
|
template=rag_app.state.RAG_TEMPLATE,
|
|
|
|
context=context_string,
|
|
|
|
query=query,
|
|
|
|
)
|
|
|
|
|
2024-03-09 07:19:20 +00:00
|
|
|
if content_type == "list":
|
|
|
|
new_content = []
|
|
|
|
for content_item in user_message["content"]:
|
|
|
|
if content_item["type"] == "text":
|
|
|
|
# Update the text item's content with ra_content
|
|
|
|
new_content.append({"type": "text", "text": ra_content})
|
|
|
|
else:
|
|
|
|
# Keep other types of content as they are
|
|
|
|
new_content.append(content_item)
|
|
|
|
new_user_message = {**user_message, "content": new_content}
|
|
|
|
else:
|
|
|
|
new_user_message = {
|
|
|
|
**user_message,
|
|
|
|
"content": ra_content,
|
|
|
|
}
|
|
|
|
|
2024-03-09 06:34:47 +00:00
|
|
|
data["messages"][last_user_message_idx] = new_user_message
|
|
|
|
del data["docs"]
|
|
|
|
|
|
|
|
modified_body_bytes = json.dumps(data).encode("utf-8")
|
|
|
|
|
|
|
|
# Create a new request with the modified body
|
|
|
|
scope = request.scope
|
|
|
|
scope["body"] = modified_body_bytes
|
|
|
|
request = Request(scope, receive=lambda: self._receive(modified_body_bytes))
|
|
|
|
|
|
|
|
response = await call_next(request)
|
|
|
|
return response
|
|
|
|
|
|
|
|
async def _receive(self, body: bytes):
|
|
|
|
return {"type": "http.request", "body": body, "more_body": False}
|
|
|
|
|
|
|
|
|
|
|
|
app.add_middleware(RAGMiddleware)
|
|
|
|
|
|
|
|
|
2023-11-15 00:28:51 +00:00
|
|
|
@app.middleware("http")
|
|
|
|
async def check_url(request: Request, call_next):
|
|
|
|
start_time = int(time.time())
|
|
|
|
response = await call_next(request)
|
|
|
|
process_time = int(time.time()) - start_time
|
|
|
|
response.headers["X-Process-Time"] = str(process_time)
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2023-11-19 00:47:12 +00:00
|
|
|
app.mount("/api/v1", webui_app)
|
2024-02-22 11:22:23 +00:00
|
|
|
app.mount("/litellm/api", litellm_app)
|
|
|
|
|
2024-03-04 03:55:32 +00:00
|
|
|
app.mount("/ollama", ollama_app)
|
2024-01-05 02:38:03 +00:00
|
|
|
app.mount("/openai/api", openai_app)
|
2024-02-11 08:17:50 +00:00
|
|
|
|
2024-02-22 02:12:01 +00:00
|
|
|
app.mount("/images/api/v1", images_app)
|
2024-02-11 08:17:50 +00:00
|
|
|
app.mount("/audio/api/v1", audio_app)
|
2024-01-07 06:07:20 +00:00
|
|
|
app.mount("/rag/api/v1", rag_app)
|
|
|
|
|
2024-01-04 21:06:31 +00:00
|
|
|
|
2024-02-22 02:12:01 +00:00
|
|
|
@app.get("/api/config")
|
|
|
|
async def get_app_config():
|
2024-02-23 08:30:26 +00:00
|
|
|
|
2024-02-22 02:12:01 +00:00
|
|
|
return {
|
|
|
|
"status": True,
|
2024-02-24 01:12:19 +00:00
|
|
|
"name": WEBUI_NAME,
|
2024-02-23 08:30:26 +00:00
|
|
|
"version": VERSION,
|
2024-02-22 02:12:01 +00:00
|
|
|
"images": images_app.state.ENABLED,
|
|
|
|
"default_models": webui_app.state.DEFAULT_MODELS,
|
|
|
|
"default_prompt_suggestions": webui_app.state.DEFAULT_PROMPT_SUGGESTIONS,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-03-05 08:59:35 +00:00
|
|
|
@app.get("/api/version")
|
|
|
|
async def get_app_config():
|
|
|
|
|
|
|
|
return {
|
|
|
|
"version": VERSION,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-02-23 08:30:26 +00:00
|
|
|
@app.get("/api/changelog")
|
|
|
|
async def get_app_changelog():
|
2024-02-23 08:36:53 +00:00
|
|
|
return CHANGELOG
|
2024-02-23 08:30:26 +00:00
|
|
|
|
|
|
|
|
2024-02-25 19:26:58 +00:00
|
|
|
@app.get("/api/version/updates")
|
|
|
|
async def get_app_latest_release_version():
|
|
|
|
try:
|
|
|
|
response = requests.get(
|
|
|
|
f"https://api.github.com/repos/open-webui/open-webui/releases/latest"
|
|
|
|
)
|
|
|
|
response.raise_for_status()
|
|
|
|
latest_version = response.json()["tag_name"]
|
|
|
|
|
2024-02-25 19:55:15 +00:00
|
|
|
return {"current": VERSION, "latest": latest_version[1:]}
|
2024-02-25 19:26:58 +00:00
|
|
|
except Exception as e:
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
2024-02-25 19:55:15 +00:00
|
|
|
detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
|
2024-02-25 19:26:58 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-02-24 01:12:19 +00:00
|
|
|
app.mount("/static", StaticFiles(directory="static"), name="static")
|
2024-03-09 01:38:10 +00:00
|
|
|
app.mount("/cache", StaticFiles(directory="data/cache"), name="cache")
|
2024-02-24 01:12:19 +00:00
|
|
|
|
|
|
|
|
2024-01-22 09:47:07 +00:00
|
|
|
app.mount(
|
|
|
|
"/",
|
2024-01-23 15:59:52 +00:00
|
|
|
SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
|
2024-01-22 09:47:07 +00:00
|
|
|
name="spa-static-files",
|
|
|
|
)
|