2024-02-23 08:30:26 +00:00
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
import json
|
|
|
|
import markdown
|
2024-01-07 10:48:21 +00:00
|
|
|
import time
|
|
|
|
|
2024-02-23 08:30:26 +00:00
|
|
|
|
2023-11-15 00:28:51 +00:00
|
|
|
from fastapi import FastAPI, Request
|
|
|
|
from fastapi.staticfiles import StaticFiles
|
|
|
|
from fastapi import HTTPException
|
|
|
|
from fastapi.middleware.wsgi import WSGIMiddleware
|
|
|
|
from fastapi.middleware.cors import CORSMiddleware
|
2023-11-19 00:47:12 +00:00
|
|
|
from starlette.exceptions import HTTPException as StarletteHTTPException
|
2023-11-15 00:28:51 +00:00
|
|
|
|
2024-01-07 06:07:20 +00:00
|
|
|
|
2023-11-15 00:28:51 +00:00
|
|
|
from apps.ollama.main import app as ollama_app
|
2024-01-05 02:38:03 +00:00
|
|
|
from apps.openai.main import app as openai_app
|
2024-02-11 08:17:50 +00:00
|
|
|
from apps.audio.main import app as audio_app
|
2024-02-22 02:12:01 +00:00
|
|
|
from apps.images.main import app as images_app
|
|
|
|
from apps.rag.main import app as rag_app
|
2024-01-05 02:38:03 +00:00
|
|
|
|
2023-11-19 00:47:12 +00:00
|
|
|
from apps.web.main import app as webui_app
|
2024-01-07 06:07:20 +00:00
|
|
|
|
2024-02-23 08:30:26 +00:00
|
|
|
from config import ENV, VERSION, FRONTEND_BUILD_DIR
|
2023-11-15 00:28:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
class SPAStaticFiles(StaticFiles):
|
|
|
|
async def get_response(self, path: str, scope):
|
|
|
|
try:
|
|
|
|
return await super().get_response(path, scope)
|
|
|
|
except (HTTPException, StarletteHTTPException) as ex:
|
|
|
|
if ex.status_code == 404:
|
|
|
|
return await super().get_response("index.html", scope)
|
|
|
|
else:
|
|
|
|
raise ex
|
|
|
|
|
|
|
|
|
2024-01-07 10:48:21 +00:00
|
|
|
app = FastAPI(docs_url="/docs" if ENV == "dev" else None, redoc_url=None)
|
2023-11-15 00:28:51 +00:00
|
|
|
|
|
|
|
origins = ["*"]
|
|
|
|
|
|
|
|
app.add_middleware(
|
|
|
|
CORSMiddleware,
|
|
|
|
allow_origins=origins,
|
|
|
|
allow_credentials=True,
|
|
|
|
allow_methods=["*"],
|
|
|
|
allow_headers=["*"],
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@app.middleware("http")
|
|
|
|
async def check_url(request: Request, call_next):
|
|
|
|
start_time = int(time.time())
|
|
|
|
response = await call_next(request)
|
|
|
|
process_time = int(time.time()) - start_time
|
|
|
|
response.headers["X-Process-Time"] = str(process_time)
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2023-11-19 00:47:12 +00:00
|
|
|
app.mount("/api/v1", webui_app)
|
2024-01-07 06:07:20 +00:00
|
|
|
|
2024-01-04 21:06:31 +00:00
|
|
|
app.mount("/ollama/api", ollama_app)
|
2024-01-05 02:38:03 +00:00
|
|
|
app.mount("/openai/api", openai_app)
|
2024-02-11 08:17:50 +00:00
|
|
|
|
2024-02-22 02:12:01 +00:00
|
|
|
app.mount("/images/api/v1", images_app)
|
2024-02-11 08:17:50 +00:00
|
|
|
app.mount("/audio/api/v1", audio_app)
|
2024-01-07 06:07:20 +00:00
|
|
|
app.mount("/rag/api/v1", rag_app)
|
|
|
|
|
2024-01-04 21:06:31 +00:00
|
|
|
|
2024-02-22 02:12:01 +00:00
|
|
|
@app.get("/api/config")
|
|
|
|
async def get_app_config():
|
2024-02-23 08:30:26 +00:00
|
|
|
|
2024-02-22 02:12:01 +00:00
|
|
|
return {
|
|
|
|
"status": True,
|
2024-02-23 08:30:26 +00:00
|
|
|
"version": VERSION,
|
2024-02-22 02:12:01 +00:00
|
|
|
"images": images_app.state.ENABLED,
|
|
|
|
"default_models": webui_app.state.DEFAULT_MODELS,
|
|
|
|
"default_prompt_suggestions": webui_app.state.DEFAULT_PROMPT_SUGGESTIONS,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-02-23 08:30:26 +00:00
|
|
|
# Function to parse each section
|
|
|
|
def parse_section(section):
|
|
|
|
items = []
|
|
|
|
for li in section.find_all("li"):
|
|
|
|
# Extract raw HTML string
|
|
|
|
raw_html = str(li)
|
|
|
|
|
|
|
|
# Extract text without HTML tags
|
|
|
|
text = li.get_text(separator=" ", strip=True)
|
|
|
|
|
|
|
|
# Split into title and content
|
|
|
|
parts = text.split(": ", 1)
|
|
|
|
title = parts[0].strip() if len(parts) > 1 else ""
|
|
|
|
content = parts[1].strip() if len(parts) > 1 else text
|
|
|
|
|
|
|
|
items.append({"title": title, "content": content, "raw": raw_html})
|
|
|
|
return items
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/api/changelog")
|
|
|
|
async def get_app_changelog():
|
|
|
|
try:
|
|
|
|
with open("../CHANGELOG.md", "r") as file:
|
|
|
|
changelog_content = file.read()
|
|
|
|
# Convert markdown content to HTML
|
|
|
|
html_content = markdown.markdown(changelog_content)
|
|
|
|
|
|
|
|
# Parse the HTML content
|
|
|
|
soup = BeautifulSoup(html_content, "html.parser")
|
|
|
|
|
|
|
|
print(soup)
|
|
|
|
# Initialize JSON structure
|
|
|
|
changelog_json = {}
|
|
|
|
|
|
|
|
# Iterate over each version
|
|
|
|
for version in soup.find_all("h2"):
|
|
|
|
version_number = (
|
|
|
|
version.get_text().strip().split(" - ")[0][1:-1]
|
|
|
|
) # Remove brackets
|
|
|
|
date = version.get_text().strip().split(" - ")[1]
|
|
|
|
|
|
|
|
version_data = {"date": date}
|
|
|
|
|
|
|
|
# Find the next sibling that is a h3 tag (section title)
|
|
|
|
current = version.find_next_sibling()
|
|
|
|
|
|
|
|
print(current)
|
|
|
|
|
|
|
|
while current and current.name != "h2":
|
|
|
|
if current.name == "h3":
|
|
|
|
section_title = current.get_text().lower() # e.g., "added", "fixed"
|
|
|
|
section_items = parse_section(current.find_next_sibling("ul"))
|
|
|
|
version_data[section_title] = section_items
|
|
|
|
|
|
|
|
# Move to the next element
|
|
|
|
current = current.find_next_sibling()
|
|
|
|
|
|
|
|
changelog_json[version_number] = version_data
|
|
|
|
|
|
|
|
# print(changelog_json)
|
|
|
|
|
|
|
|
# Return content as JSON string
|
|
|
|
return changelog_json
|
|
|
|
except FileNotFoundError:
|
|
|
|
return {"error": "readme.md not found"}
|
|
|
|
except Exception as e:
|
|
|
|
return {"error": f"An error occurred: {e}"}
|
|
|
|
|
|
|
|
|
2024-01-22 09:47:07 +00:00
|
|
|
app.mount(
|
|
|
|
"/",
|
2024-01-23 15:59:52 +00:00
|
|
|
SPAStaticFiles(directory=FRONTEND_BUILD_DIR, html=True),
|
2024-01-22 09:47:07 +00:00
|
|
|
name="spa-static-files",
|
|
|
|
)
|