2024-01-07 07:40:51 +00:00
|
|
|
from fastapi import (
|
|
|
|
FastAPI,
|
|
|
|
Depends,
|
|
|
|
HTTPException,
|
|
|
|
status,
|
|
|
|
UploadFile,
|
|
|
|
File,
|
|
|
|
Form,
|
|
|
|
)
|
2024-01-07 06:07:20 +00:00
|
|
|
from fastapi.middleware.cors import CORSMiddleware
|
2024-04-03 15:19:18 +00:00
|
|
|
import os, shutil, logging, re
|
2024-02-18 05:06:08 +00:00
|
|
|
|
|
|
|
from pathlib import Path
|
2024-02-01 21:35:41 +00:00
|
|
|
from typing import List
|
2024-01-07 06:07:20 +00:00
|
|
|
|
2024-04-09 14:38:40 +00:00
|
|
|
from chromadb.utils.batch_utils import create_batches
|
2024-01-07 06:07:20 +00:00
|
|
|
|
2024-01-07 17:05:52 +00:00
|
|
|
from langchain_community.document_loaders import (
|
|
|
|
WebBaseLoader,
|
|
|
|
TextLoader,
|
|
|
|
PyPDFLoader,
|
|
|
|
CSVLoader,
|
2024-03-25 10:26:18 +00:00
|
|
|
BSHTMLLoader,
|
2024-01-07 21:56:01 +00:00
|
|
|
Docx2txtLoader,
|
2024-01-13 13:46:56 +00:00
|
|
|
UnstructuredEPubLoader,
|
2024-01-09 23:24:53 +00:00
|
|
|
UnstructuredWordDocumentLoader,
|
|
|
|
UnstructuredMarkdownLoader,
|
feat: Add RAG support for various programming languages
Enables RAG for golang, python, java, sh, bat, powershell, cmd, js, css, c/c++/c#, sql, logs, ini, perl, r, dart, docker, env, php, haskell, lua, conf, plsql, ruby, db2, scalla, bash, swift, vue, html, xml, and other arbitrary text files.
2024-01-17 07:09:47 +00:00
|
|
|
UnstructuredXMLLoader,
|
2024-01-19 17:48:04 +00:00
|
|
|
UnstructuredRSTLoader,
|
2024-01-23 21:03:22 +00:00
|
|
|
UnstructuredExcelLoader,
|
2024-05-02 00:17:00 +00:00
|
|
|
YoutubeLoader,
|
2024-01-07 17:05:52 +00:00
|
|
|
)
|
2024-01-07 06:59:22 +00:00
|
|
|
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|
|
|
|
2024-04-29 19:55:17 +00:00
|
|
|
import validators
|
|
|
|
import urllib.parse
|
|
|
|
import socket
|
|
|
|
|
|
|
|
|
2024-01-07 06:59:22 +00:00
|
|
|
from pydantic import BaseModel
|
|
|
|
from typing import Optional
|
2024-02-18 05:06:08 +00:00
|
|
|
import mimetypes
|
2024-01-07 06:59:22 +00:00
|
|
|
import uuid
|
2024-02-19 19:05:45 +00:00
|
|
|
import json
|
|
|
|
|
2024-04-22 18:27:43 +00:00
|
|
|
import sentence_transformers
|
2024-01-07 06:59:22 +00:00
|
|
|
|
2024-02-18 05:06:08 +00:00
|
|
|
from apps.web.models.documents import (
|
|
|
|
Documents,
|
|
|
|
DocumentForm,
|
|
|
|
DocumentResponse,
|
|
|
|
)
|
2024-02-18 08:17:43 +00:00
|
|
|
|
2024-04-14 21:55:00 +00:00
|
|
|
from apps.rag.utils import (
|
2024-04-25 12:49:59 +00:00
|
|
|
get_model_path,
|
2024-04-27 19:38:50 +00:00
|
|
|
get_embedding_function,
|
|
|
|
query_doc,
|
|
|
|
query_doc_with_hybrid_search,
|
|
|
|
query_collection,
|
|
|
|
query_collection_with_hybrid_search,
|
2024-04-14 21:55:00 +00:00
|
|
|
)
|
2024-03-09 03:26:39 +00:00
|
|
|
|
2024-02-18 05:06:08 +00:00
|
|
|
from utils.misc import (
|
|
|
|
calculate_sha256,
|
|
|
|
calculate_sha256_string,
|
|
|
|
sanitize_filename,
|
|
|
|
extract_folders_after_data_docs,
|
|
|
|
)
|
2024-02-09 00:05:01 +00:00
|
|
|
from utils.utils import get_current_user, get_admin_user
|
2024-04-25 12:49:59 +00:00
|
|
|
|
2024-02-18 05:06:08 +00:00
|
|
|
from config import (
|
2024-03-20 23:11:36 +00:00
|
|
|
SRC_LOG_LEVELS,
|
2024-02-18 05:06:08 +00:00
|
|
|
UPLOAD_DIR,
|
|
|
|
DOCS_DIR,
|
2024-04-22 23:36:46 +00:00
|
|
|
RAG_TOP_K,
|
|
|
|
RAG_RELEVANCE_THRESHOLD,
|
2024-04-14 21:55:00 +00:00
|
|
|
RAG_EMBEDDING_ENGINE,
|
2024-02-18 19:16:10 +00:00
|
|
|
RAG_EMBEDDING_MODEL,
|
2024-04-25 12:49:59 +00:00
|
|
|
RAG_EMBEDDING_MODEL_AUTO_UPDATE,
|
2024-04-22 18:27:43 +00:00
|
|
|
RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
|
2024-04-26 18:41:39 +00:00
|
|
|
ENABLE_RAG_HYBRID_SEARCH,
|
2024-05-06 20:12:08 +00:00
|
|
|
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
|
2024-04-22 20:49:58 +00:00
|
|
|
RAG_RERANKING_MODEL,
|
2024-04-27 22:54:26 +00:00
|
|
|
PDF_EXTRACT_IMAGES,
|
2024-04-25 12:49:59 +00:00
|
|
|
RAG_RERANKING_MODEL_AUTO_UPDATE,
|
2024-04-22 20:49:58 +00:00
|
|
|
RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
|
2024-04-20 20:15:59 +00:00
|
|
|
RAG_OPENAI_API_BASE_URL,
|
|
|
|
RAG_OPENAI_API_KEY,
|
2024-03-20 07:44:09 +00:00
|
|
|
DEVICE_TYPE,
|
2024-02-18 05:06:08 +00:00
|
|
|
CHROMA_CLIENT,
|
|
|
|
CHUNK_SIZE,
|
|
|
|
CHUNK_OVERLAP,
|
2024-02-18 06:41:03 +00:00
|
|
|
RAG_TEMPLATE,
|
2024-05-06 20:12:08 +00:00
|
|
|
ENABLE_RAG_LOCAL_WEB_FETCH,
|
2024-02-18 05:06:08 +00:00
|
|
|
)
|
2024-02-18 08:20:54 +00:00
|
|
|
|
2024-01-07 06:59:22 +00:00
|
|
|
from constants import ERROR_MESSAGES
|
|
|
|
|
2024-03-20 23:11:36 +00:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
|
|
|
|
2024-01-07 06:07:20 +00:00
|
|
|
app = FastAPI()
|
|
|
|
|
2024-04-22 23:36:46 +00:00
|
|
|
app.state.TOP_K = RAG_TOP_K
|
|
|
|
app.state.RELEVANCE_THRESHOLD = RAG_RELEVANCE_THRESHOLD
|
2024-04-26 18:41:39 +00:00
|
|
|
|
|
|
|
app.state.ENABLE_RAG_HYBRID_SEARCH = ENABLE_RAG_HYBRID_SEARCH
|
2024-05-06 20:12:08 +00:00
|
|
|
app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
|
|
|
|
ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
|
|
|
|
)
|
2024-04-25 22:31:21 +00:00
|
|
|
|
2024-02-18 06:29:52 +00:00
|
|
|
app.state.CHUNK_SIZE = CHUNK_SIZE
|
|
|
|
app.state.CHUNK_OVERLAP = CHUNK_OVERLAP
|
2024-04-10 07:33:45 +00:00
|
|
|
|
2024-04-14 21:55:00 +00:00
|
|
|
app.state.RAG_EMBEDDING_ENGINE = RAG_EMBEDDING_ENGINE
|
2024-02-19 19:05:45 +00:00
|
|
|
app.state.RAG_EMBEDDING_MODEL = RAG_EMBEDDING_MODEL
|
2024-04-22 20:49:58 +00:00
|
|
|
app.state.RAG_RERANKING_MODEL = RAG_RERANKING_MODEL
|
2024-04-14 21:55:00 +00:00
|
|
|
app.state.RAG_TEMPLATE = RAG_TEMPLATE
|
2024-04-10 07:33:45 +00:00
|
|
|
|
2024-05-06 20:12:08 +00:00
|
|
|
|
2024-04-20 20:21:52 +00:00
|
|
|
app.state.OPENAI_API_BASE_URL = RAG_OPENAI_API_BASE_URL
|
|
|
|
app.state.OPENAI_API_KEY = RAG_OPENAI_API_KEY
|
2024-04-10 07:33:45 +00:00
|
|
|
|
2024-04-27 22:54:26 +00:00
|
|
|
app.state.PDF_EXTRACT_IMAGES = PDF_EXTRACT_IMAGES
|
2024-04-14 21:55:00 +00:00
|
|
|
|
2024-04-22 20:49:58 +00:00
|
|
|
|
2024-04-25 12:49:59 +00:00
|
|
|
def update_embedding_model(
|
|
|
|
embedding_model: str,
|
|
|
|
update_model: bool = False,
|
|
|
|
):
|
|
|
|
if embedding_model and app.state.RAG_EMBEDDING_ENGINE == "":
|
|
|
|
app.state.sentence_transformer_ef = sentence_transformers.SentenceTransformer(
|
|
|
|
get_model_path(embedding_model, update_model),
|
|
|
|
device=DEVICE_TYPE,
|
|
|
|
trust_remote_code=RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
app.state.sentence_transformer_ef = None
|
|
|
|
|
|
|
|
|
|
|
|
def update_reranking_model(
|
|
|
|
reranking_model: str,
|
|
|
|
update_model: bool = False,
|
|
|
|
):
|
|
|
|
if reranking_model:
|
|
|
|
app.state.sentence_transformer_rf = sentence_transformers.CrossEncoder(
|
|
|
|
get_model_path(reranking_model, update_model),
|
|
|
|
device=DEVICE_TYPE,
|
|
|
|
trust_remote_code=RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
app.state.sentence_transformer_rf = None
|
|
|
|
|
|
|
|
|
|
|
|
update_embedding_model(
|
|
|
|
app.state.RAG_EMBEDDING_MODEL,
|
|
|
|
RAG_EMBEDDING_MODEL_AUTO_UPDATE,
|
|
|
|
)
|
|
|
|
|
|
|
|
update_reranking_model(
|
|
|
|
app.state.RAG_RERANKING_MODEL,
|
|
|
|
RAG_RERANKING_MODEL_AUTO_UPDATE,
|
|
|
|
)
|
2024-02-18 06:29:52 +00:00
|
|
|
|
2024-04-27 19:38:50 +00:00
|
|
|
|
|
|
|
app.state.EMBEDDING_FUNCTION = get_embedding_function(
|
|
|
|
app.state.RAG_EMBEDDING_ENGINE,
|
|
|
|
app.state.RAG_EMBEDDING_MODEL,
|
|
|
|
app.state.sentence_transformer_ef,
|
|
|
|
app.state.OPENAI_API_KEY,
|
|
|
|
app.state.OPENAI_API_BASE_URL,
|
|
|
|
)
|
|
|
|
|
2024-01-07 06:07:20 +00:00
|
|
|
origins = ["*"]
|
|
|
|
|
2024-04-25 12:49:59 +00:00
|
|
|
|
2024-01-07 06:07:20 +00:00
|
|
|
app.add_middleware(
|
|
|
|
CORSMiddleware,
|
|
|
|
allow_origins=origins,
|
|
|
|
allow_credentials=True,
|
|
|
|
allow_methods=["*"],
|
|
|
|
allow_headers=["*"],
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-01-07 07:40:51 +00:00
|
|
|
class CollectionNameForm(BaseModel):
|
2024-01-07 06:59:22 +00:00
|
|
|
collection_name: Optional[str] = "test"
|
|
|
|
|
|
|
|
|
2024-05-02 00:17:00 +00:00
|
|
|
class UrlForm(CollectionNameForm):
|
2024-01-07 07:40:51 +00:00
|
|
|
url: str
|
|
|
|
|
2024-03-26 06:47:08 +00:00
|
|
|
|
2024-01-07 06:07:20 +00:00
|
|
|
@app.get("/")
|
|
|
|
async def get_status():
|
2024-02-18 06:29:52 +00:00
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"chunk_size": app.state.CHUNK_SIZE,
|
|
|
|
"chunk_overlap": app.state.CHUNK_OVERLAP,
|
2024-02-19 19:05:45 +00:00
|
|
|
"template": app.state.RAG_TEMPLATE,
|
2024-04-14 21:55:00 +00:00
|
|
|
"embedding_engine": app.state.RAG_EMBEDDING_ENGINE,
|
2024-02-19 19:05:45 +00:00
|
|
|
"embedding_model": app.state.RAG_EMBEDDING_MODEL,
|
2024-04-22 20:49:58 +00:00
|
|
|
"reranking_model": app.state.RAG_RERANKING_MODEL,
|
2024-02-19 19:05:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-04-14 22:31:40 +00:00
|
|
|
@app.get("/embedding")
|
|
|
|
async def get_embedding_config(user=Depends(get_admin_user)):
|
2024-02-19 19:05:45 +00:00
|
|
|
return {
|
|
|
|
"status": True,
|
2024-04-14 22:31:40 +00:00
|
|
|
"embedding_engine": app.state.RAG_EMBEDDING_ENGINE,
|
2024-02-19 19:05:45 +00:00
|
|
|
"embedding_model": app.state.RAG_EMBEDDING_MODEL,
|
2024-04-14 23:15:39 +00:00
|
|
|
"openai_config": {
|
2024-04-20 20:21:52 +00:00
|
|
|
"url": app.state.OPENAI_API_BASE_URL,
|
|
|
|
"key": app.state.OPENAI_API_KEY,
|
2024-04-14 23:15:39 +00:00
|
|
|
},
|
2024-02-19 19:05:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-04-22 20:49:58 +00:00
|
|
|
@app.get("/reranking")
|
|
|
|
async def get_reraanking_config(user=Depends(get_admin_user)):
|
|
|
|
return {"status": True, "reranking_model": app.state.RAG_RERANKING_MODEL}
|
|
|
|
|
|
|
|
|
2024-04-14 23:15:39 +00:00
|
|
|
class OpenAIConfigForm(BaseModel):
|
|
|
|
url: str
|
|
|
|
key: str
|
|
|
|
|
|
|
|
|
2024-02-19 19:05:45 +00:00
|
|
|
class EmbeddingModelUpdateForm(BaseModel):
|
2024-04-14 23:15:39 +00:00
|
|
|
openai_config: Optional[OpenAIConfigForm] = None
|
2024-04-14 22:31:40 +00:00
|
|
|
embedding_engine: str
|
2024-02-19 19:05:45 +00:00
|
|
|
embedding_model: str
|
|
|
|
|
|
|
|
|
2024-04-14 22:31:40 +00:00
|
|
|
@app.post("/embedding/update")
|
|
|
|
async def update_embedding_config(
|
2024-02-19 19:05:45 +00:00
|
|
|
form_data: EmbeddingModelUpdateForm, user=Depends(get_admin_user)
|
|
|
|
):
|
2024-04-04 18:07:42 +00:00
|
|
|
log.info(
|
|
|
|
f"Updating embedding model: {app.state.RAG_EMBEDDING_MODEL} to {form_data.embedding_model}"
|
2024-02-19 19:05:45 +00:00
|
|
|
)
|
2024-04-04 17:01:23 +00:00
|
|
|
try:
|
2024-04-14 22:31:40 +00:00
|
|
|
app.state.RAG_EMBEDDING_ENGINE = form_data.embedding_engine
|
2024-04-22 23:36:46 +00:00
|
|
|
app.state.RAG_EMBEDDING_MODEL = form_data.embedding_model
|
2024-04-14 22:31:40 +00:00
|
|
|
|
2024-04-14 23:15:39 +00:00
|
|
|
if app.state.RAG_EMBEDDING_ENGINE in ["ollama", "openai"]:
|
|
|
|
if form_data.openai_config != None:
|
2024-04-20 20:21:52 +00:00
|
|
|
app.state.OPENAI_API_BASE_URL = form_data.openai_config.url
|
|
|
|
app.state.OPENAI_API_KEY = form_data.openai_config.key
|
2024-04-22 23:36:46 +00:00
|
|
|
|
2024-04-25 12:49:59 +00:00
|
|
|
update_embedding_model(app.state.RAG_EMBEDDING_MODEL, True)
|
2024-04-04 17:01:23 +00:00
|
|
|
|
2024-04-27 19:38:50 +00:00
|
|
|
app.state.EMBEDDING_FUNCTION = get_embedding_function(
|
|
|
|
app.state.RAG_EMBEDDING_ENGINE,
|
|
|
|
app.state.RAG_EMBEDDING_MODEL,
|
|
|
|
app.state.sentence_transformer_ef,
|
|
|
|
app.state.OPENAI_API_KEY,
|
|
|
|
app.state.OPENAI_API_BASE_URL,
|
|
|
|
)
|
|
|
|
|
2024-04-10 07:59:05 +00:00
|
|
|
return {
|
|
|
|
"status": True,
|
2024-04-14 22:31:40 +00:00
|
|
|
"embedding_engine": app.state.RAG_EMBEDDING_ENGINE,
|
2024-04-10 07:59:05 +00:00
|
|
|
"embedding_model": app.state.RAG_EMBEDDING_MODEL,
|
2024-04-14 23:15:39 +00:00
|
|
|
"openai_config": {
|
2024-04-20 20:21:52 +00:00
|
|
|
"url": app.state.OPENAI_API_BASE_URL,
|
|
|
|
"key": app.state.OPENAI_API_KEY,
|
2024-04-14 23:15:39 +00:00
|
|
|
},
|
2024-04-10 07:59:05 +00:00
|
|
|
}
|
|
|
|
except Exception as e:
|
|
|
|
log.exception(f"Problem updating embedding model: {e}")
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(e),
|
|
|
|
)
|
2024-02-18 06:29:52 +00:00
|
|
|
|
|
|
|
|
2024-04-22 20:49:58 +00:00
|
|
|
class RerankingModelUpdateForm(BaseModel):
|
|
|
|
reranking_model: str
|
2024-04-22 23:36:46 +00:00
|
|
|
|
2024-04-22 20:49:58 +00:00
|
|
|
|
|
|
|
@app.post("/reranking/update")
|
|
|
|
async def update_reranking_config(
|
|
|
|
form_data: RerankingModelUpdateForm, user=Depends(get_admin_user)
|
|
|
|
):
|
|
|
|
log.info(
|
|
|
|
f"Updating reranking model: {app.state.RAG_RERANKING_MODEL} to {form_data.reranking_model}"
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
app.state.RAG_RERANKING_MODEL = form_data.reranking_model
|
2024-04-22 23:36:46 +00:00
|
|
|
|
2024-04-25 12:49:59 +00:00
|
|
|
update_reranking_model(app.state.RAG_RERANKING_MODEL, True)
|
2024-04-22 20:49:58 +00:00
|
|
|
|
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"reranking_model": app.state.RAG_RERANKING_MODEL,
|
|
|
|
}
|
|
|
|
except Exception as e:
|
|
|
|
log.exception(f"Problem updating reranking model: {e}")
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(e),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-03-10 20:32:34 +00:00
|
|
|
@app.get("/config")
|
|
|
|
async def get_rag_config(user=Depends(get_admin_user)):
|
2024-02-18 06:29:52 +00:00
|
|
|
return {
|
|
|
|
"status": True,
|
2024-03-10 20:32:34 +00:00
|
|
|
"pdf_extract_images": app.state.PDF_EXTRACT_IMAGES,
|
|
|
|
"chunk": {
|
|
|
|
"chunk_size": app.state.CHUNK_SIZE,
|
|
|
|
"chunk_overlap": app.state.CHUNK_OVERLAP,
|
|
|
|
},
|
2024-05-06 21:50:55 +00:00
|
|
|
"web_loader_ssl_verification": app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
|
2024-02-18 06:29:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class ChunkParamUpdateForm(BaseModel):
|
|
|
|
chunk_size: int
|
|
|
|
chunk_overlap: int
|
|
|
|
|
|
|
|
|
2024-03-10 20:32:34 +00:00
|
|
|
class ConfigUpdateForm(BaseModel):
|
2024-05-06 21:50:55 +00:00
|
|
|
pdf_extract_images: Optional[bool] = None
|
|
|
|
chunk: Optional[ChunkParamUpdateForm] = None
|
|
|
|
web_loader_ssl_verification: Optional[bool] = None
|
2024-03-10 20:32:34 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.post("/config/update")
|
|
|
|
async def update_rag_config(form_data: ConfigUpdateForm, user=Depends(get_admin_user)):
|
2024-05-06 21:50:55 +00:00
|
|
|
app.state.PDF_EXTRACT_IMAGES = (
|
|
|
|
form_data.pdf_extract_images
|
|
|
|
if form_data.pdf_extract_images != None
|
|
|
|
else app.state.PDF_EXTRACT_IMAGES
|
|
|
|
)
|
|
|
|
|
|
|
|
app.state.CHUNK_SIZE = (
|
|
|
|
form_data.chunk.chunk_size if form_data.chunk != None else app.state.CHUNK_SIZE
|
|
|
|
)
|
|
|
|
|
|
|
|
app.state.CHUNK_OVERLAP = (
|
|
|
|
form_data.chunk.chunk_overlap
|
|
|
|
if form_data.chunk != None
|
|
|
|
else app.state.CHUNK_OVERLAP
|
|
|
|
)
|
|
|
|
|
|
|
|
app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION = (
|
|
|
|
form_data.web_loader_ssl_verification
|
|
|
|
if form_data.web_loader_ssl_verification != None
|
|
|
|
else app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
|
|
|
|
)
|
2024-02-18 06:29:52 +00:00
|
|
|
|
|
|
|
return {
|
|
|
|
"status": True,
|
2024-03-10 20:32:34 +00:00
|
|
|
"pdf_extract_images": app.state.PDF_EXTRACT_IMAGES,
|
|
|
|
"chunk": {
|
|
|
|
"chunk_size": app.state.CHUNK_SIZE,
|
|
|
|
"chunk_overlap": app.state.CHUNK_OVERLAP,
|
|
|
|
},
|
2024-05-06 21:50:55 +00:00
|
|
|
"web_loader_ssl_verification": app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION,
|
2024-02-18 06:29:52 +00:00
|
|
|
}
|
2024-01-07 06:59:22 +00:00
|
|
|
|
|
|
|
|
2024-02-18 06:41:03 +00:00
|
|
|
@app.get("/template")
|
|
|
|
async def get_rag_template(user=Depends(get_current_user)):
|
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"template": app.state.RAG_TEMPLATE,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-03-03 02:56:57 +00:00
|
|
|
@app.get("/query/settings")
|
|
|
|
async def get_query_settings(user=Depends(get_admin_user)):
|
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"template": app.state.RAG_TEMPLATE,
|
|
|
|
"k": app.state.TOP_K,
|
2024-04-22 23:36:46 +00:00
|
|
|
"r": app.state.RELEVANCE_THRESHOLD,
|
2024-04-26 18:41:39 +00:00
|
|
|
"hybrid": app.state.ENABLE_RAG_HYBRID_SEARCH,
|
2024-03-03 02:56:57 +00:00
|
|
|
}
|
2024-02-18 06:41:03 +00:00
|
|
|
|
|
|
|
|
2024-03-03 02:56:57 +00:00
|
|
|
class QuerySettingsForm(BaseModel):
|
|
|
|
k: Optional[int] = None
|
2024-04-22 23:36:46 +00:00
|
|
|
r: Optional[float] = None
|
2024-03-03 02:56:57 +00:00
|
|
|
template: Optional[str] = None
|
2024-04-25 22:31:21 +00:00
|
|
|
hybrid: Optional[bool] = None
|
2024-03-03 02:56:57 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.post("/query/settings/update")
|
|
|
|
async def update_query_settings(
|
|
|
|
form_data: QuerySettingsForm, user=Depends(get_admin_user)
|
|
|
|
):
|
|
|
|
app.state.RAG_TEMPLATE = form_data.template if form_data.template else RAG_TEMPLATE
|
|
|
|
app.state.TOP_K = form_data.k if form_data.k else 4
|
2024-04-22 23:36:46 +00:00
|
|
|
app.state.RELEVANCE_THRESHOLD = form_data.r if form_data.r else 0.0
|
2024-04-26 18:41:39 +00:00
|
|
|
app.state.ENABLE_RAG_HYBRID_SEARCH = form_data.hybrid if form_data.hybrid else False
|
2024-04-25 22:31:21 +00:00
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"template": app.state.RAG_TEMPLATE,
|
|
|
|
"k": app.state.TOP_K,
|
|
|
|
"r": app.state.RELEVANCE_THRESHOLD,
|
2024-04-26 18:41:39 +00:00
|
|
|
"hybrid": app.state.ENABLE_RAG_HYBRID_SEARCH,
|
2024-04-25 22:31:21 +00:00
|
|
|
}
|
2024-01-07 06:59:22 +00:00
|
|
|
|
|
|
|
|
2024-02-03 23:57:06 +00:00
|
|
|
class QueryDocForm(BaseModel):
|
2024-02-01 21:35:41 +00:00
|
|
|
collection_name: str
|
|
|
|
query: str
|
2024-03-03 02:56:57 +00:00
|
|
|
k: Optional[int] = None
|
2024-04-22 23:36:46 +00:00
|
|
|
r: Optional[float] = None
|
2024-04-25 22:31:21 +00:00
|
|
|
hybrid: Optional[bool] = None
|
2024-02-01 21:35:41 +00:00
|
|
|
|
|
|
|
|
2024-02-03 23:57:06 +00:00
|
|
|
@app.post("/query/doc")
|
2024-03-09 03:26:39 +00:00
|
|
|
def query_doc_handler(
|
2024-02-03 23:57:06 +00:00
|
|
|
form_data: QueryDocForm,
|
2024-01-07 10:46:12 +00:00
|
|
|
user=Depends(get_current_user),
|
|
|
|
):
|
2024-01-07 09:59:00 +00:00
|
|
|
try:
|
2024-04-27 19:38:50 +00:00
|
|
|
if app.state.ENABLE_RAG_HYBRID_SEARCH:
|
|
|
|
return query_doc_with_hybrid_search(
|
|
|
|
collection_name=form_data.collection_name,
|
|
|
|
query=form_data.query,
|
2024-04-29 17:15:58 +00:00
|
|
|
embedding_function=app.state.EMBEDDING_FUNCTION,
|
2024-04-27 19:38:50 +00:00
|
|
|
k=form_data.k if form_data.k else app.state.TOP_K,
|
2024-04-29 17:15:58 +00:00
|
|
|
reranking_function=app.state.sentence_transformer_rf,
|
2024-04-27 19:38:50 +00:00
|
|
|
r=form_data.r if form_data.r else app.state.RELEVANCE_THRESHOLD,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
return query_doc(
|
|
|
|
collection_name=form_data.collection_name,
|
|
|
|
query=form_data.query,
|
2024-04-29 17:15:58 +00:00
|
|
|
embedding_function=app.state.EMBEDDING_FUNCTION,
|
2024-04-27 19:38:50 +00:00
|
|
|
k=form_data.k if form_data.k else app.state.TOP_K,
|
|
|
|
)
|
2024-01-07 09:59:00 +00:00
|
|
|
except Exception as e:
|
2024-03-20 23:11:36 +00:00
|
|
|
log.exception(e)
|
2024-01-07 09:59:00 +00:00
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(e),
|
|
|
|
)
|
2024-01-07 06:59:22 +00:00
|
|
|
|
|
|
|
|
2024-02-01 21:35:41 +00:00
|
|
|
class QueryCollectionsForm(BaseModel):
|
|
|
|
collection_names: List[str]
|
|
|
|
query: str
|
2024-03-03 02:56:57 +00:00
|
|
|
k: Optional[int] = None
|
2024-04-22 23:36:46 +00:00
|
|
|
r: Optional[float] = None
|
2024-04-25 22:31:21 +00:00
|
|
|
hybrid: Optional[bool] = None
|
2024-02-01 21:35:41 +00:00
|
|
|
|
|
|
|
|
2024-02-03 23:57:06 +00:00
|
|
|
@app.post("/query/collection")
|
2024-03-09 03:26:39 +00:00
|
|
|
def query_collection_handler(
|
2024-02-01 21:35:41 +00:00
|
|
|
form_data: QueryCollectionsForm,
|
|
|
|
user=Depends(get_current_user),
|
|
|
|
):
|
2024-04-14 21:55:00 +00:00
|
|
|
try:
|
2024-04-27 19:38:50 +00:00
|
|
|
if app.state.ENABLE_RAG_HYBRID_SEARCH:
|
|
|
|
return query_collection_with_hybrid_search(
|
|
|
|
collection_names=form_data.collection_names,
|
|
|
|
query=form_data.query,
|
2024-04-29 17:15:58 +00:00
|
|
|
embedding_function=app.state.EMBEDDING_FUNCTION,
|
2024-04-27 19:38:50 +00:00
|
|
|
k=form_data.k if form_data.k else app.state.TOP_K,
|
2024-04-29 17:15:58 +00:00
|
|
|
reranking_function=app.state.sentence_transformer_rf,
|
2024-04-27 19:38:50 +00:00
|
|
|
r=form_data.r if form_data.r else app.state.RELEVANCE_THRESHOLD,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
return query_collection(
|
|
|
|
collection_names=form_data.collection_names,
|
|
|
|
query=form_data.query,
|
2024-04-29 17:15:58 +00:00
|
|
|
embedding_function=app.state.EMBEDDING_FUNCTION,
|
2024-04-27 19:38:50 +00:00
|
|
|
k=form_data.k if form_data.k else app.state.TOP_K,
|
|
|
|
)
|
2024-04-14 23:15:39 +00:00
|
|
|
|
2024-04-14 21:55:00 +00:00
|
|
|
except Exception as e:
|
|
|
|
log.exception(e)
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(e),
|
|
|
|
)
|
2024-02-01 21:35:41 +00:00
|
|
|
|
|
|
|
|
2024-05-02 00:17:00 +00:00
|
|
|
@app.post("/youtube")
|
|
|
|
def store_youtube_video(form_data: UrlForm, user=Depends(get_current_user)):
|
|
|
|
try:
|
|
|
|
loader = YoutubeLoader.from_youtube_url(form_data.url, add_video_info=False)
|
|
|
|
data = loader.load()
|
|
|
|
|
|
|
|
collection_name = form_data.collection_name
|
|
|
|
if collection_name == "":
|
|
|
|
collection_name = calculate_sha256_string(form_data.url)[:63]
|
|
|
|
|
|
|
|
store_data_in_vector_db(data, collection_name, overwrite=True)
|
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"collection_name": collection_name,
|
|
|
|
"filename": form_data.url,
|
|
|
|
}
|
|
|
|
except Exception as e:
|
|
|
|
log.exception(e)
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(e),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-01-07 06:59:22 +00:00
|
|
|
@app.post("/web")
|
2024-05-02 00:17:00 +00:00
|
|
|
def store_web(form_data: UrlForm, user=Depends(get_current_user)):
|
2024-01-07 06:59:22 +00:00
|
|
|
# "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm"
|
|
|
|
try:
|
2024-05-06 21:50:55 +00:00
|
|
|
loader = get_web_loader(
|
|
|
|
form_data.url, verify_ssl=app.state.ENABLE_RAG_WEB_LOADER_SSL_VERIFICATION
|
|
|
|
)
|
2024-01-07 06:59:22 +00:00
|
|
|
data = loader.load()
|
2024-01-27 06:17:28 +00:00
|
|
|
|
|
|
|
collection_name = form_data.collection_name
|
|
|
|
if collection_name == "":
|
|
|
|
collection_name = calculate_sha256_string(form_data.url)[:63]
|
|
|
|
|
2024-03-04 16:00:06 +00:00
|
|
|
store_data_in_vector_db(data, collection_name, overwrite=True)
|
2024-01-08 09:26:15 +00:00
|
|
|
return {
|
|
|
|
"status": True,
|
2024-01-27 06:17:28 +00:00
|
|
|
"collection_name": collection_name,
|
2024-01-08 09:26:15 +00:00
|
|
|
"filename": form_data.url,
|
|
|
|
}
|
2024-01-07 06:59:22 +00:00
|
|
|
except Exception as e:
|
2024-03-20 23:11:36 +00:00
|
|
|
log.exception(e)
|
2024-01-07 06:59:22 +00:00
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(e),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-05-06 21:50:55 +00:00
|
|
|
def get_web_loader(url: str, verify_ssl: bool = True):
|
2024-04-29 19:55:17 +00:00
|
|
|
# Check if the URL is valid
|
|
|
|
if isinstance(validators.url(url), validators.ValidationError):
|
|
|
|
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
2024-05-06 20:12:08 +00:00
|
|
|
if not ENABLE_RAG_LOCAL_WEB_FETCH:
|
2024-04-29 19:55:17 +00:00
|
|
|
# Local web fetch is disabled, filter out any URLs that resolve to private IP addresses
|
|
|
|
parsed_url = urllib.parse.urlparse(url)
|
|
|
|
# Get IPv4 and IPv6 addresses
|
|
|
|
ipv4_addresses, ipv6_addresses = resolve_hostname(parsed_url.hostname)
|
|
|
|
# Check if any of the resolved addresses are private
|
|
|
|
# This is technically still vulnerable to DNS rebinding attacks, as we don't control WebBaseLoader
|
|
|
|
for ip in ipv4_addresses:
|
|
|
|
if validators.ipv4(ip, private=True):
|
|
|
|
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
|
|
|
for ip in ipv6_addresses:
|
|
|
|
if validators.ipv6(ip, private=True):
|
|
|
|
raise ValueError(ERROR_MESSAGES.INVALID_URL)
|
2024-05-06 21:50:55 +00:00
|
|
|
return WebBaseLoader(url, verify_ssl=verify_ssl)
|
2024-04-29 19:55:17 +00:00
|
|
|
|
|
|
|
|
|
|
|
def resolve_hostname(hostname):
|
|
|
|
# Get address information
|
|
|
|
addr_info = socket.getaddrinfo(hostname, None)
|
|
|
|
|
|
|
|
# Extract IP addresses from address information
|
|
|
|
ipv4_addresses = [info[4][0] for info in addr_info if info[0] == socket.AF_INET]
|
|
|
|
ipv6_addresses = [info[4][0] for info in addr_info if info[0] == socket.AF_INET6]
|
|
|
|
|
|
|
|
return ipv4_addresses, ipv6_addresses
|
|
|
|
|
|
|
|
|
2024-03-24 07:40:27 +00:00
|
|
|
def store_data_in_vector_db(data, collection_name, overwrite: bool = False) -> bool:
|
2024-03-26 06:47:08 +00:00
|
|
|
|
2024-03-24 07:40:27 +00:00
|
|
|
text_splitter = RecursiveCharacterTextSplitter(
|
|
|
|
chunk_size=app.state.CHUNK_SIZE,
|
|
|
|
chunk_overlap=app.state.CHUNK_OVERLAP,
|
|
|
|
add_start_index=True,
|
|
|
|
)
|
2024-04-14 21:55:00 +00:00
|
|
|
|
2024-03-24 07:40:27 +00:00
|
|
|
docs = text_splitter.split_documents(data)
|
2024-03-26 06:47:08 +00:00
|
|
|
|
|
|
|
if len(docs) > 0:
|
2024-04-14 23:48:15 +00:00
|
|
|
log.info(f"store_data_in_vector_db {docs}")
|
2024-03-26 06:47:08 +00:00
|
|
|
return store_docs_in_vector_db(docs, collection_name, overwrite), None
|
|
|
|
else:
|
|
|
|
raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT)
|
2024-03-24 07:40:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
def store_text_in_vector_db(
|
2024-03-24 07:41:41 +00:00
|
|
|
text, metadata, collection_name, overwrite: bool = False
|
2024-03-24 07:40:27 +00:00
|
|
|
) -> bool:
|
|
|
|
text_splitter = RecursiveCharacterTextSplitter(
|
|
|
|
chunk_size=app.state.CHUNK_SIZE,
|
|
|
|
chunk_overlap=app.state.CHUNK_OVERLAP,
|
|
|
|
add_start_index=True,
|
|
|
|
)
|
2024-03-24 07:41:41 +00:00
|
|
|
docs = text_splitter.create_documents([text], metadatas=[metadata])
|
2024-03-24 07:40:27 +00:00
|
|
|
return store_docs_in_vector_db(docs, collection_name, overwrite)
|
|
|
|
|
|
|
|
|
2024-04-14 22:47:45 +00:00
|
|
|
def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> bool:
|
2024-04-14 23:48:15 +00:00
|
|
|
log.info(f"store_docs_in_vector_db {docs} {collection_name}")
|
2024-03-26 06:47:08 +00:00
|
|
|
|
2024-03-24 07:40:27 +00:00
|
|
|
texts = [doc.page_content for doc in docs]
|
|
|
|
metadatas = [doc.metadata for doc in docs]
|
|
|
|
|
|
|
|
try:
|
|
|
|
if overwrite:
|
|
|
|
for collection in CHROMA_CLIENT.list_collections():
|
|
|
|
if collection_name == collection.name:
|
2024-03-31 19:17:29 +00:00
|
|
|
log.info(f"deleting existing collection {collection_name}")
|
2024-03-24 07:40:27 +00:00
|
|
|
CHROMA_CLIENT.delete_collection(name=collection_name)
|
|
|
|
|
2024-04-22 18:27:43 +00:00
|
|
|
collection = CHROMA_CLIENT.create_collection(name=collection_name)
|
2024-04-14 21:55:00 +00:00
|
|
|
|
2024-04-27 19:38:50 +00:00
|
|
|
embedding_func = get_embedding_function(
|
2024-04-22 20:49:58 +00:00
|
|
|
app.state.RAG_EMBEDDING_ENGINE,
|
|
|
|
app.state.RAG_EMBEDDING_MODEL,
|
|
|
|
app.state.sentence_transformer_ef,
|
|
|
|
app.state.OPENAI_API_KEY,
|
|
|
|
app.state.OPENAI_API_BASE_URL,
|
|
|
|
)
|
|
|
|
|
|
|
|
embedding_texts = list(map(lambda x: x.replace("\n", " "), texts))
|
2024-04-22 23:36:46 +00:00
|
|
|
embeddings = embedding_func(embedding_texts)
|
2024-04-22 18:27:43 +00:00
|
|
|
|
|
|
|
for batch in create_batches(
|
|
|
|
api=CHROMA_CLIENT,
|
|
|
|
ids=[str(uuid.uuid1()) for _ in texts],
|
|
|
|
metadatas=metadatas,
|
|
|
|
embeddings=embeddings,
|
|
|
|
documents=texts,
|
|
|
|
):
|
|
|
|
collection.add(*batch)
|
2024-04-09 14:38:40 +00:00
|
|
|
|
2024-04-14 23:15:39 +00:00
|
|
|
return True
|
2024-03-24 07:40:27 +00:00
|
|
|
except Exception as e:
|
2024-03-31 19:17:29 +00:00
|
|
|
log.exception(e)
|
2024-03-24 07:40:27 +00:00
|
|
|
if e.__class__.__name__ == "UniqueConstraintError":
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2024-02-18 05:06:08 +00:00
|
|
|
def get_loader(filename: str, file_content_type: str, file_path: str):
|
|
|
|
file_ext = filename.split(".")[-1].lower()
|
2024-01-25 08:24:49 +00:00
|
|
|
known_type = True
|
|
|
|
|
|
|
|
known_source_ext = [
|
|
|
|
"go",
|
|
|
|
"py",
|
|
|
|
"java",
|
|
|
|
"sh",
|
|
|
|
"bat",
|
|
|
|
"ps1",
|
|
|
|
"cmd",
|
|
|
|
"js",
|
|
|
|
"ts",
|
|
|
|
"css",
|
|
|
|
"cpp",
|
|
|
|
"hpp",
|
|
|
|
"h",
|
|
|
|
"c",
|
|
|
|
"cs",
|
|
|
|
"sql",
|
|
|
|
"log",
|
|
|
|
"ini",
|
|
|
|
"pl",
|
|
|
|
"pm",
|
|
|
|
"r",
|
|
|
|
"dart",
|
|
|
|
"dockerfile",
|
|
|
|
"env",
|
|
|
|
"php",
|
|
|
|
"hs",
|
|
|
|
"hsc",
|
|
|
|
"lua",
|
|
|
|
"nginxconf",
|
|
|
|
"conf",
|
|
|
|
"m",
|
|
|
|
"mm",
|
|
|
|
"plsql",
|
|
|
|
"perl",
|
|
|
|
"rb",
|
|
|
|
"rs",
|
|
|
|
"db2",
|
|
|
|
"scala",
|
|
|
|
"bash",
|
|
|
|
"swift",
|
|
|
|
"vue",
|
|
|
|
"svelte",
|
|
|
|
]
|
|
|
|
|
|
|
|
if file_ext == "pdf":
|
2024-03-10 20:32:34 +00:00
|
|
|
loader = PyPDFLoader(file_path, extract_images=app.state.PDF_EXTRACT_IMAGES)
|
2024-01-25 08:24:49 +00:00
|
|
|
elif file_ext == "csv":
|
|
|
|
loader = CSVLoader(file_path)
|
|
|
|
elif file_ext == "rst":
|
|
|
|
loader = UnstructuredRSTLoader(file_path, mode="elements")
|
|
|
|
elif file_ext == "xml":
|
|
|
|
loader = UnstructuredXMLLoader(file_path)
|
2024-03-25 08:50:53 +00:00
|
|
|
elif file_ext in ["htm", "html"]:
|
2024-03-26 06:50:52 +00:00
|
|
|
loader = BSHTMLLoader(file_path, open_encoding="unicode_escape")
|
2024-01-25 08:24:49 +00:00
|
|
|
elif file_ext == "md":
|
|
|
|
loader = UnstructuredMarkdownLoader(file_path)
|
2024-02-18 05:06:08 +00:00
|
|
|
elif file_content_type == "application/epub+zip":
|
2024-01-25 08:24:49 +00:00
|
|
|
loader = UnstructuredEPubLoader(file_path)
|
|
|
|
elif (
|
2024-02-18 05:06:08 +00:00
|
|
|
file_content_type
|
2024-01-25 08:24:49 +00:00
|
|
|
== "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
|
|
|
or file_ext in ["doc", "docx"]
|
|
|
|
):
|
|
|
|
loader = Docx2txtLoader(file_path)
|
2024-02-18 05:06:08 +00:00
|
|
|
elif file_content_type in [
|
2024-01-25 08:24:49 +00:00
|
|
|
"application/vnd.ms-excel",
|
|
|
|
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
|
|
] or file_ext in ["xls", "xlsx"]:
|
|
|
|
loader = UnstructuredExcelLoader(file_path)
|
2024-03-03 02:56:57 +00:00
|
|
|
elif file_ext in known_source_ext or (
|
|
|
|
file_content_type and file_content_type.find("text/") >= 0
|
|
|
|
):
|
2024-03-16 06:52:37 +00:00
|
|
|
loader = TextLoader(file_path, autodetect_encoding=True)
|
2024-01-25 08:24:49 +00:00
|
|
|
else:
|
2024-03-16 06:52:37 +00:00
|
|
|
loader = TextLoader(file_path, autodetect_encoding=True)
|
2024-01-25 08:24:49 +00:00
|
|
|
known_type = False
|
|
|
|
|
|
|
|
return loader, known_type
|
|
|
|
|
|
|
|
|
2024-01-07 06:59:22 +00:00
|
|
|
@app.post("/doc")
|
2024-01-07 10:46:12 +00:00
|
|
|
def store_doc(
|
2024-01-07 17:00:30 +00:00
|
|
|
collection_name: Optional[str] = Form(None),
|
2024-01-07 10:46:12 +00:00
|
|
|
file: UploadFile = File(...),
|
|
|
|
user=Depends(get_current_user),
|
|
|
|
):
|
2024-01-07 06:59:22 +00:00
|
|
|
# "https://www.gutenberg.org/files/1727/1727-h/1727-h.htm"
|
2024-01-07 07:40:51 +00:00
|
|
|
|
2024-03-20 23:11:36 +00:00
|
|
|
log.info(f"file.content_type: {file.content_type}")
|
2024-01-07 06:59:22 +00:00
|
|
|
try:
|
2024-04-01 20:55:14 +00:00
|
|
|
unsanitized_filename = file.filename
|
2024-04-05 00:38:59 +00:00
|
|
|
filename = os.path.basename(unsanitized_filename)
|
2024-04-01 20:55:14 +00:00
|
|
|
|
2024-04-05 00:38:59 +00:00
|
|
|
file_path = f"{UPLOAD_DIR}/{filename}"
|
2024-04-01 20:55:14 +00:00
|
|
|
|
2024-01-07 06:59:22 +00:00
|
|
|
contents = file.file.read()
|
2024-01-07 07:40:51 +00:00
|
|
|
with open(file_path, "wb") as f:
|
2024-01-07 06:59:22 +00:00
|
|
|
f.write(contents)
|
|
|
|
f.close()
|
|
|
|
|
2024-01-07 17:00:30 +00:00
|
|
|
f = open(file_path, "rb")
|
|
|
|
if collection_name == None:
|
|
|
|
collection_name = calculate_sha256(f)[:63]
|
|
|
|
f.close()
|
|
|
|
|
2024-04-05 00:38:59 +00:00
|
|
|
loader, known_type = get_loader(filename, file.content_type, file_path)
|
2024-01-07 07:40:51 +00:00
|
|
|
data = loader.load()
|
2024-03-26 06:47:08 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
result = store_data_in_vector_db(data, collection_name)
|
|
|
|
|
|
|
|
if result:
|
|
|
|
return {
|
|
|
|
"status": True,
|
|
|
|
"collection_name": collection_name,
|
|
|
|
"filename": filename,
|
|
|
|
"known_type": known_type,
|
|
|
|
}
|
|
|
|
except Exception as e:
|
2024-01-07 09:40:36 +00:00
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
2024-03-26 06:47:08 +00:00
|
|
|
detail=e,
|
2024-01-07 09:40:36 +00:00
|
|
|
)
|
2024-01-07 06:59:22 +00:00
|
|
|
except Exception as e:
|
2024-03-20 23:11:36 +00:00
|
|
|
log.exception(e)
|
2024-01-13 13:46:56 +00:00
|
|
|
if "No pandoc was found" in str(e):
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
detail=ERROR_MESSAGES.PANDOC_NOT_INSTALLED,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(e),
|
|
|
|
)
|
2024-01-07 06:59:22 +00:00
|
|
|
|
|
|
|
|
2024-03-24 07:40:27 +00:00
|
|
|
class TextRAGForm(BaseModel):
|
|
|
|
name: str
|
|
|
|
content: str
|
|
|
|
collection_name: Optional[str] = None
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/text")
|
|
|
|
def store_text(
|
|
|
|
form_data: TextRAGForm,
|
|
|
|
user=Depends(get_current_user),
|
|
|
|
):
|
|
|
|
|
|
|
|
collection_name = form_data.collection_name
|
|
|
|
if collection_name == None:
|
|
|
|
collection_name = calculate_sha256_string(form_data.content)
|
|
|
|
|
2024-03-24 07:41:41 +00:00
|
|
|
result = store_text_in_vector_db(
|
|
|
|
form_data.content,
|
|
|
|
metadata={"name": form_data.name, "created_by": user.id},
|
|
|
|
collection_name=collection_name,
|
|
|
|
)
|
2024-03-24 07:40:27 +00:00
|
|
|
|
|
|
|
if result:
|
|
|
|
return {"status": True, "collection_name": collection_name}
|
|
|
|
else:
|
|
|
|
raise HTTPException(
|
|
|
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
|
|
detail=ERROR_MESSAGES.DEFAULT(),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-02-18 05:06:08 +00:00
|
|
|
@app.get("/scan")
|
|
|
|
def scan_docs_dir(user=Depends(get_admin_user)):
|
2024-02-23 10:57:31 +00:00
|
|
|
for path in Path(DOCS_DIR).rglob("./**/*"):
|
|
|
|
try:
|
2024-02-18 05:06:08 +00:00
|
|
|
if path.is_file() and not path.name.startswith("."):
|
|
|
|
tags = extract_folders_after_data_docs(path)
|
|
|
|
filename = path.name
|
|
|
|
file_content_type = mimetypes.guess_type(path)
|
|
|
|
|
|
|
|
f = open(path, "rb")
|
|
|
|
collection_name = calculate_sha256(f)[:63]
|
|
|
|
f.close()
|
|
|
|
|
2024-02-18 05:31:46 +00:00
|
|
|
loader, known_type = get_loader(
|
|
|
|
filename, file_content_type[0], str(path)
|
|
|
|
)
|
2024-02-18 05:06:08 +00:00
|
|
|
data = loader.load()
|
|
|
|
|
2024-03-26 06:47:08 +00:00
|
|
|
try:
|
|
|
|
result = store_data_in_vector_db(data, collection_name)
|
|
|
|
|
|
|
|
if result:
|
|
|
|
sanitized_filename = sanitize_filename(filename)
|
|
|
|
doc = Documents.get_doc_by_name(sanitized_filename)
|
|
|
|
|
|
|
|
if doc == None:
|
|
|
|
doc = Documents.insert_new_doc(
|
|
|
|
user.id,
|
|
|
|
DocumentForm(
|
|
|
|
**{
|
|
|
|
"name": sanitized_filename,
|
|
|
|
"title": filename,
|
|
|
|
"collection_name": collection_name,
|
|
|
|
"filename": filename,
|
|
|
|
"content": (
|
|
|
|
json.dumps(
|
|
|
|
{
|
|
|
|
"tags": list(
|
|
|
|
map(
|
|
|
|
lambda name: {"name": name},
|
|
|
|
tags,
|
|
|
|
)
|
2024-02-18 05:06:08 +00:00
|
|
|
)
|
2024-03-26 06:47:08 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
if len(tags)
|
|
|
|
else "{}"
|
|
|
|
),
|
|
|
|
}
|
|
|
|
),
|
|
|
|
)
|
|
|
|
except Exception as e:
|
2024-03-31 19:17:29 +00:00
|
|
|
log.exception(e)
|
2024-03-26 06:47:08 +00:00
|
|
|
pass
|
2024-02-18 05:06:08 +00:00
|
|
|
|
2024-02-23 10:57:31 +00:00
|
|
|
except Exception as e:
|
2024-03-20 23:11:36 +00:00
|
|
|
log.exception(e)
|
2024-02-18 05:06:08 +00:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2024-01-07 09:40:36 +00:00
|
|
|
@app.get("/reset/db")
|
2024-02-09 00:05:01 +00:00
|
|
|
def reset_vector_db(user=Depends(get_admin_user)):
|
|
|
|
CHROMA_CLIENT.reset()
|
2024-01-07 09:40:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
@app.get("/reset")
|
2024-02-09 00:05:01 +00:00
|
|
|
def reset(user=Depends(get_admin_user)) -> bool:
|
|
|
|
folder = f"{UPLOAD_DIR}"
|
|
|
|
for filename in os.listdir(folder):
|
|
|
|
file_path = os.path.join(folder, filename)
|
2024-01-07 09:40:36 +00:00
|
|
|
try:
|
2024-02-09 00:05:01 +00:00
|
|
|
if os.path.isfile(file_path) or os.path.islink(file_path):
|
|
|
|
os.unlink(file_path)
|
|
|
|
elif os.path.isdir(file_path):
|
|
|
|
shutil.rmtree(file_path)
|
2024-01-07 09:40:36 +00:00
|
|
|
except Exception as e:
|
2024-03-20 23:11:36 +00:00
|
|
|
log.error("Failed to delete %s. Reason: %s" % (file_path, e))
|
2024-01-07 09:40:36 +00:00
|
|
|
|
2024-02-09 00:05:01 +00:00
|
|
|
try:
|
|
|
|
CHROMA_CLIENT.reset()
|
|
|
|
except Exception as e:
|
2024-03-20 23:11:36 +00:00
|
|
|
log.exception(e)
|
2024-02-09 00:05:01 +00:00
|
|
|
|
|
|
|
return True
|