mirror of
https://github.com/open-webui/open-webui
synced 2025-04-28 10:11:34 +00:00
Fixed errors and added more support
This commit is contained in:
parent
47b8412695
commit
7b8e5d4e7c
@ -1330,16 +1330,16 @@ RAG_EMBEDDING_BATCH_SIZE = PersistentConfig(
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
RAG_EMBEDDING_PASSAGE_PREFIX = PersistentConfig(
|
RAG_EMBEDDING_QUERY_PREFIX = (
|
||||||
"RAG_EMBEDDING_PASSAGE_PREFIX",
|
os.environ.get("RAG_EMBEDDING_QUERY_PREFIX", None)
|
||||||
"rag.embedding_passage_prefix",
|
|
||||||
os.environ.get("RAG_EMBEDDING_PASSAGE_PREFIX", False),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
RAG_EMBEDDING_QUERY_PREFIX = PersistentConfig(
|
RAG_EMBEDDING_PASSAGE_PREFIX = (
|
||||||
"RAG_EMBEDDING_QUERY_PREFIX",
|
os.environ.get("RAG_EMBEDDING_PASSAGE_PREFIX", None)
|
||||||
"rag.embedding_query_prefix",
|
)
|
||||||
os.environ.get("RAG_EMBEDDING_QUERY_PREFIX", False),
|
|
||||||
|
RAG_EMBEDDING_PREFIX_FIELD_NAME = (
|
||||||
|
os.environ.get("RAG_EMBEDDING_PREFIX_FIELD_NAME", "input_type")
|
||||||
)
|
)
|
||||||
|
|
||||||
RAG_RERANKING_MODEL = PersistentConfig(
|
RAG_RERANKING_MODEL = PersistentConfig(
|
||||||
|
@ -15,7 +15,11 @@ from open_webui.retrieval.vector.connector import VECTOR_DB_CLIENT
|
|||||||
from open_webui.utils.misc import get_last_user_message
|
from open_webui.utils.misc import get_last_user_message
|
||||||
|
|
||||||
from open_webui.env import SRC_LOG_LEVELS, OFFLINE_MODE
|
from open_webui.env import SRC_LOG_LEVELS, OFFLINE_MODE
|
||||||
from open_webui.config import RAG_EMBEDDING_QUERY_PREFIX, RAG_EMBEDDING_PASSAGE_PREFIX
|
from open_webui.config import (
|
||||||
|
RAG_EMBEDDING_QUERY_PREFIX,
|
||||||
|
RAG_EMBEDDING_PASSAGE_PREFIX,
|
||||||
|
RAG_EMBEDDING_PREFIX_FIELD_NAME
|
||||||
|
)
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||||
|
|
||||||
@ -265,7 +269,7 @@ def get_embedding_function(
|
|||||||
embeddings.extend(func(query[i : i + embedding_batch_size], prefix))
|
embeddings.extend(func(query[i : i + embedding_batch_size], prefix))
|
||||||
return embeddings
|
return embeddings
|
||||||
else:
|
else:
|
||||||
return func(query)
|
return func(query, prefix)
|
||||||
|
|
||||||
return lambda query, prefix: generate_multiple(query, prefix, func)
|
return lambda query, prefix: generate_multiple(query, prefix, func)
|
||||||
|
|
||||||
@ -421,7 +425,7 @@ def generate_openai_batch_embeddings(
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Authorization": f"Bearer {key}",
|
"Authorization": f"Bearer {key}",
|
||||||
},
|
},
|
||||||
json={"input": texts, "model": model} if not prefix else {"input": texts, "model": model, "prefix": prefix},
|
json={"input": texts, "model": model} if not prefix else {"input": texts, "model": model, RAG_EMBEDDING_PREFIX_FIELD_NAME: prefix},
|
||||||
)
|
)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
data = r.json()
|
data = r.json()
|
||||||
@ -444,7 +448,7 @@ def generate_ollama_batch_embeddings(
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Authorization": f"Bearer {key}",
|
"Authorization": f"Bearer {key}",
|
||||||
},
|
},
|
||||||
json={"input": texts, "model": model} if not prefix else {"input": texts, "model": model, "prefix": prefix},
|
json={"input": texts, "model": model} if not prefix else {"input": texts, "model": model, RAG_EMBEDDING_PREFIX_FIELD_NAME: prefix},
|
||||||
)
|
)
|
||||||
r.raise_for_status()
|
r.raise_for_status()
|
||||||
data = r.json()
|
data = r.json()
|
||||||
|
@ -70,7 +70,6 @@ from open_webui.utils.misc import (
|
|||||||
)
|
)
|
||||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||||
|
|
||||||
|
|
||||||
from open_webui.config import (
|
from open_webui.config import (
|
||||||
ENV,
|
ENV,
|
||||||
RAG_EMBEDDING_MODEL_AUTO_UPDATE,
|
RAG_EMBEDDING_MODEL_AUTO_UPDATE,
|
||||||
@ -79,7 +78,8 @@ from open_webui.config import (
|
|||||||
RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
|
RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
|
||||||
UPLOAD_DIR,
|
UPLOAD_DIR,
|
||||||
DEFAULT_LOCALE,
|
DEFAULT_LOCALE,
|
||||||
RAG_EMBEDDING_PASSAGE_PREFIX
|
RAG_EMBEDDING_PASSAGE_PREFIX,
|
||||||
|
RAG_EMBEDDING_QUERY_PREFIX
|
||||||
)
|
)
|
||||||
from open_webui.env import (
|
from open_webui.env import (
|
||||||
SRC_LOG_LEVELS,
|
SRC_LOG_LEVELS,
|
||||||
@ -1319,7 +1319,7 @@ def query_doc_handler(
|
|||||||
else:
|
else:
|
||||||
return query_doc(
|
return query_doc(
|
||||||
collection_name=form_data.collection_name,
|
collection_name=form_data.collection_name,
|
||||||
query_embedding=request.app.state.EMBEDDING_FUNCTION(form_data.query),
|
query_embedding=request.app.state.EMBEDDING_FUNCTION(form_data.query, RAG_EMBEDDING_QUERY_PREFIX),
|
||||||
k=form_data.k if form_data.k else request.app.state.config.TOP_K,
|
k=form_data.k if form_data.k else request.app.state.config.TOP_K,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -1438,7 +1438,7 @@ if ENV == "dev":
|
|||||||
|
|
||||||
@router.get("/ef/{text}")
|
@router.get("/ef/{text}")
|
||||||
async def get_embeddings(request: Request, text: Optional[str] = "Hello World!"):
|
async def get_embeddings(request: Request, text: Optional[str] = "Hello World!"):
|
||||||
return {"result": request.app.state.EMBEDDING_FUNCTION(text)}
|
return {"result": request.app.state.EMBEDDING_FUNCTION(text, RAG_EMBEDDING_QUERY_PREFIX)}
|
||||||
|
|
||||||
|
|
||||||
class BatchProcessFilesForm(BaseModel):
|
class BatchProcessFilesForm(BaseModel):
|
||||||
|
Loading…
Reference in New Issue
Block a user