mirror of
https://github.com/open-webui/open-webui
synced 2025-06-26 18:26:48 +00:00
Merge branch 'dev' of github.com:taylorwilsdon/open-webui into dev
This commit is contained in:
commit
708fcdc6b7
@ -73,8 +73,15 @@ def serve(
|
|||||||
os.environ["LD_LIBRARY_PATH"] = ":".join(LD_LIBRARY_PATH)
|
os.environ["LD_LIBRARY_PATH"] = ":".join(LD_LIBRARY_PATH)
|
||||||
|
|
||||||
import open_webui.main # we need set environment variables before importing main
|
import open_webui.main # we need set environment variables before importing main
|
||||||
|
from open_webui.env import UVICORN_WORKERS # Import the workers setting
|
||||||
|
|
||||||
uvicorn.run(open_webui.main.app, host=host, port=port, forwarded_allow_ips="*")
|
uvicorn.run(
|
||||||
|
open_webui.main.app,
|
||||||
|
host=host,
|
||||||
|
port=port,
|
||||||
|
forwarded_allow_ips="*",
|
||||||
|
workers=UVICORN_WORKERS
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.command()
|
@app.command()
|
||||||
|
@ -31,6 +31,7 @@ class ERROR_MESSAGES(str, Enum):
|
|||||||
USERNAME_TAKEN = (
|
USERNAME_TAKEN = (
|
||||||
"Uh-oh! This username is already registered. Please choose another username."
|
"Uh-oh! This username is already registered. Please choose another username."
|
||||||
)
|
)
|
||||||
|
PASSWORD_TOO_LONG = "Uh-oh! The password you entered is too long. Please make sure your password is less than 72 bytes long."
|
||||||
COMMAND_TAKEN = "Uh-oh! This command is already registered. Please choose another command string."
|
COMMAND_TAKEN = "Uh-oh! This command is already registered. Please choose another command string."
|
||||||
FILE_EXISTS = "Uh-oh! This file is already registered. Please choose another file."
|
FILE_EXISTS = "Uh-oh! This file is already registered. Please choose another file."
|
||||||
|
|
||||||
|
@ -326,6 +326,20 @@ REDIS_URL = os.environ.get("REDIS_URL", "")
|
|||||||
REDIS_SENTINEL_HOSTS = os.environ.get("REDIS_SENTINEL_HOSTS", "")
|
REDIS_SENTINEL_HOSTS = os.environ.get("REDIS_SENTINEL_HOSTS", "")
|
||||||
REDIS_SENTINEL_PORT = os.environ.get("REDIS_SENTINEL_PORT", "26379")
|
REDIS_SENTINEL_PORT = os.environ.get("REDIS_SENTINEL_PORT", "26379")
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# UVICORN WORKERS
|
||||||
|
####################################
|
||||||
|
|
||||||
|
# Number of uvicorn worker processes for handling requests
|
||||||
|
UVICORN_WORKERS = os.environ.get("UVICORN_WORKERS", "1")
|
||||||
|
try:
|
||||||
|
UVICORN_WORKERS = int(UVICORN_WORKERS)
|
||||||
|
if UVICORN_WORKERS < 1:
|
||||||
|
UVICORN_WORKERS = 1
|
||||||
|
except ValueError:
|
||||||
|
UVICORN_WORKERS = 1
|
||||||
|
log.info(f"Invalid UVICORN_WORKERS value, defaulting to {UVICORN_WORKERS}")
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# WEBUI_AUTH (Required for security)
|
# WEBUI_AUTH (Required for security)
|
||||||
####################################
|
####################################
|
||||||
|
@ -77,6 +77,7 @@ def query_doc(
|
|||||||
collection_name: str, query_embedding: list[float], k: int, user: UserModel = None
|
collection_name: str, query_embedding: list[float], k: int, user: UserModel = None
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
|
log.debug(f"query_doc:doc {collection_name}")
|
||||||
result = VECTOR_DB_CLIENT.search(
|
result = VECTOR_DB_CLIENT.search(
|
||||||
collection_name=collection_name,
|
collection_name=collection_name,
|
||||||
vectors=[query_embedding],
|
vectors=[query_embedding],
|
||||||
@ -94,6 +95,7 @@ def query_doc(
|
|||||||
|
|
||||||
def get_doc(collection_name: str, user: UserModel = None):
|
def get_doc(collection_name: str, user: UserModel = None):
|
||||||
try:
|
try:
|
||||||
|
log.debug(f"get_doc:doc {collection_name}")
|
||||||
result = VECTOR_DB_CLIENT.get(collection_name=collection_name)
|
result = VECTOR_DB_CLIENT.get(collection_name=collection_name)
|
||||||
|
|
||||||
if result:
|
if result:
|
||||||
@ -116,6 +118,7 @@ def query_doc_with_hybrid_search(
|
|||||||
r: float,
|
r: float,
|
||||||
) -> dict:
|
) -> dict:
|
||||||
try:
|
try:
|
||||||
|
log.debug(f"query_doc_with_hybrid_search:doc {collection_name}")
|
||||||
bm25_retriever = BM25Retriever.from_texts(
|
bm25_retriever = BM25Retriever.from_texts(
|
||||||
texts=collection_result.documents[0],
|
texts=collection_result.documents[0],
|
||||||
metadatas=collection_result.metadatas[0],
|
metadatas=collection_result.metadatas[0],
|
||||||
@ -168,6 +171,7 @@ def query_doc_with_hybrid_search(
|
|||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
log.exception(f"Error querying doc {collection_name} with hybrid search: {e}")
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
|
||||||
@ -257,6 +261,7 @@ def query_collection(
|
|||||||
) -> dict:
|
) -> dict:
|
||||||
results = []
|
results = []
|
||||||
for query in queries:
|
for query in queries:
|
||||||
|
log.debug(f"query_collection:query {query}")
|
||||||
query_embedding = embedding_function(query, prefix=RAG_EMBEDDING_QUERY_PREFIX)
|
query_embedding = embedding_function(query, prefix=RAG_EMBEDDING_QUERY_PREFIX)
|
||||||
for collection_name in collection_names:
|
for collection_name in collection_names:
|
||||||
if collection_name:
|
if collection_name:
|
||||||
@ -292,6 +297,7 @@ def query_collection_with_hybrid_search(
|
|||||||
collection_results = {}
|
collection_results = {}
|
||||||
for collection_name in collection_names:
|
for collection_name in collection_names:
|
||||||
try:
|
try:
|
||||||
|
log.debug(f"query_collection_with_hybrid_search:VECTOR_DB_CLIENT.get:collection {collection_name}")
|
||||||
collection_results[collection_name] = VECTOR_DB_CLIENT.get(
|
collection_results[collection_name] = VECTOR_DB_CLIENT.get(
|
||||||
collection_name=collection_name
|
collection_name=collection_name
|
||||||
)
|
)
|
||||||
@ -613,6 +619,7 @@ def generate_openai_batch_embeddings(
|
|||||||
user: UserModel = None,
|
user: UserModel = None,
|
||||||
) -> Optional[list[list[float]]]:
|
) -> Optional[list[list[float]]]:
|
||||||
try:
|
try:
|
||||||
|
log.debug(f"generate_openai_batch_embeddings:model {model} batch size: {len(texts)}")
|
||||||
json_data = {"input": texts, "model": model}
|
json_data = {"input": texts, "model": model}
|
||||||
if isinstance(RAG_EMBEDDING_PREFIX_FIELD_NAME, str) and isinstance(prefix, str):
|
if isinstance(RAG_EMBEDDING_PREFIX_FIELD_NAME, str) and isinstance(prefix, str):
|
||||||
json_data[RAG_EMBEDDING_PREFIX_FIELD_NAME] = prefix
|
json_data[RAG_EMBEDDING_PREFIX_FIELD_NAME] = prefix
|
||||||
@ -655,6 +662,7 @@ def generate_ollama_batch_embeddings(
|
|||||||
user: UserModel = None,
|
user: UserModel = None,
|
||||||
) -> Optional[list[list[float]]]:
|
) -> Optional[list[list[float]]]:
|
||||||
try:
|
try:
|
||||||
|
log.debug(f"generate_ollama_batch_embeddings:model {model} batch size: {len(texts)}")
|
||||||
json_data = {"input": texts, "model": model}
|
json_data = {"input": texts, "model": model}
|
||||||
if isinstance(RAG_EMBEDDING_PREFIX_FIELD_NAME, str) and isinstance(prefix, str):
|
if isinstance(RAG_EMBEDDING_PREFIX_FIELD_NAME, str) and isinstance(prefix, str):
|
||||||
json_data[RAG_EMBEDDING_PREFIX_FIELD_NAME] = prefix
|
json_data[RAG_EMBEDDING_PREFIX_FIELD_NAME] = prefix
|
||||||
|
@ -3,6 +3,7 @@ from typing import Optional
|
|||||||
|
|
||||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||||
from duckduckgo_search import DDGS
|
from duckduckgo_search import DDGS
|
||||||
|
from duckduckgo_search.exceptions import RatelimitException
|
||||||
from open_webui.env import SRC_LOG_LEVELS
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@ -22,16 +23,15 @@ def search_duckduckgo(
|
|||||||
list[SearchResult]: A list of search results
|
list[SearchResult]: A list of search results
|
||||||
"""
|
"""
|
||||||
# Use the DDGS context manager to create a DDGS object
|
# Use the DDGS context manager to create a DDGS object
|
||||||
|
search_results = []
|
||||||
with DDGS() as ddgs:
|
with DDGS() as ddgs:
|
||||||
# Use the ddgs.text() method to perform the search
|
# Use the ddgs.text() method to perform the search
|
||||||
ddgs_gen = ddgs.text(
|
try:
|
||||||
query, safesearch="moderate", max_results=count, backend="api"
|
search_results = ddgs.text(
|
||||||
|
query, safesearch="moderate", max_results=count, backend="lite"
|
||||||
)
|
)
|
||||||
# Check if there are search results
|
except RatelimitException as e:
|
||||||
if ddgs_gen:
|
log.error(f"RatelimitException: {e}")
|
||||||
# Convert the search results into a list
|
|
||||||
search_results = [r for r in ddgs_gen]
|
|
||||||
|
|
||||||
if filter_list:
|
if filter_list:
|
||||||
search_results = get_filtered_results(search_results, filter_list)
|
search_results = get_filtered_results(search_results, filter_list)
|
||||||
|
|
||||||
|
@ -454,6 +454,13 @@ async def signup(request: Request, response: Response, form_data: SignupForm):
|
|||||||
# Disable signup after the first user is created
|
# Disable signup after the first user is created
|
||||||
request.app.state.config.ENABLE_SIGNUP = False
|
request.app.state.config.ENABLE_SIGNUP = False
|
||||||
|
|
||||||
|
# The password passed to bcrypt must be 72 bytes or fewer. If it is longer, it will be truncated before hashing.
|
||||||
|
if len(form_data.password.encode("utf-8")) > 72:
|
||||||
|
raise HTTPException(
|
||||||
|
status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail=ERROR_MESSAGES.PASSWORD_TOO_LONG,
|
||||||
|
)
|
||||||
|
|
||||||
hashed = get_password_hash(form_data.password)
|
hashed = get_password_hash(form_data.password)
|
||||||
user = Auths.insert_new_auth(
|
user = Auths.insert_new_auth(
|
||||||
form_data.email.lower(),
|
form_data.email.lower(),
|
||||||
|
@ -98,7 +98,7 @@ pytube==15.0.0
|
|||||||
|
|
||||||
extract_msg
|
extract_msg
|
||||||
pydub
|
pydub
|
||||||
duckduckgo-search~=7.3.2
|
duckduckgo-search~=7.5.5
|
||||||
|
|
||||||
## Google Drive
|
## Google Drive
|
||||||
google-api-python-client
|
google-api-python-client
|
||||||
@ -127,11 +127,11 @@ firecrawl-py==1.12.0
|
|||||||
opentelemetry-api==1.31.1
|
opentelemetry-api==1.31.1
|
||||||
opentelemetry-sdk==1.31.1
|
opentelemetry-sdk==1.31.1
|
||||||
opentelemetry-exporter-otlp==1.31.1
|
opentelemetry-exporter-otlp==1.31.1
|
||||||
opentelemetry-instrumentation==0.52b0
|
opentelemetry-instrumentation==0.52b1
|
||||||
opentelemetry-instrumentation-fastapi==0.52b0
|
opentelemetry-instrumentation-fastapi==0.52b1
|
||||||
opentelemetry-instrumentation-sqlalchemy==0.52b0
|
opentelemetry-instrumentation-sqlalchemy==0.52b1
|
||||||
opentelemetry-instrumentation-redis==0.52b0
|
opentelemetry-instrumentation-redis==0.52b1
|
||||||
opentelemetry-instrumentation-requests==0.52b0
|
opentelemetry-instrumentation-requests==0.52b1
|
||||||
opentelemetry-instrumentation-logging==0.52b0
|
opentelemetry-instrumentation-logging==0.52b1
|
||||||
opentelemetry-instrumentation-httpx==0.52b0
|
opentelemetry-instrumentation-httpx==0.52b1
|
||||||
opentelemetry-instrumentation-aiohttp-client==0.52b0
|
opentelemetry-instrumentation-aiohttp-client==0.52b1
|
||||||
|
@ -65,4 +65,4 @@ if [ -n "$SPACE_ID" ]; then
|
|||||||
export WEBUI_URL=${SPACE_HOST}
|
export WEBUI_URL=${SPACE_HOST}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn open_webui.main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*'
|
WEBUI_SECRET_KEY="$WEBUI_SECRET_KEY" exec uvicorn open_webui.main:app --host "$HOST" --port "$PORT" --forwarded-allow-ips '*' --workers "${UVICORN_WORKERS:-1}"
|
||||||
|
@ -41,5 +41,6 @@ IF "%WEBUI_SECRET_KEY%%WEBUI_JWT_SECRET_KEY%" == " " (
|
|||||||
|
|
||||||
:: Execute uvicorn
|
:: Execute uvicorn
|
||||||
SET "WEBUI_SECRET_KEY=%WEBUI_SECRET_KEY%"
|
SET "WEBUI_SECRET_KEY=%WEBUI_SECRET_KEY%"
|
||||||
uvicorn open_webui.main:app --host "%HOST%" --port "%PORT%" --forwarded-allow-ips '*' --ws auto
|
IF "%UVICORN_WORKERS%"=="" SET UVICORN_WORKERS=1
|
||||||
|
uvicorn open_webui.main:app --host "%HOST%" --port "%PORT%" --forwarded-allow-ips '*' --workers %UVICORN_WORKERS% --ws auto
|
||||||
:: For ssl user uvicorn open_webui.main:app --host "%HOST%" --port "%PORT%" --forwarded-allow-ips '*' --ssl-keyfile "key.pem" --ssl-certfile "cert.pem" --ws auto
|
:: For ssl user uvicorn open_webui.main:app --host "%HOST%" --port "%PORT%" --forwarded-allow-ips '*' --ssl-keyfile "key.pem" --ssl-certfile "cert.pem" --ws auto
|
||||||
|
@ -104,7 +104,7 @@ dependencies = [
|
|||||||
|
|
||||||
"extract_msg",
|
"extract_msg",
|
||||||
"pydub",
|
"pydub",
|
||||||
"duckduckgo-search~=7.3.2",
|
"duckduckgo-search~=7.5.5",
|
||||||
|
|
||||||
"google-api-python-client",
|
"google-api-python-client",
|
||||||
"google-auth-httplib2",
|
"google-auth-httplib2",
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
let selectedTools = [];
|
let selectedTools = [];
|
||||||
|
|
||||||
$: selectedTools = $tools.filter((tool) => selectedToolIds.includes(tool.id));
|
$: selectedTools = ($tools ?? []).filter((tool) => selectedToolIds.includes(tool.id));
|
||||||
|
|
||||||
const i18n = getContext('i18n');
|
const i18n = getContext('i18n');
|
||||||
</script>
|
</script>
|
||||||
|
Loading…
Reference in New Issue
Block a user