mirror of
https://github.com/open-webui/open-webui
synced 2025-05-21 13:36:35 +00:00
Merge pull request #11507 from OrenZhang/feat_ot
feat(trace): opentelemetry instrument
This commit is contained in:
commit
adfa67d9d2
@ -1580,7 +1580,9 @@ QDRANT_API_KEY = os.environ.get("QDRANT_API_KEY", None)
|
|||||||
# OpenSearch
|
# OpenSearch
|
||||||
OPENSEARCH_URI = os.environ.get("OPENSEARCH_URI", "https://localhost:9200")
|
OPENSEARCH_URI = os.environ.get("OPENSEARCH_URI", "https://localhost:9200")
|
||||||
OPENSEARCH_SSL = os.environ.get("OPENSEARCH_SSL", "true").lower() == "true"
|
OPENSEARCH_SSL = os.environ.get("OPENSEARCH_SSL", "true").lower() == "true"
|
||||||
OPENSEARCH_CERT_VERIFY = os.environ.get("OPENSEARCH_CERT_VERIFY", "false").lower() == "true"
|
OPENSEARCH_CERT_VERIFY = (
|
||||||
|
os.environ.get("OPENSEARCH_CERT_VERIFY", "false").lower() == "true"
|
||||||
|
)
|
||||||
OPENSEARCH_USERNAME = os.environ.get("OPENSEARCH_USERNAME", None)
|
OPENSEARCH_USERNAME = os.environ.get("OPENSEARCH_USERNAME", None)
|
||||||
OPENSEARCH_PASSWORD = os.environ.get("OPENSEARCH_PASSWORD", None)
|
OPENSEARCH_PASSWORD = os.environ.get("OPENSEARCH_PASSWORD", None)
|
||||||
|
|
||||||
|
@ -105,7 +105,6 @@ for source in log_sources:
|
|||||||
|
|
||||||
log.setLevel(SRC_LOG_LEVELS["CONFIG"])
|
log.setLevel(SRC_LOG_LEVELS["CONFIG"])
|
||||||
|
|
||||||
|
|
||||||
WEBUI_NAME = os.environ.get("WEBUI_NAME", "Open WebUI")
|
WEBUI_NAME = os.environ.get("WEBUI_NAME", "Open WebUI")
|
||||||
if WEBUI_NAME != "Open WebUI":
|
if WEBUI_NAME != "Open WebUI":
|
||||||
WEBUI_NAME += " (Open WebUI)"
|
WEBUI_NAME += " (Open WebUI)"
|
||||||
@ -130,7 +129,6 @@ else:
|
|||||||
except Exception:
|
except Exception:
|
||||||
PACKAGE_DATA = {"version": "0.0.0"}
|
PACKAGE_DATA = {"version": "0.0.0"}
|
||||||
|
|
||||||
|
|
||||||
VERSION = PACKAGE_DATA["version"]
|
VERSION = PACKAGE_DATA["version"]
|
||||||
|
|
||||||
|
|
||||||
@ -161,7 +159,6 @@ try:
|
|||||||
except Exception:
|
except Exception:
|
||||||
changelog_content = (pkgutil.get_data("open_webui", "CHANGELOG.md") or b"").decode()
|
changelog_content = (pkgutil.get_data("open_webui", "CHANGELOG.md") or b"").decode()
|
||||||
|
|
||||||
|
|
||||||
# Convert markdown content to HTML
|
# Convert markdown content to HTML
|
||||||
html_content = markdown.markdown(changelog_content)
|
html_content = markdown.markdown(changelog_content)
|
||||||
|
|
||||||
@ -192,7 +189,6 @@ for version in soup.find_all("h2"):
|
|||||||
|
|
||||||
changelog_json[version_number] = version_data
|
changelog_json[version_number] = version_data
|
||||||
|
|
||||||
|
|
||||||
CHANGELOG = changelog_json
|
CHANGELOG = changelog_json
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
@ -209,7 +205,6 @@ ENABLE_FORWARD_USER_INFO_HEADERS = (
|
|||||||
os.environ.get("ENABLE_FORWARD_USER_INFO_HEADERS", "False").lower() == "true"
|
os.environ.get("ENABLE_FORWARD_USER_INFO_HEADERS", "False").lower() == "true"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# WEBUI_BUILD_HASH
|
# WEBUI_BUILD_HASH
|
||||||
####################################
|
####################################
|
||||||
@ -244,7 +239,6 @@ if FROM_INIT_PY:
|
|||||||
|
|
||||||
DATA_DIR = Path(os.getenv("DATA_DIR", OPEN_WEBUI_DIR / "data"))
|
DATA_DIR = Path(os.getenv("DATA_DIR", OPEN_WEBUI_DIR / "data"))
|
||||||
|
|
||||||
|
|
||||||
STATIC_DIR = Path(os.getenv("STATIC_DIR", OPEN_WEBUI_DIR / "static"))
|
STATIC_DIR = Path(os.getenv("STATIC_DIR", OPEN_WEBUI_DIR / "static"))
|
||||||
|
|
||||||
FONTS_DIR = Path(os.getenv("FONTS_DIR", OPEN_WEBUI_DIR / "static" / "fonts"))
|
FONTS_DIR = Path(os.getenv("FONTS_DIR", OPEN_WEBUI_DIR / "static" / "fonts"))
|
||||||
@ -256,7 +250,6 @@ if FROM_INIT_PY:
|
|||||||
os.getenv("FRONTEND_BUILD_DIR", OPEN_WEBUI_DIR / "frontend")
|
os.getenv("FRONTEND_BUILD_DIR", OPEN_WEBUI_DIR / "frontend")
|
||||||
).resolve()
|
).resolve()
|
||||||
|
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# Database
|
# Database
|
||||||
####################################
|
####################################
|
||||||
@ -321,7 +314,6 @@ RESET_CONFIG_ON_START = (
|
|||||||
os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true"
|
os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
ENABLE_REALTIME_CHAT_SAVE = (
|
ENABLE_REALTIME_CHAT_SAVE = (
|
||||||
os.environ.get("ENABLE_REALTIME_CHAT_SAVE", "False").lower() == "true"
|
os.environ.get("ENABLE_REALTIME_CHAT_SAVE", "False").lower() == "true"
|
||||||
)
|
)
|
||||||
@ -402,7 +394,6 @@ AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = os.environ.get(
|
|||||||
os.environ.get("AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST", ""),
|
os.environ.get("AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST == "":
|
if AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST == "":
|
||||||
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = None
|
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = None
|
||||||
else:
|
else:
|
||||||
@ -411,7 +402,6 @@ else:
|
|||||||
except Exception:
|
except Exception:
|
||||||
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = 5
|
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = 5
|
||||||
|
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# OFFLINE_MODE
|
# OFFLINE_MODE
|
||||||
####################################
|
####################################
|
||||||
@ -442,3 +432,19 @@ AUDIT_EXCLUDED_PATHS = os.getenv("AUDIT_EXCLUDED_PATHS", "/chats,/chat,/folders"
|
|||||||
)
|
)
|
||||||
AUDIT_EXCLUDED_PATHS = [path.strip() for path in AUDIT_EXCLUDED_PATHS]
|
AUDIT_EXCLUDED_PATHS = [path.strip() for path in AUDIT_EXCLUDED_PATHS]
|
||||||
AUDIT_EXCLUDED_PATHS = [path.lstrip("/") for path in AUDIT_EXCLUDED_PATHS]
|
AUDIT_EXCLUDED_PATHS = [path.lstrip("/") for path in AUDIT_EXCLUDED_PATHS]
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# OPENTELEMETRY
|
||||||
|
####################################
|
||||||
|
|
||||||
|
ENABLE_OTEL = os.environ.get("ENABLE_OTEL", "False").lower() == "true"
|
||||||
|
OTEL_EXPORTER_OTLP_ENDPOINT = os.environ.get(
|
||||||
|
"OTEL_EXPORTER_OTLP_ENDPOINT", "http://localhost:4317"
|
||||||
|
)
|
||||||
|
OTEL_SERVICE_NAME = os.environ.get("OTEL_SERVICE_NAME", "open-webui")
|
||||||
|
OTEL_RESOURCE_ATTRIBUTES = os.environ.get(
|
||||||
|
"OTEL_RESOURCE_ATTRIBUTES", ""
|
||||||
|
) # e.g. key1=val1,key2=val2
|
||||||
|
OTEL_TRACES_SAMPLER = os.environ.get(
|
||||||
|
"OTEL_TRACES_SAMPLER", "parentbased_always_on"
|
||||||
|
).lower()
|
||||||
|
@ -84,7 +84,7 @@ from open_webui.routers.retrieval import (
|
|||||||
get_rf,
|
get_rf,
|
||||||
)
|
)
|
||||||
|
|
||||||
from open_webui.internal.db import Session
|
from open_webui.internal.db import Session, engine
|
||||||
|
|
||||||
from open_webui.models.functions import Functions
|
from open_webui.models.functions import Functions
|
||||||
from open_webui.models.models import Models
|
from open_webui.models.models import Models
|
||||||
@ -330,6 +330,7 @@ from open_webui.env import (
|
|||||||
BYPASS_MODEL_ACCESS_CONTROL,
|
BYPASS_MODEL_ACCESS_CONTROL,
|
||||||
RESET_CONFIG_ON_START,
|
RESET_CONFIG_ON_START,
|
||||||
OFFLINE_MODE,
|
OFFLINE_MODE,
|
||||||
|
ENABLE_OTEL,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -356,7 +357,7 @@ from open_webui.utils.oauth import OAuthManager
|
|||||||
from open_webui.utils.security_headers import SecurityHeadersMiddleware
|
from open_webui.utils.security_headers import SecurityHeadersMiddleware
|
||||||
|
|
||||||
from open_webui.tasks import stop_task, list_tasks # Import from tasks.py
|
from open_webui.tasks import stop_task, list_tasks # Import from tasks.py
|
||||||
|
from open_webui.utils.telemetry.setup import setup as setup_opentelemetry
|
||||||
|
|
||||||
if SAFE_MODE:
|
if SAFE_MODE:
|
||||||
print("SAFE MODE ENABLED")
|
print("SAFE MODE ENABLED")
|
||||||
@ -426,6 +427,17 @@ app.state.config = AppConfig(redis_url=REDIS_URL)
|
|||||||
app.state.WEBUI_NAME = WEBUI_NAME
|
app.state.WEBUI_NAME = WEBUI_NAME
|
||||||
app.state.LICENSE_METADATA = None
|
app.state.LICENSE_METADATA = None
|
||||||
|
|
||||||
|
|
||||||
|
########################################
|
||||||
|
#
|
||||||
|
# OPENTELEMETRY
|
||||||
|
#
|
||||||
|
########################################
|
||||||
|
|
||||||
|
if not ENABLE_OTEL:
|
||||||
|
setup_opentelemetry(app=app, db_engine=engine)
|
||||||
|
|
||||||
|
|
||||||
########################################
|
########################################
|
||||||
#
|
#
|
||||||
# OLLAMA
|
# OLLAMA
|
||||||
|
@ -9,6 +9,7 @@ from open_webui.env import SRC_LOG_LEVELS
|
|||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||||
|
|
||||||
|
|
||||||
class TavilyLoader(BaseLoader):
|
class TavilyLoader(BaseLoader):
|
||||||
"""Extract web page content from URLs using Tavily Extract API.
|
"""Extract web page content from URLs using Tavily Extract API.
|
||||||
|
|
||||||
@ -21,6 +22,7 @@ class TavilyLoader(BaseLoader):
|
|||||||
extract_depth: Depth of extraction, either "basic" or "advanced".
|
extract_depth: Depth of extraction, either "basic" or "advanced".
|
||||||
continue_on_failure: Whether to continue if extraction of a URL fails.
|
continue_on_failure: Whether to continue if extraction of a URL fails.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
urls: Union[str, List[str]],
|
urls: Union[str, List[str]],
|
||||||
@ -58,20 +60,13 @@ class TavilyLoader(BaseLoader):
|
|||||||
try:
|
try:
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Authorization": f"Bearer {self.api_key}"
|
"Authorization": f"Bearer {self.api_key}",
|
||||||
}
|
}
|
||||||
# Use string for single URL, array for multiple URLs
|
# Use string for single URL, array for multiple URLs
|
||||||
urls_param = batch_urls[0] if len(batch_urls) == 1 else batch_urls
|
urls_param = batch_urls[0] if len(batch_urls) == 1 else batch_urls
|
||||||
payload = {
|
payload = {"urls": urls_param, "extract_depth": self.extract_depth}
|
||||||
"urls": urls_param,
|
|
||||||
"extract_depth": self.extract_depth
|
|
||||||
}
|
|
||||||
# Make the API call
|
# Make the API call
|
||||||
response = requests.post(
|
response = requests.post(self.api_url, headers=headers, json=payload)
|
||||||
self.api_url,
|
|
||||||
headers=headers,
|
|
||||||
json=payload
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
response_data = response.json()
|
response_data = response.json()
|
||||||
# Process successful results
|
# Process successful results
|
||||||
|
@ -56,16 +56,15 @@ class OpenSearchClient:
|
|||||||
metadatas.append(hit["_source"].get("metadata"))
|
metadatas.append(hit["_source"].get("metadata"))
|
||||||
|
|
||||||
return SearchResult(
|
return SearchResult(
|
||||||
ids=[ids], distances=[distances], documents=[documents], metadatas=[metadatas]
|
ids=[ids],
|
||||||
|
distances=[distances],
|
||||||
|
documents=[documents],
|
||||||
|
metadatas=[metadatas],
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_index(self, collection_name: str, dimension: int):
|
def _create_index(self, collection_name: str, dimension: int):
|
||||||
body = {
|
body = {
|
||||||
"settings": {
|
"settings": {"index": {"knn": True}},
|
||||||
"index": {
|
|
||||||
"knn": True
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"mappings": {
|
"mappings": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"id": {"type": "keyword"},
|
"id": {"type": "keyword"},
|
||||||
@ -81,13 +80,13 @@ class OpenSearchClient:
|
|||||||
"parameters": {
|
"parameters": {
|
||||||
"ef_construction": 128,
|
"ef_construction": 128,
|
||||||
"m": 16,
|
"m": 16,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"text": {"type": "text"},
|
"text": {"type": "text"},
|
||||||
"metadata": {"type": "object"},
|
"metadata": {"type": "object"},
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
self.client.indices.create(
|
self.client.indices.create(
|
||||||
index=self._get_index_name(collection_name), body=body
|
index=self._get_index_name(collection_name), body=body
|
||||||
@ -100,9 +99,7 @@ class OpenSearchClient:
|
|||||||
def has_collection(self, collection_name: str) -> bool:
|
def has_collection(self, collection_name: str) -> bool:
|
||||||
# has_collection here means has index.
|
# has_collection here means has index.
|
||||||
# We are simply adapting to the norms of the other DBs.
|
# We are simply adapting to the norms of the other DBs.
|
||||||
return self.client.indices.exists(
|
return self.client.indices.exists(index=self._get_index_name(collection_name))
|
||||||
index=self._get_index_name(collection_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
def delete_collection(self, collection_name: str):
|
def delete_collection(self, collection_name: str):
|
||||||
# delete_collection here means delete index.
|
# delete_collection here means delete index.
|
||||||
@ -121,14 +118,12 @@ class OpenSearchClient:
|
|||||||
"_source": ["text", "metadata"],
|
"_source": ["text", "metadata"],
|
||||||
"query": {
|
"query": {
|
||||||
"script_score": {
|
"script_score": {
|
||||||
"query": {
|
"query": {"match_all": {}},
|
||||||
"match_all": {}
|
|
||||||
},
|
|
||||||
"script": {
|
"script": {
|
||||||
"source": "cosineSimilarity(params.query_value, doc[params.field]) + 1.0",
|
"source": "cosineSimilarity(params.query_value, doc[params.field]) + 1.0",
|
||||||
"params": {
|
"params": {
|
||||||
"field": "vector",
|
"field": "vector",
|
||||||
"query_value": vectors[0]
|
"query_value": vectors[0],
|
||||||
}, # Assuming single query vector
|
}, # Assuming single query vector
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -136,8 +131,7 @@ class OpenSearchClient:
|
|||||||
}
|
}
|
||||||
|
|
||||||
result = self.client.search(
|
result = self.client.search(
|
||||||
index=self._get_index_name(collection_name),
|
index=self._get_index_name(collection_name), body=query
|
||||||
body=query
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._result_to_search_result(result)
|
return self._result_to_search_result(result)
|
||||||
@ -152,20 +146,14 @@ class OpenSearchClient:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
query_body = {
|
query_body = {
|
||||||
"query": {
|
"query": {"bool": {"filter": []}},
|
||||||
"bool": {
|
|
||||||
"filter": []
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"_source": ["text", "metadata"],
|
"_source": ["text", "metadata"],
|
||||||
}
|
}
|
||||||
|
|
||||||
for field, value in filter.items():
|
for field, value in filter.items():
|
||||||
query_body["query"]["bool"]["filter"].append({
|
query_body["query"]["bool"]["filter"].append(
|
||||||
"match": {
|
{"match": {"metadata." + str(field): value}}
|
||||||
"metadata." + str(field): value
|
)
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
size = limit if limit else 10
|
size = limit if limit else 10
|
||||||
|
|
||||||
@ -236,7 +224,12 @@ class OpenSearchClient:
|
|||||||
]
|
]
|
||||||
bulk(self.client, actions)
|
bulk(self.client, actions)
|
||||||
|
|
||||||
def delete(self, collection_name: str, ids: Optional[list[str]] = None, filter: Optional[dict] = None):
|
def delete(
|
||||||
|
self,
|
||||||
|
collection_name: str,
|
||||||
|
ids: Optional[list[str]] = None,
|
||||||
|
filter: Optional[dict] = None,
|
||||||
|
):
|
||||||
if ids:
|
if ids:
|
||||||
actions = [
|
actions = [
|
||||||
{
|
{
|
||||||
@ -249,19 +242,15 @@ class OpenSearchClient:
|
|||||||
bulk(self.client, actions)
|
bulk(self.client, actions)
|
||||||
elif filter:
|
elif filter:
|
||||||
query_body = {
|
query_body = {
|
||||||
"query": {
|
"query": {"bool": {"filter": []}},
|
||||||
"bool": {
|
|
||||||
"filter": []
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
for field, value in filter.items():
|
for field, value in filter.items():
|
||||||
query_body["query"]["bool"]["filter"].append({
|
query_body["query"]["bool"]["filter"].append(
|
||||||
"match": {
|
{"match": {"metadata." + str(field): value}}
|
||||||
"metadata." + str(field): value
|
)
|
||||||
}
|
self.client.delete_by_query(
|
||||||
})
|
index=self._get_index_name(collection_name), body=query_body
|
||||||
self.client.delete_by_query(index=self._get_index_name(collection_name), body=query_body)
|
)
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
indices = self.client.indices.get(index=f"{self.index_prefix}_*")
|
indices = self.client.indices.get(index=f"{self.index_prefix}_*")
|
||||||
|
@ -477,7 +477,6 @@ class SafePlaywrightURLLoader(PlaywrightURLLoader, RateLimitMixin, URLProcessing
|
|||||||
await browser.close()
|
await browser.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class SafeWebBaseLoader(WebBaseLoader):
|
class SafeWebBaseLoader(WebBaseLoader):
|
||||||
"""WebBaseLoader with enhanced error handling for URLs."""
|
"""WebBaseLoader with enhanced error handling for URLs."""
|
||||||
|
|
||||||
|
0
backend/open_webui/utils/telemetry/__init__.py
Normal file
0
backend/open_webui/utils/telemetry/__init__.py
Normal file
26
backend/open_webui/utils/telemetry/constants.py
Normal file
26
backend/open_webui/utils/telemetry/constants.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from opentelemetry.semconv.trace import SpanAttributes as _SpanAttributes
|
||||||
|
|
||||||
|
# Span Tags
|
||||||
|
SPAN_DB_TYPE = "mysql"
|
||||||
|
SPAN_REDIS_TYPE = "redis"
|
||||||
|
SPAN_DURATION = "duration"
|
||||||
|
SPAN_SQL_STR = "sql"
|
||||||
|
SPAN_SQL_EXPLAIN = "explain"
|
||||||
|
SPAN_ERROR_TYPE = "error"
|
||||||
|
|
||||||
|
|
||||||
|
class SpanAttributes(_SpanAttributes):
|
||||||
|
"""
|
||||||
|
Span Attributes
|
||||||
|
"""
|
||||||
|
|
||||||
|
DB_INSTANCE = "db.instance"
|
||||||
|
DB_TYPE = "db.type"
|
||||||
|
DB_IP = "db.ip"
|
||||||
|
DB_PORT = "db.port"
|
||||||
|
ERROR_KIND = "error.kind"
|
||||||
|
ERROR_OBJECT = "error.object"
|
||||||
|
ERROR_MESSAGE = "error.message"
|
||||||
|
RESULT_CODE = "result.code"
|
||||||
|
RESULT_MESSAGE = "result.message"
|
||||||
|
RESULT_ERRORS = "result.errors"
|
31
backend/open_webui/utils/telemetry/exporters.py
Normal file
31
backend/open_webui/utils/telemetry/exporters.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
import threading
|
||||||
|
|
||||||
|
from opentelemetry.sdk.trace import ReadableSpan
|
||||||
|
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
||||||
|
|
||||||
|
|
||||||
|
class LazyBatchSpanProcessor(BatchSpanProcessor):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.done = True
|
||||||
|
with self.condition:
|
||||||
|
self.condition.notify_all()
|
||||||
|
self.worker_thread.join()
|
||||||
|
self.done = False
|
||||||
|
self.worker_thread = None
|
||||||
|
|
||||||
|
def on_end(self, span: ReadableSpan) -> None:
|
||||||
|
if self.worker_thread is None:
|
||||||
|
self.worker_thread = threading.Thread(
|
||||||
|
name=self.__class__.__name__, target=self.worker, daemon=True
|
||||||
|
)
|
||||||
|
self.worker_thread.start()
|
||||||
|
super().on_end(span)
|
||||||
|
|
||||||
|
def shutdown(self) -> None:
|
||||||
|
self.done = True
|
||||||
|
with self.condition:
|
||||||
|
self.condition.notify_all()
|
||||||
|
if self.worker_thread:
|
||||||
|
self.worker_thread.join()
|
||||||
|
self.span_exporter.shutdown()
|
202
backend/open_webui/utils/telemetry/instrumentors.py
Normal file
202
backend/open_webui/utils/telemetry/instrumentors.py
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
import logging
|
||||||
|
import traceback
|
||||||
|
from typing import Collection, Union
|
||||||
|
|
||||||
|
from aiohttp import (
|
||||||
|
TraceRequestStartParams,
|
||||||
|
TraceRequestEndParams,
|
||||||
|
TraceRequestExceptionParams,
|
||||||
|
)
|
||||||
|
from chromadb.telemetry.opentelemetry.fastapi import instrument_fastapi
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from opentelemetry.instrumentation.httpx import (
|
||||||
|
HTTPXClientInstrumentor,
|
||||||
|
RequestInfo,
|
||||||
|
ResponseInfo,
|
||||||
|
)
|
||||||
|
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||||
|
from opentelemetry.instrumentation.logging import LoggingInstrumentor
|
||||||
|
from opentelemetry.instrumentation.redis import RedisInstrumentor
|
||||||
|
from opentelemetry.instrumentation.requests import RequestsInstrumentor
|
||||||
|
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
|
||||||
|
from opentelemetry.instrumentation.aiohttp_client import AioHttpClientInstrumentor
|
||||||
|
from opentelemetry.trace import Span, StatusCode
|
||||||
|
from redis import Redis
|
||||||
|
from requests import PreparedRequest, Response
|
||||||
|
from sqlalchemy import Engine
|
||||||
|
from fastapi import status
|
||||||
|
|
||||||
|
from open_webui.utils.telemetry.constants import SPAN_REDIS_TYPE, SpanAttributes
|
||||||
|
|
||||||
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||||
|
|
||||||
|
|
||||||
|
def requests_hook(span: Span, request: PreparedRequest):
|
||||||
|
"""
|
||||||
|
Http Request Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
span.update_name(f"{request.method} {request.url}")
|
||||||
|
span.set_attributes(
|
||||||
|
attributes={
|
||||||
|
SpanAttributes.HTTP_URL: request.url,
|
||||||
|
SpanAttributes.HTTP_METHOD: request.method,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def response_hook(span: Span, request: PreparedRequest, response: Response):
|
||||||
|
"""
|
||||||
|
HTTP Response Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
span.set_attributes(
|
||||||
|
attributes={
|
||||||
|
SpanAttributes.HTTP_STATUS_CODE: response.status_code,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
span.set_status(StatusCode.ERROR if response.status_code >= 400 else StatusCode.OK)
|
||||||
|
|
||||||
|
|
||||||
|
def redis_request_hook(span: Span, instance: Redis, args, kwargs):
|
||||||
|
"""
|
||||||
|
Redis Request Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
connection_kwargs: dict = instance.connection_pool.connection_kwargs
|
||||||
|
host = connection_kwargs.get("host")
|
||||||
|
port = connection_kwargs.get("port")
|
||||||
|
db = connection_kwargs.get("db")
|
||||||
|
span.set_attributes(
|
||||||
|
{
|
||||||
|
SpanAttributes.DB_INSTANCE: f"{host}/{db}",
|
||||||
|
SpanAttributes.DB_NAME: f"{host}/{db}",
|
||||||
|
SpanAttributes.DB_TYPE: SPAN_REDIS_TYPE,
|
||||||
|
SpanAttributes.DB_PORT: port,
|
||||||
|
SpanAttributes.DB_IP: host,
|
||||||
|
SpanAttributes.DB_STATEMENT: " ".join([str(i) for i in args]),
|
||||||
|
SpanAttributes.DB_OPERATION: str(args[0]),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception: # pylint: disable=W0718
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
|
||||||
|
|
||||||
|
def httpx_request_hook(span: Span, request: RequestInfo):
|
||||||
|
"""
|
||||||
|
HTTPX Request Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
span.update_name(f"{request.method.decode()} {str(request.url)}")
|
||||||
|
span.set_attributes(
|
||||||
|
attributes={
|
||||||
|
SpanAttributes.HTTP_URL: str(request.url),
|
||||||
|
SpanAttributes.HTTP_METHOD: request.method.decode(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def httpx_response_hook(span: Span, request: RequestInfo, response: ResponseInfo):
|
||||||
|
"""
|
||||||
|
HTTPX Response Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
span.set_attribute(SpanAttributes.HTTP_STATUS_CODE, response.status_code)
|
||||||
|
span.set_status(
|
||||||
|
StatusCode.ERROR
|
||||||
|
if response.status_code >= status.HTTP_400_BAD_REQUEST
|
||||||
|
else StatusCode.OK
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def httpx_async_request_hook(span: Span, request: RequestInfo):
|
||||||
|
"""
|
||||||
|
Async Request Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
httpx_request_hook(span, request)
|
||||||
|
|
||||||
|
|
||||||
|
async def httpx_async_response_hook(
|
||||||
|
span: Span, request: RequestInfo, response: ResponseInfo
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Async Response Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
httpx_response_hook(span, request, response)
|
||||||
|
|
||||||
|
|
||||||
|
def aiohttp_request_hook(span: Span, request: TraceRequestStartParams):
|
||||||
|
"""
|
||||||
|
Aiohttp Request Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
span.update_name(f"{request.method} {str(request.url)}")
|
||||||
|
span.set_attributes(
|
||||||
|
attributes={
|
||||||
|
SpanAttributes.HTTP_URL: str(request.url),
|
||||||
|
SpanAttributes.HTTP_METHOD: request.method,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def aiohttp_response_hook(
|
||||||
|
span: Span, response: Union[TraceRequestExceptionParams, TraceRequestEndParams]
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Aiohttp Response Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(response, TraceRequestEndParams):
|
||||||
|
span.set_attribute(SpanAttributes.HTTP_STATUS_CODE, response.response.status)
|
||||||
|
span.set_status(
|
||||||
|
StatusCode.ERROR
|
||||||
|
if response.response.status >= status.HTTP_400_BAD_REQUEST
|
||||||
|
else StatusCode.OK
|
||||||
|
)
|
||||||
|
elif isinstance(response, TraceRequestExceptionParams):
|
||||||
|
span.set_status(StatusCode.ERROR)
|
||||||
|
span.set_attribute(SpanAttributes.ERROR_MESSAGE, str(response.exception))
|
||||||
|
|
||||||
|
|
||||||
|
class Instrumentor(BaseInstrumentor):
|
||||||
|
"""
|
||||||
|
Instrument OT
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, app: FastAPI, db_engine: Engine):
|
||||||
|
self.app = app
|
||||||
|
self.db_engine = db_engine
|
||||||
|
|
||||||
|
def instrumentation_dependencies(self) -> Collection[str]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _instrument(self, **kwargs):
|
||||||
|
instrument_fastapi(app=self.app)
|
||||||
|
SQLAlchemyInstrumentor().instrument(engine=self.db_engine)
|
||||||
|
RedisInstrumentor().instrument(request_hook=redis_request_hook)
|
||||||
|
RequestsInstrumentor().instrument(
|
||||||
|
request_hook=requests_hook, response_hook=response_hook
|
||||||
|
)
|
||||||
|
LoggingInstrumentor().instrument()
|
||||||
|
HTTPXClientInstrumentor().instrument(
|
||||||
|
request_hook=httpx_request_hook,
|
||||||
|
response_hook=httpx_response_hook,
|
||||||
|
async_request_hook=httpx_async_request_hook,
|
||||||
|
async_response_hook=httpx_async_response_hook,
|
||||||
|
)
|
||||||
|
AioHttpClientInstrumentor().instrument(
|
||||||
|
request_hook=aiohttp_request_hook,
|
||||||
|
response_hook=aiohttp_response_hook,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _uninstrument(self, **kwargs):
|
||||||
|
if getattr(self, "instrumentors", None) is None:
|
||||||
|
return
|
||||||
|
for instrumentor in self.instrumentors:
|
||||||
|
instrumentor.uninstrument()
|
23
backend/open_webui/utils/telemetry/setup.py
Normal file
23
backend/open_webui/utils/telemetry/setup.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from fastapi import FastAPI
|
||||||
|
from opentelemetry import trace
|
||||||
|
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
|
||||||
|
from opentelemetry.sdk.resources import SERVICE_NAME, Resource
|
||||||
|
from opentelemetry.sdk.trace import TracerProvider
|
||||||
|
from sqlalchemy import Engine
|
||||||
|
|
||||||
|
from open_webui.utils.telemetry.exporters import LazyBatchSpanProcessor
|
||||||
|
from open_webui.utils.telemetry.instrumentors import Instrumentor
|
||||||
|
from open_webui.env import OTEL_SERVICE_NAME, OTEL_EXPORTER_OTLP_ENDPOINT
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app: FastAPI, db_engine: Engine):
|
||||||
|
# set up trace
|
||||||
|
trace.set_tracer_provider(
|
||||||
|
TracerProvider(
|
||||||
|
resource=Resource.create(attributes={SERVICE_NAME: OTEL_SERVICE_NAME})
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# otlp export
|
||||||
|
exporter = OTLPSpanExporter(endpoint=OTEL_EXPORTER_OTLP_ENDPOINT)
|
||||||
|
trace.get_tracer_provider().add_span_processor(LazyBatchSpanProcessor(exporter))
|
||||||
|
Instrumentor(app=app, db_engine=db_engine).instrument()
|
@ -37,7 +37,7 @@ asgiref==3.8.1
|
|||||||
# AI libraries
|
# AI libraries
|
||||||
openai
|
openai
|
||||||
anthropic
|
anthropic
|
||||||
google-generativeai==0.7.2
|
google-generativeai==0.8.4
|
||||||
tiktoken
|
tiktoken
|
||||||
|
|
||||||
langchain==0.3.19
|
langchain==0.3.19
|
||||||
@ -118,3 +118,16 @@ ldap3==2.9.1
|
|||||||
|
|
||||||
## Firecrawl
|
## Firecrawl
|
||||||
firecrawl-py==1.12.0
|
firecrawl-py==1.12.0
|
||||||
|
|
||||||
|
## Trace
|
||||||
|
opentelemetry-api==1.30.0
|
||||||
|
opentelemetry-sdk==1.30.0
|
||||||
|
opentelemetry-exporter-otlp==1.30.0
|
||||||
|
opentelemetry-instrumentation==0.51b0
|
||||||
|
opentelemetry-instrumentation-fastapi==0.51b0
|
||||||
|
opentelemetry-instrumentation-sqlalchemy==0.51b0
|
||||||
|
opentelemetry-instrumentation-redis==0.51b0
|
||||||
|
opentelemetry-instrumentation-requests==0.51b0
|
||||||
|
opentelemetry-instrumentation-logging==0.51b0
|
||||||
|
opentelemetry-instrumentation-httpx==0.51b0
|
||||||
|
opentelemetry-instrumentation-aiohttp-client==0.51b0
|
Loading…
Reference in New Issue
Block a user