mirror of
https://github.com/open-webui/open-webui
synced 2025-05-20 21:25:21 +00:00
Merge pull request #11507 from OrenZhang/feat_ot
feat(trace): opentelemetry instrument
This commit is contained in:
commit
adfa67d9d2
@ -1580,7 +1580,9 @@ QDRANT_API_KEY = os.environ.get("QDRANT_API_KEY", None)
|
|||||||
# OpenSearch
|
# OpenSearch
|
||||||
OPENSEARCH_URI = os.environ.get("OPENSEARCH_URI", "https://localhost:9200")
|
OPENSEARCH_URI = os.environ.get("OPENSEARCH_URI", "https://localhost:9200")
|
||||||
OPENSEARCH_SSL = os.environ.get("OPENSEARCH_SSL", "true").lower() == "true"
|
OPENSEARCH_SSL = os.environ.get("OPENSEARCH_SSL", "true").lower() == "true"
|
||||||
OPENSEARCH_CERT_VERIFY = os.environ.get("OPENSEARCH_CERT_VERIFY", "false").lower() == "true"
|
OPENSEARCH_CERT_VERIFY = (
|
||||||
|
os.environ.get("OPENSEARCH_CERT_VERIFY", "false").lower() == "true"
|
||||||
|
)
|
||||||
OPENSEARCH_USERNAME = os.environ.get("OPENSEARCH_USERNAME", None)
|
OPENSEARCH_USERNAME = os.environ.get("OPENSEARCH_USERNAME", None)
|
||||||
OPENSEARCH_PASSWORD = os.environ.get("OPENSEARCH_PASSWORD", None)
|
OPENSEARCH_PASSWORD = os.environ.get("OPENSEARCH_PASSWORD", None)
|
||||||
|
|
||||||
|
@ -105,7 +105,6 @@ for source in log_sources:
|
|||||||
|
|
||||||
log.setLevel(SRC_LOG_LEVELS["CONFIG"])
|
log.setLevel(SRC_LOG_LEVELS["CONFIG"])
|
||||||
|
|
||||||
|
|
||||||
WEBUI_NAME = os.environ.get("WEBUI_NAME", "Open WebUI")
|
WEBUI_NAME = os.environ.get("WEBUI_NAME", "Open WebUI")
|
||||||
if WEBUI_NAME != "Open WebUI":
|
if WEBUI_NAME != "Open WebUI":
|
||||||
WEBUI_NAME += " (Open WebUI)"
|
WEBUI_NAME += " (Open WebUI)"
|
||||||
@ -130,7 +129,6 @@ else:
|
|||||||
except Exception:
|
except Exception:
|
||||||
PACKAGE_DATA = {"version": "0.0.0"}
|
PACKAGE_DATA = {"version": "0.0.0"}
|
||||||
|
|
||||||
|
|
||||||
VERSION = PACKAGE_DATA["version"]
|
VERSION = PACKAGE_DATA["version"]
|
||||||
|
|
||||||
|
|
||||||
@ -161,7 +159,6 @@ try:
|
|||||||
except Exception:
|
except Exception:
|
||||||
changelog_content = (pkgutil.get_data("open_webui", "CHANGELOG.md") or b"").decode()
|
changelog_content = (pkgutil.get_data("open_webui", "CHANGELOG.md") or b"").decode()
|
||||||
|
|
||||||
|
|
||||||
# Convert markdown content to HTML
|
# Convert markdown content to HTML
|
||||||
html_content = markdown.markdown(changelog_content)
|
html_content = markdown.markdown(changelog_content)
|
||||||
|
|
||||||
@ -192,7 +189,6 @@ for version in soup.find_all("h2"):
|
|||||||
|
|
||||||
changelog_json[version_number] = version_data
|
changelog_json[version_number] = version_data
|
||||||
|
|
||||||
|
|
||||||
CHANGELOG = changelog_json
|
CHANGELOG = changelog_json
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
@ -209,7 +205,6 @@ ENABLE_FORWARD_USER_INFO_HEADERS = (
|
|||||||
os.environ.get("ENABLE_FORWARD_USER_INFO_HEADERS", "False").lower() == "true"
|
os.environ.get("ENABLE_FORWARD_USER_INFO_HEADERS", "False").lower() == "true"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# WEBUI_BUILD_HASH
|
# WEBUI_BUILD_HASH
|
||||||
####################################
|
####################################
|
||||||
@ -244,7 +239,6 @@ if FROM_INIT_PY:
|
|||||||
|
|
||||||
DATA_DIR = Path(os.getenv("DATA_DIR", OPEN_WEBUI_DIR / "data"))
|
DATA_DIR = Path(os.getenv("DATA_DIR", OPEN_WEBUI_DIR / "data"))
|
||||||
|
|
||||||
|
|
||||||
STATIC_DIR = Path(os.getenv("STATIC_DIR", OPEN_WEBUI_DIR / "static"))
|
STATIC_DIR = Path(os.getenv("STATIC_DIR", OPEN_WEBUI_DIR / "static"))
|
||||||
|
|
||||||
FONTS_DIR = Path(os.getenv("FONTS_DIR", OPEN_WEBUI_DIR / "static" / "fonts"))
|
FONTS_DIR = Path(os.getenv("FONTS_DIR", OPEN_WEBUI_DIR / "static" / "fonts"))
|
||||||
@ -256,7 +250,6 @@ if FROM_INIT_PY:
|
|||||||
os.getenv("FRONTEND_BUILD_DIR", OPEN_WEBUI_DIR / "frontend")
|
os.getenv("FRONTEND_BUILD_DIR", OPEN_WEBUI_DIR / "frontend")
|
||||||
).resolve()
|
).resolve()
|
||||||
|
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# Database
|
# Database
|
||||||
####################################
|
####################################
|
||||||
@ -321,7 +314,6 @@ RESET_CONFIG_ON_START = (
|
|||||||
os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true"
|
os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
ENABLE_REALTIME_CHAT_SAVE = (
|
ENABLE_REALTIME_CHAT_SAVE = (
|
||||||
os.environ.get("ENABLE_REALTIME_CHAT_SAVE", "False").lower() == "true"
|
os.environ.get("ENABLE_REALTIME_CHAT_SAVE", "False").lower() == "true"
|
||||||
)
|
)
|
||||||
@ -402,7 +394,6 @@ AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = os.environ.get(
|
|||||||
os.environ.get("AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST", ""),
|
os.environ.get("AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST == "":
|
if AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST == "":
|
||||||
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = None
|
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = None
|
||||||
else:
|
else:
|
||||||
@ -411,7 +402,6 @@ else:
|
|||||||
except Exception:
|
except Exception:
|
||||||
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = 5
|
AIOHTTP_CLIENT_TIMEOUT_MODEL_LIST = 5
|
||||||
|
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# OFFLINE_MODE
|
# OFFLINE_MODE
|
||||||
####################################
|
####################################
|
||||||
@ -442,3 +432,19 @@ AUDIT_EXCLUDED_PATHS = os.getenv("AUDIT_EXCLUDED_PATHS", "/chats,/chat,/folders"
|
|||||||
)
|
)
|
||||||
AUDIT_EXCLUDED_PATHS = [path.strip() for path in AUDIT_EXCLUDED_PATHS]
|
AUDIT_EXCLUDED_PATHS = [path.strip() for path in AUDIT_EXCLUDED_PATHS]
|
||||||
AUDIT_EXCLUDED_PATHS = [path.lstrip("/") for path in AUDIT_EXCLUDED_PATHS]
|
AUDIT_EXCLUDED_PATHS = [path.lstrip("/") for path in AUDIT_EXCLUDED_PATHS]
|
||||||
|
|
||||||
|
####################################
|
||||||
|
# OPENTELEMETRY
|
||||||
|
####################################
|
||||||
|
|
||||||
|
ENABLE_OTEL = os.environ.get("ENABLE_OTEL", "False").lower() == "true"
|
||||||
|
OTEL_EXPORTER_OTLP_ENDPOINT = os.environ.get(
|
||||||
|
"OTEL_EXPORTER_OTLP_ENDPOINT", "http://localhost:4317"
|
||||||
|
)
|
||||||
|
OTEL_SERVICE_NAME = os.environ.get("OTEL_SERVICE_NAME", "open-webui")
|
||||||
|
OTEL_RESOURCE_ATTRIBUTES = os.environ.get(
|
||||||
|
"OTEL_RESOURCE_ATTRIBUTES", ""
|
||||||
|
) # e.g. key1=val1,key2=val2
|
||||||
|
OTEL_TRACES_SAMPLER = os.environ.get(
|
||||||
|
"OTEL_TRACES_SAMPLER", "parentbased_always_on"
|
||||||
|
).lower()
|
||||||
|
@ -84,7 +84,7 @@ from open_webui.routers.retrieval import (
|
|||||||
get_rf,
|
get_rf,
|
||||||
)
|
)
|
||||||
|
|
||||||
from open_webui.internal.db import Session
|
from open_webui.internal.db import Session, engine
|
||||||
|
|
||||||
from open_webui.models.functions import Functions
|
from open_webui.models.functions import Functions
|
||||||
from open_webui.models.models import Models
|
from open_webui.models.models import Models
|
||||||
@ -330,6 +330,7 @@ from open_webui.env import (
|
|||||||
BYPASS_MODEL_ACCESS_CONTROL,
|
BYPASS_MODEL_ACCESS_CONTROL,
|
||||||
RESET_CONFIG_ON_START,
|
RESET_CONFIG_ON_START,
|
||||||
OFFLINE_MODE,
|
OFFLINE_MODE,
|
||||||
|
ENABLE_OTEL,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -356,7 +357,7 @@ from open_webui.utils.oauth import OAuthManager
|
|||||||
from open_webui.utils.security_headers import SecurityHeadersMiddleware
|
from open_webui.utils.security_headers import SecurityHeadersMiddleware
|
||||||
|
|
||||||
from open_webui.tasks import stop_task, list_tasks # Import from tasks.py
|
from open_webui.tasks import stop_task, list_tasks # Import from tasks.py
|
||||||
|
from open_webui.utils.telemetry.setup import setup as setup_opentelemetry
|
||||||
|
|
||||||
if SAFE_MODE:
|
if SAFE_MODE:
|
||||||
print("SAFE MODE ENABLED")
|
print("SAFE MODE ENABLED")
|
||||||
@ -426,6 +427,17 @@ app.state.config = AppConfig(redis_url=REDIS_URL)
|
|||||||
app.state.WEBUI_NAME = WEBUI_NAME
|
app.state.WEBUI_NAME = WEBUI_NAME
|
||||||
app.state.LICENSE_METADATA = None
|
app.state.LICENSE_METADATA = None
|
||||||
|
|
||||||
|
|
||||||
|
########################################
|
||||||
|
#
|
||||||
|
# OPENTELEMETRY
|
||||||
|
#
|
||||||
|
########################################
|
||||||
|
|
||||||
|
if not ENABLE_OTEL:
|
||||||
|
setup_opentelemetry(app=app, db_engine=engine)
|
||||||
|
|
||||||
|
|
||||||
########################################
|
########################################
|
||||||
#
|
#
|
||||||
# OLLAMA
|
# OLLAMA
|
||||||
|
@ -9,18 +9,20 @@ from open_webui.env import SRC_LOG_LEVELS
|
|||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||||
|
|
||||||
|
|
||||||
class TavilyLoader(BaseLoader):
|
class TavilyLoader(BaseLoader):
|
||||||
"""Extract web page content from URLs using Tavily Extract API.
|
"""Extract web page content from URLs using Tavily Extract API.
|
||||||
|
|
||||||
This is a LangChain document loader that uses Tavily's Extract API to
|
This is a LangChain document loader that uses Tavily's Extract API to
|
||||||
retrieve content from web pages and return it as Document objects.
|
retrieve content from web pages and return it as Document objects.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
urls: URL or list of URLs to extract content from.
|
urls: URL or list of URLs to extract content from.
|
||||||
api_key: The Tavily API key.
|
api_key: The Tavily API key.
|
||||||
extract_depth: Depth of extraction, either "basic" or "advanced".
|
extract_depth: Depth of extraction, either "basic" or "advanced".
|
||||||
continue_on_failure: Whether to continue if extraction of a URL fails.
|
continue_on_failure: Whether to continue if extraction of a URL fails.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
urls: Union[str, List[str]],
|
urls: Union[str, List[str]],
|
||||||
@ -29,13 +31,13 @@ class TavilyLoader(BaseLoader):
|
|||||||
continue_on_failure: bool = True,
|
continue_on_failure: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize Tavily Extract client.
|
"""Initialize Tavily Extract client.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
urls: URL or list of URLs to extract content from.
|
urls: URL or list of URLs to extract content from.
|
||||||
api_key: The Tavily API key.
|
api_key: The Tavily API key.
|
||||||
include_images: Whether to include images in the extraction.
|
include_images: Whether to include images in the extraction.
|
||||||
extract_depth: Depth of extraction, either "basic" or "advanced".
|
extract_depth: Depth of extraction, either "basic" or "advanced".
|
||||||
advanced extraction retrieves more data, including tables and
|
advanced extraction retrieves more data, including tables and
|
||||||
embedded content, with higher success but may increase latency.
|
embedded content, with higher success but may increase latency.
|
||||||
basic costs 1 credit per 5 successful URL extractions,
|
basic costs 1 credit per 5 successful URL extractions,
|
||||||
advanced costs 2 credits per 5 successful URL extractions.
|
advanced costs 2 credits per 5 successful URL extractions.
|
||||||
@ -43,35 +45,28 @@ class TavilyLoader(BaseLoader):
|
|||||||
"""
|
"""
|
||||||
if not urls:
|
if not urls:
|
||||||
raise ValueError("At least one URL must be provided.")
|
raise ValueError("At least one URL must be provided.")
|
||||||
|
|
||||||
self.api_key = api_key
|
self.api_key = api_key
|
||||||
self.urls = urls if isinstance(urls, list) else [urls]
|
self.urls = urls if isinstance(urls, list) else [urls]
|
||||||
self.extract_depth = extract_depth
|
self.extract_depth = extract_depth
|
||||||
self.continue_on_failure = continue_on_failure
|
self.continue_on_failure = continue_on_failure
|
||||||
self.api_url = "https://api.tavily.com/extract"
|
self.api_url = "https://api.tavily.com/extract"
|
||||||
|
|
||||||
def lazy_load(self) -> Iterator[Document]:
|
def lazy_load(self) -> Iterator[Document]:
|
||||||
"""Extract and yield documents from the URLs using Tavily Extract API."""
|
"""Extract and yield documents from the URLs using Tavily Extract API."""
|
||||||
batch_size = 20
|
batch_size = 20
|
||||||
for i in range(0, len(self.urls), batch_size):
|
for i in range(0, len(self.urls), batch_size):
|
||||||
batch_urls = self.urls[i:i + batch_size]
|
batch_urls = self.urls[i : i + batch_size]
|
||||||
try:
|
try:
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Authorization": f"Bearer {self.api_key}"
|
"Authorization": f"Bearer {self.api_key}",
|
||||||
}
|
}
|
||||||
# Use string for single URL, array for multiple URLs
|
# Use string for single URL, array for multiple URLs
|
||||||
urls_param = batch_urls[0] if len(batch_urls) == 1 else batch_urls
|
urls_param = batch_urls[0] if len(batch_urls) == 1 else batch_urls
|
||||||
payload = {
|
payload = {"urls": urls_param, "extract_depth": self.extract_depth}
|
||||||
"urls": urls_param,
|
|
||||||
"extract_depth": self.extract_depth
|
|
||||||
}
|
|
||||||
# Make the API call
|
# Make the API call
|
||||||
response = requests.post(
|
response = requests.post(self.api_url, headers=headers, json=payload)
|
||||||
self.api_url,
|
|
||||||
headers=headers,
|
|
||||||
json=payload
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
response_data = response.json()
|
response_data = response.json()
|
||||||
# Process successful results
|
# Process successful results
|
||||||
@ -95,4 +90,4 @@ class TavilyLoader(BaseLoader):
|
|||||||
if self.continue_on_failure:
|
if self.continue_on_failure:
|
||||||
log.error(f"Error extracting content from batch {batch_urls}: {e}")
|
log.error(f"Error extracting content from batch {batch_urls}: {e}")
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
|
@ -21,14 +21,14 @@ class OpenSearchClient:
|
|||||||
verify_certs=OPENSEARCH_CERT_VERIFY,
|
verify_certs=OPENSEARCH_CERT_VERIFY,
|
||||||
http_auth=(OPENSEARCH_USERNAME, OPENSEARCH_PASSWORD),
|
http_auth=(OPENSEARCH_USERNAME, OPENSEARCH_PASSWORD),
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_index_name(self, collection_name: str) -> str:
|
def _get_index_name(self, collection_name: str) -> str:
|
||||||
return f"{self.index_prefix}_{collection_name}"
|
return f"{self.index_prefix}_{collection_name}"
|
||||||
|
|
||||||
def _result_to_get_result(self, result) -> GetResult:
|
def _result_to_get_result(self, result) -> GetResult:
|
||||||
if not result["hits"]["hits"]:
|
if not result["hits"]["hits"]:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
ids = []
|
ids = []
|
||||||
documents = []
|
documents = []
|
||||||
metadatas = []
|
metadatas = []
|
||||||
@ -43,7 +43,7 @@ class OpenSearchClient:
|
|||||||
def _result_to_search_result(self, result) -> SearchResult:
|
def _result_to_search_result(self, result) -> SearchResult:
|
||||||
if not result["hits"]["hits"]:
|
if not result["hits"]["hits"]:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
ids = []
|
ids = []
|
||||||
distances = []
|
distances = []
|
||||||
documents = []
|
documents = []
|
||||||
@ -56,16 +56,15 @@ class OpenSearchClient:
|
|||||||
metadatas.append(hit["_source"].get("metadata"))
|
metadatas.append(hit["_source"].get("metadata"))
|
||||||
|
|
||||||
return SearchResult(
|
return SearchResult(
|
||||||
ids=[ids], distances=[distances], documents=[documents], metadatas=[metadatas]
|
ids=[ids],
|
||||||
|
distances=[distances],
|
||||||
|
documents=[documents],
|
||||||
|
metadatas=[metadatas],
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_index(self, collection_name: str, dimension: int):
|
def _create_index(self, collection_name: str, dimension: int):
|
||||||
body = {
|
body = {
|
||||||
"settings": {
|
"settings": {"index": {"knn": True}},
|
||||||
"index": {
|
|
||||||
"knn": True
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"mappings": {
|
"mappings": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"id": {"type": "keyword"},
|
"id": {"type": "keyword"},
|
||||||
@ -81,13 +80,13 @@ class OpenSearchClient:
|
|||||||
"parameters": {
|
"parameters": {
|
||||||
"ef_construction": 128,
|
"ef_construction": 128,
|
||||||
"m": 16,
|
"m": 16,
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"text": {"type": "text"},
|
"text": {"type": "text"},
|
||||||
"metadata": {"type": "object"},
|
"metadata": {"type": "object"},
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
self.client.indices.create(
|
self.client.indices.create(
|
||||||
index=self._get_index_name(collection_name), body=body
|
index=self._get_index_name(collection_name), body=body
|
||||||
@ -100,9 +99,7 @@ class OpenSearchClient:
|
|||||||
def has_collection(self, collection_name: str) -> bool:
|
def has_collection(self, collection_name: str) -> bool:
|
||||||
# has_collection here means has index.
|
# has_collection here means has index.
|
||||||
# We are simply adapting to the norms of the other DBs.
|
# We are simply adapting to the norms of the other DBs.
|
||||||
return self.client.indices.exists(
|
return self.client.indices.exists(index=self._get_index_name(collection_name))
|
||||||
index=self._get_index_name(collection_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
def delete_collection(self, collection_name: str):
|
def delete_collection(self, collection_name: str):
|
||||||
# delete_collection here means delete index.
|
# delete_collection here means delete index.
|
||||||
@ -115,33 +112,30 @@ class OpenSearchClient:
|
|||||||
try:
|
try:
|
||||||
if not self.has_collection(collection_name):
|
if not self.has_collection(collection_name):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
query = {
|
query = {
|
||||||
"size": limit,
|
"size": limit,
|
||||||
"_source": ["text", "metadata"],
|
"_source": ["text", "metadata"],
|
||||||
"query": {
|
"query": {
|
||||||
"script_score": {
|
"script_score": {
|
||||||
"query": {
|
"query": {"match_all": {}},
|
||||||
"match_all": {}
|
|
||||||
},
|
|
||||||
"script": {
|
"script": {
|
||||||
"source": "cosineSimilarity(params.query_value, doc[params.field]) + 1.0",
|
"source": "cosineSimilarity(params.query_value, doc[params.field]) + 1.0",
|
||||||
"params": {
|
"params": {
|
||||||
"field": "vector",
|
"field": "vector",
|
||||||
"query_value": vectors[0]
|
"query_value": vectors[0],
|
||||||
}, # Assuming single query vector
|
}, # Assuming single query vector
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
result = self.client.search(
|
result = self.client.search(
|
||||||
index=self._get_index_name(collection_name),
|
index=self._get_index_name(collection_name), body=query
|
||||||
body=query
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._result_to_search_result(result)
|
return self._result_to_search_result(result)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -152,20 +146,14 @@ class OpenSearchClient:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
query_body = {
|
query_body = {
|
||||||
"query": {
|
"query": {"bool": {"filter": []}},
|
||||||
"bool": {
|
|
||||||
"filter": []
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"_source": ["text", "metadata"],
|
"_source": ["text", "metadata"],
|
||||||
}
|
}
|
||||||
|
|
||||||
for field, value in filter.items():
|
for field, value in filter.items():
|
||||||
query_body["query"]["bool"]["filter"].append({
|
query_body["query"]["bool"]["filter"].append(
|
||||||
"match": {
|
{"match": {"metadata." + str(field): value}}
|
||||||
"metadata." + str(field): value
|
)
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
size = limit if limit else 10
|
size = limit if limit else 10
|
||||||
|
|
||||||
@ -201,9 +189,9 @@ class OpenSearchClient:
|
|||||||
for batch in self._create_batches(items):
|
for batch in self._create_batches(items):
|
||||||
actions = [
|
actions = [
|
||||||
{
|
{
|
||||||
"_op_type": "index",
|
"_op_type": "index",
|
||||||
"_index": self._get_index_name(collection_name),
|
"_index": self._get_index_name(collection_name),
|
||||||
"_id": item["id"],
|
"_id": item["id"],
|
||||||
"_source": {
|
"_source": {
|
||||||
"vector": item["vector"],
|
"vector": item["vector"],
|
||||||
"text": item["text"],
|
"text": item["text"],
|
||||||
@ -222,9 +210,9 @@ class OpenSearchClient:
|
|||||||
for batch in self._create_batches(items):
|
for batch in self._create_batches(items):
|
||||||
actions = [
|
actions = [
|
||||||
{
|
{
|
||||||
"_op_type": "update",
|
"_op_type": "update",
|
||||||
"_index": self._get_index_name(collection_name),
|
"_index": self._get_index_name(collection_name),
|
||||||
"_id": item["id"],
|
"_id": item["id"],
|
||||||
"doc": {
|
"doc": {
|
||||||
"vector": item["vector"],
|
"vector": item["vector"],
|
||||||
"text": item["text"],
|
"text": item["text"],
|
||||||
@ -236,7 +224,12 @@ class OpenSearchClient:
|
|||||||
]
|
]
|
||||||
bulk(self.client, actions)
|
bulk(self.client, actions)
|
||||||
|
|
||||||
def delete(self, collection_name: str, ids: Optional[list[str]] = None, filter: Optional[dict] = None):
|
def delete(
|
||||||
|
self,
|
||||||
|
collection_name: str,
|
||||||
|
ids: Optional[list[str]] = None,
|
||||||
|
filter: Optional[dict] = None,
|
||||||
|
):
|
||||||
if ids:
|
if ids:
|
||||||
actions = [
|
actions = [
|
||||||
{
|
{
|
||||||
@ -249,20 +242,16 @@ class OpenSearchClient:
|
|||||||
bulk(self.client, actions)
|
bulk(self.client, actions)
|
||||||
elif filter:
|
elif filter:
|
||||||
query_body = {
|
query_body = {
|
||||||
"query": {
|
"query": {"bool": {"filter": []}},
|
||||||
"bool": {
|
|
||||||
"filter": []
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
for field, value in filter.items():
|
for field, value in filter.items():
|
||||||
query_body["query"]["bool"]["filter"].append({
|
query_body["query"]["bool"]["filter"].append(
|
||||||
"match": {
|
{"match": {"metadata." + str(field): value}}
|
||||||
"metadata." + str(field): value
|
)
|
||||||
}
|
self.client.delete_by_query(
|
||||||
})
|
index=self._get_index_name(collection_name), body=query_body
|
||||||
self.client.delete_by_query(index=self._get_index_name(collection_name), body=query_body)
|
)
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
indices = self.client.indices.get(index=f"{self.index_prefix}_*")
|
indices = self.client.indices.get(index=f"{self.index_prefix}_*")
|
||||||
for index in indices:
|
for index in indices:
|
||||||
|
@ -136,18 +136,18 @@ class RateLimitMixin:
|
|||||||
self.last_request_time = datetime.now()
|
self.last_request_time = datetime.now()
|
||||||
|
|
||||||
|
|
||||||
class URLProcessingMixin:
|
class URLProcessingMixin:
|
||||||
def _verify_ssl_cert(self, url: str) -> bool:
|
def _verify_ssl_cert(self, url: str) -> bool:
|
||||||
"""Verify SSL certificate for a URL."""
|
"""Verify SSL certificate for a URL."""
|
||||||
return verify_ssl_cert(url)
|
return verify_ssl_cert(url)
|
||||||
|
|
||||||
async def _safe_process_url(self, url: str) -> bool:
|
async def _safe_process_url(self, url: str) -> bool:
|
||||||
"""Perform safety checks before processing a URL."""
|
"""Perform safety checks before processing a URL."""
|
||||||
if self.verify_ssl and not self._verify_ssl_cert(url):
|
if self.verify_ssl and not self._verify_ssl_cert(url):
|
||||||
raise ValueError(f"SSL certificate verification failed for {url}")
|
raise ValueError(f"SSL certificate verification failed for {url}")
|
||||||
await self._wait_for_rate_limit()
|
await self._wait_for_rate_limit()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _safe_process_url_sync(self, url: str) -> bool:
|
def _safe_process_url_sync(self, url: str) -> bool:
|
||||||
"""Synchronous version of safety checks."""
|
"""Synchronous version of safety checks."""
|
||||||
if self.verify_ssl and not self._verify_ssl_cert(url):
|
if self.verify_ssl and not self._verify_ssl_cert(url):
|
||||||
@ -286,7 +286,7 @@ class SafeTavilyLoader(BaseLoader, RateLimitMixin, URLProcessingMixin):
|
|||||||
proxy["server"] = env_proxy_server
|
proxy["server"] = env_proxy_server
|
||||||
else:
|
else:
|
||||||
proxy = {"server": env_proxy_server}
|
proxy = {"server": env_proxy_server}
|
||||||
|
|
||||||
# Store parameters for creating TavilyLoader instances
|
# Store parameters for creating TavilyLoader instances
|
||||||
self.web_paths = web_paths if isinstance(web_paths, list) else [web_paths]
|
self.web_paths = web_paths if isinstance(web_paths, list) else [web_paths]
|
||||||
self.api_key = api_key
|
self.api_key = api_key
|
||||||
@ -295,7 +295,7 @@ class SafeTavilyLoader(BaseLoader, RateLimitMixin, URLProcessingMixin):
|
|||||||
self.verify_ssl = verify_ssl
|
self.verify_ssl = verify_ssl
|
||||||
self.trust_env = trust_env
|
self.trust_env = trust_env
|
||||||
self.proxy = proxy
|
self.proxy = proxy
|
||||||
|
|
||||||
# Add rate limiting
|
# Add rate limiting
|
||||||
self.requests_per_second = requests_per_second
|
self.requests_per_second = requests_per_second
|
||||||
self.last_request_time = None
|
self.last_request_time = None
|
||||||
@ -329,7 +329,7 @@ class SafeTavilyLoader(BaseLoader, RateLimitMixin, URLProcessingMixin):
|
|||||||
log.exception(e, "Error extracting content from URLs")
|
log.exception(e, "Error extracting content from URLs")
|
||||||
else:
|
else:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
async def alazy_load(self) -> AsyncIterator[Document]:
|
async def alazy_load(self) -> AsyncIterator[Document]:
|
||||||
"""Async version with rate limiting and SSL verification."""
|
"""Async version with rate limiting and SSL verification."""
|
||||||
valid_urls = []
|
valid_urls = []
|
||||||
@ -341,13 +341,13 @@ class SafeTavilyLoader(BaseLoader, RateLimitMixin, URLProcessingMixin):
|
|||||||
log.warning(f"SSL verification failed for {url}: {str(e)}")
|
log.warning(f"SSL verification failed for {url}: {str(e)}")
|
||||||
if not self.continue_on_failure:
|
if not self.continue_on_failure:
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
if not valid_urls:
|
if not valid_urls:
|
||||||
if self.continue_on_failure:
|
if self.continue_on_failure:
|
||||||
log.warning("No valid URLs to process after SSL verification")
|
log.warning("No valid URLs to process after SSL verification")
|
||||||
return
|
return
|
||||||
raise ValueError("No valid URLs to process after SSL verification")
|
raise ValueError("No valid URLs to process after SSL verification")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
loader = TavilyLoader(
|
loader = TavilyLoader(
|
||||||
urls=valid_urls,
|
urls=valid_urls,
|
||||||
@ -477,7 +477,6 @@ class SafePlaywrightURLLoader(PlaywrightURLLoader, RateLimitMixin, URLProcessing
|
|||||||
await browser.close()
|
await browser.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class SafeWebBaseLoader(WebBaseLoader):
|
class SafeWebBaseLoader(WebBaseLoader):
|
||||||
"""WebBaseLoader with enhanced error handling for URLs."""
|
"""WebBaseLoader with enhanced error handling for URLs."""
|
||||||
|
|
||||||
|
0
backend/open_webui/utils/telemetry/__init__.py
Normal file
0
backend/open_webui/utils/telemetry/__init__.py
Normal file
26
backend/open_webui/utils/telemetry/constants.py
Normal file
26
backend/open_webui/utils/telemetry/constants.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from opentelemetry.semconv.trace import SpanAttributes as _SpanAttributes
|
||||||
|
|
||||||
|
# Span Tags
|
||||||
|
SPAN_DB_TYPE = "mysql"
|
||||||
|
SPAN_REDIS_TYPE = "redis"
|
||||||
|
SPAN_DURATION = "duration"
|
||||||
|
SPAN_SQL_STR = "sql"
|
||||||
|
SPAN_SQL_EXPLAIN = "explain"
|
||||||
|
SPAN_ERROR_TYPE = "error"
|
||||||
|
|
||||||
|
|
||||||
|
class SpanAttributes(_SpanAttributes):
|
||||||
|
"""
|
||||||
|
Span Attributes
|
||||||
|
"""
|
||||||
|
|
||||||
|
DB_INSTANCE = "db.instance"
|
||||||
|
DB_TYPE = "db.type"
|
||||||
|
DB_IP = "db.ip"
|
||||||
|
DB_PORT = "db.port"
|
||||||
|
ERROR_KIND = "error.kind"
|
||||||
|
ERROR_OBJECT = "error.object"
|
||||||
|
ERROR_MESSAGE = "error.message"
|
||||||
|
RESULT_CODE = "result.code"
|
||||||
|
RESULT_MESSAGE = "result.message"
|
||||||
|
RESULT_ERRORS = "result.errors"
|
31
backend/open_webui/utils/telemetry/exporters.py
Normal file
31
backend/open_webui/utils/telemetry/exporters.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
import threading
|
||||||
|
|
||||||
|
from opentelemetry.sdk.trace import ReadableSpan
|
||||||
|
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
||||||
|
|
||||||
|
|
||||||
|
class LazyBatchSpanProcessor(BatchSpanProcessor):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.done = True
|
||||||
|
with self.condition:
|
||||||
|
self.condition.notify_all()
|
||||||
|
self.worker_thread.join()
|
||||||
|
self.done = False
|
||||||
|
self.worker_thread = None
|
||||||
|
|
||||||
|
def on_end(self, span: ReadableSpan) -> None:
|
||||||
|
if self.worker_thread is None:
|
||||||
|
self.worker_thread = threading.Thread(
|
||||||
|
name=self.__class__.__name__, target=self.worker, daemon=True
|
||||||
|
)
|
||||||
|
self.worker_thread.start()
|
||||||
|
super().on_end(span)
|
||||||
|
|
||||||
|
def shutdown(self) -> None:
|
||||||
|
self.done = True
|
||||||
|
with self.condition:
|
||||||
|
self.condition.notify_all()
|
||||||
|
if self.worker_thread:
|
||||||
|
self.worker_thread.join()
|
||||||
|
self.span_exporter.shutdown()
|
202
backend/open_webui/utils/telemetry/instrumentors.py
Normal file
202
backend/open_webui/utils/telemetry/instrumentors.py
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
import logging
|
||||||
|
import traceback
|
||||||
|
from typing import Collection, Union
|
||||||
|
|
||||||
|
from aiohttp import (
|
||||||
|
TraceRequestStartParams,
|
||||||
|
TraceRequestEndParams,
|
||||||
|
TraceRequestExceptionParams,
|
||||||
|
)
|
||||||
|
from chromadb.telemetry.opentelemetry.fastapi import instrument_fastapi
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from opentelemetry.instrumentation.httpx import (
|
||||||
|
HTTPXClientInstrumentor,
|
||||||
|
RequestInfo,
|
||||||
|
ResponseInfo,
|
||||||
|
)
|
||||||
|
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
||||||
|
from opentelemetry.instrumentation.logging import LoggingInstrumentor
|
||||||
|
from opentelemetry.instrumentation.redis import RedisInstrumentor
|
||||||
|
from opentelemetry.instrumentation.requests import RequestsInstrumentor
|
||||||
|
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
|
||||||
|
from opentelemetry.instrumentation.aiohttp_client import AioHttpClientInstrumentor
|
||||||
|
from opentelemetry.trace import Span, StatusCode
|
||||||
|
from redis import Redis
|
||||||
|
from requests import PreparedRequest, Response
|
||||||
|
from sqlalchemy import Engine
|
||||||
|
from fastapi import status
|
||||||
|
|
||||||
|
from open_webui.utils.telemetry.constants import SPAN_REDIS_TYPE, SpanAttributes
|
||||||
|
|
||||||
|
from open_webui.env import SRC_LOG_LEVELS
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||||
|
|
||||||
|
|
||||||
|
def requests_hook(span: Span, request: PreparedRequest):
|
||||||
|
"""
|
||||||
|
Http Request Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
span.update_name(f"{request.method} {request.url}")
|
||||||
|
span.set_attributes(
|
||||||
|
attributes={
|
||||||
|
SpanAttributes.HTTP_URL: request.url,
|
||||||
|
SpanAttributes.HTTP_METHOD: request.method,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def response_hook(span: Span, request: PreparedRequest, response: Response):
|
||||||
|
"""
|
||||||
|
HTTP Response Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
span.set_attributes(
|
||||||
|
attributes={
|
||||||
|
SpanAttributes.HTTP_STATUS_CODE: response.status_code,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
span.set_status(StatusCode.ERROR if response.status_code >= 400 else StatusCode.OK)
|
||||||
|
|
||||||
|
|
||||||
|
def redis_request_hook(span: Span, instance: Redis, args, kwargs):
|
||||||
|
"""
|
||||||
|
Redis Request Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
connection_kwargs: dict = instance.connection_pool.connection_kwargs
|
||||||
|
host = connection_kwargs.get("host")
|
||||||
|
port = connection_kwargs.get("port")
|
||||||
|
db = connection_kwargs.get("db")
|
||||||
|
span.set_attributes(
|
||||||
|
{
|
||||||
|
SpanAttributes.DB_INSTANCE: f"{host}/{db}",
|
||||||
|
SpanAttributes.DB_NAME: f"{host}/{db}",
|
||||||
|
SpanAttributes.DB_TYPE: SPAN_REDIS_TYPE,
|
||||||
|
SpanAttributes.DB_PORT: port,
|
||||||
|
SpanAttributes.DB_IP: host,
|
||||||
|
SpanAttributes.DB_STATEMENT: " ".join([str(i) for i in args]),
|
||||||
|
SpanAttributes.DB_OPERATION: str(args[0]),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception: # pylint: disable=W0718
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
|
||||||
|
|
||||||
|
def httpx_request_hook(span: Span, request: RequestInfo):
|
||||||
|
"""
|
||||||
|
HTTPX Request Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
span.update_name(f"{request.method.decode()} {str(request.url)}")
|
||||||
|
span.set_attributes(
|
||||||
|
attributes={
|
||||||
|
SpanAttributes.HTTP_URL: str(request.url),
|
||||||
|
SpanAttributes.HTTP_METHOD: request.method.decode(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def httpx_response_hook(span: Span, request: RequestInfo, response: ResponseInfo):
|
||||||
|
"""
|
||||||
|
HTTPX Response Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
span.set_attribute(SpanAttributes.HTTP_STATUS_CODE, response.status_code)
|
||||||
|
span.set_status(
|
||||||
|
StatusCode.ERROR
|
||||||
|
if response.status_code >= status.HTTP_400_BAD_REQUEST
|
||||||
|
else StatusCode.OK
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def httpx_async_request_hook(span: Span, request: RequestInfo):
|
||||||
|
"""
|
||||||
|
Async Request Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
httpx_request_hook(span, request)
|
||||||
|
|
||||||
|
|
||||||
|
async def httpx_async_response_hook(
|
||||||
|
span: Span, request: RequestInfo, response: ResponseInfo
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Async Response Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
httpx_response_hook(span, request, response)
|
||||||
|
|
||||||
|
|
||||||
|
def aiohttp_request_hook(span: Span, request: TraceRequestStartParams):
|
||||||
|
"""
|
||||||
|
Aiohttp Request Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
span.update_name(f"{request.method} {str(request.url)}")
|
||||||
|
span.set_attributes(
|
||||||
|
attributes={
|
||||||
|
SpanAttributes.HTTP_URL: str(request.url),
|
||||||
|
SpanAttributes.HTTP_METHOD: request.method,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def aiohttp_response_hook(
|
||||||
|
span: Span, response: Union[TraceRequestExceptionParams, TraceRequestEndParams]
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Aiohttp Response Hook
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(response, TraceRequestEndParams):
|
||||||
|
span.set_attribute(SpanAttributes.HTTP_STATUS_CODE, response.response.status)
|
||||||
|
span.set_status(
|
||||||
|
StatusCode.ERROR
|
||||||
|
if response.response.status >= status.HTTP_400_BAD_REQUEST
|
||||||
|
else StatusCode.OK
|
||||||
|
)
|
||||||
|
elif isinstance(response, TraceRequestExceptionParams):
|
||||||
|
span.set_status(StatusCode.ERROR)
|
||||||
|
span.set_attribute(SpanAttributes.ERROR_MESSAGE, str(response.exception))
|
||||||
|
|
||||||
|
|
||||||
|
class Instrumentor(BaseInstrumentor):
|
||||||
|
"""
|
||||||
|
Instrument OT
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, app: FastAPI, db_engine: Engine):
|
||||||
|
self.app = app
|
||||||
|
self.db_engine = db_engine
|
||||||
|
|
||||||
|
def instrumentation_dependencies(self) -> Collection[str]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _instrument(self, **kwargs):
|
||||||
|
instrument_fastapi(app=self.app)
|
||||||
|
SQLAlchemyInstrumentor().instrument(engine=self.db_engine)
|
||||||
|
RedisInstrumentor().instrument(request_hook=redis_request_hook)
|
||||||
|
RequestsInstrumentor().instrument(
|
||||||
|
request_hook=requests_hook, response_hook=response_hook
|
||||||
|
)
|
||||||
|
LoggingInstrumentor().instrument()
|
||||||
|
HTTPXClientInstrumentor().instrument(
|
||||||
|
request_hook=httpx_request_hook,
|
||||||
|
response_hook=httpx_response_hook,
|
||||||
|
async_request_hook=httpx_async_request_hook,
|
||||||
|
async_response_hook=httpx_async_response_hook,
|
||||||
|
)
|
||||||
|
AioHttpClientInstrumentor().instrument(
|
||||||
|
request_hook=aiohttp_request_hook,
|
||||||
|
response_hook=aiohttp_response_hook,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _uninstrument(self, **kwargs):
|
||||||
|
if getattr(self, "instrumentors", None) is None:
|
||||||
|
return
|
||||||
|
for instrumentor in self.instrumentors:
|
||||||
|
instrumentor.uninstrument()
|
23
backend/open_webui/utils/telemetry/setup.py
Normal file
23
backend/open_webui/utils/telemetry/setup.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from fastapi import FastAPI
|
||||||
|
from opentelemetry import trace
|
||||||
|
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
|
||||||
|
from opentelemetry.sdk.resources import SERVICE_NAME, Resource
|
||||||
|
from opentelemetry.sdk.trace import TracerProvider
|
||||||
|
from sqlalchemy import Engine
|
||||||
|
|
||||||
|
from open_webui.utils.telemetry.exporters import LazyBatchSpanProcessor
|
||||||
|
from open_webui.utils.telemetry.instrumentors import Instrumentor
|
||||||
|
from open_webui.env import OTEL_SERVICE_NAME, OTEL_EXPORTER_OTLP_ENDPOINT
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app: FastAPI, db_engine: Engine):
|
||||||
|
# set up trace
|
||||||
|
trace.set_tracer_provider(
|
||||||
|
TracerProvider(
|
||||||
|
resource=Resource.create(attributes={SERVICE_NAME: OTEL_SERVICE_NAME})
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# otlp export
|
||||||
|
exporter = OTLPSpanExporter(endpoint=OTEL_EXPORTER_OTLP_ENDPOINT)
|
||||||
|
trace.get_tracer_provider().add_span_processor(LazyBatchSpanProcessor(exporter))
|
||||||
|
Instrumentor(app=app, db_engine=db_engine).instrument()
|
@ -37,7 +37,7 @@ asgiref==3.8.1
|
|||||||
# AI libraries
|
# AI libraries
|
||||||
openai
|
openai
|
||||||
anthropic
|
anthropic
|
||||||
google-generativeai==0.7.2
|
google-generativeai==0.8.4
|
||||||
tiktoken
|
tiktoken
|
||||||
|
|
||||||
langchain==0.3.19
|
langchain==0.3.19
|
||||||
@ -118,3 +118,16 @@ ldap3==2.9.1
|
|||||||
|
|
||||||
## Firecrawl
|
## Firecrawl
|
||||||
firecrawl-py==1.12.0
|
firecrawl-py==1.12.0
|
||||||
|
|
||||||
|
## Trace
|
||||||
|
opentelemetry-api==1.30.0
|
||||||
|
opentelemetry-sdk==1.30.0
|
||||||
|
opentelemetry-exporter-otlp==1.30.0
|
||||||
|
opentelemetry-instrumentation==0.51b0
|
||||||
|
opentelemetry-instrumentation-fastapi==0.51b0
|
||||||
|
opentelemetry-instrumentation-sqlalchemy==0.51b0
|
||||||
|
opentelemetry-instrumentation-redis==0.51b0
|
||||||
|
opentelemetry-instrumentation-requests==0.51b0
|
||||||
|
opentelemetry-instrumentation-logging==0.51b0
|
||||||
|
opentelemetry-instrumentation-httpx==0.51b0
|
||||||
|
opentelemetry-instrumentation-aiohttp-client==0.51b0
|
Loading…
Reference in New Issue
Block a user