From 121a13d4ed62a77ea6c1bc9c0b5628644c264112 Mon Sep 17 00:00:00 2001 From: Rory <16675082+roryeckel@users.noreply.github.com> Date: Mon, 3 Feb 2025 17:37:20 -0600 Subject: [PATCH 1/2] fix: Filter to valid RAG web search URLs --- backend/open_webui/retrieval/web/main.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/backend/open_webui/retrieval/web/main.py b/backend/open_webui/retrieval/web/main.py index 1af8a70aa..28a749e7d 100644 --- a/backend/open_webui/retrieval/web/main.py +++ b/backend/open_webui/retrieval/web/main.py @@ -1,3 +1,5 @@ +import validators + from typing import Optional from urllib.parse import urlparse @@ -10,6 +12,8 @@ def get_filtered_results(results, filter_list): filtered_results = [] for result in results: url = result.get("url") or result.get("link", "") + if not validators.url(url): + continue domain = urlparse(url).netloc if any(domain.endswith(filtered_domain) for filtered_domain in filter_list): filtered_results.append(result) From 3db6b4352fdb75d469fc58e272d2de2e0b071b5c Mon Sep 17 00:00:00 2001 From: Rory <16675082+roryeckel@users.noreply.github.com> Date: Mon, 3 Feb 2025 18:18:49 -0600 Subject: [PATCH 2/2] fix: Filter out invalid RAG web URLs (continued) --- backend/open_webui/retrieval/web/utils.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/backend/open_webui/retrieval/web/utils.py b/backend/open_webui/retrieval/web/utils.py index a322bbbfc..d91c7da15 100644 --- a/backend/open_webui/retrieval/web/utils.py +++ b/backend/open_webui/retrieval/web/utils.py @@ -42,6 +42,15 @@ def validate_url(url: Union[str, Sequence[str]]): else: return False +def safe_validate_urls(url: Sequence[str]) -> Sequence[str]: + valid_urls = [] + for u in url: + try: + if validate_url(u): + valid_urls.append(u) + except ValueError: + continue + return valid_urls def resolve_hostname(hostname): # Get address information @@ -86,11 +95,11 @@ def get_web_loader( verify_ssl: bool = True, requests_per_second: int = 2, ): - # Check if the URL is valid - if not validate_url(urls): - raise ValueError(ERROR_MESSAGES.INVALID_URL) + # Check if the URLs are valid + safe_urls = safe_validate_urls([urls] if isinstance(urls, str) else urls) + return SafeWebBaseLoader( - urls, + safe_urls, verify_ssl=verify_ssl, requests_per_second=requests_per_second, continue_on_failure=True,