diff --git a/README.md b/README.md index 4ac495a47..8c7e4cdf7 100644 --- a/README.md +++ b/README.md @@ -185,6 +185,14 @@ If you want to try out the latest bleeding-edge features and are okay with occas docker run -d -p 3000:8080 -v open-webui:/app/backend/data --name open-webui --add-host=host.docker.internal:host-gateway --restart always ghcr.io/open-webui/open-webui:dev ``` +### Offline Mode + +If you are running Open WebUI in an offline environment, you can set the `HF_HUB_OFFLINE` environment variable to `1` to prevent attempts to download models from the internet. + +```bash +export HF_HUB_OFFLINE=1 +``` + ## What's Next? 🌟 Discover upcoming features on our roadmap in the [Open WebUI Documentation](https://docs.openwebui.com/roadmap/). diff --git a/backend/open_webui/env.py b/backend/open_webui/env.py index a5f848b62..5d79623d2 100644 --- a/backend/open_webui/env.py +++ b/backend/open_webui/env.py @@ -392,3 +392,6 @@ else: #################################### OFFLINE_MODE = os.environ.get("OFFLINE_MODE", "false").lower() == "true" + +if OFFLINE_MODE: + os.environ["HF_HUB_OFFLINE"] = "1" diff --git a/backend/open_webui/retrieval/utils.py b/backend/open_webui/retrieval/utils.py index 17f1438da..c95367e6c 100644 --- a/backend/open_webui/retrieval/utils.py +++ b/backend/open_webui/retrieval/utils.py @@ -14,7 +14,7 @@ from langchain_core.documents import Document from open_webui.retrieval.vector.connector import VECTOR_DB_CLIENT from open_webui.utils.misc import get_last_user_message -from open_webui.env import SRC_LOG_LEVELS +from open_webui.env import SRC_LOG_LEVELS, OFFLINE_MODE log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["RAG"]) @@ -375,6 +375,9 @@ def get_model_path(model: str, update_model: bool = False): local_files_only = not update_model + if OFFLINE_MODE: + local_files_only = True + snapshot_kwargs = { "cache_dir": cache_dir, "local_files_only": local_files_only,