diff --git a/backend/open_webui/apps/ollama/main.py b/backend/open_webui/apps/ollama/main.py index c13a1cbc3..b44f68017 100644 --- a/backend/open_webui/apps/ollama/main.py +++ b/backend/open_webui/apps/ollama/main.py @@ -9,6 +9,8 @@ from typing import Optional, Union from urllib.parse import urlparse import aiohttp +from aiocache import cached + import requests from open_webui.apps.webui.models.models import Models from open_webui.config import ( @@ -256,6 +258,7 @@ def merge_models_lists(model_lists): return list(merged_models.values()) +@cached(ttl=3) async def get_all_models(): log.info("get_all_models()") if app.state.config.ENABLE_OLLAMA_API: @@ -295,8 +298,6 @@ async def get_all_models(): for model in response.get("models", []): model["model"] = f"{prefix_id}.{model['model']}" - print(responses) - models = { "models": merge_models_lists( map( diff --git a/backend/open_webui/apps/openai/main.py b/backend/open_webui/apps/openai/main.py index ee3443ac9..6d6ac50c6 100644 --- a/backend/open_webui/apps/openai/main.py +++ b/backend/open_webui/apps/openai/main.py @@ -6,7 +6,10 @@ from pathlib import Path from typing import Literal, Optional, overload import aiohttp +from aiocache import cached import requests + + from open_webui.apps.webui.models.models import Models from open_webui.config import ( CACHE_DIR, @@ -325,6 +328,7 @@ async def get_all_models_responses() -> list: return responses +@cached(ttl=3) async def get_all_models() -> dict[str, list]: log.info("get_all_models()")