diff --git a/backend/open_webui/routers/ollama.py b/backend/open_webui/routers/ollama.py index e825848d4..a3d506449 100644 --- a/backend/open_webui/routers/ollama.py +++ b/backend/open_webui/routers/ollama.py @@ -115,7 +115,7 @@ async def send_post_request( stream: bool = True, key: Optional[str] = None, content_type: Optional[str] = None, - user: UserModel = None + user: UserModel = None, ): r = None @@ -296,7 +296,7 @@ async def update_config( @cached(ttl=3) -async def get_all_models(request: Request, user: UserModel=None): +async def get_all_models(request: Request, user: UserModel = None): log.info("get_all_models()") if request.app.state.config.ENABLE_OLLAMA_API: request_tasks = [] @@ -317,7 +317,9 @@ async def get_all_models(request: Request, user: UserModel=None): key = api_config.get("key", None) if enable: - request_tasks.append(send_get_request(f"{url}/api/tags", key, user=user)) + request_tasks.append( + send_get_request(f"{url}/api/tags", key, user=user) + ) else: request_tasks.append(asyncio.ensure_future(asyncio.sleep(0, None))) @@ -531,7 +533,7 @@ async def get_ollama_loaded_models(request: Request, user=Depends(get_verified_u url, {} ), # Legacy support ).get("key", None), - user=user + user=user, ) for idx, url in enumerate(request.app.state.config.OLLAMA_BASE_URLS) ] diff --git a/backend/open_webui/routers/openai.py b/backend/open_webui/routers/openai.py index f0d5d81dd..1ef913df4 100644 --- a/backend/open_webui/routers/openai.py +++ b/backend/open_webui/routers/openai.py @@ -52,7 +52,7 @@ log.setLevel(SRC_LOG_LEVELS["OPENAI"]) ########################################## -async def send_get_request(url, key=None, user: UserModel=None): +async def send_get_request(url, key=None, user: UserModel = None): timeout = aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST) try: async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session: @@ -70,7 +70,7 @@ async def send_get_request(url, key=None, user: UserModel=None): if ENABLE_FORWARD_USER_INFO_HEADERS else {} ), - } + }, ) as response: return await response.json() except Exception as e: diff --git a/backend/open_webui/utils/models.py b/backend/open_webui/utils/models.py index 00f8fd666..872049f0f 100644 --- a/backend/open_webui/utils/models.py +++ b/backend/open_webui/utils/models.py @@ -30,7 +30,7 @@ log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["MAIN"]) -async def get_all_base_models(request: Request, user: UserModel=None): +async def get_all_base_models(request: Request, user: UserModel = None): function_models = [] openai_models = [] ollama_models = [] @@ -59,7 +59,7 @@ async def get_all_base_models(request: Request, user: UserModel=None): return models -async def get_all_models(request, user: UserModel=None): +async def get_all_models(request, user: UserModel = None): models = await get_all_base_models(request, user=user) # If there are no models, return an empty list