mirror of
https://github.com/open-webui/open-webui
synced 2025-04-15 21:13:44 +00:00
black formatting
This commit is contained in:
parent
6d62e71c34
commit
06062568c7
@ -115,7 +115,7 @@ async def send_post_request(
|
||||
stream: bool = True,
|
||||
key: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
user: UserModel = None
|
||||
user: UserModel = None,
|
||||
):
|
||||
|
||||
r = None
|
||||
@ -296,7 +296,7 @@ async def update_config(
|
||||
|
||||
|
||||
@cached(ttl=3)
|
||||
async def get_all_models(request: Request, user: UserModel=None):
|
||||
async def get_all_models(request: Request, user: UserModel = None):
|
||||
log.info("get_all_models()")
|
||||
if request.app.state.config.ENABLE_OLLAMA_API:
|
||||
request_tasks = []
|
||||
@ -317,7 +317,9 @@ async def get_all_models(request: Request, user: UserModel=None):
|
||||
key = api_config.get("key", None)
|
||||
|
||||
if enable:
|
||||
request_tasks.append(send_get_request(f"{url}/api/tags", key, user=user))
|
||||
request_tasks.append(
|
||||
send_get_request(f"{url}/api/tags", key, user=user)
|
||||
)
|
||||
else:
|
||||
request_tasks.append(asyncio.ensure_future(asyncio.sleep(0, None)))
|
||||
|
||||
@ -531,7 +533,7 @@ async def get_ollama_loaded_models(request: Request, user=Depends(get_verified_u
|
||||
url, {}
|
||||
), # Legacy support
|
||||
).get("key", None),
|
||||
user=user
|
||||
user=user,
|
||||
)
|
||||
for idx, url in enumerate(request.app.state.config.OLLAMA_BASE_URLS)
|
||||
]
|
||||
|
@ -52,7 +52,7 @@ log.setLevel(SRC_LOG_LEVELS["OPENAI"])
|
||||
##########################################
|
||||
|
||||
|
||||
async def send_get_request(url, key=None, user: UserModel=None):
|
||||
async def send_get_request(url, key=None, user: UserModel = None):
|
||||
timeout = aiohttp.ClientTimeout(total=AIOHTTP_CLIENT_TIMEOUT_OPENAI_MODEL_LIST)
|
||||
try:
|
||||
async with aiohttp.ClientSession(timeout=timeout, trust_env=True) as session:
|
||||
@ -70,7 +70,7 @@ async def send_get_request(url, key=None, user: UserModel=None):
|
||||
if ENABLE_FORWARD_USER_INFO_HEADERS
|
||||
else {}
|
||||
),
|
||||
}
|
||||
},
|
||||
) as response:
|
||||
return await response.json()
|
||||
except Exception as e:
|
||||
|
@ -30,7 +30,7 @@ log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
async def get_all_base_models(request: Request, user: UserModel=None):
|
||||
async def get_all_base_models(request: Request, user: UserModel = None):
|
||||
function_models = []
|
||||
openai_models = []
|
||||
ollama_models = []
|
||||
@ -59,7 +59,7 @@ async def get_all_base_models(request: Request, user: UserModel=None):
|
||||
return models
|
||||
|
||||
|
||||
async def get_all_models(request, user: UserModel=None):
|
||||
async def get_all_models(request, user: UserModel = None):
|
||||
models = await get_all_base_models(request, user=user)
|
||||
|
||||
# If there are no models, return an empty list
|
||||
|
Loading…
Reference in New Issue
Block a user