diff --git a/backend/apps/ollama/main.py b/backend/apps/ollama/main.py index 39106ac17..6bea03ab8 100644 --- a/backend/apps/ollama/main.py +++ b/backend/apps/ollama/main.py @@ -33,7 +33,13 @@ from constants import ERROR_MESSAGES from utils.utils import decode_token, get_current_user, get_admin_user -from config import SRC_LOG_LEVELS, OLLAMA_BASE_URLS, MODEL_FILTER_ENABLED, MODEL_FILTER_LIST, UPLOAD_DIR +from config import ( + SRC_LOG_LEVELS, + OLLAMA_BASE_URLS, + MODEL_FILTER_ENABLED, + MODEL_FILTER_LIST, + UPLOAD_DIR, +) from utils.misc import calculate_sha256 log = logging.getLogger(__name__) @@ -203,7 +209,8 @@ async def get_ollama_versions(url_idx: Optional[int] = None): if len(responses) > 0: lowest_version = min( - responses, key=lambda x: tuple(map(int, x["version"].replace("-rc", ".").split("."))) + responses, + key=lambda x: tuple(map(int, x["version"].split("-")[0].split("."))), ) return {"version": lowest_version["version"]} @@ -770,7 +777,11 @@ async def generate_chat_completion( r = None - log.debug("form_data.model_dump_json(exclude_none=True).encode(): {0} ".format(form_data.model_dump_json(exclude_none=True).encode())) + log.debug( + "form_data.model_dump_json(exclude_none=True).encode(): {0} ".format( + form_data.model_dump_json(exclude_none=True).encode() + ) + ) def get_request(): nonlocal form_data