Python Formatting (Failed CI - fixed)

This commit is contained in:
DmitriyAlergant-T1A 2024-11-22 23:11:46 -05:00
parent d24c21b40f
commit 374d6cad18
2 changed files with 36 additions and 21 deletions

View File

@ -272,7 +272,9 @@ async def get_pipe_models():
log.exception(e)
sub_pipes = []
log.debug(f"get_pipe_models: function '{pipe.id}' is a manifold of {sub_pipes}")
log.debug(
f"get_pipe_models: function '{pipe.id}' is a manifold of {sub_pipes}"
)
for p in sub_pipes:
sub_pipe_id = f'{pipe.id}.{p["id"]}'
@ -282,7 +284,7 @@ async def get_pipe_models():
sub_pipe_name = f"{function_module.name}{sub_pipe_name}"
pipe_flag = {"type": pipe.type}
pipe_models.append(
{
"id": sub_pipe_id,
@ -296,8 +298,10 @@ async def get_pipe_models():
else:
pipe_flag = {"type": "pipe"}
log.debug(f"get_pipe_models: function '{pipe.id}' is a single pipe {{ 'id': {pipe.id}, 'name': {pipe.name} }}")
log.debug(
f"get_pipe_models: function '{pipe.id}' is a single pipe {{ 'id': {pipe.id}, 'name': {pipe.name} }}"
)
pipe_models.append(
{
"id": pipe.id,

View File

@ -539,7 +539,6 @@ async def chat_completion_files_handler(
if len(queries) == 0:
queries = [get_last_user_message(body["messages"])]
sources = get_sources_from_files(
files=files,
queries=queries,
@ -969,7 +968,7 @@ app.add_middleware(SecurityHeadersMiddleware)
@app.middleware("http")
async def commit_session_after_request(request: Request, call_next):
response = await call_next(request)
#log.debug("Commit session after request")
# log.debug("Commit session after request")
Session.commit()
return response
@ -1215,7 +1214,9 @@ async def get_models(user=Depends(get_verified_user)):
filtered_models.append(model)
models = filtered_models
log.debug(f"/api/models returned filtered models accessible to the user: {json.dumps([model['id'] for model in models])}")
log.debug(
f"/api/models returned filtered models accessible to the user: {json.dumps([model['id'] for model in models])}"
)
return {"data": models}
@ -1727,7 +1728,9 @@ async def generate_title(form_data: dict, user=Depends(get_verified_user)):
models,
)
log.debug(f"generating chat title using model {task_model_id} for user {user.email} ")
log.debug(
f"generating chat title using model {task_model_id} for user {user.email} "
)
if app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE != "":
template = app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE
@ -1767,10 +1770,10 @@ Artificial Intelligence in Healthcare
}
),
"metadata": {
"task": str(TASKS.TITLE_GENERATION),
"task_body": form_data,
"chat_id": form_data.get("chat_id", None)
},
"task": str(TASKS.TITLE_GENERATION),
"task_body": form_data,
"chat_id": form_data.get("chat_id", None),
},
}
# Handle pipeline filters
@ -1820,8 +1823,10 @@ async def generate_chat_tags(form_data: dict, user=Depends(get_verified_user)):
app.state.config.TASK_MODEL_EXTERNAL,
models,
)
log.debug(f"generating chat tags using model {task_model_id} for user {user.email} ")
log.debug(
f"generating chat tags using model {task_model_id} for user {user.email} "
)
if app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE != "":
template = app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE
@ -1853,10 +1858,10 @@ JSON format: { "tags": ["tag1", "tag2", "tag3"] }
"messages": [{"role": "user", "content": content}],
"stream": False,
"metadata": {
"task": str(TASKS.TAGS_GENERATION),
"task": str(TASKS.TAGS_GENERATION),
"task_body": form_data,
"chat_id": form_data.get("chat_id", None)
}
"chat_id": form_data.get("chat_id", None),
},
}
# Handle pipeline filters
@ -1914,8 +1919,10 @@ async def generate_queries(form_data: dict, user=Depends(get_verified_user)):
app.state.config.TASK_MODEL_EXTERNAL,
models,
)
log.debug(f"generating {type} queries using model {task_model_id} for user {user.email}")
log.debug(
f"generating {type} queries using model {task_model_id} for user {user.email}"
)
if app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE != "":
template = app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE
@ -1930,7 +1937,11 @@ async def generate_queries(form_data: dict, user=Depends(get_verified_user)):
"model": task_model_id,
"messages": [{"role": "user", "content": content}],
"stream": False,
"metadata": {"task": str(TASKS.QUERY_GENERATION), "task_body": form_data, "chat_id": form_data.get("chat_id", None)},
"metadata": {
"task": str(TASKS.QUERY_GENERATION),
"task_body": form_data,
"chat_id": form_data.get("chat_id", None),
},
}
# Handle pipeline filters
@ -2047,7 +2058,7 @@ async def generate_moa_response(form_data: dict, user=Depends(get_verified_user)
app.state.config.TASK_MODEL_EXTERNAL,
models,
)
log.debug(f"generating MOA model {task_model_id} for user {user.email} ")
template = """You have been provided with a set of responses from various models to the latest user query: "{{prompt}}"