From a1f3ece5289b8d862c8e6265357aa3d2ae21b4af Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Fri, 13 Sep 2024 01:07:03 -0400 Subject: [PATCH] refac --- backend/open_webui/main.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/backend/open_webui/main.py b/backend/open_webui/main.py index 15f41be69..cb2364da6 100644 --- a/backend/open_webui/main.py +++ b/backend/open_webui/main.py @@ -586,13 +586,16 @@ class ChatCompletionMiddleware(BaseHTTPMiddleware): if len(contexts) > 0: context_string = "/n".join(contexts).strip() prompt = get_last_user_message(body["messages"]) + if prompt is None: raise Exception("No user message found") - if rag_app.state.config.RELEVANCE_THRESHOLD == 0: - if context_string.strip() == "": - log.debug( - f"With a 0 relevancy threshold for RAG, the context cannot be empty" - ) + if ( + rag_app.state.config.RELEVANCE_THRESHOLD == 0 + and context_string.strip() == "" + ): + log.debug( + f"With a 0 relevancy threshold for RAG, the context cannot be empty" + ) # Workaround for Ollama 2.0+ system prompt issue # TODO: replace with add_or_update_system_message