From 741ed5dc4c150edf8e059fedc4afd8190c1bcdb8 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Sun, 14 Apr 2024 19:56:33 -0400 Subject: [PATCH] fix --- backend/apps/rag/main.py | 1 + backend/apps/rag/utils.py | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/backend/apps/rag/main.py b/backend/apps/rag/main.py index 04554c3d4..5e9564f7d 100644 --- a/backend/apps/rag/main.py +++ b/backend/apps/rag/main.py @@ -495,6 +495,7 @@ def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> b ids=[str(uuid.uuid1()) for _ in texts], metadatas=metadatas, embeddings=embeddings, + documents=texts, ): collection.add(*batch) diff --git a/backend/apps/rag/utils.py b/backend/apps/rag/utils.py index 140fd88ec..daea36863 100644 --- a/backend/apps/rag/utils.py +++ b/backend/apps/rag/utils.py @@ -43,6 +43,8 @@ def query_embeddings_doc(collection_name: str, query_embeddings, k: int): query_embeddings=[query_embeddings], n_results=k, ) + + log.info(f"query_embeddings_doc:result {result}") return result except Exception as e: raise e @@ -155,7 +157,9 @@ def rag_messages( openai_key, openai_url, ): - log.debug(f"docs: {docs}") + log.debug( + f"docs: {docs} {messages} {embedding_engine} {embedding_model} {embedding_function} {openai_key} {openai_url}" + ) last_user_message_idx = None for i in range(len(messages) - 1, -1, -1):