Merge pull request #5728 from sp301415/dev

fix: Fix OpenAI batch embedding
This commit is contained in:
Timothy Jaeryang Baek 2024-09-26 14:08:11 +02:00 committed by GitHub
commit 3af50f08bd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 5 additions and 7 deletions

View File

@ -1112,13 +1112,17 @@ def store_docs_in_vector_db(
app.state.config.RAG_EMBEDDING_OPENAI_BATCH_SIZE,
)
embedding_texts = embedding_function(
list(map(lambda x: x.replace("\n", " "), texts))
)
VECTOR_DB_CLIENT.insert(
collection_name=collection_name,
items=[
{
"id": str(uuid.uuid4()),
"text": text,
"vector": embedding_function(text.replace("\n", " ")),
"vector": embedding_texts[idx],
"metadata": metadatas[idx],
}
for idx, text in enumerate(texts)

View File

@ -76,8 +76,6 @@ def query_doc(
limit=k,
)
print("result", result)
log.info(f"query_doc:result {result}")
return result
except Exception as e:

View File

@ -16,8 +16,6 @@ class MilvusClient:
self.client = Client(uri=MILVUS_URI)
def _result_to_get_result(self, result) -> GetResult:
print(result)
ids = []
documents = []
metadatas = []
@ -45,8 +43,6 @@ class MilvusClient:
)
def _result_to_search_result(self, result) -> SearchResult:
print(result)
ids = []
distances = []
documents = []