Formatting and Client changes (#2247)
This commit is contained in:
@@ -118,8 +118,7 @@ class AzureAISearch(VectorStoreBase):
|
||||
logger.info(f"Inserting {len(vectors)} vectors into index {self.index_name}")
|
||||
|
||||
documents = [
|
||||
self._generate_document(vector, payload, id)
|
||||
for id, vector, payload in zip(ids, vectors, payloads)
|
||||
self._generate_document(vector, payload, id) for id, vector, payload in zip(ids, vectors, payloads)
|
||||
]
|
||||
self.search_client.upload_documents(documents)
|
||||
|
||||
@@ -133,7 +132,7 @@ class AzureAISearch(VectorStoreBase):
|
||||
condition = f"{key} eq {value}"
|
||||
filter_conditions.append(condition)
|
||||
# Use 'and' to join multiple conditions
|
||||
filter_expression = ' and '.join(filter_conditions)
|
||||
filter_expression = " and ".join(filter_conditions)
|
||||
return filter_expression
|
||||
|
||||
def search(self, query, limit=5, filters=None):
|
||||
@@ -152,14 +151,8 @@ class AzureAISearch(VectorStoreBase):
|
||||
if filters:
|
||||
filter_expression = self._build_filter_expression(filters)
|
||||
|
||||
vector_query = VectorizedQuery(
|
||||
vector=query, k_nearest_neighbors=limit, fields="vector"
|
||||
)
|
||||
search_results = self.search_client.search(
|
||||
vector_queries=[vector_query],
|
||||
filter=filter_expression,
|
||||
top=limit
|
||||
)
|
||||
vector_query = VectorizedQuery(vector=query, k_nearest_neighbors=limit, fields="vector")
|
||||
search_results = self.search_client.search(vector_queries=[vector_query], filter=filter_expression, top=limit)
|
||||
|
||||
results = []
|
||||
for result in search_results:
|
||||
@@ -245,11 +238,7 @@ class AzureAISearch(VectorStoreBase):
|
||||
if filters:
|
||||
filter_expression = self._build_filter_expression(filters)
|
||||
|
||||
search_results = self.search_client.search(
|
||||
search_text="*",
|
||||
filter=filter_expression,
|
||||
top=limit
|
||||
)
|
||||
search_results = self.search_client.search(search_text="*", filter=filter_expression, top=limit)
|
||||
results = []
|
||||
for result in search_results:
|
||||
payload = json.loads(result["payload"])
|
||||
|
||||
@@ -49,30 +49,14 @@ class ElasticsearchDB(VectorStoreBase):
|
||||
def create_index(self) -> None:
|
||||
"""Create Elasticsearch index with proper mappings if it doesn't exist"""
|
||||
index_settings = {
|
||||
"settings": {
|
||||
"index": {
|
||||
"number_of_replicas": 1,
|
||||
"number_of_shards": 5,
|
||||
"refresh_interval": "1s"
|
||||
}
|
||||
},
|
||||
"settings": {"index": {"number_of_replicas": 1, "number_of_shards": 5, "refresh_interval": "1s"}},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"text": {"type": "text"},
|
||||
"vector": {
|
||||
"type": "dense_vector",
|
||||
"dims": self.vector_dim,
|
||||
"index": True,
|
||||
"similarity": "cosine"
|
||||
},
|
||||
"metadata": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user_id": {"type": "keyword"}
|
||||
}
|
||||
}
|
||||
"vector": {"type": "dense_vector", "dims": self.vector_dim, "index": True, "similarity": "cosine"},
|
||||
"metadata": {"type": "object", "properties": {"user_id": {"type": "keyword"}}},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
if not self.client.indices.exists(index=self.collection_name):
|
||||
@@ -114,8 +98,8 @@ class ElasticsearchDB(VectorStoreBase):
|
||||
"_id": id_,
|
||||
"_source": {
|
||||
"vector": vec,
|
||||
"metadata": payloads[i] # Store all metadata in the metadata field
|
||||
}
|
||||
"metadata": payloads[i], # Store all metadata in the metadata field
|
||||
},
|
||||
}
|
||||
actions.append(action)
|
||||
|
||||
@@ -127,7 +111,7 @@ class ElasticsearchDB(VectorStoreBase):
|
||||
OutputData(
|
||||
id=id_,
|
||||
score=1.0, # Default score for inserts
|
||||
payload=payloads[i]
|
||||
payload=payloads[i],
|
||||
)
|
||||
)
|
||||
return results
|
||||
@@ -136,35 +120,20 @@ class ElasticsearchDB(VectorStoreBase):
|
||||
"""Search for similar vectors using KNN search with pre-filtering."""
|
||||
if not filters:
|
||||
# If no filters, just do KNN search
|
||||
search_query = {
|
||||
"knn": {
|
||||
"field": "vector",
|
||||
"query_vector": query,
|
||||
"k": limit,
|
||||
"num_candidates": limit * 2
|
||||
}
|
||||
}
|
||||
search_query = {"knn": {"field": "vector", "query_vector": query, "k": limit, "num_candidates": limit * 2}}
|
||||
else:
|
||||
# If filters exist, apply them with KNN search
|
||||
filter_conditions = []
|
||||
for key, value in filters.items():
|
||||
filter_conditions.append({
|
||||
"term": {
|
||||
f"metadata.{key}": value
|
||||
}
|
||||
})
|
||||
|
||||
filter_conditions.append({"term": {f"metadata.{key}": value}})
|
||||
|
||||
search_query = {
|
||||
"knn": {
|
||||
"field": "vector",
|
||||
"query_vector": query,
|
||||
"k": limit,
|
||||
"num_candidates": limit * 2,
|
||||
"filter": {
|
||||
"bool": {
|
||||
"must": filter_conditions
|
||||
}
|
||||
}
|
||||
"filter": {"bool": {"must": filter_conditions}},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,11 +142,7 @@ class ElasticsearchDB(VectorStoreBase):
|
||||
results = []
|
||||
for hit in response["hits"]["hits"]:
|
||||
results.append(
|
||||
OutputData(
|
||||
id=hit["_id"],
|
||||
score=hit["_score"],
|
||||
payload=hit.get("_source", {}).get("metadata", {})
|
||||
)
|
||||
OutputData(id=hit["_id"], score=hit["_score"], payload=hit.get("_source", {}).get("metadata", {}))
|
||||
)
|
||||
|
||||
return results
|
||||
@@ -203,7 +168,7 @@ class ElasticsearchDB(VectorStoreBase):
|
||||
return OutputData(
|
||||
id=response["_id"],
|
||||
score=1.0, # Default score for direct get
|
||||
payload=response["_source"].get("metadata", {})
|
||||
payload=response["_source"].get("metadata", {}),
|
||||
)
|
||||
except KeyError as e:
|
||||
logger.warning(f"Missing key in Elasticsearch response: {e}")
|
||||
@@ -234,16 +199,8 @@ class ElasticsearchDB(VectorStoreBase):
|
||||
if filters:
|
||||
filter_conditions = []
|
||||
for key, value in filters.items():
|
||||
filter_conditions.append({
|
||||
"term": {
|
||||
f"metadata.{key}": value
|
||||
}
|
||||
})
|
||||
query["query"] = {
|
||||
"bool": {
|
||||
"must": filter_conditions
|
||||
}
|
||||
}
|
||||
filter_conditions.append({"term": {f"metadata.{key}": value}})
|
||||
query["query"] = {"bool": {"must": filter_conditions}}
|
||||
|
||||
if limit:
|
||||
query["size"] = limit
|
||||
@@ -256,7 +213,7 @@ class ElasticsearchDB(VectorStoreBase):
|
||||
OutputData(
|
||||
id=hit["_id"],
|
||||
score=1.0, # Default score for list operation
|
||||
payload=hit.get("_source", {}).get("metadata", {})
|
||||
payload=hit.get("_source", {}).get("metadata", {}),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -50,10 +50,7 @@ class OpenSearchDB(VectorStoreBase):
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"text": {"type": "text"},
|
||||
"vector": {
|
||||
"type": "knn_vector",
|
||||
"dimension": self.vector_dim
|
||||
},
|
||||
"vector": {"type": "knn_vector", "dimension": self.vector_dim},
|
||||
"metadata": {"type": "object", "properties": {"user_id": {"type": "keyword"}}},
|
||||
}
|
||||
},
|
||||
@@ -73,7 +70,7 @@ class OpenSearchDB(VectorStoreBase):
|
||||
"vector": {
|
||||
"type": "knn_vector",
|
||||
"dimension": vector_size,
|
||||
"method": { "engine": "lucene", "name": "hnsw", "space_type": "cosinesimil"},
|
||||
"method": {"engine": "lucene", "name": "hnsw", "space_type": "cosinesimil"},
|
||||
},
|
||||
"payload": {"type": "object"},
|
||||
"id": {"type": "keyword"},
|
||||
@@ -125,12 +122,12 @@ class OpenSearchDB(VectorStoreBase):
|
||||
"k": limit,
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
if filters:
|
||||
filter_conditions = [{"term": {f"metadata.{key}": value}} for key, value in filters.items()]
|
||||
search_query["query"]["knn"]["vector"]["filter"] = { "bool": {"filter": filter_conditions} }
|
||||
search_query["query"]["knn"]["vector"]["filter"] = {"bool": {"filter": filter_conditions}}
|
||||
|
||||
response = self.client.search(index=self.collection_name, body=search_query)
|
||||
|
||||
@@ -180,10 +177,17 @@ class OpenSearchDB(VectorStoreBase):
|
||||
query = {"query": {"match_all": {}}}
|
||||
|
||||
if filters:
|
||||
query["query"] = {"bool": {"must": [{"term": {f"metadata.{key}": value}} for key, value in filters.items()]}}
|
||||
query["query"] = {
|
||||
"bool": {"must": [{"term": {f"metadata.{key}": value}} for key, value in filters.items()]}
|
||||
}
|
||||
|
||||
if limit:
|
||||
query["size"] = limit
|
||||
|
||||
response = self.client.search(index=self.collection_name, body=query)
|
||||
return [[OutputData(id=hit["_id"], score=1.0, payload=hit["_source"].get("metadata", {})) for hit in response["hits"]["hits"]]]
|
||||
return [
|
||||
[
|
||||
OutputData(id=hit["_id"], score=1.0, payload=hit["_source"].get("metadata", {}))
|
||||
for hit in response["hits"]["hits"]
|
||||
]
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user