@@ -56,7 +56,9 @@ class QdrantVectorSearchTool(BaseTool):
56
56
openai_client : Any = None # Added for lazy initialization
57
57
openai_async_client : Any = None # Added for lazy initialization
58
58
name : str = "QdrantVectorSearchTool"
59
- description : str = "A tool to search the Qdrant database for relevant information on internal documents."
59
+ description : str = (
60
+ "A tool to search the Qdrant database for relevant information on internal documents."
61
+ )
60
62
args_schema : type [BaseModel ] = QdrantToolSchema
61
63
query : str | None = None
62
64
filter_by : str | None = None
@@ -97,7 +99,7 @@ def __init__(self, **kwargs):
97
99
):
98
100
import subprocess
99
101
100
- subprocess .run (["uv" , "add" , "qdrant-client" ], check = True )
102
+ subprocess .run (["/usr/bin/ uv" , "add" , "qdrant-client" ], check = True )
101
103
else :
102
104
raise ImportError (
103
105
"The 'qdrant-client' package is required to use the QdrantVectorSearchTool. "
@@ -130,7 +132,11 @@ def _run(
130
132
# Create filter if filter parameters are provided
131
133
search_filter = None
132
134
if filter_by and filter_value :
133
- search_filter = Filter (must = [FieldCondition (key = filter_by , match = MatchValue (value = filter_value ))])
135
+ search_filter = Filter (
136
+ must = [
137
+ FieldCondition (key = filter_by , match = MatchValue (value = filter_value ))
138
+ ]
139
+ )
134
140
135
141
# Search in Qdrant using the built-in query method
136
142
query_vector = (
@@ -214,11 +220,17 @@ async def _arun(
214
220
# Create filter if filter parameters are provided
215
221
search_filter = None
216
222
if filter_by and filter_value :
217
- search_filter = Filter (must = [FieldCondition (key = filter_by , match = MatchValue (value = filter_value ))])
223
+ search_filter = Filter (
224
+ must = [
225
+ FieldCondition (key = filter_by , match = MatchValue (value = filter_value ))
226
+ ]
227
+ )
218
228
219
229
# Search in Qdrant using the built-in query method
220
230
query_vector = (
221
- await self ._vectorize_query_async (query , embedding_model = "text-embedding-3-large" )
231
+ await self ._vectorize_query_async (
232
+ query , embedding_model = "text-embedding-3-large"
233
+ )
222
234
if not self .custom_embedding_fn
223
235
else self .custom_embedding_fn (query )
224
236
)
@@ -243,7 +255,9 @@ async def _arun(
243
255
244
256
return json .dumps (results , indent = 2 )
245
257
246
- async def _vectorize_query_async (self , query : str , embedding_model : str ) -> list [float ]:
258
+ async def _vectorize_query_async (
259
+ self , query : str , embedding_model : str
260
+ ) -> list [float ]:
247
261
"""Default async vectorization function with openai.
248
262
249
263
Args:
0 commit comments