Skip to content

Commit c87ed59

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 22af50f commit c87ed59

31 files changed

+1488
-1477
lines changed

src/vdf_io/import_vdf/astradb_import.py

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ def upsert_data(self, via_cql=False):
124124
data_path = namespace_meta["data_path"]
125125
final_data_path = self.get_final_data_path(data_path)
126126
new_index_name = index_name + (
127-
f'_{namespace_meta["namespace"]}'
127+
f"_{namespace_meta['namespace']}"
128128
if namespace_meta["namespace"]
129129
else ""
130130
)
@@ -162,7 +162,7 @@ def upsert_data(self, via_cql=False):
162162

163163
self.session.execute(
164164
f"CREATE TABLE IF NOT EXISTS {self.args['keyspace']}.{new_index_name}"
165-
f" (id text PRIMARY KEY, \"$vector\" vector<float,{namespace_meta['dimensions']}>)"
165+
f' (id text PRIMARY KEY, "$vector" vector<float,{namespace_meta["dimensions"]}>)'
166166
)
167167
parquet_files = self.get_parquet_files(final_data_path)
168168
vectors = {}
@@ -208,7 +208,7 @@ def flush_to_db(self, vectors, metadata, collection, via_cql, parallel=True):
208208
keys = list(set(vectors.keys()).union(set(metadata.keys())))
209209
for id in keys:
210210
self.session.execute(
211-
f"INSERT INTO {self.args['keyspace']}.{collection.name} (id, \"$vector\", {', '.join(metadata[id].keys())}) "
211+
f'INSERT INTO {self.args["keyspace"]}.{collection.name} (id, "$vector", {", ".join(metadata[id].keys())}) '
212212
f"VALUES ('{id}', {vectors[id]}, {', '.join([str(v) for v in metadata[id].values()])})"
213213
)
214214
return len(vectors)
@@ -248,12 +248,15 @@ def flush_batch_to_db(collection, keys, vectors, metadata):
248248
for i in range(0, total_points, BATCH_SIZE)
249249
]
250250

251-
with concurrent.futures.ThreadPoolExecutor(
252-
max_workers=num_parallel_threads
253-
) as executor, tqdm(
254-
total=total_points,
255-
desc=f"Flushing to DB in batches of {BATCH_SIZE} in {num_parallel_threads} threads",
256-
) as pbar:
251+
with (
252+
concurrent.futures.ThreadPoolExecutor(
253+
max_workers=num_parallel_threads
254+
) as executor,
255+
tqdm(
256+
total=total_points,
257+
desc=f"Flushing to DB in batches of {BATCH_SIZE} in {num_parallel_threads} threads",
258+
) as pbar,
259+
):
257260
future_to_batch = {
258261
executor.submit(flush_batch_to_db, collection, *batch): batch
259262
for batch in batches

src/vdf_io/import_vdf/chroma_import.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ def upsert_data(self):
123123
parquet_files = self.get_parquet_files(final_data_path)
124124

125125
new_index_name = index_name + (
126-
f'_{namespace_meta["namespace"]}'
126+
f"_{namespace_meta['namespace']}"
127127
if namespace_meta["namespace"]
128128
else ""
129129
)

src/vdf_io/import_vdf/kdbai_import.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def upsert_data(self):
9999
data_path = namespace_meta["data_path"]
100100
final_data_path = self.get_final_data_path(data_path)
101101
index_name = index_name + (
102-
f'_{namespace_meta["namespace"]}'
102+
f"_{namespace_meta['namespace']}"
103103
if namespace_meta["namespace"]
104104
else ""
105105
)

src/vdf_io/import_vdf/lancedb_import.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def upsert_data(self):
8989
parquet_files = self.get_parquet_files(final_data_path)
9090

9191
new_index_name = index_name + (
92-
f'_{namespace_meta["namespace"]}'
92+
f"_{namespace_meta['namespace']}"
9393
if namespace_meta["namespace"]
9494
else ""
9595
)

src/vdf_io/import_vdf/milvus_import.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def upsert_data(self):
8282
self.set_dims(namespace_meta, collection_name)
8383
data_path = namespace_meta["data_path"]
8484
index_name = collection_name + (
85-
f'_{namespace_meta["namespace"]}'
85+
f"_{namespace_meta['namespace']}"
8686
if namespace_meta["namespace"]
8787
else ""
8888
)

src/vdf_io/import_vdf/qdrant_import.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -313,12 +313,15 @@ def get_nested_config(config, keys, default=None):
313313
total_points = len(points)
314314

315315
num_parallel_threads = self.args.get("parallel", 5) or 5
316-
with concurrent.futures.ThreadPoolExecutor(
317-
max_workers=num_parallel_threads
318-
) as executor, tqdm(
319-
total=total_points,
320-
desc=f"Uploading points in batches of {BATCH_SIZE} in {num_parallel_threads} threads",
321-
) as pbar:
316+
with (
317+
concurrent.futures.ThreadPoolExecutor(
318+
max_workers=num_parallel_threads
319+
) as executor,
320+
tqdm(
321+
total=total_points,
322+
desc=f"Uploading points in batches of {BATCH_SIZE} in {num_parallel_threads} threads",
323+
) as pbar,
324+
):
322325
# Create a future to batch mapping to update progress bar correctly after each batch completion
323326
future_to_batch = {
324327
executor.submit(

src/vdf_io/import_vdf/turbopuffer_import.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def upsert_data(self):
7373
parquet_files = self.get_parquet_files(final_data_path)
7474

7575
new_index_name = index_name + (
76-
f'_{namespace_meta["namespace"]}'
76+
f"_{namespace_meta['namespace']}"
7777
if namespace_meta["namespace"]
7878
else ""
7979
)

src/vdf_io/marqo_vespa_util.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ def get_all_documents(
8585
[f"{key}={value}" for key, value in query_params.items() if value]
8686
)
8787
url = f"{self.document_url}/document/v1/{schema}/{schema}/docid"
88-
url = f'{url.strip("?")}?{query_string}'
88+
url = f"{url.strip('?')}?{query_string}"
8989
print(f"{url=}")
9090
resp = self.http_client.get(url)
9191
except httpx.HTTPError as e:

0 commit comments

Comments
 (0)