Skip to content

Commit be873e6

Browse files
authored
fix(fs): add dirs caching, make it a bit more robust (#322)
1 parent 6f38536 commit be873e6

File tree

1 file changed

+25
-22
lines changed

1 file changed

+25
-22
lines changed

pydrive2/fs/spec.py

Lines changed: 25 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -261,8 +261,8 @@ def _ids_cache(self):
261261
def _cache_path_id(self, path, *item_ids, cache=None):
262262
cache = cache or self._ids_cache
263263
for item_id in item_ids:
264-
cache["dirs"][path].append(item_id)
265264
cache["ids"][item_id] = path
265+
cache["dirs"][path].append(item_id)
266266

267267
@cached_property
268268
def _list_params(self):
@@ -316,7 +316,9 @@ def _gdrive_list_ids(self, query_ids):
316316
query = f"({query}) and trashed=false"
317317
return self._gdrive_list(query)
318318

319-
def _get_remote_item_ids(self, parent_ids, title):
319+
def _get_remote_item_ids(
320+
self, parent_ids, parent_path, title, use_cache=True
321+
):
320322
if not parent_ids:
321323
return None
322324
query = "trashed=false and ({})".format(
@@ -326,13 +328,19 @@ def _get_remote_item_ids(self, parent_ids, title):
326328
)
327329
query += " and title='{}'".format(title.replace("'", "\\'"))
328330

329-
# GDrive list API is case insensitive, we need to compare
330-
# all results and pick the ones with the right title
331-
return [
332-
item["id"]
333-
for item in self._gdrive_list(query)
334-
if item["title"] == title
335-
]
331+
res = []
332+
for item in self._gdrive_list(query):
333+
# GDrive list API is case insensitive, we need to compare
334+
# all results and pick the ones with the right title
335+
if item["title"] == title:
336+
res.append(item["id"])
337+
338+
if item["mimeType"] == FOLDER_MIME_TYPE and use_cache:
339+
self._cache_path_id(
340+
posixpath.join(parent_path, item["title"]), item["id"]
341+
)
342+
343+
return res
336344

337345
def _get_cached_item_ids(self, path, use_cache):
338346
if not path:
@@ -348,7 +356,9 @@ def _path_to_item_ids(self, path, create=False, use_cache=True):
348356

349357
parent_path, title = posixpath.split(path)
350358
parent_ids = self._path_to_item_ids(parent_path, create, use_cache)
351-
item_ids = self._get_remote_item_ids(parent_ids, title)
359+
item_ids = self._get_remote_item_ids(
360+
parent_ids, parent_path, title, use_cache
361+
)
352362
if item_ids:
353363
return item_ids
354364

@@ -418,11 +428,7 @@ def info(self, path):
418428
def ls(self, path, detail=False):
419429
bucket, base = self.split_path(path)
420430

421-
cached = base in self._ids_cache["dirs"]
422-
if cached:
423-
dir_ids = self._ids_cache["dirs"][base]
424-
else:
425-
dir_ids = self._path_to_item_ids(base)
431+
dir_ids = self._path_to_item_ids(base)
426432

427433
if not dir_ids:
428434
raise FileNotFoundError(
@@ -452,9 +458,6 @@ def ls(self, path, detail=False):
452458
}
453459
)
454460

455-
if not cached:
456-
self._cache_path_id(root_path, *dir_ids)
457-
458461
if detail:
459462
return contents
460463
else:
@@ -464,10 +467,10 @@ def find(self, path, detail=False, **kwargs):
464467
bucket, base = self.split_path(path)
465468

466469
seen_paths = set()
467-
cached = base in self._ids_cache["dirs"]
468-
if not cached:
469-
dir_ids = self._path_to_item_ids(base)
470-
self._cache_path_id(base, *dir_ids)
470+
471+
# Make sure the base path is cached and dir_ids below has some
472+
# dirs revelant to this call
473+
self._path_to_item_ids(base)
471474

472475
dir_ids = [self._ids_cache["ids"].copy()]
473476
contents = []

0 commit comments

Comments
 (0)