From aa18aa6aba9d8fe06d1549eea47f008354054e5d Mon Sep 17 00:00:00 2001 From: Martin Durant Date: Sun, 20 Oct 2024 14:17:49 -0400 Subject: [PATCH] Be sure to invalidate cache for both branched of pipe_file --- s3fs/core.py | 2 +- s3fs/tests/test_s3fs.py | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/s3fs/core.py b/s3fs/core.py index 6f8d071a..24dbffc0 100644 --- a/s3fs/core.py +++ b/s3fs/core.py @@ -1141,7 +1141,7 @@ async def _pipe_file( size = len(data) # 5 GB is the limit for an S3 PUT if size < min(5 * 2**30, 2 * chunksize): - return await self._call_s3( + await self._call_s3( "put_object", Bucket=bucket, Key=key, Body=data, **kwargs ) else: diff --git a/s3fs/tests/test_s3fs.py b/s3fs/tests/test_s3fs.py index 970d2b29..3379da52 100644 --- a/s3fs/tests/test_s3fs.py +++ b/s3fs/tests/test_s3fs.py @@ -2802,3 +2802,13 @@ def test_upload_part_with_prime_pads(s3_fixed_upload_size): with s3_fixed_upload_size.open(a, "r") as f: assert len(f.read()) == 2 * block + pad1 + pad2 + + +@pytest.mark.asyncio +async def test_invalidate_cache(s3: s3fs.S3FileSystem) -> None: + await s3._call_s3("put_object", Bucket=test_bucket_name, Key="a/b.txt", Body=b"abc") + before = await s3._ls(f"{test_bucket_name}/a/") + assert sorted(before) == ["test/a/b.txt"] + await s3._pipe_file(f"{test_bucket_name}/a/c.txt", data=b"abc") + after = await s3._ls(f"{test_bucket_name}/a/") + assert sorted(after) == ["test/a/b.txt", "test/a/c.txt"]