Skip to content

Commit

Permalink
Added test against file truncation and only use a single code
Browse files Browse the repository at this point in the history
  • Loading branch information
nils-braun committed Nov 6, 2024
1 parent dff2a85 commit bedae4a
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 10 deletions.
17 changes: 7 additions & 10 deletions s3fs/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1275,17 +1275,14 @@ async def _upload_chunk(chunk, part_number):
chunks.append(chunk)
if not chunks:
break
if len(chunks) > 1:
out.extend(
await asyncio.gather(
*[
_upload_chunk(chunk, len(out) + i)
for i, chunk in enumerate(chunks, 1)
]
)
out.extend(
await asyncio.gather(
*[
_upload_chunk(chunk, len(out) + i)
for i, chunk in enumerate(chunks, 1)
]
)
else:
out.append(await _upload_chunk(chunks[0], len(out) + 1))
)
return out

async def _get_file(
Expand Down
18 changes: 18 additions & 0 deletions s3fs/tests/test_s3fs.py
Original file line number Diff line number Diff line change
Expand Up @@ -1017,6 +1017,24 @@ def test_put_file_with_callback(s3, tmpdir, size):
assert cb.size == os.stat(test_file).st_size
assert cb.value == cb.size

assert s3.size(test_bucket_name + "/temp") == 11 * size


@pytest.mark.parametrize("factor", [1, 5, 6])
def test_put_file_does_not_truncate(s3, tmpdir, factor):
test_file = str(tmpdir.join("test.json"))

chunksize = 5 * 2**20
block = b"x" * chunksize

with open(test_file, "wb") as f:
f.write(block * factor)

s3.put_file(
test_file, test_bucket_name + "/temp", max_concurrency=5, chunksize=chunksize
)
assert s3.size(test_bucket_name + "/temp") == factor * chunksize


@pytest.mark.parametrize("size", [2**10, 2**20, 10 * 2**20])
def test_pipe_cat_big(s3, size):
Expand Down

0 comments on commit bedae4a

Please sign in to comment.