Skip to content

Commit e328e70

Browse files
committed
Filesession id added
1 parent 2628e49 commit e328e70

File tree

1 file changed

+5
-2
lines changed

1 file changed

+5
-2
lines changed

azure/datalake/store/multithread.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,14 @@
2121
import pickle
2222
import time
2323
import errno
24+
import uuid
2425

2526
from io import open
2627
from .core import AzureDLPath, _fetch_range
2728
from .exceptions import FileExistsError, FileNotFoundError
2829
from .transfer import ADLTransferClient
2930
from .utils import datadir, read_block, tokenize
31+
from .retry import ExponentialRetryPolicy
3032

3133
logger = logging.getLogger(__name__)
3234

@@ -284,9 +286,9 @@ def get_chunk(adlfs, src, dst, offset, size, buffersize, blocksize,
284286
"""
285287
err = None
286288
total_bytes_downloaded = 0
287-
from azure.datalake.store.retry import ExponentialRetryPolicy
288289
retry_policy = ExponentialRetryPolicy(max_retries=retries, exponential_retry_interval=delay,
289290
exponential_factor=backoff)
291+
filesessionid = str(uuid.uuid4())
290292
try:
291293
nbytes = 0
292294
start = offset
@@ -295,7 +297,8 @@ def get_chunk(adlfs, src, dst, offset, size, buffersize, blocksize,
295297
fout.seek(start)
296298
while start < offset+size:
297299
with closing(_fetch_range(adlfs.azure, src, start=start,
298-
end=min(start+blocksize, offset+size), stream=True, retry_policy=retry_policy)) as response:
300+
end=min(start+blocksize, offset+size), stream=True,
301+
retry_policy=retry_policy, filesessionid=filesessionid)) as response:
299302
chunk = response.content
300303
if shutdown_event and shutdown_event.is_set():
301304
return total_bytes_downloaded, None

0 commit comments

Comments
 (0)