Skip to content

Commit

Permalink
[Storage] Add read to datalake StorageStreamDownloader (#25707)
Browse files Browse the repository at this point in the history
  • Loading branch information
jalauzon-msft authored Aug 15, 2022
1 parent 92d9c39 commit ba5059c
Show file tree
Hide file tree
Showing 7 changed files with 1,428 additions and 12 deletions.
1 change: 1 addition & 0 deletions sdk/storage/azure-storage-file-datalake/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ This version and all future versions will require Python 3.7+. Python 3.6 is no
- Added support for `flush` to `append_data` API, allowing for append and flush in one operation.
- Encryption Scope is now supported for both `create_file_system` APIs (`FileSystemClient`, `DataLakeServiceClient`).
- Encryption Scope is now supported as a SAS permission.
- Added standard `read` method to `StorageStreamDownloader`.

## 12.8.0 (2022-07-07)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from typing import Iterator
from typing import Iterator, Optional

from ._deserialize import from_blob_properties

Expand All @@ -30,24 +30,36 @@ def __init__(self, downloader):
def __len__(self):
return self.size

def chunks(self):
# type: () -> Iterator[bytes]
def chunks(self) -> Iterator[bytes]:
"""Iterate over chunks in the download stream.
:rtype: Iterator[bytes]
"""
return self._downloader.chunks()

def readall(self):
# type: () -> bytes
def read(self, size: Optional[int] = -1) -> bytes:
"""
Read up to size bytes from the stream and return them. If size
is unspecified or is -1, all bytes will be read.
:param size:
The number of bytes to download from the stream. Leave unsepcified
or set to -1 to download all bytes.
:returns:
The requsted data as bytes. If the return value is empty, there is no more data to read.
:rtype: bytes
"""
return self._downloader.read(size)

def readall(self) -> bytes:
"""Download the contents of this file.
This operation is blocking until all data is downloaded.
:rtype: bytes
"""
return self._downloader.readall()

def readinto(self, stream):
def readinto(self, stream) -> int:
"""Download the contents of this file to a stream.
:param stream:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from typing import AsyncIterator
from typing import AsyncIterator, Optional

from .._deserialize import from_blob_properties

Expand All @@ -30,24 +30,36 @@ def __init__(self, downloader):
def __len__(self):
return self.size

def chunks(self):
# type: () -> AsyncIterator[bytes]
def chunks(self) -> AsyncIterator[bytes]:
"""Iterate over chunks in the download stream.
:rtype: AsyncIterator[bytes]
"""
return self._downloader.chunks()

async def readall(self):
# type: () -> bytes
async def read(self, size: Optional[int] = -1) -> bytes:
"""
Read up to size bytes from the stream and return them. If size
is unspecified or is -1, all bytes will be read.
:param size:
The number of bytes to download from the stream. Leave unsepcified
or set to -1 to download all bytes.
:returns:
The requsted data as bytes. If the return value is empty, there is no more data to read.
:rtype: bytes
"""
return await self._downloader.read(size)

async def readall(self) -> bytes:
"""Download the contents of this file.
This operation is blocking until all data is downloaded.
:rtype: bytes
"""
return await self._downloader.readall()

async def readinto(self, stream):
async def readinto(self, stream) -> int:
"""Download the contents of this file to a stream.
:param stream:
Expand Down
Loading

0 comments on commit ba5059c

Please sign in to comment.