Skip to content

Commit

Permalink
Support blob streaming for file-like objects
Browse files Browse the repository at this point in the history
When uploading file-like objects without a specified size, upload in chunks instead of exhausting the stream and keeping the data in-memory
  • Loading branch information
linar-jether authored Jun 27, 2016
1 parent f2fae7b commit 942c5d0
Showing 1 changed file with 8 additions and 3 deletions.
11 changes: 8 additions & 3 deletions gcloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -492,9 +492,7 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
if total_bytes is None:
if hasattr(file_obj, 'fileno'):
total_bytes = os.fstat(file_obj.fileno()).st_size
else:
raise ValueError('total bytes could not be determined. Please '
'pass an explicit size.')

headers = {
'Accept': 'application/json',
'Accept-Encoding': 'gzip, deflate',
Expand All @@ -510,6 +508,12 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
if self.chunk_size is not None:
upload.chunksize = self.chunk_size

if total_bytes is None:
upload.strategy = RESUMABLE_UPLOAD
elif self.chunk_size is None and total_bytes is None:
raise ValueError('total bytes could not be determined. Please '
'pass an explicit size, or supply a chunk size for a streaming transfer.')

url_builder = _UrlBuilder(bucket_name=self.bucket.name,
object_name=self.name)
upload_config = _UploadConfig()
Expand All @@ -522,6 +526,7 @@ def upload_from_file(self, file_obj, rewind=False, size=None,
# Use apitools 'Upload' facility.
request = Request(upload_url, 'POST', headers)


upload.configure_request(upload_config, request, url_builder)
query_params = url_builder.query_params
base_url = connection.API_BASE_URL + '/upload'
Expand Down

0 comments on commit 942c5d0

Please sign in to comment.