Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Removing redundant Bucket.upload* methods. #1002

Merged
merged 1 commit into from
Jul 21, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,8 @@ how to create a bucket.
blob = bucket.get_blob('/remote/path/to/file.txt')
print blob.download_as_string()
blob.upload_from_string('New contents!')
bucket.upload_file('/remote/path/storage.txt', '/local/path.txt')
blob2 = storage.Blob('/remote/path/storage.txt', bucket)

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

blob2.upload_from_filename(filename='/local/path.txt')

Contributing
------------
Expand Down
3 changes: 2 additions & 1 deletion docs/_components/storage-getting-started.rst
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,8 @@ Then you can look at the file in a terminal::
And what about when you're not dealing with text?
That's pretty simple too::

>>> blob = bucket.upload_file('kitten.jpg')
>>> blob = storage.Blob('kitten.jpg', bucket)
>>> blob.upload_from_filename('kitten.jpg')

And to test whether it worked?

Expand Down
2 changes: 1 addition & 1 deletion docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -55,4 +55,4 @@ Cloud Storage
client = storage.Client()
bucket = client.get_bucket('<your-bucket-name>')
blob = storage.Blob('my-test-file.txt', bucket=bucket)
blob = blob.upload_contents_from_string('this is test content!')
blob.upload_from_string('this is test content!')
3 changes: 2 additions & 1 deletion gcloud/storage/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@
>>> blob = bucket.get_blob('/remote/path/to/file.txt')
>>> print blob.download_as_string()
>>> blob.upload_from_string('New contents!')
>>> bucket.upload_file('/remote/path/storage.txt', '/local/path.txt')
>>> blob2 = storage.Blob('/remote/path/storage.txt', bucket)
>>> blob2.upload_from_filename(filename='/local/path.txt')

The main concepts with this API are:

Expand Down
93 changes: 0 additions & 93 deletions gcloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

import datetime
import copy
import os

import pytz
import six
Expand Down Expand Up @@ -442,98 +441,6 @@ def copy_blob(self, blob, destination_bucket, new_name=None,
new_blob._set_properties(copy_result)
return new_blob

def upload_file(self, filename, blob_name=None, client=None):
"""Shortcut method to upload a file into this bucket.

Use this method to quickly put a local file in Cloud Storage.

For example::

>>> from gcloud import storage
>>> client = storage.Client()
>>> bucket = client.get_bucket('my-bucket')
>>> bucket.upload_file('~/my-file.txt', 'remote-text-file.txt')
>>> print bucket.list_blobs()
[<Blob: my-bucket, remote-text-file.txt>]

If you don't provide a blob name, we will try to upload the file
using the local filename (**not** the complete path)::

>>> from gcloud import storage
>>> client = storage.Client()
>>> bucket = client.get_bucket('my-bucket')
>>> bucket.upload_file('~/my-file.txt')
>>> print bucket.list_blobs()
[<Blob: my-bucket, my-file.txt>]

:type filename: string
:param filename: Local path to the file you want to upload.

:type blob_name: string
:param blob_name: The name of the blob to upload the file to. If this
is blank, we will try to upload the file to the root
of the bucket with the same name as on your local
file system.

:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.

:rtype: :class:`Blob`
:returns: The updated Blob object.
"""
if blob_name is None:
blob_name = os.path.basename(filename)
blob = Blob(bucket=self, name=blob_name)
blob.upload_from_filename(filename, client=client)
return blob

def upload_file_object(self, file_obj, blob_name=None, client=None):
"""Shortcut method to upload a file object into this bucket.

Use this method to quickly put a local file in Cloud Storage.

For example::

>>> from gcloud import storage
>>> client = storage.Client()
>>> bucket = client.get_bucket('my-bucket')
>>> bucket.upload_file(open('~/my-file.txt'), 'remote-text-file.txt')
>>> print bucket.list_blobs()
[<Blob: my-bucket, remote-text-file.txt>]

If you don't provide a blob name, we will try to upload the file
using the local filename (**not** the complete path)::

>>> from gcloud import storage
>>> client = storage.Client()
>>> bucket = client.get_bucket('my-bucket')
>>> bucket.upload_file(open('~/my-file.txt'))
>>> print bucket.list_blobs()
[<Blob: my-bucket, my-file.txt>]

:type file_obj: file
:param file_obj: A file handle open for reading.

:type blob_name: string
:param blob_name: The name of the blob to upload the file to. If this
is blank, we will try to upload the file to the root
of the bucket with the same name as on your local
file system.

:type client: :class:`gcloud.storage.client.Client` or ``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.

:rtype: :class:`Blob`
:returns: The updated Blob object.
"""
if blob_name is None:
blob_name = os.path.basename(file_obj.name)
blob = Blob(bucket=self, name=blob_name)
blob.upload_from_file(file_obj, client=client)
return blob

@property

This comment was marked as spam.

def cors(self):
"""Retrieve CORS policies configured for this bucket.
Expand Down
103 changes: 0 additions & 103 deletions gcloud/storage/test_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import io

import unittest2


Expand Down Expand Up @@ -506,99 +504,6 @@ class _Blob(object):
self.assertEqual(kw['method'], 'POST')
self.assertEqual(kw['path'], COPY_PATH)

def test_upload_file_default_blob_name(self):
from gcloud._testing import _Monkey
from gcloud.storage import bucket as MUT
BASENAME = 'file.ext'
FILENAME = '/path/to/%s' % BASENAME
_uploaded = []

class _Blob(object):

def __init__(self, bucket, name):
self._bucket = bucket
self._name = name

def upload_from_filename(self, filename, client=None):
_uploaded.append((self._bucket, self._name, filename,
client))

bucket = self._makeOne()
with _Monkey(MUT, Blob=_Blob):
bucket.upload_file(FILENAME)
self.assertEqual(_uploaded, [(bucket, BASENAME, FILENAME, None)])

def test_upload_file_blob_w_blob_name(self):
from gcloud._testing import _Monkey
from gcloud.storage import bucket as MUT
FILENAME = '/path/to/file'
BLOB_NAME = 'blob-name'
_uploaded = []

class _Blob(object):

def __init__(self, bucket, name):
self._bucket = bucket
self._name = name

def upload_from_filename(self, filename, client=None):
_uploaded.append((self._bucket, self._name, filename,
client))

bucket = self._makeOne()
with _Monkey(MUT, Blob=_Blob):
bucket.upload_file(FILENAME, BLOB_NAME)
self.assertEqual(_uploaded, [(bucket, BLOB_NAME, FILENAME, None)])

def test_upload_file_object_no_blob(self):
from gcloud._testing import _Monkey
from gcloud.storage import bucket as MUT
FILENAME = 'file.txt'
FILEOBJECT = MockFile(FILENAME)
_uploaded = []

class _Blob(object):

def __init__(self, bucket, name):
self._bucket = bucket
self._name = name

def upload_from_file(self, fh, client=None):
_uploaded.append((self._bucket, self._name, fh, client))

bucket = self._makeOne()
with _Monkey(MUT, Blob=_Blob):
found = bucket.upload_file_object(FILEOBJECT)
self.assertEqual(_uploaded, [(bucket, FILENAME, FILEOBJECT, None)])
self.assertTrue(isinstance(found, _Blob))
self.assertEqual(found._name, FILENAME)
self.assertTrue(found._bucket is bucket)

def test_upload_file_object_blob(self):
from gcloud._testing import _Monkey
from gcloud.storage import bucket as MUT
FILENAME = 'file.txt'
FILEOBJECT = MockFile(FILENAME)
BLOB_NAME = 'blob-name'
_uploaded = []

class _Blob(object):

def __init__(self, bucket, name):
self._bucket = bucket
self._name = name

def upload_from_file(self, fh, client=None):
_uploaded.append((self._bucket, self._name, fh, client))

bucket = self._makeOne()
with _Monkey(MUT, Blob=_Blob):
found = bucket.upload_file_object(FILEOBJECT, BLOB_NAME)
self.assertEqual(_uploaded, [(bucket, BLOB_NAME, FILEOBJECT, None)])
self.assertTrue(isinstance(found, _Blob))
self.assertEqual(found._name, BLOB_NAME)
self.assertTrue(found._bucket is bucket)

def test_etag(self):
ETAG = 'ETAG'
properties = {'etag': ETAG}
Expand Down Expand Up @@ -1018,14 +923,6 @@ def __init__(self, client=None):
self.client = client


class MockFile(io.StringIO):
name = None

def __init__(self, name, buffer_=None):
super(MockFile, self).__init__(buffer_)
self.name = name


class _Client(object):

def __init__(self, connection, project=None):
Expand Down
18 changes: 13 additions & 5 deletions system_tests/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# limitations under the License.

import httplib2
import os
import six
import tempfile
import time
Expand Down Expand Up @@ -141,7 +142,11 @@ def test_small_file_write_from_filename(self):
self.assertEqual(md5_hash, file_data['hash'])

def test_write_metadata(self):
blob = self.bucket.upload_file(self.FILES['logo']['path'])
filename = self.FILES['logo']['path']
blob_name = os.path.basename(filename)

blob = storage.Blob(blob_name, bucket=self.bucket)
blob.upload_from_filename(filename)
self.case_blobs_to_delete.append(blob)

# NOTE: This should not be necessary. We should be able to pass
Expand All @@ -167,8 +172,9 @@ def test_direct_write_and_read_into_file(self):
self.assertEqual(file_contents, stored_contents)

def test_copy_existing_file(self):
blob = self.bucket.upload_file(self.FILES['logo']['path'],
blob_name='CloudLogo')
filename = self.FILES['logo']['path']
blob = storage.Blob('CloudLogo', bucket=self.bucket)
blob.upload_from_filename(filename)
self.case_blobs_to_delete.append(blob)

new_blob = self.bucket.copy_blob(blob, self.bucket, 'CloudLogoCopy')
Expand All @@ -191,7 +197,8 @@ def setUpClass(cls):
blob.delete()

logo_path = cls.FILES['logo']['path']
blob = cls.bucket.upload_file(logo_path, blob_name=cls.FILENAMES[0])
blob = storage.Blob(cls.FILENAMES[0], bucket=cls.bucket)
blob.upload_from_filename(logo_path)
cls.suite_blobs_to_delete = [blob]

# Copy main blob onto remaining in FILENAMES.
Expand Down Expand Up @@ -242,7 +249,8 @@ def setUpClass(cls):
blob.delete()

simple_path = cls.FILES['simple']['path']
blob = cls.bucket.upload_file(simple_path, blob_name=cls.FILENAMES[0])
blob = storage.Blob(cls.FILENAMES[0], bucket=cls.bucket)
blob.upload_from_filename(simple_path)
cls.suite_blobs_to_delete = [blob]
for filename in cls.FILENAMES[1:]:
new_blob = cls.bucket.copy_blob(blob, cls.bucket, filename)
Expand Down