diff --git a/docs/_components/storage-getting-started.rst b/docs/_components/storage-getting-started.rst index d10671f44e30..65b7e99627dd 100644 --- a/docs/_components/storage-getting-started.rst +++ b/docs/_components/storage-getting-started.rst @@ -1,15 +1,12 @@ Getting started with Cloud Storage ================================== -This tutorial focuses on using ``gcloud`` to access -Google Cloud Storage. -We'll go through the basic concepts, -how to operate on buckets and blobs, -and how to handle access control, -among other things. +This tutorial focuses on using ``gcloud`` to access Google Cloud Storage. +We'll go through the basic concepts, how to operate on buckets and blobs, +and how to handle access control, among other things. -We're going to assume that you've already downloaded -and installed the library. +We're going to assume that you've already downloaded and installed the +library. Creating a project ------------------ @@ -19,19 +16,15 @@ Creating a project Enabling the API ---------------- -Now that you created a project, -you need to **turn on** the Google Cloud Storage API. -This is sort of like telling Google -which services you intend to use for this project. +Now that you created a project, you need to **turn on** the Google Cloud +Storage API. This is sort of like telling Google which services you intend +to use for this project. -* **Click on APIs & Auth** - on the left hand side, - and scroll down to where it says - "Google Cloud Storage JSON API". +* **Click on APIs & Auth** on the left hand side, and scroll down to where +it says "Google Cloud Storage JSON API". -* **Click the "Off" button** - on the right side - to turn it into an "On" button. +* **Click the "Off" button** on the right side to turn it into an "On" +button. Enabling a service account -------------------------- @@ -41,31 +34,25 @@ Enabling a service account Creating a connection --------------------- -The first step in accessing Cloud Storage -is to create a connection to the service:: +The first step in accessing Cloud Storage is to create a connection to the +service:: >>> from gcloud import storage >>> connection = storage.get_connection(project_name) -We're going to use this -:class:`connection ` object -for the rest of this guide. +We're going to use this :class:`connection +` object for the rest of this guide. Creating a bucket ----------------- -Once you've established a connection to Cloud Storage, -the first thing you typically want to do is -create a new bucket. -A bucket is a container used to store -objects in Cloud Storage -(if you're familiar with S3, -buckets on Cloud Storage mean the same thing). -Think of each bucket as a single "disk drive", -where you can store lots of files on each. -How you organize your data is up to you, -but it's typical to group common data -in a single bucket. +Once you've established a connection to Cloud Storage, the first thing you +typically want to do is create a new bucket. A bucket is a container used +to store objects in Cloud Storage (if you're familiar with S3, buckets on +Cloud Storage mean the same thing). Think of each bucket as a single "disk +drive", where you can store lots of files on each. How you organize your +data is up to you, but it's typical to group common data in a single +bucket. Let's create a bucket: @@ -74,59 +61,41 @@ Let's create a bucket: File "", line 1, in File "gcloud/storage/connection.py", line 340, in create_bucket data={'name': bucket.name}) - File "gcloud/storage/connection.py", line 224, in api_request - raise exceptions.ConnectionError(response, content) - gcloud.storage.exceptions.ConnectionError: {'status': '409', 'alternate-protocol': '443:quic', 'content-length': '271', 'x-xss-protection': '1; mode=block', 'x-content-type-options': 'nosniff', 'expires': 'Sat, 15 Mar 2014 19:19:47 GMT', 'server': 'GSE', '-content-encoding': 'gzip', 'cache-control': 'private, max-age=0', 'date': 'Sat, 15 Mar 2014 19:19:47 GMT', 'x-frame-options': 'SAMEORIGIN', 'content-type': 'application/json; charset=UTF-8'}{ - "error": { - "errors": [ - { - "domain": "global", - "reason": "conflict", - "message": "Sorry, that name is not available. Please try a different one." - } - ], - "code": 409, - "message": "Sorry, that name is not available. Please try a different one." - } - } + File "gcloud/storage/connection.py", line 324, in api_request + raise make_exception(response, content) + ... **Whoops!** -It might be important to mention -that bucket names are like domain names: -it's one big namespace that we all share, -so you have to pick a bucket name that isn't already taken. -It's up to you to decide what a good name is, -let's assume that you found a unique name -and are ready to move on with your newly created bucket. +It might be important to mention that bucket names are like domain names: +it's one big namespace that we all share, so you have to pick a bucket name +that isn't already taken. + +It's up to you to decide what a good name is, let's assume that you found a +unique name and are ready to move on with your newly created bucket. Storing data ------------ -OK, so you have a bucket. Now what? -Cloud Storage is just an arbitrary data container, -so you can put whatever format of data you want. -The naming of your files is also arbitrary, -however the Cloud Storage online file browser -tries to make it feel a bit like a file system -by recognizing forward-slashes (``/``) -so if you want to group data into "directories", +OK, so you have a bucket. Now what? Cloud Storage is just an arbitrary +data container, so you can put whatever format of data you want. The +naming of your files is also arbitrary, however the Cloud Storage online +file browser tries to make it feel a bit like a file system by recognizing +forward-slashes (``/``) so if you want to group data into "directories", you can do that. -The fundamental container for a file in Cloud Storage -is called an Object, however ``gcloud`` uses the term ``Blob`` -to avoid confusion with the Python built-in ``object``. +The fundamental container for a file in Cloud Storage is called an Object, +however ``gcloud`` uses the term ``Blob`` to avoid confusion with the +Python built-in ``object``. -If you want to set some data, -you just create a ``Blob`` inside your bucket +If you want to set some data, you just create a ``Blob`` inside your bucket and store your data inside the blob:: >>> blob = bucket.new_blob('greeting.txt') >>> blob.upload_from_string('Hello world!') -:func:`new_blob ` -creates a :class:`Blob ` object locally -and +:func:`new_blob ` creates a +:class:`Blob ` object locally and :func:`upload_from_string ` allows you to put a string into the blob. @@ -160,21 +129,19 @@ and check if they are the same in a terminal:: $ diff kitten.jpg kitten2.jpg -Notice that we're using -:func:`get_blob ` -to retrieve a blob we know exists remotely. -If the blob doesn't exist, it will return ``None``. +Notice that we're using :func:`get_blob +` to retrieve a blob we know exists +remotely. If the blob doesn't exist, it will return ``None``. .. note:: ``get_blob`` is **not** retrieving the entire object's data. -If you want to "get-or-create" the blob -(that is, overwrite it if it already exists), -you can use :func:`new_blob `. -However, keep in mind, the blob is not created -until you store some data inside of it. +If you want to "get-or-create" the blob (that is, overwrite it if it +already exists), you can use :func:`new_blob +`. However, keep in mind, the blob +is not created until you store some data inside of it. -If you want to check whether a blob exists, -you can use the ``in`` operator in Python:: +If you want to check whether a blob exists, you can use the ``in`` operator +in Python:: >>> print 'kitten.jpg' in bucket True @@ -184,20 +151,19 @@ you can use the ``in`` operator in Python:: Accessing a bucket ------------------ -If you already have a bucket, -use :func:`get_bucket ` -to retrieve the bucket object:: +If you already have a bucket, use :func:`get_bucket +` to retrieve the bucket +object:: >>> bucket = connection.get_bucket('my-bucket') -If you want to get all the blobs in the bucket, -you can use +If you want to get all the blobs in the bucket, you can use :func:`get_all_blobs `:: >>> blobs = bucket.get_all_blobs() -However, if you're looking to iterate through the blobs, -you can use the bucket itself as an iterator:: +However, if you're looking to iterate through the blobs, you can use the +bucket itself as an iterator:: >>> for blob in bucket: ... print blob @@ -205,14 +171,13 @@ you can use the bucket itself as an iterator:: Deleting a bucket ----------------- -You can delete a bucket using the -:func:`delete_bucket ` -method:: +You can delete a bucket using the :func:`delete_bucket +` method:: >>> connection.delete_bucket('my-bucket') -Remember, the bucket you're deleting needs to be empty, -otherwise you'll get an error. +Remember, the bucket you're deleting needs to be empty, otherwise you'll +get an error. If you have a full bucket, you can delete it this way:: @@ -221,9 +186,9 @@ If you have a full bucket, you can delete it this way:: Listing available buckets ------------------------- -The :class:`Connection ` -object itself is iterable, -so you can loop over it, or call ``list`` on it to get a list object:: +The :class:`Connection ` object +itself is iterable, so you can loop over it, or call ``list`` on it to get +a list object:: >>> for bucket in connection: ... print bucket.name @@ -232,19 +197,14 @@ so you can loop over it, or call ``list`` on it to get a list object:: Managing access control ----------------------- -Cloud storage provides fine-grained access control -for both buckets and blobs. -`gcloud` tries to simplify access control -by working with entities and "grants". -On any ACL, -you get a reference to an entity, -and then either grant or revoke a specific access level. -Additionally, we provide two default entities: -all users, and all authenticated users. +Cloud storage provides fine-grained access control for both buckets and +blobs. `gcloud` tries to simplify access control by working with entities +and "grants". On any ACL, you get a reference to an entity, and then +either grant or revoke a specific access level. Additionally, we provide +two default entities: all users, and all authenticated users. For example, if you want to grant read access to all users on your bucket:: >>> bucket.get_acl().all().grant_read() -For more detail on access control, -see :mod:`gcloud.storage.acl`. +For more detail on access control, see :mod:`gcloud.storage.acl`. diff --git a/docs/gcloud-api.rst b/docs/gcloud-api.rst index 583794a0f334..feab47583b50 100644 --- a/docs/gcloud-api.rst +++ b/docs/gcloud-api.rst @@ -28,3 +28,11 @@ Credentials :members: :undoc-members: :show-inheritance: + +Exceptions +~~~~~~~~~~ + +.. automodule:: gcloud.exceptions + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/storage-api.rst b/docs/storage-api.rst index edf609844b22..b3fc4cc661d5 100644 --- a/docs/storage-api.rst +++ b/docs/storage-api.rst @@ -30,11 +30,3 @@ Iterators :members: :undoc-members: :show-inheritance: - -Exceptions -~~~~~~~~~~ - -.. automodule:: gcloud.storage.exceptions - :members: - :undoc-members: - :show-inheritance: diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 33de63e794be..d52f794b2f6f 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -14,9 +14,8 @@ """Connections to gcloud datastore API servers.""" -import six - from gcloud import connection +from gcloud.exceptions import make_exception from gcloud.datastore import _datastore_v1_pb2 as datastore_pb from gcloud.datastore import helpers @@ -54,7 +53,7 @@ def _request(self, dataset_id, method, data): :rtype: string :returns: The string response content from the API call. - :raises: :class:`six.moves.http_client.HTTPException` if the response + :raises: :class:`gcloud.exceptions.GCloudError` if the response code is not 200 OK. """ headers = { @@ -68,9 +67,7 @@ def _request(self, dataset_id, method, data): status = headers['status'] if status != '200': - message = ('Request failed with status code %s. ' - 'Error was: %s' % (status, content)) - raise six.moves.http_client.HTTPException(message) + raise make_exception(headers, content) return content diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index ed241de8253b..278b2e9af1ea 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -100,17 +100,16 @@ def test__request_w_200(self): self.assertEqual(http._called_with['body'], DATA) def test__request_not_200(self): - import six + from gcloud.exceptions import BadRequest DATASET_ID = 'DATASET' METHOD = 'METHOD' DATA = 'DATA' conn = self._makeOne() - conn._http = Http({'status': '400'}, 'Bad Request') - with self.assertRaises(six.moves.http_client.HTTPException) as e: + conn._http = Http({'status': '400'}, '{"message": "Bad Request"}') + with self.assertRaises(BadRequest) as e: conn._request(DATASET_ID, METHOD, DATA) - expected_message = ('Request failed with status code 400. ' - 'Error was: Bad Request') + expected_message = ('400 Bad Request') self.assertEqual(str(e.exception), expected_message) def test__rpc(self): @@ -845,12 +844,13 @@ class Http(object): _called_with = None def __init__(self, headers, content): - self._headers = headers + from httplib2 import Response + self._response = Response(headers) self._content = content def request(self, **kw): self._called_with = kw - return self._headers, self._content + return self._response, self._content class HttpMultiple(object): diff --git a/gcloud/storage/exceptions.py b/gcloud/exceptions.py similarity index 92% rename from gcloud/storage/exceptions.py rename to gcloud/exceptions.py index 05fa71983aa9..f56148afd33d 100644 --- a/gcloud/storage/exceptions.py +++ b/gcloud/exceptions.py @@ -22,7 +22,7 @@ _HTTP_CODE_TO_EXCEPTION = {} # populated at end of module -class StorageError(Exception): +class GCloudError(Exception): """Base error class for gcloud errors (abstract). Each subclass represents a single type of HTTP error response. @@ -34,7 +34,7 @@ class StorageError(Exception): """ def __init__(self, message, errors=()): - super(StorageError, self).__init__() + super(GCloudError, self).__init__() # suppress deprecation warning under 2.6.x self.message = message self._errors = [error.copy() for error in errors] @@ -52,7 +52,7 @@ def errors(self): return [error.copy() for error in self._errors] -class Redirection(StorageError): +class Redirection(GCloudError): """Base for 3xx responses This class is abstract. @@ -79,7 +79,7 @@ class ResumeIncomplete(Redirection): code = 308 -class ClientError(StorageError): +class ClientError(GCloudError): """Base for 4xx responses This class is abstract @@ -93,12 +93,12 @@ class BadRequest(ClientError): class Unauthorized(ClientError): """Exception mapping a '401 Unauthorized' response.""" - code = 400 + code = 401 class Forbidden(ClientError): """Exception mapping a '403 Forbidden' response.""" - code = 400 + code = 403 class NotFound(ClientError): @@ -136,7 +136,7 @@ class TooManyRequests(ClientError): code = 429 -class ServerError(StorageError): +class ServerError(GCloudError): """Base for 5xx responses: (abstract)""" @@ -158,7 +158,7 @@ class ServiceUnavailable(ServerError): def make_exception(response, content): """Factory: create exception based on HTTP response code. - :rtype: instance of :class:`StorageError`, or a concrete subclass. + :rtype: instance of :class:`GCloudError`, or a concrete subclass. """ if isinstance(content, str): @@ -171,7 +171,7 @@ def make_exception(response, content): try: klass = _HTTP_CODE_TO_EXCEPTION[response.status] except KeyError: - error = StorageError(message, errors) + error = GCloudError(message, errors) error.code = response.status else: error = klass(message, errors) @@ -187,7 +187,7 @@ def _walk_subclasses(klass): # Build the code->exception class mapping. -for eklass in _walk_subclasses(StorageError): +for eklass in _walk_subclasses(GCloudError): code = getattr(eklass, 'code', None) if code is not None: _HTTP_CODE_TO_EXCEPTION[code] = eklass diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py index fb16fe58fa9f..44f35056ae3c 100644 --- a/gcloud/storage/blob.py +++ b/gcloud/storage/blob.py @@ -195,7 +195,7 @@ def delete(self): :rtype: :class:`Blob` :returns: The blob that was just deleted. - :raises: :class:`gcloud.storage.exceptions.NotFound` + :raises: :class:`gcloud.exceptions.NotFound` (propagated from :meth:`gcloud.storage.bucket.Bucket.delete_blob`). """ @@ -207,7 +207,7 @@ def download_to_file(self, file_obj): :type file_obj: file :param file_obj: A file handle to which to write the blob's data. - :raises: :class:`gcloud.storage.exceptions.NotFound` + :raises: :class:`gcloud.exceptions.NotFound` """ download_url = self.media_link @@ -232,7 +232,7 @@ def download_to_filename(self, filename): :type filename: string :param filename: A filename to be passed to ``open``. - :raises: :class:`gcloud.storage.exceptions.NotFound` + :raises: :class:`gcloud.exceptions.NotFound` """ with open(filename, 'wb') as file_obj: self.download_to_file(file_obj) @@ -249,7 +249,7 @@ def download_as_string(self): :rtype: string :returns: The data stored in this blob. - :raises: :class:`gcloud.storage.exceptions.NotFound` + :raises: :class:`gcloud.exceptions.NotFound` """ string_buffer = StringIO() self.download_to_file(string_buffer) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 077e423e763c..d1c2e95f8c97 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -17,9 +17,9 @@ import os import six +from gcloud.exceptions import NotFound from gcloud.storage._helpers import _PropertyMixin from gcloud.storage._helpers import _scalar_property -from gcloud.storage import exceptions from gcloud.storage.acl import BucketACL from gcloud.storage.acl import DefaultObjectACL from gcloud.storage.iterator import Iterator @@ -159,7 +159,7 @@ def get_blob(self, blob): response = self.connection.api_request(method='GET', path=blob.path) return Blob(properties=response, bucket=self) - except exceptions.NotFound: + except NotFound: return None def get_all_blobs(self): @@ -239,7 +239,7 @@ def delete(self, force=False): The bucket **must** be empty in order to delete it. If the bucket doesn't exist, this will raise a - :class:`gcloud.storage.exceptions.NotFound`. If the bucket is + :class:`gcloud.exceptions.NotFound`. If the bucket is not empty, this will raise an Exception. If you want to delete a non-empty bucket you can pass in a force @@ -249,9 +249,9 @@ def delete(self, force=False): :type force: boolean :param force: If True, empties the bucket's objects then deletes it. - :raises: :class:`gcloud.storage.exceptions.NotFound` if the + :raises: :class:`gcloud.exceptions.NotFound` if the bucket does not exist, or - :class:`gcloud.storage.exceptions.Conflict` if the + :class:`gcloud.exceptions.Conflict` if the bucket has blobs and `force` is not passed. """ return self.connection.delete_bucket(self.name, force=force) @@ -260,12 +260,12 @@ def delete_blob(self, blob): """Deletes a blob from the current bucket. If the blob isn't found, raise a - :class:`gcloud.storage.exceptions.NotFound`. + :class:`gcloud.exceptions.NotFound`. For example:: + >>> from gcloud.exceptions import NotFound >>> from gcloud import storage - >>> from gcloud.storage import exceptions >>> connection = storage.get_connection(project) >>> bucket = connection.get_bucket('my-bucket') >>> print bucket.get_all_blobs() @@ -273,7 +273,7 @@ def delete_blob(self, blob): >>> bucket.delete_blob('my-file.txt') >>> try: ... bucket.delete_blob('doesnt-exist') - ... except exceptions.NotFound: + ... except NotFound: ... pass @@ -282,7 +282,7 @@ def delete_blob(self, blob): :rtype: :class:`gcloud.storage.blob.Blob` :returns: The blob that was just deleted. - :raises: :class:`gcloud.storage.exceptions.NotFound` (to suppress + :raises: :class:`gcloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op ``on_error`` callback, e.g.:: @@ -302,16 +302,16 @@ def delete_blobs(self, blobs, on_error=None): :type on_error: a callable taking (blob) :param on_error: If not ``None``, called once for each blob raising - :class:`gcloud.storage.exceptions.NotFound`; + :class:`gcloud.exceptions.NotFound`; otherwise, the exception is propagated. - :raises: :class:`gcloud.storage.exceptions.NotFound` (if + :raises: :class:`gcloud.exceptions.NotFound` (if `on_error` is not passed). """ for blob in blobs: try: self.delete_blob(blob) - except exceptions.NotFound: + except NotFound: if on_error is not None: on_error(blob) else: diff --git a/gcloud/storage/connection.py b/gcloud/storage/connection.py index 4d7072cbd470..f473e5ebca43 100644 --- a/gcloud/storage/connection.py +++ b/gcloud/storage/connection.py @@ -29,7 +29,8 @@ import pytz from gcloud.connection import Connection as _Base -from gcloud.storage import exceptions +from gcloud.exceptions import make_exception +from gcloud.exceptions import NotFound from gcloud.storage.bucket import Bucket from gcloud.storage.iterator import Iterator import six @@ -320,7 +321,7 @@ def api_request(self, method, path, query_params=None, method=method, url=url, data=data, content_type=content_type) if not 200 <= response.status < 300: - raise exceptions.make_exception(response, content) + raise make_exception(response, content) if content and expect_json: content_type = response.get('content-type', '') @@ -364,11 +365,11 @@ def get_bucket(self, bucket_name): For example:: >>> from gcloud import storage - >>> from gcloud.storage import exceptions + >>> from gcloud.exceptions import NotFound >>> connection = storage.get_connection(project) >>> try: >>> bucket = connection.get_bucket('my-bucket') - >>> except exceptions.NotFound: + >>> except NotFound: >>> print 'Sorry, that bucket does not exist!' :type bucket_name: string @@ -405,7 +406,7 @@ def lookup(self, bucket_name): """ try: return self.get_bucket(bucket_name) - except exceptions.NotFound: + except NotFound: return None def create_bucket(self, bucket): @@ -452,10 +453,10 @@ def delete_bucket(self, bucket, force=False): If the bucket doesn't exist, this will raise a :class:`gcloud.storage.exceptions.NotFound`:: - >>> from gcloud.storage import exceptions + >>> from gcloud.exceptions import NotFound >>> try: >>> connection.delete_bucket('my-bucket') - >>> except exceptions.NotFound: + >>> except NotFound: >>> print 'That bucket does not exist!' :type bucket: string or :class:`gcloud.storage.bucket.Bucket` diff --git a/gcloud/storage/test_acl.py b/gcloud/storage/test_acl.py index 0028f853019b..0017c366e81b 100644 --- a/gcloud/storage/test_acl.py +++ b/gcloud/storage/test_acl.py @@ -815,7 +815,7 @@ def __init__(self, *responses): self._deleted = [] def api_request(self, **kw): - from gcloud.storage.exceptions import NotFound + from gcloud.exceptions import NotFound self._requested.append(kw) try: diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 08f000667370..886aa6d5b4eb 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -277,7 +277,7 @@ def test_new_blob_invalid(self): self.assertRaises(TypeError, bucket.new_blob, object()) def test_delete_default_miss(self): - from gcloud.storage.exceptions import NotFound + from gcloud.exceptions import NotFound NAME = 'name' connection = _Connection() bucket = self._makeOne(connection, NAME) @@ -293,7 +293,7 @@ def test_delete_explicit_hit(self): self.assertEqual(connection._deleted, [(NAME, True)]) def test_delete_blob_miss(self): - from gcloud.storage.exceptions import NotFound + from gcloud.exceptions import NotFound NAME = 'name' NONESUCH = 'nonesuch' connection = _Connection() @@ -334,7 +334,7 @@ def test_delete_blobs_hit(self): self.assertEqual(kw[0]['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) def test_delete_blobs_miss_no_on_error(self): - from gcloud.storage.exceptions import NotFound + from gcloud.exceptions import NotFound NAME = 'name' BLOB_NAME = 'blob-name' NONESUCH = 'nonesuch' @@ -982,7 +982,7 @@ def __init__(self, *responses): self._deleted = [] def api_request(self, **kw): - from gcloud.storage.exceptions import NotFound + from gcloud.exceptions import NotFound self._requested.append(kw) try: @@ -993,7 +993,7 @@ def api_request(self, **kw): return response def delete_bucket(self, bucket, force=False): - from gcloud.storage.exceptions import NotFound + from gcloud.exceptions import NotFound self._deleted.append((bucket, force)) if not self._delete_ok: raise NotFound('miss') diff --git a/gcloud/storage/test_connection.py b/gcloud/storage/test_connection.py index 2edd313aeb86..1e10f1928cd4 100644 --- a/gcloud/storage/test_connection.py +++ b/gcloud/storage/test_connection.py @@ -338,7 +338,7 @@ def test_api_request_w_data(self): self.assertEqual(http._called_with['headers'], expected_headers) def test_api_request_w_404(self): - from gcloud.storage.exceptions import NotFound + from gcloud.exceptions import NotFound PROJECT = 'project' conn = self._makeOne(PROJECT) conn._http = Http( @@ -348,7 +348,7 @@ def test_api_request_w_404(self): self.assertRaises(NotFound, conn.api_request, 'GET', '/') def test_api_request_w_500(self): - from gcloud.storage.exceptions import InternalServerError + from gcloud.exceptions import InternalServerError PROJECT = 'project' conn = self._makeOne(PROJECT) conn._http = Http( @@ -396,7 +396,7 @@ def test_get_all_buckets_non_empty(self): self.assertEqual(http._called_with['uri'], URI) def test_get_bucket_miss(self): - from gcloud.storage.exceptions import NotFound + from gcloud.exceptions import NotFound PROJECT = 'project' NONESUCH = 'nonesuch' conn = self._makeOne(PROJECT) diff --git a/gcloud/storage/test_exceptions.py b/gcloud/test_exceptions.py similarity index 87% rename from gcloud/storage/test_exceptions.py rename to gcloud/test_exceptions.py index 70edca89c9a2..ad7f89798660 100644 --- a/gcloud/storage/test_exceptions.py +++ b/gcloud/test_exceptions.py @@ -15,11 +15,11 @@ import unittest2 -class Test_StorageError(unittest2.TestCase): +class Test_GCloudError(unittest2.TestCase): def _getTargetClass(self): - from gcloud.storage.exceptions import StorageError - return StorageError + from gcloud.exceptions import GCloudError + return GCloudError def _makeOne(self, *args): return self._getTargetClass()(*args) @@ -49,11 +49,11 @@ def test_ctor_explicit(self): class Test_make_exception(unittest2.TestCase): def _callFUT(self, response, content): - from gcloud.storage.exceptions import make_exception + from gcloud.exceptions import make_exception return make_exception(response, content) def test_hit_w_content_as_str(self): - from gcloud.storage.exceptions import NotFound + from gcloud.exceptions import NotFound response = _Response(404) content = '{"message": "Not Found"}' exception = self._callFUT(response, content) @@ -62,7 +62,7 @@ def test_hit_w_content_as_str(self): self.assertEqual(list(exception.errors), []) def test_miss_w_content_as_dict(self): - from gcloud.storage.exceptions import StorageError + from gcloud.exceptions import GCloudError ERROR = { 'domain': 'global', 'location': 'test', @@ -73,7 +73,7 @@ def test_miss_w_content_as_dict(self): response = _Response(600) content = {"message": "Unknown Error", "error": {"errors": [ERROR]}} exception = self._callFUT(response, content) - self.assertTrue(isinstance(exception, StorageError)) + self.assertTrue(isinstance(exception, GCloudError)) self.assertEqual(exception.message, 'Unknown Error') self.assertEqual(list(exception.errors), [ERROR]) diff --git a/regression/storage.py b/regression/storage.py index 18c25a90a911..4327522c7e61 100644 --- a/regression/storage.py +++ b/regression/storage.py @@ -19,6 +19,7 @@ import time import unittest2 +from gcloud import exceptions from gcloud import storage from gcloud.storage import _implicit_environ @@ -45,7 +46,7 @@ def safe_delete(bucket): for blob in bucket: try: blob.delete() - except storage.exceptions.NotFound: + except exceptions.NotFound: print('Delete failed with 404: %r' % (blob,)) # Passing force=False does not try to delete the contained files. @@ -68,7 +69,7 @@ def tearDown(self): def test_create_bucket(self): new_bucket_name = 'a-new-bucket' - self.assertRaises(storage.exceptions.NotFound, + self.assertRaises(exceptions.NotFound, CONNECTION.get_bucket, new_bucket_name) created = CONNECTION.create_bucket(new_bucket_name) self.case_buckets_to_delete.append(created)