+
+
{%- block sidebarrel %}
- {%- include "relations.html" %}
+ {# include "relations.html" #}
{%- endblock %}
{%- block sidebarsourcelink %}
- {%- include "sourcelink.html" %}
+ {# include "sourcelink.html" #}
{%- endblock %}
{%- if customsidebar %}
{%- include customsidebar %}
{%- endif %}
{%- block sidebarsearch %}
- {%- include "searchbox.html" %}
+ {#- include "searchbox.html" #}
{%- endblock %}
{%- endif %}
diff --git a/docs/common-api.rst b/docs/common-api.rst
new file mode 100644
index 000000000000..f6d53322e803
--- /dev/null
+++ b/docs/common-api.rst
@@ -0,0 +1,18 @@
+Cloud Common
+============
+
+Connections
+-----------
+
+.. automodule:: gcloud.connection
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Credentials
+-----------
+
+.. automodule:: gcloud.credentials
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
index 8bc7786bbd23..2c13b61fdc5a 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -25,7 +25,8 @@
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.viewcode']
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.autosummary',
+ 'sphinx.ext.todo', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -247,3 +248,7 @@
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
+
+# This pulls class descriptions from the class docstring,
+# and parameter definitions from the __init__ docstring.
+autoclass_content = 'both'
diff --git a/docs/datastore-api.rst b/docs/datastore-api.rst
index a5f5f30d18db..157e9d7beba2 100644
--- a/docs/datastore-api.rst
+++ b/docs/datastore-api.rst
@@ -1,5 +1,5 @@
-Cloud Datastore API Documentation
-=================================
+Cloud Datastore
+===============
:mod:`gcloud.datastore`
-----------------------
diff --git a/docs/index.rst b/docs/index.rst
index 365434dd2a08..4f625b0e37d2 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,4 +1,11 @@
-:tocdepth: 2
+:maxdepth: 1
+
+.. toctree::
+ :hidden:
+
+ datastore-api
+ storage-api
+ common-api
Google Cloud Python API
=======================
@@ -14,21 +21,32 @@ If you've never used ``gcloud`` before,
you should probably take a look at
:doc:`getting-started`.
-Supported services
-------------------
+Cloud Datastore
+---------------
+
+- Google's `official documentation `_
+- :doc:`datastore-quickstart`
+- :doc:`datastore-getting-started`
+- :doc:`Cloud Datastore API Documentation `
-* **Cloud Datastore**
- (`official documentation `_)
+Cloud Storage
+-------------
- - :doc:`datastore-quickstart`
- - :doc:`datastore-getting-started`
- - :doc:`datastore-api`
+- Google's `official documentation `_
+- :doc:`storage-quickstart`
+- :doc:`Cloud Storage API Documentation `
-I found a bug!
+Common modules
--------------
-Awesome!
+- :doc:`Common Module API Documentation `
+
+How to contribute
+-----------------
+
+Want to help out?
+That's awesome.
The library is open source
and `lives on GitHub `_.
-Open an issue,
+Open an issue
or fork the library and submit a pull request.
diff --git a/docs/storage-api.rst b/docs/storage-api.rst
new file mode 100644
index 000000000000..ed36c0870c3a
--- /dev/null
+++ b/docs/storage-api.rst
@@ -0,0 +1,58 @@
+Cloud Storage
+=============
+
+:mod:`gcloud.storage`
+-----------------------
+
+.. automodule:: gcloud.storage.__init__
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Connections
+-----------
+
+.. automodule:: gcloud.storage.connection
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Buckets
+-------
+
+.. automodule:: gcloud.storage.bucket
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Keys
+----
+
+.. automodule:: gcloud.storage.key
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Access Control
+--------------
+
+.. automodule:: gcloud.storage.acl
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Iterators
+---------
+
+.. automodule:: gcloud.storage.iterator
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+Exceptions
+----------
+
+.. automodule:: gcloud.storage.exceptions
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/storage-quickstart.rst b/docs/storage-quickstart.rst
new file mode 100644
index 000000000000..b28f22fa22c6
--- /dev/null
+++ b/docs/storage-quickstart.rst
@@ -0,0 +1,67 @@
+Cloud Storage in 10 seconds
+===========================
+
+Install the library
+-------------------
+
+The source code for the library
+(and demo code)
+lives on GitHub,
+You can install the library quickly with ``pip``::
+
+ $ pip install gcloud
+
+Run the
+`example script `_
+included in the package::
+
+ $ python -m gcloud.storage.demo
+
+And that's it!
+You should be walking through
+a demonstration of using ``gcloud.storage``
+to read and write data to Google Cloud Storage.
+
+Try it yourself
+---------------
+
+You can interact with a demo dataset
+in a Python interactive shell.
+
+Start by importing the demo module
+and instantiating the demo connection::
+
+ >>> from gcloud.storage import demo
+ >>> connection = demo.get_connection()
+
+Once you have the connection,
+you can create buckets and keys::
+
+ >>> connection.get_all_buckets()
+ [, ...]
+ >>> bucket = connection.create_bucket('my-new-bucket')
+ >>> print bucket
+
+ >>> key = bucket.new_key('my-test-file.txt')
+ >>> print key
+
+ >>> key = key.set_contents_from_string('this is test content!')
+ >>> print key.get_contents_as_string()
+ 'this is test content!'
+ >>> print bucket.get_all_keys()
+ []
+ >>> bucket.delete()
+
+.. note::
+ The ``get_connection`` method is just a shortcut for::
+
+ >>> from gcloud import storage
+ >>> from gcloud.storage import demo
+ >>> connection = storage.get_connection(
+ >>> demo.PROJECT_NAME, demo.CLIENT_EMAIL, demo.PRIVATE_KEY_PATH)
+
+OK, that's it!
+--------------
+
+And you can always check out
+the :doc:`storage-api`.
diff --git a/gcloud/connection.py b/gcloud/connection.py
new file mode 100644
index 000000000000..c70e7a3fb21b
--- /dev/null
+++ b/gcloud/connection.py
@@ -0,0 +1,40 @@
+import httplib2
+
+
+class Connection(object):
+ """A generic connection to Google Cloud Platform.
+
+ Subclasses should understand
+ only the basic types
+ in method arguments,
+ however they should be capable
+ of returning advanced types.
+ """
+
+ API_BASE_URL = 'https://www.googleapis.com'
+ """The base of the API call URL."""
+
+ _EMPTY = object()
+ """A pointer to represent an empty value for default arguments."""
+
+ def __init__(self, credentials=None):
+ """
+ :type credentials: :class:`gcloud.credentials.Credentials`
+ :param credentials: The OAuth2 Credentials to use for this connection.
+ """
+
+ self._credentials = credentials
+
+ @property
+ def http(self):
+ """A getter for the HTTP transport used in talking to the API.
+
+ :rtype: :class:`httplib2.Http`
+ :returns: A Http object used to transport data.
+ """
+ if not hasattr(self, '_http'):
+ self._http = httplib2.Http()
+ if self._credentials:
+ self._http = self._credentials.authorize(self._http)
+ return self._http
+
diff --git a/gcloud/datastore/credentials.py b/gcloud/credentials.py
similarity index 74%
rename from gcloud/datastore/credentials.py
rename to gcloud/credentials.py
index fb0c8b466b32..dd8a5bd619b4 100644
--- a/gcloud/datastore/credentials.py
+++ b/gcloud/credentials.py
@@ -15,12 +15,8 @@ class Credentials(object):
which use this class under the hood.
"""
- SCOPE = ('https://www.googleapis.com/auth/datastore '
- 'https://www.googleapis.com/auth/userinfo.email')
- """The scope required for authenticating as a Cloud Datastore consumer."""
-
@classmethod
- def get_for_service_account(cls, client_email, private_key_path):
+ def get_for_service_account(cls, client_email, private_key_path, scope=None):
"""Gets the credentials for a service account.
:type client_email: string
@@ -30,8 +26,15 @@ def get_for_service_account(cls, client_email, private_key_path):
:param private_key_path: The path to a private key file (this file was
given to you when you created the service
account).
+
+ :type scope: string or tuple of strings
+ :param scope: The scope against which to authenticate.
+ (Different services require different scopes,
+ check the documentation for which scope is required
+ for the different levels of access
+ to any particular API.)
"""
return client.SignedJwtAssertionCredentials(
service_account_name=client_email,
private_key=open(private_key_path).read(),
- scope=cls.SCOPE)
+ scope=scope)
diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py
index c43f3e3dea15..b7ee04221321 100644
--- a/gcloud/datastore/__init__.py
+++ b/gcloud/datastore/__init__.py
@@ -35,6 +35,10 @@
__version__ = '0.1.2'
+SCOPE = ('https://www.googleapis.com/auth/datastore ',
+ 'https://www.googleapis.com/auth/userinfo.email')
+"""The scope required for authenticating as a Cloud Datastore consumer."""
+
def get_connection(client_email, private_key_path):
"""Shortcut method to establish a connection to the Cloud Datastore.
@@ -58,11 +62,11 @@ def get_connection(client_email, private_key_path):
:rtype: :class:`gcloud.datastore.connection.Connection`
:returns: A connection defined with the proper credentials.
"""
- from connection import Connection
- from credentials import Credentials
+ from gcloud.credentials import Credentials
+ from gcloud.datastore.connection import Connection
credentials = Credentials.get_for_service_account(
- client_email, private_key_path)
+ client_email, private_key_path, scope=SCOPE)
return Connection(credentials=credentials)
def get_dataset(dataset_id, client_email, private_key_path):
diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py
index dfa31aaa65c0..983ace032b89 100644
--- a/gcloud/datastore/connection.py
+++ b/gcloud/datastore/connection.py
@@ -12,7 +12,7 @@ class Connection(object):
This class should understand only the basic types (and protobufs)
in method arguments, however should be capable of returning advanced types.
- :type credentials: :class:`gcloud.datastore.credentials.Credentials`
+ :type credentials: :class:`gcloud.credentials.Credentials`
:param credentials: The OAuth2 Credentials to use for this connection.
"""
@@ -41,6 +41,7 @@ def http(self):
:rtype: :class:`httplib2.Http`
:returns: A Http object used to transport data.
"""
+
if not self._http:
self._http = httplib2.Http()
if self._credentials:
diff --git a/gcloud/storage/__init__.py b/gcloud/storage/__init__.py
new file mode 100644
index 000000000000..e53c3df71981
--- /dev/null
+++ b/gcloud/storage/__init__.py
@@ -0,0 +1,104 @@
+"""Shortcut methods for getting set up with Google Cloud Storage.
+
+You'll typically use these to get started with the API:
+
+>>> import gcloud.storage
+>>> bucket = gcloud.storage.get_bucket('bucket-id-here',
+ 'long-email@googleapis.com',
+ '/path/to/private.key')
+>>> # Then do other things...
+>>> key = bucket.get_key('/remote/path/to/file.txt')
+>>> print key.get_contents_as_string()
+>>> key.set_contents_from_string('New contents!')
+>>> bucket.upload_file('/remote/path/storage.txt', '/local/path.txt')
+
+The main concepts with this API are:
+
+- :class:`gcloud.storage.connection.Connection`
+ which represents a connection between your machine
+ and the Cloud Storage API.
+
+- :class:`gcloud.storage.bucket.Bucket`
+ which represents a particular bucket
+ (akin to a mounted disk on a computer).
+
+- :class:`gcloud.storage.key.Key`
+ which represents a pointer
+ to a particular entity in Cloud Storage
+ (akin to a file path on a remote machine).
+"""
+
+
+__version__ = '0.1'
+
+# TODO: Allow specific scopes and authorization levels.
+SCOPE = ('https://www.googleapis.com/auth/devstorage.full_control',
+ 'https://www.googleapis.com/auth/devstorage.read_only',
+ 'https://www.googleapis.com/auth/devstorage.read_write')
+
+
+def get_connection(project_name, client_email, private_key_path):
+ """Shortcut method to establish a connection to Cloud Storage.
+
+ Use this if you are going to access several buckets
+ with the same set of credentials:
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> bucket1 = connection.get_bucket('bucket1')
+ >>> bucket2 = connection.get_bucket('bucket2')
+
+ :type project_name: string
+ :param project_name: The name of the project to connect to.
+
+ :type client_email: string
+ :param client_email: The e-mail attached to the service account.
+
+ :type private_key_path: string
+ :param private_key_path: The path to a private key file (this file was
+ given to you when you created the service
+ account).
+
+ :rtype: :class:`gcloud.storage.connection.Connection`
+ :returns: A connection defined with the proper credentials.
+ """
+
+ from gcloud.credentials import Credentials
+ from gcloud.storage.connection import Connection
+
+ credentials = Credentials.get_for_service_account(
+ client_email, private_key_path, scope=SCOPE)
+ return Connection(project_name=project_name, credentials=credentials)
+
+def get_bucket(bucket_name, project_name, client_email, private_key_path):
+ """Shortcut method to establish a connection to a particular bucket.
+
+ You'll generally use this as the first call to working with the API:
+
+ >>> from gcloud import storage
+ >>> bucket = storage.get_bucket(project_name, bucket_name, email, key_path)
+ >>> # Now you can do things with the bucket.
+ >>> bucket.exists('/path/to/file.txt')
+ False
+
+ :type bucket_name: string
+ :param bucket_name: The id of the bucket you want to use.
+ This is akin to a disk name on a file system.
+
+ :type project_name: string
+ :param project_name: The name of the project to connect to.
+
+ :type client_email: string
+ :param client_email: The e-mail attached to the service account.
+
+ :type private_key_path: string
+ :param private_key_path: The path to a private key file (this file was
+ given to you when you created the service
+ account).
+
+ :rtype: :class:`gcloud.storage.bucket.Bucket`
+ :returns: A bucket with a connection using the provided credentials.
+ """
+
+ connection = get_connection(project_name, client_email, private_key_path)
+ return connection.get_bucket(bucket_name)
diff --git a/gcloud/storage/acl.py b/gcloud/storage/acl.py
new file mode 100644
index 000000000000..ac39081f704d
--- /dev/null
+++ b/gcloud/storage/acl.py
@@ -0,0 +1,403 @@
+"""
+This module makes it simple to interact
+with the access control lists that Cloud Storage provides.
+
+:class:`gcloud.storage.bucket.Bucket` has a getting method
+that creates an ACL object under the hood,
+and you can interact with that using
+:func:`gcloud.storage.bucket.Bucket.get_acl`::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> bucket = connection.get_bucket(bucket_name)
+ >>> acl = bucket.get_acl()
+
+Adding and removing permissions can be done with the following methods
+(in increasing order of granularity):
+
+ - :func:`ACL.all`
+ corresponds to access for all users.
+ - :func:`ACL.all_authenticated` corresponds
+ to access for all users that are signed into a Google account.
+ - :func:`ACL.domain` corresponds to access on a
+ per Google Apps domain (ie, ``example.com``).
+ - :func:`ACL.group` corresponds to access on a
+ per group basis (either by ID or e-mail address).
+ - :func:`ACL.user` corresponds to access on a
+ per user basis (either by ID or e-mail address).
+
+And you are able to ``grant`` and ``revoke`` the following roles::
+
+ - :func:`ACL.Entity.grant_read` and :func:`ACL.Entity.revoke_read`
+ - :func:`ACL.Entity.grant_write` and :func:`ACL.Entity.revoke_write`
+ - :func:`ACL.Entity.grant_owner` and :func:`ACL.Entity.revoke_owner`
+
+You can use any of these like any other factory method
+(these happen to be :class:`ACL.Entity` factories)::
+
+ >>> acl.user('me@example.org').grant_read()
+ >>> acl.all_authenticated().grant_write()
+
+You can also chain
+these ``grant_*`` and ``revoke_*`` methods
+together for brevity::
+
+ >>> acl.all().grant_read().revoke_write()
+
+After that,
+you can save any changes you make
+with the :func:`gcloud.storage.acl.ACL.save` method::
+
+ >>> acl.save()
+
+You can alternatively save any existing
+:class:`gcloud.storage.acl.ACL` object
+(whether it was created by a factory method or not)
+with the :func:`gcloud.storage.bucket.Bucket.save_acl` method::
+
+ >>> bucket.save_acl(acl)
+
+To get the list
+of ``entity`` and ``role``
+for each unique pair,
+the :class:`ACL` class is iterable::
+
+ >>> print list(ACL)
+ [{'role': 'OWNER', 'entity': 'allUsers'}, ...]
+
+This list of tuples can be used as the ``entity`` and ``role``
+fields when sending metadata for ACLs to the API.
+"""
+
+class ACL(object):
+ """Container class representing a list of access controls."""
+
+ class Role(object):
+ """Enum style class for role-type constants."""
+
+ Reader = 'READER'
+ Writer = 'WRITER'
+ Owner = 'OWNER'
+
+
+ class Entity(object):
+ """Class representing a set of roles for an entity.
+
+ This is a helper class that you likely won't ever construct
+ outside of using the factor methods on the :class:`ACL` object.
+ """
+
+ def __init__(self, type, identifier=None):
+ """
+ :type type: string
+ :param type: The type of entity (ie, 'group' or 'user').
+
+ :type identifier: string
+ :param identifier: The ID or e-mail of the entity.
+ For the special entity types (like 'allUsers') this
+ is optional.
+ """
+
+ # TODO: Add validation of types.
+ self.identifier = identifier
+ self.roles = set([])
+ self.type = type
+
+ def __str__(self):
+ if not self.identifier:
+ return str(self.type)
+ else:
+ return '{self.type}-{self.identifier}'.format(self=self)
+
+ def __repr__(self):
+ return ''.format(
+ self=self, roles=', '.join(self.roles))
+
+ def get_roles(self):
+ """Get the list of roles permitted by this entity.
+
+ :rtype: list of strings
+ :returns: The list of roles associated with this entity.
+ """
+
+ return self.roles
+
+ def grant(self, role):
+ """Add a role to the entity.
+
+ :type role: string
+ :param role: The role to add to the entity.
+
+ :rtype: :class:`ACL.Entity`
+ :returns: The entity class.
+ """
+
+ self.roles.add(role)
+ return self
+
+ def revoke(self, role):
+ """Remove a role from the entity.
+
+ :type role: string
+ :param role: The role to remove from the entity.
+
+ :rtype: :class:`ACL.Entity`
+ :returns: The entity class.
+ """
+
+ if role in self.roles:
+ self.roles.remove(role)
+ return self
+
+ def grant_read(self):
+ """Grant read access to the current entity."""
+
+ return self.grant(ACL.Role.Reader)
+
+ def grant_write(self):
+ """Grant write access to the current entity."""
+
+ return self.grant(ACL.Role.Writer)
+
+ def grant_owner(self):
+ """Grant owner access to the current entity."""
+
+ return self.grant(ACL.Role.Owner)
+
+ def revoke_read(self):
+ """Revoke read access from the current entity."""
+
+ return self.revoke(ACL.Role.Reader)
+
+ def revoke_write(self):
+ """Revoke write access from the current entity."""
+
+ return self.revoke(ACL.Role.Writer)
+
+ def revoke_owner(self):
+ """Revoke owner access from the current entity."""
+
+ return self.revoke(ACL.Role.Owner)
+
+
+ def __init__(self):
+ self.entities = {}
+
+ def __iter__(self):
+ for entity in self.entities.itervalues():
+ for role in entity.get_roles():
+ if role:
+ yield {'entity': str(entity), 'role': role}
+
+ def entity_from_dict(self, entity_dict):
+ """Build an ACL.Entity object from a dictionary of data.
+
+ An entity is a mutable object
+ that represents a list of roles
+ belonging to either a user or group
+ or the special types
+ for all users
+ and all authenticated users.
+
+ :type entity_dict: dict
+ :param entity_dict: Dictionary full of data from an ACL lookup.
+
+ :rtype: :class:`ACL.Entity`
+ :returns: An Entity constructed from the dictionary.
+ """
+
+ entity = entity_dict['entity']
+ role = entity_dict['role']
+
+ if entity == 'allUsers':
+ entity = self.all()
+
+ elif entity == 'allAuthenticatedUsers':
+ entity = self.all_authenticated()
+
+ elif '-' in entity:
+ type, identifier = entity.split('-', 1)
+ entity = self.entity(type=type, identifier=identifier)
+
+ if not isinstance(entity, ACL.Entity):
+ raise ValueError('Invalid dictionary: %s' % acl_dict)
+
+ return entity.grant(role)
+
+ def has_entity(self, entity):
+ """Returns whether or not this ACL has any entries for an entity.
+
+ :type entity: :class:`ACL.Entity`
+ :param entity: The entity to check for existence in this ACL.
+
+ :rtype: bool
+ :returns: True of the entity exists in the ACL.
+ """
+
+ return str(entity) in self.entities
+
+ def get_entity(self, entity, default=None):
+ """Gets an entity object from the ACL.
+
+ :type entity: :class:`ACL.Entity` or string
+ :param entity: The entity to get lookup in the ACL.
+
+ :type default: anything
+ :param default: This value will be returned if the entity doesn't exist.
+
+ :rtype: :class:`ACL.Entity`
+ :returns: The corresponding entity or the value provided to ``default``.
+ """
+
+ return self.entities.get(str(entity), default)
+
+ def add_entity(self, entity):
+ """Add an entity to the ACL.
+
+ :type entity: :class:`ACL.Entity`
+ :param entity: The entity to add to this ACL.
+ """
+
+ self.entities[str(entity)] = entity
+
+ def entity(self, type, identifier=None):
+ """Factory method for creating an Entity.
+
+ If an entity with the same type and identifier already exists,
+ this will return a reference to that entity.
+ If not, it will create a new one and add it to the list
+ of known entities for this ACL.
+
+ :type type: string
+ :param type: The type of entity to create (ie, ``user``, ``group``, etc)
+
+ :type identifier: string
+ :param identifier: The ID of the entity (if applicable).
+ This can be either an ID or an e-mail address.
+
+ :rtype: :class:`ACL.Entity`
+ :returns: A new Entity or a refernece to an existing identical entity.
+ """
+
+ entity = ACL.Entity(type=type, identifier=identifier)
+ if self.has_entity(entity):
+ entity = self.get_entity(entity)
+ else:
+ self.add_entity(entity)
+ return entity
+
+ def user(self, identifier):
+ """Factory method for a user Entity.
+
+ :type identifier: string
+ :param identifier: An id or e-mail for this particular user.
+
+ :rtype: :class:`ACL.Entity`
+ :returns: An Entity corresponding to this user.
+ """
+
+ return self.entity('user', identifier=identifier)
+
+ def group(self, identifier):
+ """Factory method for a group Entity.
+
+ :type identifier: string
+ :param identifier: An id or e-mail for this particular group.
+
+ :rtype: :class:`ACL.Entity`
+ :returns: An Entity corresponding to this group.
+ """
+
+ return self.entity('group', identifier=identifier)
+
+ def domain(self, domain):
+ """Factory method for a domain Entity.
+
+ :type domain: string
+ :param domain: The domain for this entity.
+
+ :rtype: :class:`ACL.Entity`
+ :returns: An entity corresponding to this domain.
+ """
+
+ return self.entity('domain', identifier=domain)
+
+ def all(self):
+ """Factory method for an Entity representing all users.
+
+ :rtype: :class:`ACL.Entity`
+ :returns: An entity representing all users.
+ """
+
+ return self.entity('allUsers')
+
+ def all_authenticated(self):
+ """Factory method for an Entity representing all authenticated users.
+
+ :rtype: :class:`ACL.Entity`
+ :returns: An entity representing all authenticated users.
+ """
+
+ return self.entity('allAuthenticatedUsers')
+
+ def get_entities(self):
+ """Get a list of all Entity objects.
+
+ :rtype: list of :class:`ACL.Entity` objects
+ :returns: A list of all Entity objects.
+ """
+
+ return self.entities.values()
+
+ def save(self):
+ """A method to be overridden by subclasses.
+
+ :raises: NotImplementedError
+ """
+
+ raise NotImplementedError
+
+
+class BucketACL(ACL):
+ """An ACL specifically for a bucket."""
+
+ def __init__(self, bucket):
+ """
+ :type bucket: :class:`gcloud.storage.bucket.Bucket`
+ :param bucket: The bucket to which this ACL relates.
+ """
+
+ super(BucketACL, self).__init__()
+ self.bucket = bucket
+
+ def save(self):
+ """Save this ACL for the current bucket."""
+
+ return self.bucket.save_acl(acl=self)
+
+
+class DefaultObjectACL(BucketACL):
+ """A subclass of BucketACL representing the default object ACL for a bucket."""
+
+ def save(self):
+ """Save this ACL as the default object ACL for the current bucket."""
+
+ return self.bucket.save_default_object_acl(acl=self)
+
+
+class ObjectACL(ACL):
+ """An ACL specifically for a key."""
+
+ def __init__(self, key):
+ """
+ :type key: :class:`gcloud.storage.key.Key`
+ :param key: The key that this ACL corresponds to.
+ """
+
+ super(ObjectACL, self).__init__()
+ self.key = key
+
+ def save(self):
+ """Save this ACL for the current key."""
+
+ return self.key.save_acl(acl=self)
diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py
new file mode 100644
index 000000000000..6c3cfe07ff00
--- /dev/null
+++ b/gcloud/storage/bucket.py
@@ -0,0 +1,553 @@
+from gcloud.storage import exceptions
+from gcloud.storage.acl import BucketACL
+from gcloud.storage.acl import DefaultObjectACL
+from gcloud.storage.iterator import KeyIterator
+from gcloud.storage.key import Key
+
+
+class Bucket(object):
+ """A class representing a Bucket on Cloud Storage.
+
+ :type connection: :class:`gcloud.storage.connection.Connection`
+ :param connection: The connection to use when sending requests.
+
+ :type name: string
+ :param name: The name of the bucket.
+ """
+
+ def __init__(self, connection=None, name=None, metadata=None):
+ self.connection = connection
+ self.name = name
+ self.metadata = metadata
+
+ # ACL rules are lazily retrieved.
+ self.acl = None
+ self.default_object_acl = None
+
+ @classmethod
+ def from_dict(cls, bucket_dict, connection=None):
+ """Construct a new bucket from a dictionary of data from Cloud Storage.
+
+ :type bucket_dict: dict
+ :param bucket_dict: The dictionary of data to construct a bucket from.
+
+ :rtype: :class:`Bucket`
+ :returns: A bucket constructed from the data provided.
+ """
+
+ return cls(connection=connection, name=bucket_dict['name'],
+ metadata=bucket_dict)
+
+ def __repr__(self):
+ return '' % self.name
+
+ def __iter__(self):
+ return iter(KeyIterator(bucket=self))
+
+ def __contains__(self, key):
+ return self.get_key(key) is not None
+
+ @property
+ def path(self):
+ """The URL path to this bucket."""
+
+ if not self.name:
+ raise ValueError('Cannot determine path without bucket name.')
+
+ return '/b/' + self.name
+
+ def get_key(self, key):
+ """Get a key object by name.
+
+ This will return None if the key doesn't exist::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> bucket = connection.get_bucket('my-bucket')
+ >>> print bucket.get_key('/path/to/key.txt')
+
+ >>> print bucket.get_key('/does-not-exist.txt')
+ None
+
+ :type key: string or :class:`gcloud.storage.key.Key`
+ :param key: The name of the key to retrieve.
+
+ :rtype: :class:`gcloud.storage.key.Key` or None
+ :returns: The key object if it exists, otherwise None.
+ """
+
+ # Coerce this to a key object (either from a Key or a string).
+ key = self.new_key(key)
+
+ try:
+ response = self.connection.api_request(method='GET', path=key.path)
+ return Key.from_dict(response, bucket=self)
+ except exceptions.NotFoundError:
+ return None
+
+ def get_all_keys(self):
+ """List all the keys in this bucket.
+
+ This will **not** retrieve all the data for all the keys,
+ it will only retrieve metadata about the keys.
+
+ This is equivalent to::
+
+ keys = [key for key in bucket]
+
+ :rtype: list of :class:`gcloud.storage.key.Key`
+ :returns: A list of all the Key objects in this bucket.
+ """
+
+ return list(self)
+
+ def new_key(self, key):
+ """Given a path name (or a Key), return a :class:`gcloud.storage.key.Key` object.
+
+ This is really useful when you're not sure
+ if you have a Key object or a string path name.
+ Given either of those types,
+ this returns the corresponding Key object.
+
+ :type key: string or :class:`gcloud.storage.key.Key`
+ :param key: A path name or actual key object.
+
+ :rtype: :class:`gcloud.storage.key.Key`
+ :returns: A Key object with the path provided.
+ """
+
+ if isinstance(key, Key):
+ return key
+
+ # Support Python 2 and 3.
+ try:
+ string_type = basestring
+ except NameError:
+ string_type = str
+
+ if isinstance(key, string_type):
+ return Key(bucket=self, name=key)
+
+ raise TypeError('Invalid key: %s' % key)
+
+ def delete(self):
+ """Delete this bucket.
+
+ The bucket **must** be empty in order to delete it.
+ If the bucket doesn't exist,
+ this will raise a :class:`gcloud.storage.exceptions.NotFoundError`.
+ If the bucket is not empty,
+ this will raise an Exception.
+
+ :raises: :class:`gcloud.storage.exceptions.NotFoundError`
+ """
+
+ # TODO: Make sure the proper exceptions are raised.
+
+ return self.connection.delete_bucket(self.name)
+
+ def delete_key(self, key):
+ # TODO: Should we accept a 'silent' param here to not raise an exception?
+ """Deletes a key from the current bucket.
+
+ If the key isn't found,
+ this will throw a :class:`gcloud.storage.exceptions.NotFoundError`.
+
+ For example::
+
+ >>> from gcloud import storage
+ >>> from gcloud.storage import exceptions
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> bucket = connection.get_bucket('my-bucket')
+ >>> print bucket.get_all_keys()
+ []
+ >>> bucket.delete_key('my-file.txt')
+ >>> try:
+ ... bucket.delete_key('doesnt-exist')
+ ... except exceptions.NotFoundError:
+ ... pass
+
+
+ :type key: string or :class:`gcloud.storage.key.Key`
+ :param key: A key name or Key object to delete.
+
+ :rtype: :class:`gcloud.storage.key.Key`
+ :returns: The key that was just deleted.
+ :raises: :class:`gcloud.storage.exceptions.NotFoundError`
+ """
+
+ key = self.new_key(key)
+ self.connection.api_request(method='DELETE', path=key.path)
+ return key
+
+ def delete_keys(self, keys):
+ # TODO: What should be the return value here?
+ # NOTE: boto returns a MultiDeleteResult instance.
+ for key in keys:
+ self.delete_key(key)
+
+ def copy_key(self):
+ raise NotImplementedError
+
+ def upload_file(self, filename, key=None):
+ # TODO: What do we do about overwriting data?
+ """Shortcut method to upload a file into this bucket.
+
+ Use this method to quickly put a local file in Cloud Storage.
+
+ For example::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> bucket = connection.get_bucket('my-bucket')
+ >>> bucket.upload_file('~/my-file.txt', 'remote-text-file.txt')
+ >>> print bucket.get_all_keys()
+ []
+
+ If you don't provide a key value,
+ we will try to upload the file using the local filename
+ as the key
+ (**not** the complete path)::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> bucket = connection.get_bucket('my-bucket')
+ >>> bucket.upload_file('~/my-file.txt')
+ >>> print bucket.get_all_keys()
+ []
+
+ :type filename: string
+ :param filename: Local path to the file you want to upload.
+
+ :type key: string or :class:`gcloud.storage.key.Key`
+ :param key: The key (either an object or a remote path)
+ of where to put the file.
+
+ If this is blank,
+ we will try to upload the file
+ to the root of the bucket
+ with the same name as on your local file system.
+ """
+ key = self.new_key(key)
+ return key.set_contents_from_filename(filename)
+
+ def has_metadata(self, field=None):
+ """Check if metadata is available locally.
+
+ :type field: string
+ :param field: (optional) the particular field to check for.
+
+ :rtype: bool
+ :returns: Whether metadata is available locally.
+ """
+
+ if not self.metadata:
+ return False
+ elif field and field not in self.metadata:
+ return False
+ else:
+ return True
+
+ def reload_metadata(self, full=False):
+ """Reload metadata from Cloud Storage.
+
+ :type full: bool
+ :param full: If True, loads all data (include ACL data).
+
+ :rtype: :class:`Bucket`
+ :returns: The bucket you just reloaded data for.
+ """
+
+ projection = 'full' if full else 'noAcl'
+ query_params = {'projection': projection}
+ self.metadata = self.connection.api_request(
+ method='GET', path=self.path, query_params=query_params)
+ return self
+
+ def get_metadata(self, field=None, default=None):
+ """Get all metadata or a specific field.
+
+ If you request a field that isn't available,
+ and that field can be retrieved by refreshing data
+ from Cloud Storage,
+ this method will reload the data using
+ :func:`Bucket.reload_metadata`.
+
+ :type field: string
+ :param field: (optional) A particular field to retrieve from metadata.
+
+ :type default: anything
+ :param default: The value to return if the field provided wasn't found.
+
+ :rtype: dict or anything
+ :returns: All metadata or the value of the specific field.
+ """
+
+ if not self.has_metadata(field=field):
+ full = (field and field in ('acl', 'defaultObjectAcl'))
+ self.reload_metadata(full=full)
+
+ if field:
+ return self.metadata.get(field, default)
+ else:
+ return self.metadata
+
+ def patch_metadata(self, metadata):
+ """Update particular fields of this bucket's metadata.
+
+ This method will only update the fields provided
+ and will not touch the other fields.
+
+ It will also reload the metadata locally
+ based on the servers response.
+
+ :type metadata: dict
+ :param metadata: The dictionary of values to update.
+
+ :rtype: :class:`Bucket`
+ :returns: The current bucket.
+ """
+
+ self.metadata = self.connection.api_request(
+ method='PATCH', path=self.path, data=metadata,
+ query_params={'projection': 'full'})
+ return self
+
+ def configure_website(self, main_page_suffix=None, not_found_page=None):
+ """Configure website-related metadata.
+
+ .. note::
+ This (apparently) only works
+ if your bucket name is a domain name
+ (and to do that, you need to get approved somehow...).
+
+ Check out the official documentation here:
+ https://developers.google.com/storage/docs/website-configuration
+
+ If you want this bucket to host a website,
+ just provide the name of an index page
+ and a page to use when a key isn't found::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, private_key_path)
+ >>> bucket = connection.get_bucket(bucket_name)
+ >>> bucket.configure_website('index.html', '404.html')
+
+ You probably should also make the whole bucket public::
+
+ >>> bucket.make_public(recursive=True, future=True)
+
+ This says:
+ "Make the bucket public,
+ and all the stuff already in the bucket,
+ and anything else I add to the bucket.
+ Just make it all public."
+
+ :type main_page_suffix: string
+ :param main_page_suffix: The page to use as the main page of a directory.
+ Typically something like index.html.
+
+ :type not_found_page: string
+ :param not_found_page: The file to use when a page isn't found.
+ """
+
+ data = {
+ 'website': {
+ 'mainPageSuffix': main_page_suffix,
+ 'notFoundPage': not_found_page,
+ }
+ }
+ return self.patch_metadata(data)
+
+ def disable_website(self):
+ """Disable the website configuration for this bucket.
+
+ This is really just a shortcut for
+ setting the website-related attributes to ``None``.
+ """
+
+ return self.configure_website(None, None)
+
+ def reload_acl(self):
+ """Reload the ACL data from Cloud Storage.
+
+ :rtype: :class:`Bucket`
+ :returns: The current bucket.
+ """
+
+ self.acl = BucketACL(bucket=self)
+
+ for entry in self.get_metadata('acl', []):
+ entity = self.acl.entity_from_dict(entry)
+ self.acl.add_entity(entity)
+
+ return self
+
+ def get_acl(self):
+ # TODO: This might be a VERY long list. Use the specific API endpoint.
+ """Get ACL metadata as a :class:`gcloud.storage.acl.BucketACL` object.
+
+ :rtype: :class:`gcloud.storage.acl.BucketACL`
+ :returns: An ACL object for the current bucket.
+ """
+
+ if not self.acl:
+ self.reload_acl()
+ return self.acl
+
+ def save_acl(self, acl=None):
+ """Save the ACL data for this bucket.
+
+ If called without arguments,
+ this will save the ACL currently stored on the Bucket object.
+ For example,
+ this will save
+ the ACL stored in ``some_other_acl``::
+
+ >>> bucket.acl = some_other_acl
+ >>> bucket.save_acl()
+
+ You can also provide a specific ACL to save
+ instead of the one currently set
+ on the Bucket object::
+
+ >>> bucket.save_acl(acl=my_other_acl)
+
+ You can use this to set access controls
+ to be consistent from one bucket to another::
+
+ >>> bucket1 = connection.get_bucket(bucket1_name)
+ >>> bucket2 = connection.get_bucket(bucket2_name)
+ >>> bucket2.save_acl(bucket1.get_acl())
+
+ If you want to **clear** the ACL for the bucket,
+ you must save an empty list (``[]``)
+ rather than using ``None``
+ (which is interpreted as wanting to save the current ACL)::
+
+ >>> bucket.save_acl(None) # Saves the current ACL (self.acl).
+ >>> bucket.save_acl([]) # Clears the current ACL.
+
+ :type acl: :class:`gcloud.storage.acl.ACL`
+ :param acl: The ACL object to save.
+ If left blank, this will save the ACL
+ set locally on the bucket.
+ """
+
+ # We do things in this weird way because [] and None
+ # both evaluate to False, but mean very different things.
+ if acl is None:
+ acl = self.acl
+
+ if acl is None:
+ return self
+
+ return self.patch_metadata({'acl': list(acl)})
+
+ def clear_acl(self):
+ """Remove all ACL rules from the bucket.
+
+ Note that this won't actually remove *ALL* the rules,
+ but it will remove all the non-default rules.
+ In short,
+ you'll still have access
+ to a bucket that you created
+ even after you clear ACL rules
+ with this method.
+
+ For example,
+ imagine that you granted access to this bucket
+ to a bunch of coworkers::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, private_key_path)
+ >>> bucket = connection.get_bucket(bucket_name)
+ >>> acl = bucket.get_acl()
+ >>> acl.user('coworker1@example.org').grant_read()
+ >>> acl.user('coworker2@example.org').grant_read()
+ >>> acl.save()
+
+ Now they work in another part of the company
+ and you want to 'start fresh' on who has access::
+
+ >>> acl.clear_acl()
+
+ At this point all the custom rules you created have been removed.
+ """
+
+ return self.save_acl(acl=[])
+
+ def reload_default_object_acl(self):
+ """Reload the Default Object ACL rules for this bucket.
+
+ :rtype: :class:`Bucket`
+ :returns: The current bucket.
+ """
+
+ self.default_object_acl = DefaultObjectACL(bucket=self)
+
+ for entry in self.get_metadata('defaultObjectAcl', []):
+ entity = self.default_object_acl.entity_from_dict(entry)
+ self.default_object_acl.add_entity(entity)
+
+ return self
+
+ def get_default_object_acl(self):
+ """Get the current Default Object ACL rules.
+
+ If the appropriate metadata isn't available locally,
+ this method will reload it from Cloud Storage.
+
+ :rtype: :class:`gcloud.storage.acl.DefaultObjectACL`
+ :returns: A DefaultObjectACL object for this bucket.
+ """
+
+ if not self.default_object_acl:
+ self.reload_default_object_acl()
+ return self.default_object_acl
+
+ def save_default_object_acl(self, acl=None):
+ """Save the Default Object ACL rules for this bucket.
+
+ :type acl: :class:`gcloud.storage.acl.DefaultObjectACL`
+ :param acl: The DefaultObjectACL object to save.
+ If not provided, this will look at
+ the ``default_object_acl`` property
+ and save that.
+ """
+
+ acl = acl or self.default_object_acl
+
+ if acl is None:
+ return self
+
+ return self.patch_metadata({'defaultObjectAcl': list(acl)})
+
+ def clear_default_object_acl(self):
+ """Remove the Default Object ACL from this bucket."""
+
+ return self.save_default_object_acl(acl=[])
+
+ def make_public(self, recursive=False, future=False):
+ """Make a bucket public.
+
+ :type recursive: bool
+ :param recursive: If True, this will make all keys inside the bucket
+ public as well.
+
+ :type future: bool
+ :param future: If True, this will make all objects created in the future
+ public as well.
+ """
+
+ self.get_acl().all().grant_read()
+ self.save_acl()
+
+ if future:
+ self.get_default_object_acl().all().grant_read()
+ self.save_default_object_acl()
+
+ if recursive:
+ for key in self:
+ key.get_acl().all().grant_read()
+ key.save_acl()
diff --git a/gcloud/storage/connection.py b/gcloud/storage/connection.py
new file mode 100644
index 000000000000..4919d43281ba
--- /dev/null
+++ b/gcloud/storage/connection.py
@@ -0,0 +1,402 @@
+import httplib2
+import json
+import urllib
+
+
+from gcloud import connection
+from gcloud.storage import exceptions
+from gcloud.storage.bucket import Bucket
+from gcloud.storage.iterator import BucketIterator
+
+
+class Connection(connection.Connection):
+ """A connection to Google Cloud Storage via the JSON REST API.
+
+ This class should understand only the basic types (and protobufs)
+ in method arguments, however should be capable of returning advanced types.
+
+ See :class:`gcloud.connection.Connection` for a full list of parameters.
+ :class:`Connection` differs only in needing a project name
+ (which you specify when creating a project in the Cloud Console).
+
+ A typical use of this is to operate on
+ :class:`gcloud.storage.bucket.Bucket` objects::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> bucket = connection.create_bucket('my-bucket-name')
+
+ You can then delete this bucket::
+
+ >>> bucket.delete()
+ >>> # or
+ >>> connection.delete_bucket(bucket)
+
+ If you want to access an existing bucket::
+
+ >>> bucket = connection.get_bucket('my-bucket-name')
+
+ A :class:`Connection` is actually iterable
+ and will return the :class:`gcloud.storage.bucket.Bucket` objects
+ inside the project::
+
+ >>> for bucket in connection:
+ >>> print bucket
+
+
+ In that same way, you can check for whether a bucket exists
+ inside the project using Python's ``in`` operator::
+
+ >>> print 'my-bucket-name' in connection
+ True
+ """
+
+ API_VERSION = 'v1beta2'
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE = '{api_base_url}/storage/{api_version}{path}'
+ """A template used to craft the URL pointing toward a particular API call."""
+
+ def __init__(self, project_name, *args, **kwargs):
+ """
+ :type project_name: string
+ :param project_name: The project name to connect to.
+ """
+
+ super(Connection, self).__init__(*args, **kwargs)
+
+ self.project_name = project_name
+
+ def __iter__(self):
+ return iter(BucketIterator(connection=self))
+
+ def __contains__(self, bucket_name):
+ return self.lookup(bucket_name) is not None
+
+ def build_api_url(self, path, query_params=None, api_base_url=None,
+ api_version=None):
+ """Construct an API url given a few components, some optional.
+
+ Typically, you shouldn't need to use this method.
+
+ :type path: string
+ :param path: The path to the resource (ie, ``'/b/bucket-name'``).
+
+ :type query_params: dict
+ :param query_params: A dictionary of keys and values to insert into
+ the query string of the URL.
+
+ :type api_base_url: string
+ :param api_base_url: The base URL for the API endpoint.
+ Typically you won't have to provide this.
+
+ :type api_version: string
+ :param api_version: The version of the API to call.
+ Typically you shouldn't provide this and instead
+ use the default for the library.
+
+ :rtype: string
+ :returns: The URL assembled from the pieces provided.
+ """
+
+ url = self.API_URL_TEMPLATE.format(
+ api_base_url=(api_base_url or self.API_BASE_URL),
+ api_version=(api_version or self.API_VERSION),
+ path=path)
+
+ query_params = query_params or {}
+ query_params.update({'project': self.project_name})
+ url += '?' + urllib.urlencode(query_params)
+
+ return url
+
+ def make_request(self, method, url, data=None, content_type=None,
+ headers=None):
+ """A low level method to send a request to the API.
+
+ Typically, you shouldn't need to use this method.
+
+ :type method: string
+ :param method: The HTTP method to use in the request.
+
+ :type url: string
+ :param url: The URL to send the request to.
+
+ :type data: string
+ :param data: The data to send as the body of the request.
+
+ :type content_type: string
+ :param content_type: The proper MIME type of the data provided.
+
+ :type headers: dict
+ :param headers: A dictionary of HTTP headers to send with the request.
+
+ :rtype: tuple of ``response`` (a dictionary of sorts)
+ and ``content`` (a string).
+ :returns: The HTTP response object and the content of the response.
+ """
+
+ headers = headers or {}
+ headers['Accept-Encoding'] = 'gzip'
+
+ if data:
+ content_length = len(str(data))
+ else:
+ content_length = 0
+
+ headers['Content-Length'] = content_length
+
+ if content_type:
+ headers['Content-Type'] = content_type
+
+ return self.http.request(uri=url, method=method, headers=headers,
+ body=data)
+
+ def api_request(self, method, path=None, query_params=None,
+ data=None, content_type=None,
+ api_base_url=None, api_version=None,
+ expect_json=True):
+ """Make a request over the HTTP transport to the Cloud Storage API.
+
+ You shouldn't need to use this method,
+ but if you plan to interact with the API using these primitives,
+ this is the correct one to use...
+
+ :type method: string
+ :param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
+
+ :type path: string
+ :param path: The path to the resource (ie, ``'/b/bucket-name'``).
+
+ :type query_params: dict
+ :param query_params: A dictionary of keys and values to insert into
+ the query string of the URL.
+
+ :type data: string
+ :param data: The data to send as the body of the request.
+
+ :type content_type: string
+ :param content_type: The proper MIME type of the data provided.
+
+ :type api_base_url: string
+ :param api_base_url: The base URL for the API endpoint.
+ Typically you won't have to provide this.
+
+ :type api_version: string
+ :param api_version: The version of the API to call.
+ Typically you shouldn't provide this and instead
+ use the default for the library.
+
+ :type expect_json: bool
+ :param expect_json: If True, this method will try to parse the response
+ as JSON and raise an exception if that cannot be done.
+
+ :raises: Exception if the response code is not 200 OK.
+ """
+
+ url = self.build_api_url(path=path, query_params=query_params,
+ api_base_url=api_base_url,
+ api_version=api_version)
+
+ # Making the executive decision that any dictionary
+ # data will be sent properly as JSON.
+ if data and isinstance(data, dict):
+ data = json.dumps(data)
+ content_type = 'application/json'
+
+ response, content = self.make_request(
+ method=method, url=url, data=data, content_type=content_type)
+
+ # TODO: Add better error handling.
+ if response.status == 404:
+ raise exceptions.NotFoundError(response, content)
+ elif not 200 <= response.status < 300:
+ raise exceptions.ConnectionError(response, content)
+
+ if content and expect_json:
+ # TODO: Better checking on this header for JSON.
+ content_type = response.get('content-type', '')
+ if not content_type.startswith('application/json'):
+ raise TypeError('Expected JSON, got %s' % content_type)
+ return json.loads(content)
+
+ return content
+
+ def get_all_buckets(self, *args, **kwargs):
+ """Get all buckets in the project.
+
+ This will not populate the list of keys available
+ in each bucket.
+
+ You can also iterate over the connection object,
+ so these two operations are identical::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> for bucket in connection.get_all_buckets():
+ >>> print bucket
+ >>> # ... is the same as ...
+ >>> for bucket in connection:
+ >>> print bucket
+
+ :rtype: list of :class:`gcloud.storage.bucket.Bucket` objects.
+ :returns: All buckets belonging to this project.
+ """
+
+ return list(self)
+
+ def get_bucket(self, bucket_name, *args, **kwargs):
+ """Get a bucket by name.
+
+ If the bucket isn't found,
+ this will raise a :class:`gcloud.storage.exceptions.NotFoundError`.
+ If you would rather get a bucket by name,
+ and return ``None`` if the bucket isn't found
+ (like ``{}.get('...')``)
+ then use :func:`Connection.lookup`.
+
+ For example::
+
+ >>> from gcloud import storage
+ >>> from gcloud.storage import exceptions
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> try:
+ >>> bucket = connection.get_bucket('my-bucket')
+ >>> except exceptions.NotFoundError:
+ >>> print 'Sorry, that bucket does not exist!'
+
+ :type bucket_name: string
+ :param bucket_name: The name of the bucket to get.
+
+ :rtype: :class:`gcloud.storage.bucket.Bucket`
+ :returns: The bucket matching the name provided.
+ :raises: :class:`gcloud.storage.exceptions.NotFoundError`
+ """
+
+ # TODO: URL-encode the bucket name to be safe?
+ bucket = self.new_bucket(bucket_name)
+ response = self.api_request(method='GET', path=bucket.path)
+ return Bucket.from_dict(response, connection=self)
+
+ def lookup(self, bucket_name):
+ """Get a bucket by name, returning None if not found.
+
+ You can use this if you would rather
+ checking for a None value
+ than catching an exception::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> bucket = connection.get_bucket('doesnt-exist')
+ >>> print bucket
+ None
+ >>> bucket = connection.get_bucket('my-bucket')
+ >>> print bucket
+
+
+ :type bucket_name: string
+ :param bucket_name: The name of the bucket to get.
+
+ :rtype: :class:`gcloud.storage.bucket.Bucket`
+ :returns: The bucket matching the name provided or None if not found.
+ """
+
+ try:
+ return self.get_bucket(bucket_name)
+ except exceptions.NotFoundError:
+ return None
+
+ def create_bucket(self, bucket, *args, **kwargs):
+ # TODO: Which exceptions will this raise?
+ """Create a new bucket.
+
+ For example::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, client, key_path)
+ >>> bucket = connection.create_bucket('my-bucket')
+ >>> print bucket
+
+
+ :type bucket: string or :class:`gcloud.storage.bucket.Bucket`
+ :param bucket: The bucket name (or bucket object) to create.
+
+ :rtype: :class:`gcloud.storage.bucket.Bucket`
+ :returns: The newly created bucket.
+ """
+
+ bucket = self.new_bucket(bucket)
+ response = self.api_request(method='POST', path='/b',
+ data={'name': bucket.name})
+ return Bucket.from_dict(response, connection=self)
+
+ def delete_bucket(self, bucket, *args, **kwargs):
+ """Delete a bucket.
+
+ You can use this method to delete a bucket by name,
+ or to delete a bucket object::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> connection.delete_bucket('my-bucket')
+ True
+
+ You can also delete pass in the bucket object::
+
+ >>> bucket = connection.get_bucket('other-bucket')
+ >>> connection.delete_bucket(bucket)
+ True
+
+ If the bucket doesn't exist,
+ this will raise a :class:`gcloud.storage.exceptions.NotFoundError`::
+
+ >>> from gcloud.storage import exceptions
+ >>> try:
+ >>> connection.delete_bucket('my-bucket')
+ >>> except exceptions.NotFoundError:
+ >>> print 'That bucket does not exist!'
+
+ :type bucket: string or :class:`gcloud.storage.bucket.Bucket`
+ :param bucket: The bucket name (or bucket object) to create.
+
+ :rtype: bool
+ :returns: True if the bucket was deleted.
+ :raises: :class:`gcloud.storage.exceptions.NotFoundError`
+ """
+
+ bucket = self.new_bucket(bucket)
+ response = self.api_request(method='DELETE', path=bucket.path)
+ return True
+
+ def new_bucket(self, bucket):
+ """Factory method for creating a new (unsaved) bucket object.
+
+ This method is really useful when you're not sure whether
+ you have an actual :class:`gcloud.storage.bucket.Bucket` object
+ or just a name of a bucket.
+ It always returns the object::
+
+ >>> bucket = connection.new_bucket('bucket')
+ >>> print bucket
+
+ >>> bucket = connection.new_bucket(bucket)
+ >>> print bucket
+
+
+ :type bucket: string or :class:`gcloud.storage.bucket.Bucket`
+ :param bucket: A name of a bucket or an existing Bucket object.
+ """
+
+ if isinstance(bucket, Bucket):
+ return bucket
+
+ # Support Python 2 and 3.
+ try:
+ string_type = basestring
+ except NameError:
+ string_type = str
+
+ if isinstance(bucket, string_type):
+ return Bucket(connection=self, name=bucket)
+
+ raise TypeError('Invalid bucket: %s' % bucket)
diff --git a/gcloud/storage/demo.key b/gcloud/storage/demo.key
new file mode 100644
index 000000000000..434d999f6b6d
Binary files /dev/null and b/gcloud/storage/demo.key differ
diff --git a/gcloud/storage/demo.py b/gcloud/storage/demo.py
new file mode 100644
index 000000000000..f701d14d8060
--- /dev/null
+++ b/gcloud/storage/demo.py
@@ -0,0 +1,112 @@
+from code import interact
+import os.path
+import sys
+import time
+
+from gcloud import storage
+
+
+__all__ = ['CLIENT_EMAIL', 'PRIVATE_KEY_PATH', 'PROJECT_NAME',
+ 'get_connection', 'main']
+
+
+CLIENT_EMAIL = '606734090113-6ink7iugcv89da9sru7lii8bs3i0obqg@developer.gserviceaccount.com'
+PRIVATE_KEY_PATH = os.path.join(os.path.dirname(storage.__file__), 'demo.key')
+PROJECT_NAME = 'gcloud-storage-demo'
+
+extra_newline = False
+code_globals, code_locals = globals(), locals()
+
+
+def get_connection():
+ return storage.get_connection(PROJECT_NAME, CLIENT_EMAIL, PRIVATE_KEY_PATH)
+
+
+def write(*strings):
+ # Add an extra newline if necessary.
+ global extra_newline
+ if extra_newline:
+ print
+
+ for string in strings:
+ print string
+ raw_input()
+
+ # We don't need an extra newline after this.
+ extra_newline = False
+
+
+def code(string, comment=None):
+ keypress_time = 0.05
+
+ print '>>> ',
+ for char in string:
+ time.sleep(keypress_time)
+ sys.stdout.write(char)
+ sys.stdout.flush()
+
+ if comment:
+ sys.stdout.write(' # %s' % comment)
+
+ # Wait for an enter key before continuing...
+ raw_input()
+
+ # Yes, this is crazy unsafe... but it's demo code.
+ # Globalize these so our imports hang around...
+ global code_globals
+ global code_locals
+ exec(string, code_globals, code_locals)
+
+ # In the next non-code piece, we need an extra newline.
+ global extra_newline
+ extra_newline = True
+
+
+def main():
+ write('Welcome to the gCloud Storage Demo! (hit enter)')
+ write('We\'re going to walk through some of the basics...',
+ 'Don\'t worry though. You don\'t need to do anything, just keep hitting enter...')
+
+ write('Let\'s start by importing the demo module and getting a connection:')
+ code('from gcloud.storage import demo')
+ code('connection = demo.get_connection()')
+
+ write('OK, now let\'s look at all of the buckets...')
+ code('print connection.get_all_buckets()',
+ 'This might take a second...')
+
+ write('Now let\'s create a new bucket...')
+ code('import time')
+ code('bucket_name = ("bucket-%s" % time.time()).replace(".", "")',
+ 'Get rid of dots...')
+ code('print bucket_name')
+ code('bucket = connection.create_bucket(bucket_name)')
+ code('print bucket')
+
+ write('Let\'s look at all of the buckets again...')
+ code('print connection.get_all_buckets()')
+
+ write('How about we create a new key inside this bucket.')
+ code('key = bucket.new_key("my-new-file.txt")')
+
+ write('Now let\'s put some data in there.')
+ code('key.set_contents_from_string("this is some data!")')
+
+ write('... and we can read that data back again.')
+ code('print key.get_contents_as_string()')
+
+ write('Now let\'s delete that key.')
+ code('print key.delete()')
+
+ write('And now that we\'re done, let\'s delete that bucket...')
+ code('print bucket.delete()')
+
+ write('Alright! That\'s all!',
+ 'Here\'s an interactive prompt for you now...')
+
+ global code_locals
+ interact('(Hit CTRL-D to exit...)', local=code_locals)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/gcloud/storage/exceptions.py b/gcloud/storage/exceptions.py
new file mode 100644
index 000000000000..1d23a96cfdb8
--- /dev/null
+++ b/gcloud/storage/exceptions.py
@@ -0,0 +1,21 @@
+# TODO: Make these super useful.
+
+class StorageError(Exception):
+ pass
+
+
+class ConnectionError(StorageError):
+
+ def __init__(self, response, content):
+ message = str(response) + content
+ super(ConnectionError, self).__init__(message)
+
+
+class NotFoundError(ConnectionError):
+
+ def __init__(self, response, content):
+ self.message = 'GET %s returned a 404.' % (response.url)
+
+
+class StorageDataError(StorageError):
+ pass
diff --git a/gcloud/storage/iterator.py b/gcloud/storage/iterator.py
new file mode 100644
index 000000000000..f53729ea38ee
--- /dev/null
+++ b/gcloud/storage/iterator.py
@@ -0,0 +1,244 @@
+"""Iterators for paging through API responses.
+
+These iterators
+simplify the process
+of paging through API responses
+where the response
+is a list of results
+with a ``nextPageToken``.
+
+To make an iterator work,
+just override the ``get_items_from_response`` method
+so that given a response
+(containing a page of results)
+it parses those results
+into an iterable
+of the actual objects you want::
+
+ class MyIterator(Iterator):
+ def get_items_from_response(self, response):
+ items = response.get('items', [])
+ for item in items:
+ yield MyItemClass.from_dict(item, other_arg=True)
+
+You then can use this
+to get **all** the results
+from a resource::
+
+ >>> iterator = MyIterator(...)
+ >>> list(iterator) # Convert to a list (consumes all values).
+
+Or you can walk your way through items
+and call off the search early
+if you find what you're looking for
+(resulting in possibly fewer requests)::
+
+ >>> for item in MyIterator(...):
+ >>> print item.name
+ >>> if not item.is_valid:
+ >>> break
+"""
+
+
+class Iterator(object):
+ """A generic class for iterating through Cloud Storage list responses.
+
+ :type connection: :class:`gcloud.storage.connection.Connection`
+ :param connection: The connection to use to make requests.
+
+ :type path: string
+ :param path: The path to query for the list of items.
+ """
+
+ def __init__(self, connection, path):
+ self.connection = connection
+ self.path = path
+ self.page_number = 0
+ self.next_page_token = None
+
+ def __iter__(self):
+ """Iterate through the list of items."""
+
+ while self.has_next_page():
+ response = self.get_next_page_response()
+ for item in self.get_items_from_response(response):
+ yield item
+
+ def has_next_page(self):
+ """Determines whether or not this iterator has more pages.
+
+ :rtype: bool
+ :returns: Whether the iterator has more pages or not.
+ """
+
+ if self.page_number == 0:
+ return True
+
+ return self.next_page_token is not None
+
+ def get_query_params(self):
+ """Getter for query parameters for the next request.
+
+ :rtype: dict or None
+ :returns: A dictionary of query parameters or None if there are none.
+ """
+
+ if self.next_page_token:
+ return {'pageToken': self.next_page_token}
+
+ def get_next_page_response(self):
+ """Requests the next page from the path provided.
+
+ :rtype: dict
+ :returns: The parsed JSON response of the next page's contents.
+ """
+
+ if not self.has_next_page():
+ raise RuntimeError('No more pages. Try resetting the iterator.')
+
+ response = self.connection.api_request(
+ method='GET', path=self.path, query_params=self.get_query_params())
+
+ self.page_number += 1
+ self.next_page_token = response.get('nextPageToken')
+
+ return response
+
+ def reset(self):
+ """Resets the iterator to the beginning."""
+ self.page_number = 0
+ self.next_page_token = None
+
+ def get_items_from_response(self, response):
+ """Factory method called while iterating. This should be overriden.
+
+ This method should be overridden by a subclass.
+ It should accept the API response
+ of a request for the next page of items,
+ and return a list (or other iterable)
+ of items.
+
+ Typically this method will construct
+ a Bucket or a Key
+ from the page of results in the response.
+
+ :type response: dict
+ :param response: The response of asking for the next page of items.
+
+ :rtype: iterable
+ :returns: Items that the iterator should yield.
+ """
+ raise NotImplementedError
+
+
+class BucketIterator(Iterator):
+ """An iterator listing all buckets.
+
+ You shouldn't have to use this directly,
+ but instead should use the helper methods
+ on :class:`gcloud.storage.connection.Connection` objects.
+
+ :type connection: :class:`gcloud.storage.connection.Connection`
+ :param connection: The connection to use for querying the list of buckets.
+ """
+
+ def __init__(self, connection):
+ super(BucketIterator, self).__init__(connection=connection, path='/b')
+
+ def get_items_from_response(self, response):
+ """Factory method which yields :class:`gcloud.storage.bucket.Bucket` items from a response.
+
+ :type response: dict
+ :param response: The JSON API response for a page of buckets.
+ """
+
+ from gcloud.storage.bucket import Bucket
+ for item in response.get('items', []):
+ yield Bucket.from_dict(item, connection=self.connection)
+
+
+class KeyIterator(Iterator):
+ """An iterator listing keys.
+
+ You shouldn't have to use this directly,
+ but instead should use the helper methods
+ on :class:`gcloud.storage.key.Key` objects.
+
+ :type bucket: :class:`gcloud.storage.bucket.Bucket`
+ :param bucket: The bucket from which to list keys.
+ """
+
+ def __init__(self, bucket):
+ self.bucket = bucket
+ super(KeyIterator, self).__init__(
+ connection=bucket.connection, path=bucket.path + '/o')
+
+ def get_items_from_response(self, response):
+ """Factory method which yields :class:`gcloud.storage.key.Key` items from a response.
+
+ :type response: dict
+ :param response: The JSON API response for a page of keys.
+ """
+
+ from gcloud.storage.key import Key
+ for item in response.get('items', []):
+ yield Key.from_dict(item, bucket=self.bucket)
+
+
+class KeyDataIterator(object):
+ # TODO: Use an actual HTTP streaming library,
+ # not multiple requests and the range header.
+
+ def __init__(self, key):
+ self.key = key
+ self.reset()
+
+ def __iter__(self):
+ while self.has_more_data():
+ yield self.get_next_chunk()
+
+ def reset(self):
+ self._bytes_written = 0
+ self._total_bytes = None
+
+ def has_more_data(self):
+ if self._bytes_written == 0:
+ return True
+ elif not self._total_bytes:
+ # self._total_bytes **should** be set by this point.
+ # If it isn't, something is wrong.
+ raise ValueError('Size of object is unknown... This is bad.')
+ else:
+ return (self._bytes_written < self._total_bytes)
+
+ def get_headers(self):
+ start = self._bytes_written
+ end = self._bytes_written + self.key.CHUNK_SIZE
+
+ if self._total_bytes and end > self._total_bytes:
+ end = ''
+
+ return {'Range': 'bytes=%s-%s' % (start, end)}
+
+ def get_url(self):
+ return self.key.connection.build_api_url(
+ path=self.key.path, query_params={'alt': 'media'})
+
+ def get_next_chunk(self):
+ if not self.has_more_data():
+ raise RuntimeError('No more data in this iterator. Try resetting.')
+
+ response, content = self.key.connection.make_request(
+ method='GET', url=self.get_url(), headers=self.get_headers())
+
+ if response.status in (200, 206):
+ self._bytes_written += len(content)
+
+ if 'content-range' in response:
+ content_range = response['content-range']
+ self._total_bytes = int(content_range.rsplit('/', 1)[1])
+
+ return content
+
+ # Expected a 200 or a 206... Got something else, which is bad.
+ raise Exception(response)
diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py
new file mode 100644
index 000000000000..87232c407e59
--- /dev/null
+++ b/gcloud/storage/key.py
@@ -0,0 +1,415 @@
+import errno
+import json
+import mimetypes
+import os
+from StringIO import StringIO
+
+from gcloud.storage.acl import ObjectACL
+from gcloud.storage.iterator import KeyDataIterator
+
+
+class Key(object):
+ """A wrapper around Cloud Storage's concept of an ``Object``."""
+
+ CHUNK_SIZE = 1024 * 1024 # 1 MB.
+ """The size of a chunk of data whenever iterating (1 MB).
+
+ This must be a multiple of 256 KB per the API specification.
+ """
+
+ def __init__(self, bucket=None, name=None, metadata=None):
+ """
+ :type bucket: :class:`gcloud.storage.bucket.Bucket`
+ :param bucket: The bucket to which this key belongs.
+
+ :type name: string
+ :param name: The name of the key.
+ This corresponds to the unique path of the object
+ in the bucket.
+
+ :type metadata: dict
+ :param metadata: All the other data provided by Cloud Storage.
+ """
+
+ self.bucket = bucket
+ self.name = name
+ self.metadata = metadata or {}
+
+ # Lazily get the ACL information.
+ self.acl = None
+
+ @classmethod
+ def from_dict(cls, key_dict, bucket=None):
+ """Instantiate a :class:`Key` from data returned by the JSON API.
+
+ :type key_dict: dict
+ :param key_dict: A dictionary of data returned from
+ getting an Cloud Storage object.
+
+ :type bucket: :class:`gcloud.storage.bucket.Bucket`
+ :param bucket: The bucket to which this key belongs
+ (and by proxy, which connection to use).
+
+ :rtype: :class:`Key`
+ :returns: A key based on the data provided.
+ """
+
+ return cls(bucket=bucket, name=key_dict['name'], metadata=key_dict)
+
+ def __repr__(self):
+ if self.bucket:
+ bucket_name = self.bucket.name
+ else:
+ bucket_name = None
+
+ return '' % (bucket_name, self.name)
+
+ @property
+ def path(self):
+ """Getter property for the URL path to this Key.
+
+ :rtype: string
+ :returns: The URL path to this Key.
+ """
+
+ if not self.bucket:
+ raise ValueError('Cannot determine path without a bucket defined.')
+ elif not self.name:
+ raise ValueError('Cannot determine path without a key name.')
+
+ return self.bucket.path + '/o/' + self.name
+
+ @property
+ def public_url(self):
+ return '{storage_base_url}/{self.bucket.name}/{self.name}'.format(
+ storage_base_url='http://commondatastorage.googleapis.com', self=self)
+
+ @property
+ def connection(self):
+ """Getter property for the connection to use with this Key.
+
+ :rtype: :class:`gcloud.storage.connection.Connection` or None
+ :returns: The connection to use, or None if no connection is set.
+ """
+
+ # TODO: If a bucket isn't defined, this is basically useless.
+ # Where do we throw an error?
+ if self.bucket and self.bucket.connection:
+ return self.bucket.connection
+
+ def exists(self):
+ """Determines whether or not this key exists.
+
+ :rtype: bool
+ :returns: True if the key exists in Cloud Storage.
+ """
+
+ return self.bucket.get_key(self.name) is not None
+
+ def delete(self):
+ """Deletes a key from Cloud Storage.
+
+ :rtype: :class:`Key`
+ :returns: The key that was just deleted.
+ """
+
+ return self.bucket.delete_key(self)
+
+ def get_contents_to_file(self, fh):
+ """Gets the contents of this key to a file-like object.
+
+ :type fh: file
+ :param fh: A file handle to which to write the key's data.
+
+ :raises: :class:`gcloud.storage.exceptions.NotFoundError`
+ """
+
+ for chunk in KeyDataIterator(self):
+ try:
+ fh.write(chunk)
+ except IOError, e:
+ if e.errno == errno.ENOSPC:
+ raise Exception('No space left on device.')
+
+ def get_contents_to_filename(self, filename):
+ """Get the contents of this key to a file by name.
+
+ :type filename: string
+ :param filename: A filename to be passed to ``open``.
+
+ :raises: :class:`gcloud.storage.exceptions.NotFoundError`
+ """
+
+ # TODO: Add good error checking.
+ # TODO: Add good exception handling.
+ # TODO: Set timestamp? Make optional, default being to set it if possible?
+ with open(filename, 'wb') as fh:
+ self.get_contents_to_file(fh)
+
+ def get_contents_as_string(self):
+ """Gets the data stored on this Key as a string.
+
+ :rtype: string
+ :returns: The data stored in this key.
+ :raises: :class:`gcloud.storage.exceptions.NotFoundError`
+ """
+
+ string_buffer = StringIO()
+ self.get_contents_to_file(string_buffer)
+ return string_buffer.getvalue()
+
+ def set_contents_from_file(self, fh, rewind=False, size=None,
+ content_type=None):
+ """Set the contents of this key to the contents of a file handle.
+
+ :type fh: file
+ :param fh: A file handle open for reading.
+
+ :type rewind: bool
+ :param rewind: If True, seek to the beginning of the file handle before
+ writing the file to Cloud Storage.
+
+ :type size: int
+ :param size: The number of bytes to read from the file handle.
+ If not provided, we'll try to guess the size using
+ :func:`os.fstat`
+ """
+
+ # Rewind the file if desired.
+ if rewind:
+ fh.seek(0, os.SEEK_SET)
+
+ # Get the basic stats about the file.
+ total_bytes = size or os.fstat(fh.fileno()).st_size
+ bytes_uploaded = 0
+
+ # Set up a resumable upload session.
+ headers = {
+ 'X-Upload-Content-Type': content_type or 'application/unknown',
+ 'X-Upload-Content-Length': total_bytes
+ }
+
+ upload_url = self.connection.build_api_url(
+ path=self.bucket.path + '/o',
+ query_params={'uploadType': 'resumable', 'name': self.name},
+ api_base_url=self.connection.API_BASE_URL + '/upload')
+
+ response, content = self.connection.make_request(
+ method='POST', url=upload_url,
+ headers=headers)
+
+ # Get the resumable upload URL.
+ upload_url = response['location']
+
+ while bytes_uploaded < total_bytes:
+ # Construct the range header.
+ data = fh.read(self.CHUNK_SIZE)
+ chunk_size = len(data)
+
+ start = bytes_uploaded
+ end = bytes_uploaded + chunk_size - 1
+
+ headers = {
+ 'Content-Range': 'bytes %d-%d/%d' % (start, end, total_bytes),
+ }
+
+ # TODO: Error checking for response code.
+ # TODO: Exponential backoff when errors come through.
+ response, content = self.connection.make_request(content_type='text/plain',
+ method='POST', url=upload_url, headers=headers, data=data)
+
+ bytes_uploaded += chunk_size
+
+ def set_contents_from_filename(self, filename):
+ """Open a path and set this key's contents to the content of that file.
+
+ :type filename: string
+ :param filename: The path to the file.
+ """
+
+ content_type, _ = mimetypes.guess_type(filename)
+
+ with open(filename, 'rb') as fh:
+ self.set_contents_from_file(fh, content_type=content_type)
+
+ def set_contents_from_string(self, data, content_type='text/plain'):
+ """Sets the contents of this key to the provided string.
+
+ You can use this method to quickly set the value of a key::
+
+ >>> from gcloud import storage
+ >>> connection = storage.get_connection(project_name, email, key_path)
+ >>> bucket = connection.get_bucket(bucket_name)
+ >>> key = bucket.new_key('my_text_file.txt')
+ >>> key.set_contents_from_string('This is the contents of my file!')
+
+ Under the hood this is using a string buffer
+ and calling :func:`gcloud.storage.key.Key.set_contents_from_file`.
+
+ :type data: string
+ :param data: The data to store in this key.
+
+ :rtype: :class:`Key`
+ :returns: The updated Key object.
+ """
+
+ # TODO: How do we handle NotFoundErrors?
+ string_buffer = StringIO()
+ string_buffer.write(data)
+ self.set_contents_from_file(fh=string_buffer, rewind=True,
+ size=string_buffer.len,
+ content_type=content_type)
+ return self
+
+ def has_metadata(self, field=None):
+ """Check if metadata is available locally.
+
+ :type field: string
+ :param field: (optional) the particular field to check for.
+
+ :rtype: bool
+ :returns: Whether metadata is available locally.
+ """
+
+ if not self.metadata:
+ return False
+ elif field and field not in self.metadata:
+ return False
+ else:
+ return True
+
+ def reload_metadata(self, full=False):
+ """Reload metadata from Cloud Storage.
+
+ :type full: bool
+ :param full: If True, loads all data (include ACL data).
+
+ :rtype: :class:`Key`
+ :returns: The key you just reloaded data for.
+ """
+
+ projection = 'full' if full else 'noAcl'
+ query_params = {'projection': projection}
+ self.metadata = self.connection.api_request(
+ method='GET', path=self.path, query_params=query_params)
+ return self
+
+ def get_metadata(self, field=None, default=None):
+ """Get all metadata or a specific field.
+
+ If you request a field that isn't available,
+ and that field can be retrieved by refreshing data
+ from Cloud Storage,
+ this method will reload the data using
+ :func:`Key.reload_metadata`.
+
+ :type field: string
+ :param field: (optional) A particular field to retrieve from metadata.
+
+ :type default: anything
+ :param default: The value to return if the field provided wasn't found.
+
+ :rtype: dict or anything
+ :returns: All metadata or the value of the specific field.
+ """
+
+ if not self.has_metadata(field=field):
+ full = (field and field == 'acl')
+ self.reload_metadata(full=full)
+
+ if field:
+ return self.metadata.get(field, default)
+ else:
+ return self.metadata
+
+ def patch_metadata(self, metadata):
+ """Update particular fields of this key's metadata.
+
+ This method will only update the fields provided
+ and will not touch the other fields.
+
+ It will also reload the metadata locally
+ based on the servers response.
+
+ :type metadata: dict
+ :param metadata: The dictionary of values to update.
+
+ :rtype: :class:`Key`
+ :returns: The current key.
+ """
+
+ self.metadata = self.connection.api_request(
+ method='PATCH', path=self.path, data=metadata,
+ query_params={'projection': 'full'})
+ return self
+
+ def reload_acl(self):
+ """Reload the ACL data from Cloud Storage.
+
+ :rtype: :class:`Key`
+ :returns: The current key.
+ """
+
+ self.acl = ObjectACL(key=self)
+
+ for entry in self.get_metadata('acl', []):
+ entity = self.acl.entity_from_dict(entry)
+ self.acl.add_entity(entity)
+
+ return self
+
+ def get_acl(self):
+ # TODO: This might be a VERY long list. Use the specific API endpoint.
+ """Get ACL metadata as a :class:`gcloud.storage.acl.ObjectACL` object.
+
+ :rtype: :class:`gcloud.storage.acl.ObjectACL`
+ :returns: An ACL object for the current key.
+ """
+
+ if not self.acl:
+ self.reload_acl()
+ return self.acl
+
+ def save_acl(self, acl=None):
+ """Save the ACL data for this key.
+
+ :type acl: :class:`gcloud.storage.acl.ACL`
+ :param acl: The ACL object to save.
+ If left blank, this will save the ACL
+ set locally on the key.
+ """
+
+ # We do things in this weird way because [] and None
+ # both evaluate to False, but mean very different things.
+ if acl is None:
+ acl = self.acl
+
+ if acl is None:
+ return self
+
+ return self.patch_metadata({'acl': list(acl)})
+
+ def clear_acl(self):
+ """Remove all ACL rules from the key.
+
+ Note that this won't actually remove *ALL* the rules,
+ but it will remove all the non-default rules.
+ In short,
+ you'll still have access
+ to a key that you created
+ even after you clear ACL rules
+ with this method.
+ """
+
+ return self.save_acl(acl=[])
+
+ def make_public(self):
+ """Make this key public giving all users read access.
+
+ :rtype: :class:`Key`
+ :returns: The current key.
+ """
+
+ self.get_acl().all().grant_read()
+ self.save_acl()
+ return self
diff --git a/gcloud/storage/test_connection.py b/gcloud/storage/test_connection.py
new file mode 100644
index 000000000000..d15c0e11484e
--- /dev/null
+++ b/gcloud/storage/test_connection.py
@@ -0,0 +1,10 @@
+import unittest2
+
+from gcloud.storage.connection import Connection
+
+
+class TestConnection(unittest2.TestCase):
+
+ def test_init(self):
+ connection = Connection('project-name')
+ self.assertEqual('project-name', connection.project_name)