Skip to content

Commit

Permalink
Add generate_upload_policy (#2998)
Browse files Browse the repository at this point in the history
  • Loading branch information
Jon Wayne Parrott authored Feb 16, 2017
1 parent 635f439 commit 0a311f7
Show file tree
Hide file tree
Showing 3 changed files with 206 additions and 2 deletions.
33 changes: 33 additions & 0 deletions docs/storage_snippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,39 @@ def list_buckets(client, to_delete):
to_delete.append(bucket)


@snippet
def policy_document(client, to_delete):
# pylint: disable=unused-argument
# [START policy_document]
bucket = client.bucket('my-bucket')
conditions = [
['starts-with', '$key', ''],
{'acl': 'public-read'}]

policy = bucket.generate_upload_policy(conditions)

# Generate an upload form using the form fields.
policy_fields = ''.join(
'<input type="hidden" name="{key}" value="{value}">'.format(
key=key, value=value)
for key, value in policy.items()
)

upload_form = (
'<form action="http://{bucket_name}.storage.googleapis.com"'
' method="post"enctype="multipart/form-data">'
'<input type="text" name="key" value="">'
'<input type="hidden" name="bucket" value="{bucket_name}">'
'<input type="hidden" name="acl" value="public-read">'
'<input name="file" type="file">'
'<input type="submit" value="Upload">'
'{policy_fields}'
'<form>').format(bucket_name=bucket.name, policy_fields=policy_fields)

print(upload_form)
# [END policy_document]


def _line_no(func):
code = getattr(func, '__code__', None) or getattr(func, 'func_code')
return code.co_firstlineno
Expand Down
79 changes: 79 additions & 0 deletions storage/google/cloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,16 @@

"""Create / interact with Google Cloud Storage buckets."""

import base64
import copy
import datetime
import json

import google.auth.credentials
import six

from google.cloud._helpers import _datetime_to_rfc3339
from google.cloud._helpers import _NOW
from google.cloud._helpers import _rfc3339_to_datetime
from google.cloud.exceptions import NotFound
from google.cloud.iterator import HTTPIterator
Expand Down Expand Up @@ -829,3 +835,76 @@ def make_public(self, recursive=False, future=False, client=None):
for blob in blobs:
blob.acl.all().grant_read()
blob.acl.save(client=client)

def generate_upload_policy(
self, conditions, expiration=None, client=None):
"""Create a signed upload policy for uploading objects.
This method generates and signs a policy document. You can use
`policy documents`_ to allow visitors to a website to upload files to
Google Cloud Storage without giving them direct write access.
For example:
.. literalinclude:: storage_snippets.py
:start-after: [START policy_document]
:end-before: [END policy_document]
.. _policy documents:
https://cloud.google.com/storage/docs/xml-api\
/post-object#policydocument
:type expiration: datetime
:param expiration: Optional expiration in UTC. If not specified, the
policy will expire in 1 hour.
:type conditions: list
:param conditions: A list of conditions as described in the
`policy documents`_ documentation.
:type client: :class:`~google.cloud.storage.client.Client`
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
:rtype: dict
:returns: A dictionary of (form field name, form field value) of form
fields that should be added to your HTML upload form in order
to attach the signature.
"""
client = self._require_client(client)
credentials = client._base_connection.credentials

if not isinstance(credentials, google.auth.credentials.Signing):
auth_uri = ('http://google-cloud-python.readthedocs.io/en/latest/'
'google-cloud-auth.html#setting-up-a-service-account')
raise AttributeError(
'you need a private key to sign credentials.'
'the credentials you are currently using %s '
'just contains a token. see %s for more '
'details.' % (type(credentials), auth_uri))

if expiration is None:
expiration = _NOW() + datetime.timedelta(hours=1)

conditions = conditions + [
{'bucket': self.name},
]

policy_document = {
'expiration': _datetime_to_rfc3339(expiration),
'conditions': conditions,
}

encoded_policy_document = base64.b64encode(
json.dumps(policy_document).encode('utf-8'))
signature = base64.b64encode(
credentials.sign_bytes(encoded_policy_document))

fields = {
'bucket': self.name,
'GoogleAccessId': credentials.signer_email,
'policy': encoded_policy_document.decode('utf-8'),
'signature': signature.decode('utf-8'),
}

return fields
96 changes: 94 additions & 2 deletions storage/unit_tests/test_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import datetime
import unittest

import mock


def _create_signing_credentials():
import google.auth.credentials

class _SigningCredentials(
google.auth.credentials.Credentials,
google.auth.credentials.Signing):
pass

credentials = mock.Mock(spec=_SigningCredentials)

return credentials


class Test_Bucket(unittest.TestCase):

Expand Down Expand Up @@ -782,7 +798,6 @@ def test_storage_class_setter_DURABLE_REDUCED_AVAILABILITY(self):
self.assertTrue('storageClass' in bucket._changes)

def test_time_created(self):
import datetime
from google.cloud._helpers import _RFC3339_MICROS
from google.cloud._helpers import UTC

Expand Down Expand Up @@ -903,7 +918,6 @@ def test_make_public_w_future_reload_default(self):
self._make_public_w_future_helper(default_object_acl_loaded=False)

def test_make_public_recursive(self):
import mock
from google.cloud.storage.acl import _ACLEntity

_saved = []
Expand Down Expand Up @@ -1068,6 +1082,82 @@ def dummy_response():
self.assertEqual(page2.num_items, 0)
self.assertEqual(iterator.prefixes, set(['foo', 'bar']))

def _test_generate_upload_policy_helper(self, **kwargs):
import base64
import json

credentials = _create_signing_credentials()
credentials.signer_email = mock.sentinel.signer_email
credentials.sign_bytes.return_value = b'DEADBEEF'
connection = _Connection()
connection.credentials = credentials
client = _Client(connection)
name = 'name'
bucket = self._make_one(client=client, name=name)

conditions = [
['starts-with', '$key', '']]

policy_fields = bucket.generate_upload_policy(conditions, **kwargs)

self.assertEqual(policy_fields['bucket'], bucket.name)
self.assertEqual(
policy_fields['GoogleAccessId'], mock.sentinel.signer_email)
self.assertEqual(
policy_fields['signature'],
base64.b64encode(b'DEADBEEF').decode('utf-8'))

policy = json.loads(
base64.b64decode(policy_fields['policy']).decode('utf-8'))

policy_conditions = policy['conditions']
expected_conditions = [{'bucket': bucket.name}] + conditions
for expected_condition in expected_conditions:
for condition in policy_conditions:
if condition == expected_condition:
break
else: # pragma: NO COVER
self.fail('Condition {} not found in {}'.format(
expected_condition, policy_conditions))

return policy_fields, policy

@mock.patch(
'google.cloud.storage.bucket._NOW',
return_value=datetime.datetime(1990, 1, 1))
def test_generate_upload_policy(self, now):
from google.cloud._helpers import _datetime_to_rfc3339

_, policy = self._test_generate_upload_policy_helper()

self.assertEqual(
policy['expiration'],
_datetime_to_rfc3339(
now() + datetime.timedelta(hours=1)))

def test_generate_upload_policy_args(self):
from google.cloud._helpers import _datetime_to_rfc3339

expiration = datetime.datetime(1990, 5, 29)

_, policy = self._test_generate_upload_policy_helper(
expiration=expiration)

self.assertEqual(
policy['expiration'],
_datetime_to_rfc3339(expiration))

def test_generate_upload_policy_bad_credentials(self):
credentials = object()
connection = _Connection()
connection.credentials = credentials
client = _Client(connection)
name = 'name'
bucket = self._make_one(client=client, name=name)

with self.assertRaises(AttributeError):
bucket.generate_upload_policy([])


class _Connection(object):
_delete_bucket = False
Expand All @@ -1076,6 +1166,7 @@ def __init__(self, *responses):
self._responses = responses
self._requested = []
self._deleted_buckets = []
self.credentials = None

@staticmethod
def _is_bucket_path(path):
Expand Down Expand Up @@ -1108,4 +1199,5 @@ class _Client(object):

def __init__(self, connection, project=None):
self._connection = connection
self._base_connection = connection
self.project = project

0 comments on commit 0a311f7

Please sign in to comment.