Skip to content

Commit

Permalink
Merge pull request #488 from dhermes/remove-dataset-from-entity
Browse files Browse the repository at this point in the history
Fix #484: Remove dataset from entity
  • Loading branch information
dhermes committed Jan 6, 2015
2 parents a02a73e + 1dab892 commit 22c8b19
Show file tree
Hide file tree
Showing 13 changed files with 103 additions and 302 deletions.
9 changes: 4 additions & 5 deletions gcloud/datastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def get_dataset(dataset_id):
:returns: A dataset with a connection using the provided credentials.
"""
connection = get_connection()
return connection.dataset(dataset_id)
return Dataset(dataset_id, connection=connection)


def _require_dataset():
Expand Down Expand Up @@ -195,10 +195,9 @@ def get_entities(keys, missing=None, deferred=None,
missing=missing, deferred=deferred,
)

new_dataset = Dataset(dataset_id, connection=connection)
if missing is not None:
missing[:] = [
helpers.entity_from_protobuf(missed_pb, dataset=new_dataset)
helpers.entity_from_protobuf(missed_pb)
for missed_pb in missing]

if deferred is not None:
Expand All @@ -208,8 +207,8 @@ def get_entities(keys, missing=None, deferred=None,

entities = []
for entity_pb in entity_pbs:
entities.append(helpers.entity_from_protobuf(
entity_pb, dataset=new_dataset))
entities.append(helpers.entity_from_protobuf(entity_pb))

return entities


Expand Down
14 changes: 0 additions & 14 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
from gcloud import connection
from gcloud.datastore import datastore_v1_pb2 as datastore_pb
from gcloud.datastore import helpers
from gcloud.datastore.dataset import Dataset


class Connection(connection.Connection):
Expand Down Expand Up @@ -154,19 +153,6 @@ def mutation(self):
else:
return datastore_pb.Mutation()

def dataset(self, *args, **kwargs):
"""Factory method for Dataset objects.
:param args: All args and kwargs will be passed along to the
:class:`gcloud.datastore.dataset.Dataset` initializer.
:rtype: :class:`gcloud.datastore.dataset.Dataset`
:returns: A dataset object that will use this connection as
its transport.
"""
kwargs['connection'] = self
return Dataset(*args, **kwargs)

def lookup(self, dataset_id, key_pbs,
missing=None, deferred=None, eventual=False):
"""Lookup keys from a dataset in the Cloud Datastore.
Expand Down
138 changes: 28 additions & 110 deletions gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,12 @@
"""Class for representing a single entity in the Cloud Datastore."""

from gcloud.datastore import _implicit_environ
from gcloud.datastore.key import Key


class NoKey(RuntimeError):
"""Exception raised by Entity methods which require a key."""


class NoDataset(RuntimeError):
"""Exception raised by Entity methods which require a dataset."""


class Entity(dict):
"""Entities are akin to rows in a relational database
Expand Down Expand Up @@ -75,72 +70,21 @@ class Entity(dict):
Python3), will be saved using the 'blob_value' field, without
any decoding / encoding step.
:type dataset: :class:`gcloud.datastore.dataset.Dataset`
:param dataset: The dataset in which this entity belongs.
:type kind: string
:param kind: The kind of entity this is, akin to a table name in a
relational database.
:type dataset: :class:`gcloud.datastore.dataset.Dataset`, or None
:param dataset: the Dataset instance associated with this entity.
:type kind: str
:param kind: the "kind" of the entity (see
https://cloud.google.com/datastore/docs/concepts/entities#Datastore_Kinds_and_identifiers)
:type key: :class:`gcloud.datastore.key.Key`
:param key: Optional key to be set on entity. Required for save() or
reload().
:type exclude_from_indexes: `tuple` of :class:`str`
:param exclude_from_indexes: names of fields whose values are not to be
indexed for this entity.
"""

def __init__(self, dataset=None, kind=None, exclude_from_indexes=()):
def __init__(self, key=None, exclude_from_indexes=()):
super(Entity, self).__init__()
# Does not inherit directly from object, so we don't use
# _implicit_environ._DatastoreBase to avoid split MRO.
self._dataset = dataset or _implicit_environ.DATASET
if kind:
self._key = Key(kind, dataset_id=self.dataset().id())
else:
self._key = None
self.key = key
self._exclude_from_indexes = set(exclude_from_indexes)

def dataset(self):
"""Get the :class:`.dataset.Dataset` in which this entity belongs.
.. note::
This is based on the :class:`gcloud.datastore.key.Key` set on the
entity. That means that if you have no key set, the dataset might
be `None`. It also means that if you change the key on the entity,
this will refer to that key's dataset.
:rtype: :class:`gcloud.datastore.dataset.Dataset`
:returns: The Dataset containing the entity if there is a key,
else None.
"""
return self._dataset

def key(self, key=None):
"""Get or set the :class:`.datastore.key.Key` on the current entity.
:type key: :class:`glcouddatastore.key.Key`
:param key: The key you want to set on the entity.
:rtype: :class:`gcloud.datastore.key.Key` or :class:`Entity`.
:returns: Either the current key (on get) or the current
object (on set).
>>> entity.key(my_other_key) # This returns the original entity.
<Entity[{'kind': 'OtherKeyKind', 'id': 1234}] {'property': 'value'}>
>>> entity.key() # This returns the key.
<Key[{'kind': 'OtherKeyKind', 'id': 1234}]>
"""

if key is not None:
self._key = key
return self
else:
return self._key

@property
def kind(self):
"""Get the kind of the current entity.
Expand All @@ -150,32 +94,17 @@ def kind(self):
of the entity at all, just the properties and a pointer to a
Key which knows its Kind.
"""
if self.key:
return self.key.kind

if self._key:
return self._key.kind

@property
def exclude_from_indexes(self):
"""Names of fields which are *not* to be indexed for this entity.
:rtype: sequence of field names
"""
return frozenset(self._exclude_from_indexes)

@classmethod
def from_key(cls, key, dataset=None):
"""Create entity based on :class:`.datastore.key.Key`.
.. note:: This is a factory method.
:type key: :class:`gcloud.datastore.key.Key`
:param key: The key for the entity.
:returns: The :class:`Entity` derived from the
:class:`gcloud.datastore.key.Key`.
"""

return cls(dataset).key(key)

@property
def _must_key(self):
"""Return our key, or raise NoKey if not set.
Expand All @@ -184,23 +113,11 @@ def _must_key(self):
:returns: our key
:raises: NoKey if key is None
"""
if self._key is None:
if self.key is None:
raise NoKey()
return self._key

@property
def _must_dataset(self):
"""Return our dataset, or raise NoDataset if not set.
:rtype: :class:`gcloud.datastore.key.Key`.
:returns: our key
:raises: NoDataset if key is None
"""
if self._dataset is None:
raise NoDataset()
return self._dataset
return self.key

def reload(self):
def reload(self, connection=None):
"""Reloads the contents of this entity from the datastore.
This method takes the :class:`gcloud.datastore.key.Key`, loads all
Expand All @@ -211,16 +128,19 @@ def reload(self):
This will override any existing properties if a different value
exists remotely, however it will *not* override any properties that
exist only locally.
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional connection used to connect to datastore.
"""
connection = connection or _implicit_environ.CONNECTION

key = self._must_key
connection = self._must_dataset.connection()
entity = key.get(connection=connection)

if entity:
self.update(entity)
return self

def save(self):
def save(self, connection=None):
"""Save the entity in the Cloud Datastore.
.. note::
Expand All @@ -234,17 +154,17 @@ def save(self):
Python3) map to 'string_value' in the datastore; values which are
"bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'.
:rtype: :class:`gcloud.datastore.entity.Entity`
:returns: The entity with a possibly updated Key.
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: Optional connection used to connect to datastore.
"""
connection = connection or _implicit_environ.CONNECTION

key = self._must_key
dataset = self._must_dataset
connection = dataset.connection()
assigned, new_id = connection.save_entity(
dataset_id=dataset.id(),
dataset_id=key.dataset_id,
key_pb=key.to_protobuf(),
properties=dict(self),
exclude_from_indexes=self.exclude_from_indexes())
exclude_from_indexes=self.exclude_from_indexes)

# If we are in a transaction and the current entity needs an
# automatically assigned ID, tell the transaction where to put that.
Expand All @@ -254,13 +174,11 @@ def save(self):

if assigned:
# Update the key (which may have been altered).
self.key(self.key().completed_key(new_id))

return self
self.key = self.key.completed_key(new_id)

def __repr__(self):
if self._key:
return '<Entity%s %s>' % (self._key.path,
if self.key:
return '<Entity%s %s>' % (self.key.path,
super(Entity, self).__repr__())
else:
return '<Entity %s>' % (super(Entity, self).__repr__())
6 changes: 3 additions & 3 deletions gcloud/datastore/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
INT_VALUE_CHECKER = Int64ValueChecker()


def entity_from_protobuf(pb, dataset=None):
def entity_from_protobuf(pb):
"""Factory method for creating an entity based on a protobuf.
The protobuf should be one returned from the Cloud Datastore
Expand All @@ -45,7 +45,7 @@ def entity_from_protobuf(pb, dataset=None):
:returns: The entity derived from the protobuf.
"""
key = key_from_protobuf(pb.key)
entity = Entity.from_key(key, dataset)
entity = Entity(key=key)

for property_pb in pb.property:
value = _get_value_from_property_pb(property_pb)
Expand Down Expand Up @@ -246,7 +246,7 @@ def _set_protobuf_value(value_pb, val):
elif attr == 'entity_value':
e_pb = value_pb.entity_value
e_pb.Clear()
key = val.key()
key = val.key
if key is not None:
e_pb.key.CopyFrom(key.to_protobuf())
for item_key, value in val.items():
Expand Down
2 changes: 1 addition & 1 deletion gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ def get(self, connection=None):
if entities:
result = entities[0]
# We assume that the backend has not changed the key.
result.key(self)
result.key = self
return result

def delete(self, connection=None):
Expand Down
3 changes: 1 addition & 2 deletions gcloud/datastore/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,9 +390,8 @@ def next_page(self):
else:
raise ValueError('Unexpected value returned for `more_results`.')

dataset = self._query.dataset
self._page = [
helpers.entity_from_protobuf(entity, dataset=dataset)
helpers.entity_from_protobuf(entity)
for entity in entity_pbs]
return self._page, self._more_results, self._start_cursor

Expand Down
6 changes: 3 additions & 3 deletions gcloud/datastore/test___init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def test_get_entities_miss_w_missing(self):
entities = self._callFUT([key], connection=connection,
dataset_id=DATASET_ID, missing=missing)
self.assertEqual(entities, [])
self.assertEqual([missed.key().to_protobuf() for missed in missing],
self.assertEqual([missed.key.to_protobuf() for missed in missing],
[key.to_protobuf()])

def test_get_entities_miss_w_deferred(self):
Expand Down Expand Up @@ -288,7 +288,7 @@ def test_get_entities_hit(self):
key = Key(KIND, ID, dataset_id=DATASET_ID)
result, = self._callFUT([key], connection=connection,
dataset_id=DATASET_ID)
new_key = result.key()
new_key = result.key

# Check the returned value is as expected.
self.assertFalse(new_key is key)
Expand Down Expand Up @@ -328,7 +328,7 @@ def test_get_entities_implicit(self):
}
self.assertEqual(CUSTOM_CONNECTION._called_with, expected_called_with)

new_key = result.key()
new_key = result.key
# Check the returned value is as expected.
self.assertFalse(new_key is key)
self.assertEqual(new_key.dataset_id, DATASET_ID)
Expand Down
7 changes: 0 additions & 7 deletions gcloud/datastore/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,13 +210,6 @@ def mutation(self):
found = conn.mutation()
self.assertTrue(isinstance(found, Mutation))

def test_dataset(self):
DATASET_ID = 'DATASET'
conn = self._makeOne()
dataset = conn.dataset(DATASET_ID)
self.assertTrue(dataset.connection() is conn)
self.assertEqual(dataset.id(), DATASET_ID)

def test_lookup_single_key_empty_response(self):
from gcloud.datastore import datastore_v1_pb2 as datastore_pb

Expand Down
Loading

0 comments on commit 22c8b19

Please sign in to comment.