Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Handle datastore renames on PartitionId #1359

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def run_query(self, project, query_pb, namespace=None,
_set_read_options(request, eventual, transaction_id)

if namespace:
request.partition_id.namespace = namespace
request.partition_id.namespace_id = namespace

request.query.CopyFrom(query_pb)
response = self._rpc(project, 'runQuery', request,
Expand Down Expand Up @@ -402,10 +402,10 @@ def _prepare_key_for_request(key_pb): # pragma: NO COVER copied from helpers
:returns: A key which will be added to a request. It will be the
original if nothing needs to be changed.
"""
if _has_field(key_pb.partition_id, 'dataset_id'):
if _has_field(key_pb.partition_id, 'project_id'):
new_key_pb = _entity_pb2.Key()
new_key_pb.CopyFrom(key_pb)
new_key_pb.partition_id.ClearField('dataset_id')
new_key_pb.partition_id.ClearField('project_id')
key_pb = new_key_pb
return key_pb

Expand Down
24 changes: 12 additions & 12 deletions gcloud/datastore/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def find_true_project(project, connection):
# the project so the backend won't complain.
bogus_key_pb = Key('__MissingLookupKind', 1,
project=project).to_protobuf()
bogus_key_pb.partition_id.ClearField('dataset_id')
bogus_key_pb.partition_id.ClearField('project_id')

found_pbs, missing_pbs, _ = connection.lookup(project, [bogus_key_pb])
# By not passing in `deferred`, lookup will continue until
Expand All @@ -71,7 +71,7 @@ def find_true_project(project, connection):
# We only asked for one, so should only receive one.
returned_pb, = all_pbs

return returned_pb.key.partition_id.dataset_id
return returned_pb.key.partition_id.project_id


def _get_meaning(value_pb, is_list=False):
Expand Down Expand Up @@ -268,11 +268,11 @@ def key_from_protobuf(pb):
path_args.append(element.name)

project = None
if _has_field(pb.partition_id, 'dataset_id'):
project = pb.partition_id.dataset_id
if _has_field(pb.partition_id, 'project_id'):
project = pb.partition_id.project_id
namespace = None
if _has_field(pb.partition_id, 'namespace'):
namespace = pb.partition_id.namespace
if _has_field(pb.partition_id, 'namespace_id'):
namespace = pb.partition_id.namespace_id

return Key(*path_args, namespace=namespace, project=project)

Expand Down Expand Up @@ -429,18 +429,18 @@ def _prepare_key_for_request(key_pb):
:returns: A key which will be added to a request. It will be the
original if nothing needs to be changed.
"""
if _has_field(key_pb.partition_id, 'dataset_id'):
# We remove the dataset_id from the protobuf. This is because
if _has_field(key_pb.partition_id, 'project_id'):
# We remove the project_id from the protobuf. This is because
# the backend fails a request if the key contains un-prefixed
# project. The backend fails because requests to
# /datastore/.../datasets/foo/...
# /v1beta3/projects/foo:...
# and
# /datastore/.../datasets/s~foo/...
# /v1beta3/projects/s~foo:...
# both go to the datastore given by 's~foo'. So if the key
# protobuf in the request body has dataset_id='foo', the
# protobuf in the request body has project_id='foo', the
# backend will reject since 'foo' != 's~foo'.
new_key_pb = _entity_pb2.Key()
new_key_pb.CopyFrom(key_pb)
new_key_pb.partition_id.ClearField('dataset_id')
new_key_pb.partition_id.ClearField('project_id')
key_pb = new_key_pb
return key_pb
4 changes: 2 additions & 2 deletions gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,10 +240,10 @@ def to_protobuf(self):
:returns: The protobuf representing the key.
"""
key = _entity_pb2.Key()
key.partition_id.dataset_id = self.project
key.partition_id.project_id = self.project

if self.namespace:
key.partition_id.namespace = self.namespace
key.partition_id.namespace_id = self.namespace

for item in self.path:
element = key.path_element.add()
Expand Down
2 changes: 1 addition & 1 deletion gcloud/datastore/test_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ def to_protobuf(self):
from gcloud.datastore._generated import entity_pb2
key = self._key = entity_pb2.Key()
# Don't assign it, because it will just get ripped out
# key.partition_id.dataset_id = self.project
# key.partition_id.project_id = self.project

element = key.path_element.add()
element.kind = self._kind
Expand Down
4 changes: 2 additions & 2 deletions gcloud/datastore/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def _make_entity_pb(project, kind, integer_id, name=None, str_val=None):
from gcloud.datastore.helpers import _new_value_pb

entity_pb = entity_pb2.Entity()
entity_pb.key.partition_id.dataset_id = project
entity_pb.key.partition_id.project_id = project
path_element = entity_pb.key.path_element.add()
path_element.kind = kind
path_element.id = integer_id
Expand Down Expand Up @@ -322,7 +322,7 @@ def test_get_multi_miss_w_missing(self):

# Make a missing entity pb to be returned from mock backend.
missed = entity_pb2.Entity()
missed.key.partition_id.dataset_id = self.PROJECT
missed.key.partition_id.project_id = self.PROJECT
path_element = missed.key.path_element.add()
path_element.kind = KIND
path_element.id = ID
Expand Down
14 changes: 7 additions & 7 deletions gcloud/datastore/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,7 +508,7 @@ def test_run_query_w_eventual_no_transaction(self):
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.partition_id.namespace, '')
self.assertEqual(request.partition_id.namespace_id, '')
self.assertEqual(request.query, q_pb)
self.assertEqual(request.read_options.read_consistency,
datastore_pb2.ReadOptions.EVENTUAL)
Expand Down Expand Up @@ -549,7 +549,7 @@ def test_run_query_wo_eventual_w_transaction(self):
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.partition_id.namespace, '')
self.assertEqual(request.partition_id.namespace_id, '')
self.assertEqual(request.query, q_pb)
self.assertEqual(request.read_options.read_consistency,
datastore_pb2.ReadOptions.DEFAULT)
Expand Down Expand Up @@ -606,7 +606,7 @@ def test_run_query_wo_namespace_empty_result(self):
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.partition_id.namespace, '')
self.assertEqual(request.partition_id.namespace_id, '')
self.assertEqual(request.query, q_pb)

def test_run_query_w_namespace_nonempty_result(self):
Expand Down Expand Up @@ -638,7 +638,7 @@ def test_run_query_w_namespace_nonempty_result(self):
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.partition_id.namespace, 'NS')
self.assertEqual(request.partition_id.namespace_id, 'NS')
self.assertEqual(request.query, q_pb)

def test_begin_transaction(self):
Expand Down Expand Up @@ -901,9 +901,9 @@ def request(self, **kw):

def _compare_key_pb_after_request(test, key_before, key_after):
from gcloud._helpers import _has_field
test.assertFalse(_has_field(key_after.partition_id, 'dataset_id'))
test.assertEqual(key_before.partition_id.namespace,
key_after.partition_id.namespace)
test.assertFalse(_has_field(key_after.partition_id, 'project_id'))
test.assertEqual(key_before.partition_id.namespace_id,
key_after.partition_id.namespace_id)
test.assertEqual(len(key_before.path_element),
len(key_after.path_element))
for elt1, elt2 in zip(key_before.path_element, key_after.path_element):
Expand Down
22 changes: 11 additions & 11 deletions gcloud/datastore/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def test_it(self):
_KIND = 'KIND'
_ID = 1234
entity_pb = entity_pb2.Entity()
entity_pb.key.partition_id.dataset_id = _PROJECT
entity_pb.key.partition_id.project_id = _PROJECT
entity_pb.key.path_element.add(kind=_KIND, id=_ID)

value_pb = _new_value_pb(entity_pb, 'foo')
Expand Down Expand Up @@ -117,7 +117,7 @@ def test_mismatched_value_indexed(self):
_KIND = 'KIND'
_ID = 1234
entity_pb = entity_pb2.Entity()
entity_pb.key.partition_id.dataset_id = _PROJECT
entity_pb.key.partition_id.project_id = _PROJECT
entity_pb.key.path_element.add(kind=_KIND, id=_ID)

list_val_pb = _new_value_pb(entity_pb, 'baz')
Expand Down Expand Up @@ -173,7 +173,7 @@ def test_nested_entity_no_key(self):
inside_val_pb.integer_value = INSIDE_VALUE

entity_pb = entity_pb2.Entity()
entity_pb.key.partition_id.dataset_id = PROJECT
entity_pb.key.partition_id.project_id = PROJECT
element = entity_pb.key.path_element.add()
element.kind = KIND

Expand Down Expand Up @@ -236,7 +236,7 @@ def test_key_only(self):
entity_pb = self._callFUT(entity)

expected_pb = entity_pb2.Entity()
expected_pb.key.partition_id.dataset_id = project
expected_pb.key.partition_id.project_id = project
path_elt = expected_pb.key.path_element.add()
path_elt.kind = kind
path_elt.name = name
Expand Down Expand Up @@ -280,7 +280,7 @@ def test_inverts_to_protobuf(self):

original_pb = entity_pb2.Entity()
# Add a key.
original_pb.key.partition_id.dataset_id = project = 'PROJECT'
original_pb.key.partition_id.project_id = project = 'PROJECT'
elem1 = original_pb.key.path_element.add()
elem1.kind = 'Family'
elem1.id = 1234
Expand Down Expand Up @@ -323,7 +323,7 @@ def test_inverts_to_protobuf(self):
new_pb = self._callFUT(entity)

# NOTE: entity_to_protobuf() strips the project so we "cheat".
new_pb.key.partition_id.dataset_id = project
new_pb.key.partition_id.project_id = project
self._compareEntityProto(original_pb, new_pb)

def test_meaning_with_change(self):
Expand Down Expand Up @@ -356,9 +356,9 @@ def _makePB(self, project=None, namespace=None, path=()):
from gcloud.datastore._generated import entity_pb2
pb = entity_pb2.Key()
if project is not None:
pb.partition_id.dataset_id = project
pb.partition_id.project_id = project
if namespace is not None:
pb.partition_id.namespace = namespace
pb.partition_id.namespace_id = namespace
for elem in path:
added = pb.path_element.add()
added.kind = elem['kind']
Expand Down Expand Up @@ -564,7 +564,7 @@ def test_entity(self):
pb = entity_pb2.Value()
entity_pb = pb.entity_value
entity_pb.key.path_element.add(kind='KIND')
entity_pb.key.partition_id.dataset_id = 'PROJECT'
entity_pb.key.partition_id.project_id = 'PROJECT'

value_pb = _new_value_pb(entity_pb, 'foo')
value_pb.string_value = 'Foo'
Expand Down Expand Up @@ -741,7 +741,7 @@ def _callFUT(self, key_pb):
def test_prepare_project_valid(self):
from gcloud.datastore._generated import entity_pb2
key = entity_pb2.Key()
key.partition_id.dataset_id = 'foo'
key.partition_id.project_id = 'foo'
new_key = self._callFUT(key)
self.assertFalse(new_key is key)

Expand Down Expand Up @@ -912,7 +912,7 @@ def lookup(self, project, key_pbs):

response = entity_pb2.Entity()
response.key.CopyFrom(key_pb)
response.key.partition_id.dataset_id = self.prefix + project
response.key.partition_id.project_id = self.prefix + project

missing = []
deferred = []
Expand Down
10 changes: 5 additions & 5 deletions gcloud/datastore/test_key.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,9 +342,9 @@ def test_to_protobuf_defaults(self):
self.assertTrue(isinstance(pb, entity_pb2.Key))

# Check partition ID.
self.assertEqual(pb.partition_id.dataset_id, self._DEFAULT_PROJECT)
self.assertEqual(pb.partition_id.namespace, '')
self.assertFalse(_has_field(pb.partition_id, 'namespace'))
self.assertEqual(pb.partition_id.project_id, self._DEFAULT_PROJECT)
self.assertEqual(pb.partition_id.namespace_id, '')
self.assertFalse(_has_field(pb.partition_id, 'namespace_id'))

# Check the element PB matches the partial key and kind.
elem, = list(pb.path_element)
Expand All @@ -358,14 +358,14 @@ def test_to_protobuf_w_explicit_project(self):
_PROJECT = 'PROJECT-ALT'
key = self._makeOne('KIND', project=_PROJECT)
pb = key.to_protobuf()
self.assertEqual(pb.partition_id.dataset_id, _PROJECT)
self.assertEqual(pb.partition_id.project_id, _PROJECT)

def test_to_protobuf_w_explicit_namespace(self):
_NAMESPACE = 'NAMESPACE'
key = self._makeOne('KIND', namespace=_NAMESPACE,
project=self._DEFAULT_PROJECT)
pb = key.to_protobuf()
self.assertEqual(pb.partition_id.namespace, _NAMESPACE)
self.assertEqual(pb.partition_id.namespace_id, _NAMESPACE)

def test_to_protobuf_w_explicit_path(self):
_PARENT = 'PARENT'
Expand Down
2 changes: 1 addition & 1 deletion gcloud/datastore/test_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def _addQueryResults(self, connection, cursor=_END, more=False):
NO_MORE = query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT
_ID = 123
entity_pb = entity_pb2.Entity()
entity_pb.key.partition_id.dataset_id = self._PROJECT
entity_pb.key.partition_id.project_id = self._PROJECT
path_element = entity_pb.key.path_element.add()
path_element.kind = self._KIND
path_element.id = _ID
Expand Down
2 changes: 1 addition & 1 deletion gcloud/datastore/test_transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def _make_key(kind, id_, project):
from gcloud.datastore._generated import entity_pb2

key = entity_pb2.Key()
key.partition_id.dataset_id = project
key.partition_id.project_id = project
elem = key.path_element.add()
elem.kind = kind
elem.id = id_
Expand Down