diff --git a/.jshintignore b/.jshintignore
new file mode 100644
index 00000000000..f4368e6349f
--- /dev/null
+++ b/.jshintignore
@@ -0,0 +1,2 @@
+system-test/data/*
+test/testdata/*
diff --git a/README.md b/README.md
index 216fb4931ad..21af2d2c48b 100644
--- a/README.md
+++ b/README.md
@@ -136,25 +136,27 @@ var gcloud = require('gcloud');
// Authenticating on a per-API-basis. You don't need to do this if you auth on a
// global basis (see Authentication section above).
-var dataset = gcloud.datastore.dataset({
+var datastore = gcloud.datastore({
projectId: 'my-project',
keyFilename: '/path/to/keyfile.json'
});
-dataset.get(dataset.key(['Product', 'Computer']), function(err, entity) {
+var key = datastore.key(['Product', 'Computer']);
+
+datastore.get(key, function(err, entity) {
console.log(err || entity);
});
-// Save data to your dataset.
+// Save data to Datastore.
var blogPostData = {
title: 'How to make the perfect homemade pasta',
author: 'Andrew Chilton',
isDraft: true
};
-var blogPostKey = dataset.key('BlogPost');
+var blogPostKey = datastore.key('BlogPost');
-dataset.save({
+datastore.save({
key: blogPostKey,
data: blogPostData
}, function(err) {
@@ -162,7 +164,7 @@ dataset.save({
// with it, such as an update.
blogPostData.isDraft = false;
- dataset.save({
+ datastore.save({
key: blogPostKey,
data: blogPostData
}, function(err) {
diff --git a/docs/toc.json b/docs/toc.json
index c0b6c9e978f..507eb3a2431 100644
--- a/docs/toc.json
+++ b/docs/toc.json
@@ -100,9 +100,6 @@
"title": "Datastore",
"type": "datastore",
"nav": [{
- "title": "Dataset",
- "type": "datastore/dataset"
- }, {
"title": "Query",
"type": "datastore/query"
}, {
diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md
index e4fcb91ce84..4f1ecdda1c2 100644
--- a/docs/troubleshooting.md
+++ b/docs/troubleshooting.md
@@ -58,10 +58,3 @@ async.eachLimit(subscriptions, PARALLEL_LIMIT, deleteSubscription, function(err)
This will only allow 10 at a time to go through, making it easier on the API to keep up with your requests.
Reference Issue: [#1101](https://github.com/GoogleCloudPlatform/gcloud-node/issues/1101)
-
-
-## I cannot connect to Datastore from a Compute Engine instance.
-
-Currently, the version of Datastore our library supports (v1beta2) requires not only the `cloud-platform` auth scope, but the `userinfo.email` scope as well. When you create a VM, be sure to select both of these scopes (possibly referred to as "Cloud Datastore" and "User info") in order to access the API from gcloud-node without receiving a 401 error.
-
-Reference Issue: [#1169](https://github.com/GoogleCloudPlatform/gcloud-node/issues/1169#issuecomment-198428431)
diff --git a/lib/common/grpc-service.js b/lib/common/grpc-service.js
index c9fd8425d31..48e48b7b4e0 100644
--- a/lib/common/grpc-service.js
+++ b/lib/common/grpc-service.js
@@ -20,13 +20,11 @@
'use strict';
-var camelize = require('camelize');
var googleProtoFiles = require('google-proto-files');
var grpc = require('grpc');
var is = require('is');
var nodeutil = require('util');
var path = require('path');
-var snakeize = require('snakeize');
/**
* @type {module:common/service}
@@ -169,11 +167,15 @@ function GrpcService(config, options) {
for (var protoService in protoServices) {
var protoFilePath = protoServices[protoService];
+ var grpcOpts = {
+ binaryAsBase64: true,
+ convertFieldsToCamelCase: true
+ };
this.protos[protoService] = grpc.load({
root: rootDir,
file: path.relative(rootDir, protoFilePath)
- }).google[service][apiVersion];
+ }, 'proto', grpcOpts).google[service][apiVersion];
}
}
@@ -239,14 +241,7 @@ GrpcService.prototype.request = function(protoOpts, reqOpts, callback) {
grpcOpts.deadline = new Date(Date.now() + protoOpts.timeout);
}
- // snakeize and camelize are used to transform camelCase request options to
- // snake_case. This is what ProtoBuf.js (via gRPC) expects. Similarly, the
- // response is in snake_case, which is why we use camelize to return it to
- // camelCase.
- //
- // An option will be added to gRPC to allow us to skip this step:
- // https://github.com/grpc/grpc/issues/5005
- service[protoOpts.method](snakeize(reqOpts), function(err, resp) {
+ service[protoOpts.method](reqOpts, function(err, resp) {
if (err) {
if (HTTP_ERROR_CODE_MAP[err.code]) {
var httpError = HTTP_ERROR_CODE_MAP[err.code];
@@ -257,46 +252,10 @@ GrpcService.prototype.request = function(protoOpts, reqOpts, callback) {
return;
}
- callback(null, GrpcService.convertBuffers_(camelize(resp)));
+ callback(null, resp);
}, null, grpcOpts);
};
-/**
- * Iterate over an object, finding anything that resembles a Buffer, then
- * convert it to a base64 string representation.
- *
- * @todo Replace this function: https://github.com/grpc/grpc/issues/5006
- *
- * @private
- *
- * @param {*} data - An object or array to iterate over.
- * @return {*} - The converted object.
- */
-GrpcService.convertBuffers_ = function(data) {
- if (is.array(data)) {
- return data.map(GrpcService.convertBuffers_);
- }
-
- if (is.object(data)) {
- for (var prop in data) {
- if (data.hasOwnProperty(prop)) {
- var value = data[prop];
-
- if (Buffer.isBuffer(value)) {
- data[prop] = value.toString('base64');
- } else if (GrpcService.isBufferLike_(value)) {
- var arrayValue = GrpcService.objToArr_(value);
- data[prop] = new Buffer(arrayValue).toString('base64');
- } else {
- data[prop] = GrpcService.convertBuffers_(value);
- }
- }
- }
- }
-
- return data;
-};
-
/**
* Convert a raw value to a type-denoted protobuf message-friendly object.
*
diff --git a/lib/datastore/dataset.js b/lib/datastore/dataset.js
deleted file mode 100644
index c9e92cfdde7..00000000000
--- a/lib/datastore/dataset.js
+++ /dev/null
@@ -1,313 +0,0 @@
-/*!
- * Copyright 2014 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*!
- * @module datastore/dataset
- */
-
-'use strict';
-
-var arrify = require('arrify');
-var extend = require('extend');
-var is = require('is');
-var nodeutil = require('util');
-
-/**
- * @type {module:datastore/entity}
- * @private
- */
-var entity = require('./entity.js');
-
-/**
- * @type {module:datastore/query}
- * @private
- */
-var Query = require('./query.js');
-
-/**
- * @type {module:datastore/transaction}
- * @private
- */
-var Transaction = require('./transaction.js');
-
-/**
- * @type {module:common/util}
- * @private
- */
-var util = require('../common/util.js');
-
-/**
- * @type {module:datastore/request}
- * @private
- */
-var DatastoreRequest = require('./request.js');
-
-/**
- * Interact with a dataset from the
- * [Google Cloud Datastore](https://developers.google.com/datastore/).
- *
- * If a project ID is not specified, the `DATASTORE_DATASET` environment
- * variable from the gcloud SDK is used.
- *
- * @constructor
- * @alias module:datastore/dataset
- * @mixes module:datastore/request
- *
- * @param {object=} options - [Configuration object](#/docs).
- * @param {string=} options.apiEndpoint - Override the default API endpoint used
- * to reach Datastore. This is useful for connecting to your local Datastore
- * server (usually "http://localhost:8080").
- * @param {string} options.namespace - Namespace to isolate transactions to.
- *
- * @example
- * var datastore = gcloud.datastore;
- *
- * var dataset = datastore.dataset({
- * projectId: 'my-project',
- * keyFilename: '/path/to/keyfile.json'
- * });
- *
- * //-
- * // Connect to your local Datastore server.
- * //-
- * var dataset = datastore.dataset({
- * projectId: 'my-project',
- * apiEndpoint: 'http://localhost:8080'
- * });
- *
- * //-
- * // The `process.env.DATASTORE_HOST` environment variable is also recognized.
- * // If set, you may omit the `apiEndpoint` option.
- * //-
- */
-function Dataset(options) {
- if (!(this instanceof Dataset)) {
- options = util.normalizeArguments(this, options);
- return new Dataset(options);
- }
-
- options = options || {};
-
- this.datasetId = options.projectId;
-
- if (process.env.DATASTORE_DATASET) {
- this.datasetId = process.env.DATASTORE_DATASET;
- }
-
- if (!this.datasetId) {
- throw new Error('A project or dataset ID is required to use a Dataset.');
- }
-
- this.determineApiEndpoint_(options.apiEndpoint);
- this.namespace = options.namespace;
-
- var reqCfg = extend({}, options, {
- customEndpoint: this.customEndpoint,
- scopes: [
- 'https://www.googleapis.com/auth/datastore',
- 'https://www.googleapis.com/auth/userinfo.email'
- ]
- });
-
- this.makeAuthenticatedRequest_ = util.makeAuthenticatedRequestFactory(reqCfg);
-}
-
-nodeutil.inherits(Dataset, DatastoreRequest);
-
-/**
- * Determine the appropriate endpoint to use for API requests. If not explicitly
- * defined, check for the "DATASTORE_HOST" environment variable, used to connect
- * to a local Datastore server.
- *
- * @private
- *
- * @param {string} customApiEndpoint - Custom API endpoint.
- */
-Dataset.prototype.determineApiEndpoint_ = function(customApiEndpoint) {
- var apiEndpoint;
- var trailingSlashes = new RegExp('/*$');
-
- if (customApiEndpoint) {
- apiEndpoint = customApiEndpoint;
- this.customEndpoint = true;
- } else if (process.env.DATASTORE_HOST) {
- apiEndpoint = process.env.DATASTORE_HOST;
- this.customEndpoint = true;
- } else {
- apiEndpoint = 'https://www.googleapis.com';
- }
-
- if (apiEndpoint.indexOf('http') !== 0) {
- apiEndpoint = 'http://' + apiEndpoint;
- }
-
- this.apiEndpoint = apiEndpoint.replace(trailingSlashes, '');
-};
-
-/**
- * Helper to create a Key object, scoped to the dataset's namespace by default.
- *
- * You may also specify a configuration object to define a namespace and path.
- *
- * @param {...*=} options - Key path. To specify or override a namespace,
- * you must use an object here to explicitly state it.
- * @param {object=} options - Configuration object.
- * @param {...*=} options.path - Key path.
- * @param {string=} options.namespace - Optional namespace.
- * @return {Key} A newly created Key from the options given.
- *
- * @example
- * //-
- * // Create an incomplete key with a kind value of `Company`.
- * //-
- * var key = dataset.key('Company');
- *
- * //-
- * // Create a complete key with a kind value of `Company` and id `123`.
- * //-
- * var key = dataset.key(['Company', 123]);
- *
- * //-
- * // Create a complete key with a kind value of `Company` and name `Google`.
- * // Note: `id` is used for numeric identifiers and `name` is used otherwise.
- * //-
- * var key = dataset.key(['Company', 'Google']);
- *
- * //-
- * // Create a complete key from a provided namespace and path.
- * //-
- * var key = dataset.key({
- * namespace: 'My-NS',
- * path: ['Company', 123]
- * });
- *
- * //-
- * // All keys have helper properties that you can access or overwrite
- * // directly.
- * //-
- * var key = dataset.key(['Company', 123]);
- *
- * // key.path: ['Company', 123]
- * // key.kind: 'Company'
- * // key.id: 123
- *
- * // Change to a named identifier.
- * delete key.id;
- * key.name = 'Google';
- *
- * // key.path: ['Company', 'Google']
- * // key.name: 'Google'
- *
- * //-
- * // Ancestor keys are created as `Key` objects as well.
- * //-
- * var key = dataset.key(['Company', 'Google', 'Employee', 'Jennifer']);
- *
- * // key.path: ['Company', 'Google', 'Employee', 'Jennifer']
- * // key.parent.path: ['Company', 'Google']
- *
- * // Change the company Jennifer works for.
- * key.parent.name = 'Alphabet';
- *
- * // key.path: ['Company', 'Alphabet', 'Employee', 'Jennifer']
- * // key.parent.path: ['Company', 'Alphabet']
- */
-Dataset.prototype.key = function(options) {
- options = is.object(options) ? options : {
- namespace: this.namespace,
- path: arrify(options)
- };
-
- return new entity.Key(options);
-};
-
-/**
- * Create a query from the current dataset to query the specified kind, scoped
- * to the namespace provided at the initialization of the dataset.
- *
- * @resource [Datastore Queries]{@link http://goo.gl/Cag0r6}
- *
- * @borrows {module:datastore/query} as createQuery
- * @see {module:datastore/query}
- *
- * @param {string=} namespace - Optional namespace.
- * @param {string} kind - Kind to query.
- * @return {module:datastore/query}
- */
-Dataset.prototype.createQuery = function(namespace, kind) {
- if (arguments.length === 1) {
- kind = arrify(namespace);
- namespace = this.namespace;
- }
-
- return new Query(namespace, arrify(kind));
-};
-
-/**
- * Run a function in the context of a new transaction. Transactions allow you to
- * perform multiple operations, committing your changes atomically. When you are
- * finished making your changes within the transaction, run the done() function
- * provided in the callback function to commit your changes. See an example
- * below for more information.
- *
- * @resource [Datasets: beginTransaction API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/beginTransaction}
- *
- * @borrows {module:datastore/transaction#begin} as runInTransaction
- *
- * @param {function} fn - The function to run in the context of a transaction.
- * @param {module:datastore/transaction} fn.transaction - The Transaction.
- * @param {function} fn.done - Function used to commit changes.
- * @param {function} callback - The callback function.
- * @param {?error} callback.err - An error returned while making this request
- *
- * @example
- * dataset.runInTransaction(function(transaction, done) {
- * // From the `transaction` object, execute dataset methods as usual.
- * // Call `done` when you're ready to commit all of the changes.
- * transaction.get(dataset.key(['Company', 123]), function(err, entity) {
- * if (err) {
- * transaction.rollback(done);
- * return;
- * }
- *
- * done();
- * });
- * }, function(err, apiResponse) {});
- */
-Dataset.prototype.runInTransaction = function(fn, callback) {
- var newTransaction = this.createTransaction_();
-
- newTransaction.begin_(function(err, resp) {
- if (err) {
- callback(err, resp);
- return;
- }
-
- fn(newTransaction, newTransaction.commit_.bind(newTransaction, callback));
- });
-};
-
-/**
- * Create a new Transaction object using the existing connection and dataset.
- *
- * @return {module:datastore/transaction}
- * @private
- */
-Dataset.prototype.createTransaction_ = function() {
- return new Transaction(this, this.datasetId);
-};
-
-module.exports = Dataset;
diff --git a/lib/datastore/datastore_v1.proto b/lib/datastore/datastore_v1.proto
deleted file mode 100644
index bb4c199b116..00000000000
--- a/lib/datastore/datastore_v1.proto
+++ /dev/null
@@ -1,594 +0,0 @@
-// Copyright 2013 Google Inc. All Rights Reserved.
-//
-// The datastore v1 service proto definitions
-
-syntax = "proto2";
-
-package pb;
-option java_package = "com.google.api.services.datastore";
-
-
-// An identifier for a particular subset of entities.
-//
-// Entities are partitioned into various subsets, each used by different
-// datasets and different namespaces within a dataset and so forth.
-//
-// All input partition IDs are normalized before use.
-// A partition ID is normalized as follows:
-// If the partition ID is unset or is set to an empty partition ID, replace it
-// with the context partition ID.
-// Otherwise, if the partition ID has no dataset ID, assign it the context
-// partition ID's dataset ID.
-// Unless otherwise documented, the context partition ID has the dataset ID set
-// to the context dataset ID and no other partition dimension set.
-//
-// A partition ID is empty if all of its fields are unset.
-//
-// Partition dimension:
-// A dimension may be unset.
-// A dimension's value must never be "".
-// A dimension's value must match [A-Za-z\d\.\-_]{1,100}
-// If the value of any dimension matches regex "__.*__",
-// the partition is reserved/read-only.
-// A reserved/read-only partition ID is forbidden in certain documented contexts.
-//
-// Dataset ID:
-// A dataset id's value must never be "".
-// A dataset id's value must match
-// ([a-z\d\-]{1,100}~)?([a-z\d][a-z\d\-\.]{0,99}:)?([a-z\d][a-z\d\-]{0,99}
-message PartitionId {
- // The dataset ID.
- optional string dataset_id = 3;
- // The namespace.
- optional string namespace = 4;
-}
-
-// A unique identifier for an entity.
-// If a key's partition id or any of its path kinds or names are
-// reserved/read-only, the key is reserved/read-only.
-// A reserved/read-only key is forbidden in certain documented contexts.
-message Key {
- // Entities are partitioned into subsets, currently identified by a dataset
- // (usually implicitly specified by the project) and namespace ID.
- // Queries are scoped to a single partition.
- optional PartitionId partition_id = 1;
-
- // A (kind, ID/name) pair used to construct a key path.
- //
- // At most one of name or ID may be set.
- // If either is set, the element is complete.
- // If neither is set, the element is incomplete.
- message PathElement {
- // The kind of the entity.
- // A kind matching regex "__.*__" is reserved/read-only.
- // A kind must not contain more than 500 characters.
- // Cannot be "".
- required string kind = 1;
- // The ID of the entity.
- // Never equal to zero. Values less than zero are discouraged and will not
- // be supported in the future.
- optional int64 id = 2;
- // The name of the entity.
- // A name matching regex "__.*__" is reserved/read-only.
- // A name must not be more than 500 characters.
- // Cannot be "".
- optional string name = 3;
- }
-
- // The entity path.
- // An entity path consists of one or more elements composed of a kind and a
- // string or numerical identifier, which identify entities. The first
- // element identifies a root entity, the second element identifies
- // a child of the root entity, the third element a child of the
- // second entity, and so forth. The entities identified by all prefixes of
- // the path are called the element's ancestors.
- // An entity path is always fully complete: ALL of the entity's ancestors
- // are required to be in the path along with the entity identifier itself.
- // The only exception is that in some documented cases, the identifier in the
- // last path element (for the entity) itself may be omitted. A path can never
- // be empty.
- repeated PathElement path_element = 2;
-}
-
-// A message that can hold any of the supported value types and associated
-// metadata.
-//
-// At most one of the Value fields may be set.
-// If none are set the value is "null".
-//
-message Value {
- // A boolean value.
- optional bool boolean_value = 1;
- // An integer value.
- optional int64 integer_value = 2;
- // A double value.
- optional double double_value = 3;
- // A timestamp value.
- optional int64 timestamp_microseconds_value = 4;
- // A key value.
- optional Key key_value = 5;
- // A blob key value.
- optional string blob_key_value = 16;
- // A UTF-8 encoded string value.
- optional string string_value = 17;
- // A blob value.
- optional bytes blob_value = 18;
- // An entity value.
- // May have no key.
- // May have a key with an incomplete key path.
- // May have a reserved/read-only key.
- optional Entity entity_value = 6;
- // A list value.
- // Cannot contain another list value.
- // Cannot also have a meaning and indexing set.
- repeated Value list_value = 7;
-
- // The meaning
field is reserved and should not be used.
- optional int32 meaning = 14;
-
- // If the value should be indexed.
- //
- // The indexed
property may be set for a
- // null
value.
- // When indexed
is true
, stringValue
- // is limited to 500 characters and the blob value is limited to 500 bytes.
- // Exception: If meaning is set to 2, string_value is limited to 2038
- // characters regardless of indexed.
- // When indexed is true, meaning 15 and 22 are not allowed, and meaning 16
- // will be ignored on input (and will never be set on output).
- // Input values by default have indexed
set to
- // true
; however, you can explicitly set indexed
to
- // true
if you want. (An output value never has
- // indexed
explicitly set to true
.) If a value is
- // itself an entity, it cannot have indexed
set to
- // true
.
- // Exception: An entity value with meaning 9, 20 or 21 may be indexed.
- optional bool indexed = 15 [default = true];
-}
-
-// An entity property.
-message Property {
- // The name of the property.
- // A property name matching regex "__.*__" is reserved.
- // A reserved property name is forbidden in certain documented contexts.
- // The name must not contain more than 500 characters.
- // Cannot be "".
- required string name = 1;
-
- // The value(s) of the property.
- // Each value can have only one value property populated. For example,
- // you cannot have a values list of { value: { integerValue: 22,
- // stringValue: "a" } }
, but you can have { value: { listValue:
- // [ { integerValue: 22 }, { stringValue: "a" } ] }
.
- required Value value = 4;
-}
-
-// An entity.
-//
-// An entity is limited to 1 megabyte when stored. That roughly
-// corresponds to a limit of 1 megabyte for the serialized form of this
-// message.
-message Entity {
- // The entity's key.
- //
- // An entity must have a key, unless otherwise documented (for example,
- // an entity in Value.entityValue
may have no key).
- // An entity's kind is its key's path's last element's kind,
- // or null if it has no key.
- optional Key key = 1;
- // The entity's properties.
- // Each property's name must be unique for its entity.
- repeated Property property = 2;
-}
-
-// The result of fetching an entity from the datastore.
-message EntityResult {
- // Specifies what data the 'entity' field contains.
- // A ResultType is either implied (for example, in LookupResponse.found it
- // is always FULL) or specified by context (for example, in message
- // QueryResultBatch, field 'entity_result_type' specifies a ResultType
- // for all the values in field 'entity_result').
- enum ResultType {
- FULL = 1; // The entire entity.
- PROJECTION = 2; // A projected subset of properties.
- // The entity may have no key.
- // A property value may have meaning 18.
- KEY_ONLY = 3; // Only the key.
- }
-
- // The resulting entity.
- required Entity entity = 1;
-}
-
-// A query.
-message Query {
- // The projection to return. If not set the entire entity is returned.
- repeated PropertyExpression projection = 2;
-
- // The kinds to query (if empty, returns entities from all kinds).
- repeated KindExpression kind = 3;
-
- // The filter to apply (optional).
- optional Filter filter = 4;
-
- // The order to apply to the query results (if empty, order is unspecified).
- repeated PropertyOrder order = 5;
-
- // The properties to group by (if empty, no grouping is applied to the
- // result set).
- repeated PropertyReference group_by = 6;
-
- // A starting point for the query results. Optional. Query cursors are
- // returned in query result batches.
- optional bytes /* serialized QueryCursor */ start_cursor = 7;
-
- // An ending point for the query results. Optional. Query cursors are
- // returned in query result batches.
- optional bytes /* serialized QueryCursor */ end_cursor = 8;
-
- // The number of results to skip. Applies before limit, but after all other
- // constraints (optional, defaults to 0).
- optional int32 offset = 10 [default=0];
-
- // The maximum number of results to return. Applies after all other
- // constraints. Optional.
- optional int32 limit = 11;
-}
-
-// A representation of a kind.
-message KindExpression {
- // The name of the kind.
- required string name = 1;
-}
-
-// A reference to a property relative to the kind expressions.
-// exactly.
-message PropertyReference {
- // The name of the property.
- required string name = 2;
-}
-
-// A representation of a property in a projection.
-message PropertyExpression {
- enum AggregationFunction {
- FIRST = 1;
- }
- // The property to project.
- required PropertyReference property = 1;
- // The aggregation function to apply to the property. Optional.
- // Can only be used when grouping by at least one property. Must
- // then be set on all properties in the projection that are not
- // being grouped by.
- optional AggregationFunction aggregation_function = 2;
-}
-
-// The desired order for a specific property.
-message PropertyOrder {
- enum Direction {
- ASCENDING = 1;
- DESCENDING = 2;
- }
- // The property to order by.
- required PropertyReference property = 1;
- // The direction to order by.
- optional Direction direction = 2 [default=ASCENDING];
-}
-
-// A holder for any type of filter. Exactly one field should be specified.
-message Filter {
- // A composite filter.
- optional CompositeFilter composite_filter = 1;
- // A filter on a property.
- optional PropertyFilter property_filter = 2;
-}
-
-// A filter that merges the multiple other filters using the given operation.
-message CompositeFilter {
- enum Operator {
- AND = 1;
- }
-
- // The operator for combining multiple filters.
- required Operator operator = 1;
- // The list of filters to combine.
- // Must contain at least one filter.
- repeated Filter filter = 2;
-}
-
-// A filter on a specific property.
-message PropertyFilter {
- enum Operator {
- LESS_THAN = 1;
- LESS_THAN_OR_EQUAL = 2;
- GREATER_THAN = 3;
- GREATER_THAN_OR_EQUAL = 4;
- EQUAL = 5;
-
- HAS_ANCESTOR = 11;
- }
-
- // The property to filter by.
- required PropertyReference property = 1;
- // The operator to filter by.
- required Operator operator = 2;
- // The value to compare the property to.
- required Value value = 3;
-}
-
-// A GQL query.
-message GqlQuery {
- required string query_string = 1;
- // When false, the query string must not contain a literal.
- optional bool allow_literal = 2 [default = false];
- // A named argument must set field GqlQueryArg.name.
- // No two named arguments may have the same name.
- // For each non-reserved named binding site in the query string,
- // there must be a named argument with that name,
- // but not necessarily the inverse.
- repeated GqlQueryArg name_arg = 3;
- // Numbered binding site @1 references the first numbered argument,
- // effectively using 1-based indexing, rather than the usual 0.
- // A numbered argument must NOT set field GqlQueryArg.name.
- // For each binding site numbered i in query_string,
- // there must be an ith numbered argument.
- // The inverse must also be true.
- repeated GqlQueryArg number_arg = 4;
-}
-
-// A binding argument for a GQL query.
-// Exactly one of fields value and cursor must be set.
-message GqlQueryArg {
- // Must match regex "[A-Za-z_$][A-Za-z_$0-9]*".
- // Must not match regex "__.*__".
- // Must not be "".
- optional string name = 1;
- optional Value value = 2;
- optional bytes cursor = 3;
-}
-
-// A batch of results produced by a query.
-message QueryResultBatch {
- // The possible values for the 'more_results' field.
- enum MoreResultsType {
- NOT_FINISHED = 1; // There are additional batches to fetch from this query.
- MORE_RESULTS_AFTER_LIMIT = 2; // The query is finished, but there are more
- // results after the limit.
- NO_MORE_RESULTS = 3; // The query has been exhausted.
- }
-
- // The result type for every entity in entityResults.
- required EntityResult.ResultType entity_result_type = 1;
- // The results for this batch.
- repeated EntityResult entity_result = 2;
-
- // A cursor that points to the position after the last result in the batch.
- // May be absent.
- optional bytes /* serialized QueryCursor */ end_cursor = 4;
-
- // The state of the query after the current batch.
- required MoreResultsType more_results = 5;
-
- // The number of results skipped because of Query.offset
.
- optional int32 skipped_results = 6;
-}
-
-// A set of changes to apply.
-//
-// No entity in this message may have a reserved property name,
-// not even a property in an entity in a value.
-// No value in this message may have meaning 18,
-// not even a value in an entity in another value.
-//
-// If entities with duplicate keys are present, an arbitrary choice will
-// be made as to which is written.
-message Mutation {
- // Entities to upsert.
- // Each upserted entity's key must have a complete path and
- // must not be reserved/read-only.
- repeated Entity upsert = 1;
- // Entities to update.
- // Each updated entity's key must have a complete path and
- // must not be reserved/read-only.
- repeated Entity update = 2;
- // Entities to insert.
- // Each inserted entity's key must have a complete path and
- // must not be reserved/read-only.
- repeated Entity insert = 3;
- // Insert entities with a newly allocated ID.
- // Each inserted entity's key must omit the final identifier in its path and
- // must not be reserved/read-only.
- repeated Entity insert_auto_id = 4;
- // Keys of entities to delete.
- // Each key must have a complete key path and must not be reserved/read-only.
- repeated Key delete = 5;
- // Ignore a user specified read-only period. Optional.
- optional bool force = 6;
-}
-
-// The result of applying a mutation.
-message MutationResult {
- // Number of index writes.
- required int32 index_updates = 1;
- // Keys for insertAutoId
entities. One per entity from the
- // request, in the same order.
- repeated Key insert_auto_id_key = 2;
-}
-
-// Options shared by read requests.
-message ReadOptions {
- enum ReadConsistency {
- DEFAULT = 0;
- STRONG = 1;
- EVENTUAL = 2;
- }
-
- // The read consistency to use.
- // Cannot be set when transaction is set.
- // Lookup and ancestor queries default to STRONG, global queries default to
- // EVENTUAL and cannot be set to STRONG.
- optional ReadConsistency read_consistency = 1 [default=DEFAULT];
-
- // The transaction to use. Optional.
- optional bytes /* serialized Transaction */ transaction = 2;
-}
-
-// The request for Lookup.
-message LookupRequest {
-
- // Options for this lookup request. Optional.
- optional ReadOptions read_options = 1;
- // Keys of entities to look up from the datastore.
- repeated Key key = 3;
-}
-
-// The response for Lookup.
-message LookupResponse {
-
- // The order of results in these fields is undefined and has no relation to
- // the order of the keys in the input.
-
- // Entities found as ResultType.FULL entities.
- repeated EntityResult found = 1;
-
- // Entities not found as ResultType.KEY_ONLY entities.
- repeated EntityResult missing = 2;
-
- // A list of keys that were not looked up due to resource constraints.
- repeated Key deferred = 3;
-}
-
-
-// The request for RunQuery.
-message RunQueryRequest {
-
- // The options for this query.
- optional ReadOptions read_options = 1;
-
- // Entities are partitioned into subsets, identified by a dataset (usually
- // implicitly specified by the project) and namespace ID. Queries are scoped
- // to a single partition.
- // This partition ID is normalized with the standard default context
- // partition ID, but all other partition IDs in RunQueryRequest are
- // normalized with this partition ID as the context partition ID.
- optional PartitionId partition_id = 2;
-
- // The query to run.
- // Either this field or field gql_query must be set, but not both.
- optional Query query = 3;
- // The GQL query to run.
- // Either this field or field query must be set, but not both.
- optional GqlQuery gql_query = 7;
-}
-
-// The response for RunQuery.
-message RunQueryResponse {
-
- // A batch of query results (always present).
- optional QueryResultBatch batch = 1;
-
-}
-
-// The request for BeginTransaction.
-message BeginTransactionRequest {
-
- enum IsolationLevel {
- SNAPSHOT = 0; // Read from a consistent snapshot. Concurrent transactions
- // conflict if their mutations conflict. For example:
- // Read(A),Write(B) may not conflict with Read(B),Write(A),
- // but Read(B),Write(B) does conflict with Read(B),Write(B).
- SERIALIZABLE = 1; // Read from a consistent snapshot. Concurrent
- // transactions conflict if they cannot be serialized.
- // For example Read(A),Write(B) does conflict with
- // Read(B),Write(A) but Read(A) may not conflict with
- // Write(A).
- }
-
- // The transaction isolation level.
- optional IsolationLevel isolation_level = 1 [default=SNAPSHOT];
-}
-
-// The response for BeginTransaction.
-message BeginTransactionResponse {
-
- // The transaction identifier (always present).
- optional bytes /* serialized Transaction */ transaction = 1;
-}
-
-// The request for Rollback.
-message RollbackRequest {
-
- // The transaction identifier, returned by a call to
- // beginTransaction
.
- required bytes /* serialized Transaction */ transaction = 1;
-}
-
-// The response for Rollback.
-message RollbackResponse {
-// Empty
-}
-
-// The request for Commit.
-message CommitRequest {
-
- enum Mode {
- TRANSACTIONAL = 1;
- NON_TRANSACTIONAL = 2;
- }
-
- // The transaction identifier, returned by a call to
- // beginTransaction
. Must be set when mode is TRANSACTIONAL.
- optional bytes /* serialized Transaction */ transaction = 1;
- // The mutation to perform. Optional.
- optional Mutation mutation = 2;
- // The type of commit to perform. Either TRANSACTIONAL or NON_TRANSACTIONAL.
- optional Mode mode = 5 [default=TRANSACTIONAL];
-}
-
-// The response for Commit.
-message CommitResponse {
-
- // The result of performing the mutation (if any).
- optional MutationResult mutation_result = 1;
-}
-
-// The request for AllocateIds.
-message AllocateIdsRequest {
-
- // A list of keys with incomplete key paths to allocate IDs for.
- // No key may be reserved/read-only.
- repeated Key key = 1;
-}
-
-// The response for AllocateIds.
-message AllocateIdsResponse {
-
- // The keys specified in the request (in the same order), each with
- // its key path completed with a newly allocated ID.
- repeated Key key = 1;
-}
-
-// Each rpc normalizes the partition IDs of the keys in its input entities,
-// and always returns entities with keys with normalized partition IDs.
-// (Note that applies to all entities, including entities in values.)
-service DatastoreService {
- // Look up some entities by key.
- rpc Lookup(LookupRequest) returns (LookupResponse) {
- };
- // Query for entities.
- rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) {
- };
- // Begin a new transaction.
- rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) {
- };
- // Commit a transaction, optionally creating, deleting or modifying some
- // entities.
- rpc Commit(CommitRequest) returns (CommitResponse) {
- };
- // Roll back a transaction.
- rpc Rollback(RollbackRequest) returns (RollbackResponse) {
- };
- // Allocate IDs for incomplete keys (useful for referencing an entity before
- // it is inserted).
- rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) {
- };
-}
diff --git a/lib/datastore/entity.js b/lib/datastore/entity.js
index 9468d85d36c..48fe83ad492 100644
--- a/lib/datastore/entity.js
+++ b/lib/datastore/entity.js
@@ -22,23 +22,71 @@
'use strict';
var arrify = require('arrify');
+var createErrorClass = require('create-error-class');
var is = require('is');
-/** @const {object} Map for query operation -> operation protocol value. */
-var OP_TO_OPERATOR = {
- '=': 'EQUAL',
- '>': 'GREATER_THAN',
- '>=': 'GREATER_THAN_OR_EQUAL',
- '<': 'LESS_THAN',
- '<=': 'LESS_THAN_OR_EQUAL',
- HAS_ANCESTOR: 'HAS_ANCESTOR'
-};
-
-/** @const {object} Conversion map for query sign -> order protocol value. */
-var SIGN_TO_ORDER = {
- '-': 'DESCENDING',
- '+': 'ASCENDING'
-};
+var entity = module.exports;
+
+var InvalidKeyError = createErrorClass('InvalidKey', function(opts) {
+ var errorMessages = {
+ MISSING_KIND: 'A key should contain at least a kind.',
+ MISSING_ANCESTOR_ID: 'Ancestor keys require an id or name.'
+ };
+
+ this.message = errorMessages[opts.code];
+});
+
+/**
+ * Build a Datastore Double object.
+ *
+ * @constructor
+ * @param {number} value - The double value.
+ *
+ * @example
+ * var aDouble = new Double(7.3);
+ */
+function Double(value) {
+ this.value = value;
+}
+
+entity.Double = Double;
+
+/**
+ * Build a Datastore Int object.
+ *
+ * @constructor
+ * @param {number} value - The integer value.
+ *
+ * @example
+ * var anInt = new Int(7);
+ */
+function Int(value) {
+ this.value = value;
+}
+
+entity.Int = Int;
+
+/**
+ * Build a Datastore Geo Point object.
+ *
+ * @constructor
+ * @param {object} coordinates - Coordinate value.
+ * @param {number} coordinates.latitude - Latitudinal value.
+ * @param {number} coordinates.longitude - Longitudinal value.
+ *
+ * @example
+ * var coordinates = {
+ * latitude: 40.6894,
+ * longitude: -74.0447
+ * };
+ *
+ * var geoPoint = new GeoPoint(coordinates);
+ */
+function GeoPoint(coordindates) {
+ this.value = coordindates;
+}
+
+entity.GeoPoint = GeoPoint;
/**
* Build a Datastore Key object.
@@ -84,186 +132,252 @@ function Key(options) {
});
}
-module.exports.Key = Key;
+entity.Key = Key;
/**
- * Build a Datastore Int object.
+ * Convert a protobuf Value message to its native value.
*
- * @constructor
- * @param {number} val - The integer value.
+ * @param {object} valueProto - The protobuf Value message to convert.
+ * @return {*}
*
* @example
- * var anInt = new Int(7);
- */
-function Int(val) {
- this.val_ = val;
-}
-
-/**
- * Retrieve the Integer value.
+ * decodeValueProto({
+ * booleanValue: false
+ * });
+ * // false
*
- * @return {number}
+ * decodeValueProto({
+ * stringValue: 'Hi'
+ * });
+ * // 'Hi'
+ *
+ * decodeValueProto({
+ * blobValue: new Buffer('68656c6c6f')
+ * });
+ * //
*/
-Int.prototype.get = function() {
- return this.val_;
-};
+function decodeValueProto(valueProto) {
+ var valueType = valueProto.value_type;
+ var value = valueProto[valueType];
-module.exports.Int = Int;
+ switch (valueType) {
+ case 'arrayValue': {
+ return value.values.map(entity.decodeValueProto);
+ }
+
+ case 'blobValue': {
+ return new Buffer(value, 'base64');
+ }
+
+ case 'nullValue': {
+ return null;
+ }
+
+ case 'doubleValue': {
+ return parseFloat(value, 10);
+ }
+
+ case 'integerValue': {
+ return parseInt(value, 10);
+ }
+
+ case 'entityValue': {
+ return entity.entityFromEntityProto(value);
+ }
+
+ case 'keyValue': {
+ return entity.keyFromKeyProto(value);
+ }
+
+ case 'timestampValue': {
+ return new Date(parseInt(value.seconds, 10) * 1000);
+ }
+
+ default: {
+ return value;
+ }
+ }
+}
+
+entity.decodeValueProto = decodeValueProto;
/**
- * Build a Datastore Double object.
+ * Convert any native value to a protobuf Value message object.
*
- * @constructor
- * @param {number} val - The double value.
+ * @param {*} value - Native value.
+ * @return {object}
*
* @example
- * var aDouble = new Double(7.3);
+ * encodeValue('Hi');
+ * // {
+ * // stringValue: 'Hi'
+ * // }
*/
-function Double(val) {
- this.val_ = val;
-}
+function encodeValue(value) {
+ var valueProto = {};
-/**
- * Retrieve the Double value.
- *
- * @return {number}
- */
-Double.prototype.get = function() {
- return this.val_;
-};
+ if (is.boolean(value)) {
+ valueProto.booleanValue = value;
+ return valueProto;
+ }
-module.exports.Double = Double;
+ if (is.nil(value)) {
+ valueProto.nullValue = 0;
+ return valueProto;
+ }
+
+ if (is.number(value)) {
+ if (value % 1 === 0) {
+ value = new entity.Int(value);
+ } else {
+ value = new entity.Double(value);
+ }
+ }
+
+ if (value instanceof entity.Int) {
+ valueProto.integerValue = value.value;
+ return valueProto;
+ }
+
+ if (value instanceof entity.Double) {
+ valueProto.doubleValue = value.value;
+ return valueProto;
+ }
+
+ if (value instanceof entity.GeoPoint) {
+ valueProto.geoPointValue = value.value;
+ return valueProto;
+ }
+
+ if (value instanceof Date) {
+ var seconds = value.getTime() / 1000;
+ var secondsRounded = Math.floor(seconds);
+
+ valueProto.timestampValue = {
+ seconds: secondsRounded,
+ nanos: Math.floor((seconds - secondsRounded) * 1e9)
+ };
+
+ return valueProto;
+ }
+
+ if (is.string(value)) {
+ valueProto.stringValue = value;
+ return valueProto;
+ }
+
+ if (value instanceof Buffer) {
+ valueProto.blobValue = value.toString('base64');
+ return valueProto;
+ }
+
+ if (is.array(value)) {
+ valueProto.arrayValue = {
+ values: value.map(entity.encodeValue)
+ };
+ return valueProto;
+ }
+
+ if (value instanceof entity.Key) {
+ valueProto.keyValue = entity.keyToKeyProto(value);
+ return valueProto;
+ }
+
+ if (is.object(value) && !is.empty(value)) {
+ var properties = Object.keys(value).map(function(key) {
+ return {
+ name: key,
+ value: entity.encodeValue(value[key])
+ };
+ });
+
+ valueProto.entityValue = {
+ properties: properties
+ };
+
+ return valueProto;
+ }
+
+ throw new Error('Unsupported field value, ' + value + ', was provided.');
+}
+
+entity.encodeValue = encodeValue;
/**
* Convert any entity protocol to a plain object.
*
* @todo Use registered metadata if provided.
*
- * @param {object} proto - The protocol entity object to convert.
+ * @param {object} entityProto - The protocol entity object to convert.
* @return {object}
*
* @example
- * var entity = entityFromEntityProto({
- * property: [
- * {
+ * entityFromEntityProto({
+ * properties: {
+ * map: {
* name: {
- * stringValue: 'Burcu Dogan'
+ * value: {
+ * value_type: 'stringValue',
+ * stringValue: 'Stephen'
+ * }
* }
* }
- * ]
+ * }
* });
- *
- * // entity:
* // {
- * // name: 'Burcu Dogan'
+ * // name: 'Stephen'
* // }
*/
-function entityFromEntityProto(proto) {
- var properties = proto.property || [];
- return Object.keys(properties).reduce(function(acc, key) {
- var property = properties[key];
- acc[property.name] = propertyToValue(property.value);
- return acc;
- }, {});
-}
+function entityFromEntityProto(entityProto) {
+ var entityObject = {};
-module.exports.entityFromEntityProto = entityFromEntityProto;
-
-/**
- * Convert a key protocol object to a Key object.
- *
- * @param {object} proto - The key protocol object to convert.
- * @return {Key}
- *
- * @example
- * var key = keyFromKeyProto({
- * partitionId: {
- * datasetId: 'project-id',
- * namespace: ''
- * },
- * path: [
- * {
- * kind: 'Kind',
- * id: '4790047639339008'
- * }
- * ]
- * });
- */
-function keyFromKeyProto(proto) {
- var keyOptions = {
- path: []
- };
+ var properties = entityProto.properties || {};
- if (proto.partition_id && proto.partition_id.namespace) {
- keyOptions.namespace = proto.partition_id.namespace;
+ for (var property in properties) {
+ var value = properties[property];
+ entityObject[property] = entity.decodeValueProto(value);
}
- proto.path_element.forEach(function(path, index) {
- var id = Number(path.id) || path.name;
- keyOptions.path.push(path.kind);
- if (id) {
- keyOptions.path.push(id);
- } else if (index < proto.path_element.length - 1) {
- throw new Error('Invalid key. Ancestor keys require an id or name.');
- }
- });
-
- return new Key(keyOptions);
+ return entityObject;
}
-module.exports.keyFromKeyProto = keyFromKeyProto;
+entity.entityFromEntityProto = entityFromEntityProto;
/**
- * Convert a Key object to a key protocol object.
+ * Convert an entity object to an entity protocol object.
*
- * @param {Key} key - The Key object to convert.
+ * @param {object} entityObject - The entity object to convert.
* @return {object}
*
* @example
- * var keyProto = keyToKeyProto(new Key(['Company', 1]));
- *
- * // keyProto:
+ * entityToEntityProto({
+ * name: 'Burcu',
+ * legit: true
+ * });
* // {
- * // path: [
- * // {
- * // kind: 'Company',
- * // id: 1
+ * // key: null,
+ * // properties: {
+ * // name: {
+ * // stringValue: 'Burcu'
+ * // },
+ * // legit: {
+ * // booleanValue: true
* // }
- * // ]
+ * // }
* // }
*/
-function keyToKeyProto(key) {
- var keyPath = key.path;
- if (!is.string(keyPath[0])) {
- throw new Error('A key should contain at least a kind.');
- }
- var path = [];
- for (var i = 0; i < keyPath.length; i += 2) {
- var p = { kind: keyPath[i] };
- var val = keyPath[i + 1];
- if (val) {
- if (is.number(val)) {
- p.id = val;
- } else {
- p.name = val;
- }
- } else if (i < keyPath.length - 2) { // i is second last path item
- throw new Error('Invalid key. Ancestor keys require an id or name.');
- }
- path.push(p);
- }
- var proto = {
- path_element: path
+function entityToEntityProto(entityObject) {
+ return {
+ key: null,
+
+ properties: Object.keys(entityObject).reduce(function(properties, key) {
+ properties[key] = entity.encodeValue(entityObject[key]);
+ return properties;
+ }, {})
};
- if (key.namespace) {
- proto.partition_id = {
- namespace: key.namespace
- };
- }
- return proto;
}
-module.exports.keyToKeyProto = keyToKeyProto;
+entity.entityToEntityProto = entityToEntityProto;
/**
* Convert an API response array to a qualified Key and data object.
@@ -274,10 +388,8 @@ module.exports.keyToKeyProto = keyToKeyProto;
* @return {object[]}
*
* @example
- * makeReq('runQuery', {}, function(err, response) {
+ * request_('runQuery', {}, function(err, response) {
* var entityObjects = formatArray(response.batch.entityResults);
- *
- * // entityObjects:
* // {
* // key: {},
* // data: {
@@ -290,13 +402,13 @@ module.exports.keyToKeyProto = keyToKeyProto;
function formatArray(results) {
return results.map(function(result) {
return {
- key: keyFromKeyProto(result.entity.key),
- data: entityFromEntityProto(result.entity)
+ key: entity.keyFromKeyProto(result.entity.key),
+ data: entity.entityFromEntityProto(result.entity)
};
});
}
-module.exports.formatArray = formatArray;
+entity.formatArray = formatArray;
/**
* Check if a key is complete.
@@ -308,197 +420,122 @@ module.exports.formatArray = formatArray;
* isKeyComplete(new Key(['Company', 'Google'])); // true
* isKeyComplete(new Key('Company')); // false
*/
-module.exports.isKeyComplete = function(key) {
- var proto = keyToKeyProto(key);
+function isKeyComplete(key) {
+ var lastPathElement = entity.keyToKeyProto(key).path.pop();
+ return !!(lastPathElement.id || lastPathElement.name);
+}
- for (var i = 0; i < proto.path_element.length; i++) {
- if (!proto.path_element[i].kind) {
- return false;
- }
- if (!proto.path_element[i].id && !proto.path_element[i].name) {
- return false;
- }
- }
- return true;
-};
+entity.isKeyComplete = isKeyComplete;
/**
- * Convert a protocol property to it's native value.
- *
- * @todo Do we need uint64s and keep Long.js support?
+ * Convert a key protocol object to a Key object.
*
- * @param {object} property - The property object to convert.
- * @return {*}
+ * @param {object} keyProto - The key protocol object to convert.
+ * @return {Key}
*
* @example
- * propertyToValue({
- * boolean_value: false
- * });
- * // false
- *
- * propertyToValue({
- * string_value: 'Hi'
- * });
- * // 'Hi'
- *
- * propertyToValue({
- * blob_value: new Buffer('68656c6c6f')
+ * var key = keyFromKeyProto({
+ * partitionId: {
+ * projectId: 'project-id',
+ * namespaceId: ''
+ * },
+ * path: [
+ * {
+ * kind: 'Kind',
+ * id: '4790047639339008'
+ * }
+ * ]
* });
- * //
*/
-function propertyToValue(property) {
- if (exists(property.integer_value)) {
- return parseInt(property.integer_value.toString(), 10);
- }
- if (exists(property.double_value)) {
- return property.double_value;
- }
- if (exists(property.string_value)) {
- return property.string_value;
- }
- if (exists(property.blob_value)) {
- return property.blob_value.toBuffer();
- }
- if (exists(property.timestamp_microseconds_value)) {
- var microSecs = parseInt(
- property.timestamp_microseconds_value.toString(), 10);
- return new Date(microSecs / 1000);
- }
- if (exists(property.key_value)) {
- return keyFromKeyProto(property.key_value);
- }
- if (exists(property.entity_value)) {
- return entityFromEntityProto(property.entity_value);
- }
- if (exists(property.boolean_value)) {
- return property.boolean_value;
+function keyFromKeyProto(keyProto) {
+ var keyOptions = {
+ path: []
+ };
+
+ if (keyProto.partitionId && keyProto.partitionId.namespaceId) {
+ keyOptions.namespaceId = keyProto.partitionId.namespaceId;
}
- if (exists(property.list_value)) {
- var list = [];
- for (var i = 0; i < property.list_value.length; i++) {
- list.push(propertyToValue(property.list_value[i]));
+
+ keyProto.path.forEach(function(path, index) {
+ var id = path.name || Number(path.id);
+
+ keyOptions.path.push(path.kind);
+
+ if (id) {
+ keyOptions.path.push(id);
+ } else if (index < keyProto.path.length - 1) {
+ throw new InvalidKeyError({
+ code: 'MISSING_ANCESTOR_ID'
+ });
}
- return list;
- }
+ });
+
+ return new entity.Key(keyOptions);
}
-module.exports.propertyToValue = propertyToValue;
+entity.keyFromKeyProto = keyFromKeyProto;
/**
- * Convert any native value to a property object.
+ * Convert a Key object to a key protocol object.
*
- * @param {*} v - Original value.
+ * @param {Key} key - The Key object to convert.
* @return {object}
*
* @example
- * valueToProperty('Hi');
+ * var keyProto = keyToKeyProto(new Key(['Company', 1]));
* // {
- * // stringValue: 'Hi'
+ * // path: [
+ * // {
+ * // kind: 'Company',
+ * // id: 1
+ * // }
+ * // ]
* // }
*/
-function valueToProperty(v) {
- var p = {};
- if (v instanceof Boolean || typeof v === 'boolean') {
- p.boolean_value = v;
- return p;
- }
- if (v instanceof Int) {
- p.integer_value = v.get();
- return p;
- }
- if (v instanceof Double) {
- p.double_value = v.get();
- return p;
- }
- if (v instanceof Number || typeof v === 'number') {
- if (v % 1 === 0) {
- p.integer_value = v;
- } else {
- p.double_value = v;
- }
- return p;
- }
- if (v instanceof Date) {
- p.timestamp_microseconds_value = v.getTime() * 1000;
- return p;
- }
- if (v instanceof String || typeof v === 'string') {
- p.string_value = v;
- return p;
- }
- if (v instanceof Buffer) {
- p.blob_value = v;
- return p;
- }
- if (Array.isArray(v)) {
- p.list_value = v.map(function(item) {
- return valueToProperty(item);
+function keyToKeyProto(key) {
+ if (!is.string(key.path[0])) {
+ throw new InvalidKeyError({
+ code: 'MISSING_KIND'
});
- return p;
}
- if (v instanceof Key) {
- p.key_value = keyToKeyProto(v);
- return p;
+
+ var keyProto = {
+ path: []
+ };
+
+ if (key.namespace) {
+ keyProto.partitionId = {
+ namespaceId: key.namespace
+ };
}
- if (v instanceof Object && Object.keys(v).length > 0) {
- var property = [];
- Object.keys(v).forEach(function(k) {
- property.push({
- name: k,
- value: valueToProperty(v[k])
+
+ for (var i = 0; i < key.path.length; i += 2) {
+ var pathElement = {
+ kind: key.path[i]
+ };
+
+ var value = key.path[i + 1];
+
+ if (value) {
+ if (is.number(value)) {
+ pathElement.id = value;
+ } else {
+ pathElement.name = value;
+ }
+ } else if (i < key.path.length - 2) {
+ // This isn't just an incomplete key. An ancestor key is incomplete.
+ throw new InvalidKeyError({
+ code: 'MISSING_ANCESTOR_ID'
});
- });
- p.entity_value = { property: property };
- p.indexed = false;
- return p;
- }
- throw new Error('Unsupported field value, ' + v + ', is provided.');
-}
+ }
-module.exports.valueToProperty = valueToProperty;
+ keyProto.path.push(pathElement);
+ }
-/**
- * Convert an entity object to an entity protocol object.
- *
- * @param {object} entity - The entity object to convert.
- * @return {object}
- *
- * @example
- * entityToEntityProto({
- * name: 'Burcu',
- * legit: true
- * });
- * // {
- * // key: null,
- * // property: [
- * // {
- * // name: 'name',
- * // value: {
- * // string_value: 'Burcu'
- * // }
- * // },
- * // {
- * // name: 'legit',
- * // value: {
- * // boolean_value: true
- * // }
- * // }
- * // }
- * // }
- */
-function entityToEntityProto(entity) {
- return {
- key: null,
- property: Object.keys(entity).map(function(key) {
- return {
- name: key,
- value: valueToProperty(entity[key])
- };
- })
- };
+ return keyProto;
}
-module.exports.entityToEntityProto = entityToEntityProto;
+entity.keyToKeyProto = keyToKeyProto;
/**
* Convert a query object to a query protocol object.
@@ -534,70 +571,108 @@ module.exports.entityToEntityProto = entityToEntityProto;
* // groupBy: []
* // }
*/
-function queryToQueryProto(q) {
- var query = {};
- query.projection = q.selectVal.map(function(v) {
- return { property: { name: v } };
- });
- query.kind = q.kinds.map(function(k) {
- return { name: k };
- });
- // filters
- if (q.filters.length > 0) {
- var filters = q.filters.map(function(f) {
- var val = {};
- if (f.name === '__key__') {
- val.key_value = keyToKeyProto(f.val);
- } else {
- val = valueToProperty(f.val);
- }
- var property = {
- property: { name: f.name },
- operator: OP_TO_OPERATOR[f.op],
- value: val
+function queryToQueryProto(query) {
+ var OP_TO_OPERATOR = {
+ '=': 'EQUAL',
+ '>': 'GREATER_THAN',
+ '>=': 'GREATER_THAN_OR_EQUAL',
+ '<': 'LESS_THAN',
+ '<=': 'LESS_THAN_OR_EQUAL',
+ HAS_ANCESTOR: 'HAS_ANCESTOR'
+ };
+
+ var SIGN_TO_ORDER = {
+ '-': 'DESCENDING',
+ '+': 'ASCENDING'
+ };
+
+ var queryProto = {
+ distinctOn: query.groupByVal.map(function(groupBy) {
+ return {
+ name: groupBy
};
- return { property_filter: property };
- });
- query.filter = {
- composite_filter: { filter: filters, operator: 'AND' }
- };
+ }),
+
+ kind: query.kinds.map(function(kind) {
+ return {
+ name: kind
+ };
+ }),
+
+ order: query.orders.map(function(order) {
+ return {
+ property: {
+ name: order.name
+ },
+ direction: SIGN_TO_ORDER[order.sign]
+ };
+ }),
+
+ projection: query.selectVal.map(function(select) {
+ return {
+ property: {
+ name: select
+ }
+ };
+ })
+ };
+
+ if (query.endVal) {
+ if (is.string(query.endVal)) {
+ queryProto.endCursor = query.endVal;
+ } else {
+ queryProto.endCursor = query.endVal.toString('base64');
+ }
}
- query.order = q.orders.map(function(o) {
- return {
- property: { name: o.name },
- direction: SIGN_TO_ORDER[o.sign]
+
+ if (query.limitVal > 0) {
+ queryProto.limit = {
+ value: query.limitVal
};
- });
- query.group_by = q.groupByVal.map(function(g) {
- return { name: g };
- });
- // pagination
- if (q.startVal) {
- query.start_cursor = new Buffer(q.startVal, 'base64');
}
- if (q.endVal) {
- query.end_cursor = new Buffer(q.endVal, 'base64');
- }
- if (q.offsetVal > 0) {
- query.offset = q.offsetVal;
+
+ if (query.offsetVal > 0) {
+ queryProto.offset = query.offsetVal;
}
- if (q.limitVal > 0) {
- query.limit = q.limitVal;
+
+ if (query.startVal) {
+ if (is.string(query.startVal)) {
+ queryProto.startCursor = query.startVal;
+ } else {
+ queryProto.startCursor = query.startVal.toString('base64');
+ }
}
- return query;
-}
-module.exports.queryToQueryProto = queryToQueryProto;
+ if (query.filters.length > 0) {
+ var filters = query.filters.map(function(filter) {
+ var value = {};
-/**
- * Does a value exist?
- *
- * @todo If protobufjs had hasFieldname support, we wouldn't need a utility.
- * Later address it on Protobuf.js.
- *
- * @param {*} value - Value.
- * @return {boolean}
- */
-function exists(value) {
- return (value !== null && value !== undefined);
+ if (filter.name === '__key__') {
+ value.keyValue = entity.keyToKeyProto(filter.val);
+ } else {
+ value = entity.encodeValue(filter.val);
+ }
+
+ return {
+ propertyFilter: {
+ property: {
+ name: filter.name
+ },
+ op: OP_TO_OPERATOR[filter.op],
+ value: value
+ }
+ };
+ });
+
+ queryProto.filter = {
+ compositeFilter: {
+ filters: filters,
+ op: 'AND'
+ }
+ };
+ }
+
+ return queryProto;
}
+
+entity.queryToQueryProto = queryToQueryProto;
diff --git a/lib/datastore/index.js b/lib/datastore/index.js
index 71d279c24a7..995ce85ffdf 100644
--- a/lib/datastore/index.js
+++ b/lib/datastore/index.js
@@ -20,6 +20,16 @@
'use strict';
+var arrify = require('arrify');
+var is = require('is');
+var modelo = require('modelo');
+
+/**
+ * @type {module:datastore/request}
+ * @private
+ */
+var DatastoreRequest = require('./request.js');
+
/**
* @type {module:datastore/entity}
* @private
@@ -27,52 +37,49 @@
var entity = require('./entity.js');
/**
- * @type {module:common/util}
+ * @type {module:datastore/query}
* @private
*/
-var util = require('../common/util.js');
+var Query = require('./query.js');
/**
- * @type {module:datastore/dataset}
+ * @type {module:common/grpcService}
* @private
*/
-var Dataset = require('./dataset.js');
+var GrpcService = require('../common/grpc-service.js');
-/*! Developer Documentation
- *
- * Invoking the Datastore class allows you to provide configuration up-front.
- * This configuration will be used for future invocations of the returned
- * `dataset` method.
- *
- * @example
- * var datastore = gcloud.datastore;
- *
- * // datastore.dataset();
- * //
- * // is equal to...
- * //
- * // datastore.dataset({
- * // projectId: 'grape-spaceship-123',
- * // keyFilename: '/path/to/keyfile.json'
- * // });
+/**
+ * @type {module:datastore/transaction}
+ * @private
*/
+var Transaction = require('./transaction.js');
+
/**
- * The example below will demonstrate the different usage patterns your app may
- * need to support to retrieve a datastore object.
+ * @type {module:common/util}
+ * @private
+ */
+var util = require('../common/util.js');
+
+/**
+ * Interact with the
+ * [Google Cloud Datastore](https://developers.google.com/datastore/).
*
- * @alias module:datastore
* @constructor
+ * @alias module:datastore
+ * @mixes module:datastore/request
*
* @classdesc
- * The `gcloud.datastore` object gives you some convenience methods, as well as
- * exposes a `dataset` function. This will allow you to create a `dataset`,
- * which is the object from which you will interact with the Google Cloud
+ * The `gcloud.datastore` object allows you to interact with Google Cloud
* Datastore.
*
* To learn more about Datastore, read the
* [Google Cloud Datastore Concepts Overview](https://cloud.google.com/datastore/docs/concepts/overview)
*
- * @param {object} options - [Configuration object](#/docs).
+ * @param {object=} options - [Configuration object](#/docs).
+ * @param {string=} options.apiEndpoint - Override the default API endpoint used
+ * to reach Datastore. This is useful for connecting to your local Datastore
+ * server (usually "http://localhost:8080").
+ * @param {string} options.namespace - Namespace to isolate transactions to.
*
* @example
* var gcloud = require('gcloud')({
@@ -80,59 +87,82 @@ var Dataset = require('./dataset.js');
* keyFilename: '/path/to/keyfile.json'
* });
*
- * var datastore = gcloud.datastore;
+ * var datastore = gcloud.datastore();
+ *
+ * //-
+ * // Connect to your local Datastore server.
+ * //-
+ * var datastore = gcloud.datastore({
+ * apiEndpoint: 'http://localhost:8080'
+ * });
+ *
+ * //-
+ * // The `DATASTORE_EMULATOR_HOST` environment variable is also recognized. If
+ * // set, you may omit the `apiEndpoint` option.
+ * //
+ * // Additionally, `DATASTORE_PROJECT_ID` is recognized. If you have this set,
+ * // you don't need to provide a `projectId`.
+ * //-
*/
function Datastore(options) {
- this.config = options || {};
+ if (!(this instanceof Datastore)) {
+ options = util.normalizeArguments(this, options, {
+ projectIdRequired: false
+ });
+ return new Datastore(options);
+ }
+
+ this.defaultBaseUrl_ = 'datastore.googleapis.com';
+ this.determineBaseUrl_(options.apiEndpoint);
+
+ this.namespace = options.namespace;
+ this.projectId = process.env.DATASTORE_PROJECT_ID || options.projectId;
+
+ var config = {
+ projectIdRequired: false,
+ baseUrl: this.baseUrl_,
+ customEndpoint: this.customEndpoint_,
+ service: 'datastore',
+ apiVersion: 'v1beta3',
+ scopes: ['https://www.googleapis.com/auth/datastore']
+ };
+
+ GrpcService.call(this, config, options);
}
-/*! Developer Documentation
+modelo.inherits(Datastore, DatastoreRequest, GrpcService);
+
+/**
+ * Helper function to get a Datastore Double object.
*
- * Use this static method to create a dataset without any pre-configured
- * options.
+ * @param {number} value - The double value.
+ * @return {object}
*
* @example
- * var datastore = gcloud.datastore;
- *
- * // Create a Dataset object.
- * var dataset = datastore.dataset({
- * projectId: 'grape-spaceship-123',
- * keyFilename: '/path/to/keyfile.json'
- * });
+ * var threeDouble = gcloud.datastore.double(3.0);
*/
-Datastore.dataset = Dataset;
+Datastore.double = function(value) {
+ return new entity.Double(value);
+};
-/*! Developer Documentation
- *
- * Create a dataset using the instance method when you want to use your
- * pre-configured options from the Datastore instance.
- *
- * @param {object=} options - Configuration object.
- * @return {module:datastore/dataset}
- */
/**
- * Create a Dataset object to reference an existing dataset.
+ * Helper function to get a Datastore Geo Point object.
*
- * @param {object=} options - [Configuration object](#/docs).
- * @param {string=} options.apiEndpoint - Override the default API endpoint used
- * to reach Datastore. This is useful for connecting to your local Datastore
- * server (usually "http://localhost:8080").
- * @param {string} options.namespace - Namespace to isolate transactions to.
- * @return {module:datastore/dataset}
+ * @param {object} coordinates - Coordinate value.
+ * @param {number} coordinates.latitude - Latitudinal value.
+ * @param {number} coordinates.longitude - Longitudinal value.
+ * @return {object}
*
* @example
- * var gcloud = require('gcloud')({
- * keyFilename: '/path/to/keyfile.json',
- * projectId: 'my-project'
- * });
+ * var coordinates = {
+ * latitude: 40.6894,
+ * longitude: -74.0447
+ * };
*
- * var datastore = gcloud.datastore;
- * var dataset = datastore.dataset();
+ * var geoPoint = gcloud.datastore.geoPoint(coordinates);
*/
-Datastore.prototype.dataset = function(options) {
- options = options || {};
- // Mix in global config data to the provided options.
- return new Dataset(util.extendGlobalConfig(this.config, options));
+Datastore.geoPoint = function(coordindates) {
+ return new entity.GeoPoint(coordindates);
};
/**
@@ -142,9 +172,6 @@ Datastore.prototype.dataset = function(options) {
* @return {object}
*
* @example
- * var gcloud = require('gcloud');
- *
- * // Create an Integer.
* var sevenInteger = gcloud.datastore.int(7);
*/
Datastore.int = function(value) {
@@ -152,19 +179,146 @@ Datastore.int = function(value) {
};
/**
- * Helper function to get a Datastore Double object.
+ * Create a query for the specified kind.
*
- * @param {number} value - The double value.
- * @return {object}
+ * @resource [Datastore Queries]{@link https://cloud.google.com/datastore/docs/concepts/queries}
+ *
+ * @see {module:datastore/query}
+ *
+ * @param {string=} namespace - Namespace.
+ * @param {string} kind - The kind to query.
+ * @return {module:datastore/query}
+ */
+Datastore.prototype.createQuery = function(namespace, kind) {
+ if (arguments.length === 1) {
+ kind = arrify(namespace);
+ namespace = this.namespace;
+ }
+
+ return new Query(namespace, arrify(kind));
+};
+
+/**
+ * Helper to create a Key object, scoped to the instance's namespace by default.
+ *
+ * You may also specify a configuration object to define a namespace and path.
+ *
+ * @param {...*=} options - Key path. To specify or override a namespace,
+ * you must use an object here to explicitly state it.
+ * @param {object=} options - Configuration object.
+ * @param {...*=} options.path - Key path.
+ * @param {string=} options.namespace - Optional namespace.
+ * @return {Key} A newly created Key from the options given.
*
* @example
- * var gcloud = require('gcloud');
+ * //-
+ * // Create an incomplete key with a kind value of `Company`.
+ * //-
+ * var key = datastore.key('Company');
*
- * // Create a Double.
- * var threeDouble = gcloud.datastore.double(3.0);
+ * //-
+ * // Create a complete key with a kind value of `Company` and id `123`.
+ * //-
+ * var key = datastore.key(['Company', 123]);
+ *
+ * //-
+ * // Create a complete key with a kind value of `Company` and name `Google`.
+ * // Note: `id` is used for numeric identifiers and `name` is used otherwise.
+ * //-
+ * var key = datastore.key(['Company', 'Google']);
+ *
+ * //-
+ * // Create a complete key from a provided namespace and path.
+ * //-
+ * var key = datastore.key({
+ * namespace: 'My-NS',
+ * path: ['Company', 123]
+ * });
*/
-Datastore.double = function(value) {
- return new entity.Double(value);
+Datastore.prototype.key = function(options) {
+ options = is.object(options) ? options : {
+ namespace: this.namespace,
+ path: arrify(options)
+ };
+
+ return new entity.Key(options);
+};
+
+/**
+ * Run a function in the context of a new transaction. Transactions allow you to
+ * perform multiple operations, committing your changes atomically. When you are
+ * finished making your changes within the transaction, run the done() function
+ * provided in the callback function to commit your changes. See an example
+ * below for more information.
+ *
+ * @param {function} fn - The function to run in the context of a transaction.
+ * @param {module:datastore/transaction} fn.transaction - The Transaction.
+ * @param {function} fn.done - Function used to commit changes.
+ * @param {function} callback - The callback function.
+ * @param {?error} callback.err - An error returned while making this request
+ *
+ * @example
+ * datastore.runInTransaction(function(transaction, done) {
+ * // From the `transaction` object, execute datastore methods as usual.
+ * transaction.get(datastore.key(['Company', 123]), function(err, entity) {
+ * if (err) {
+ * transaction.rollback(done);
+ * return;
+ * }
+ *
+ * // Call `done` when you're ready to commit your changes.
+ * done();
+ * });
+ * }, function(err, apiResponse) {});
+ */
+Datastore.prototype.runInTransaction = function(fn, callback) {
+ var newTransaction = this.createTransaction_();
+
+ newTransaction.begin_(function(err, resp) {
+ if (err) {
+ callback(err, resp);
+ return;
+ }
+
+ fn(newTransaction, newTransaction.commit_.bind(newTransaction, callback));
+ });
+};
+
+/**
+ * Create a new Transaction object.
+ *
+ * @return {module:datastore/transaction}
+ * @private
+ */
+Datastore.prototype.createTransaction_ = function() {
+ return new Transaction(this);
+};
+
+/**
+ * Determine the appropriate endpoint to use for API requests. If not explicitly
+ * defined, check for the "DATASTORE_EMULATOR_HOST" environment variable, used
+ * to connect to a local Datastore server.
+ *
+ * @private
+ *
+ * @param {string} customApiEndpoint - Custom API endpoint.
+ */
+Datastore.prototype.determineBaseUrl_ = function(customApiEndpoint) {
+ var baseUrl = this.defaultBaseUrl_;
+ var leadingProtocol = new RegExp('^https*://');
+ var trailingSlashes = new RegExp('/*$');
+
+ if (customApiEndpoint) {
+ baseUrl = customApiEndpoint;
+ this.customEndpoint_ = true;
+ } else if (process.env.DATASTORE_EMULATOR_HOST) {
+ baseUrl = process.env.DATASTORE_EMULATOR_HOST;
+ this.customEndpoint_ = true;
+ }
+
+ this.baseUrl_ = baseUrl
+ .replace(leadingProtocol, '')
+ .replace(trailingSlashes, '');
};
module.exports = Datastore;
diff --git a/lib/datastore/pb.js b/lib/datastore/pb.js
deleted file mode 100644
index 68ae909f868..00000000000
--- a/lib/datastore/pb.js
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Copyright 2014 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @private
- * @module datastore/pb
- */
-
-'use strict';
-
-var path = require('path');
-var protobufjs = require('protobufjs');
-
-/** @const {string} Path to the proto file. */
-var PROTO_FILE = path.join(__dirname, 'datastore_v1.proto');
-
-/**
- * protobuf.
- *
- * @type {object}
- */
-module.exports = protobufjs.loadProtoFile(PROTO_FILE).build().pb;
diff --git a/lib/datastore/query.js b/lib/datastore/query.js
index 4541f90fc66..fc9c8565556 100644
--- a/lib/datastore/query.js
+++ b/lib/datastore/query.js
@@ -38,19 +38,13 @@ var arrify = require('arrify');
* @param {string} kind - Kind to query.
*
* @example
- * var dataset = gcloud.datastore.dataset({
+ * var gcloud = require('gcloud')({
+ * keyFilename: '/path/to/keyfile.json',
* projectId: 'grape-spaceship-123'
* });
*
- * // If your dataset was scoped to a namespace at initialization, your query
- * // will likewise be scoped to that namespace.
- * var query = dataset.createQuery('Lion');
- *
- * // However, you may override the namespace per query.
- * var query = dataset.createQuery('AnimalNamespace', 'Lion');
- *
- * // You may also remove the namespace altogether.
- * var query = dataset.createQuery(null, 'Lion');
+ * var datastore = gcloud.datastore();
+ * var query = datastore.createQuery('AnimalNamespace', 'Lion');
*/
function Query(namespace, kinds) {
if (!kinds) {
@@ -82,18 +76,18 @@ function Query(namespace, kinds) {
* @example
* // Retrieve a list of people related to person "1234",
* // disabling auto pagination
- * var query = dataset.createQuery('Person')
- * .hasAncestor(dataset.key(['Person', 1234]))
+ * var query = datastore.createQuery('Person')
+ * .hasAncestor(datastore.key(['Person', 1234]))
* .autoPaginate(false);
*
- * var callback = function(err, entities, nextQuery, apiResponse) {
+ * function callback(err, entities, nextQuery, apiResponse) {
* if (nextQuery) {
* // More results might exist, so we'll manually fetch them
- * dataset.runQuery(nextQuery, callback);
+ * datastore.runQuery(nextQuery, callback);
* }
- * };
+ * }
*
- * dataset.runQuery(query, callback);
+ * datastore.runQuery(query, callback);
*/
Query.prototype.autoPaginate = function(autoPaginateVal) {
this.autoPaginateVal = autoPaginateVal !== false;
@@ -124,15 +118,15 @@ Query.prototype.autoPaginate = function(autoPaginateVal) {
* // List all companies named Google that have less than 400 employees.
* //-
* var companyQuery = query
- * .filter('name', '=', 'Google')
+ * .filter('name', 'Google')
* .filter('size', '<', 400);
*
* //-
* // To filter by key, use `__key__` for the property name. Filter on keys
* // stored as properties is not currently supported.
* //-
- * var key = dataset.key(['Company', 'Google']);
- * var keyQuery = query.filter('__key__', '=', key);
+ * var key = datastore.key(['Company', 'Google']);
+ * var keyQuery = query.filter('__key__', key);
*/
Query.prototype.filter = function(property, operator, value) {
if (arguments.length === 2) {
@@ -158,7 +152,7 @@ Query.prototype.filter = function(property, operator, value) {
* @return {module:datastore/query}
*
* @example
- * var ancestoryQuery = query.hasAncestor(dataset.key(['Parent', 123]));
+ * var ancestoryQuery = query.hasAncestor(datastore.key(['Parent', 123]));
*/
Query.prototype.hasAncestor = function(key) {
this.filters.push({ name: '__key__', op: 'HAS_ANCESTOR', val: key });
diff --git a/lib/datastore/request.js b/lib/datastore/request.js
index 5eb6fb77d76..2e36d2e92fb 100644
--- a/lib/datastore/request.js
+++ b/lib/datastore/request.js
@@ -22,17 +22,11 @@
var arrify = require('arrify');
var concat = require('concat-stream');
-var format = require('string-format-obj');
+var extend = require('extend');
var is = require('is');
var propAssign = require('prop-assign');
-var request = require('request').defaults({
- pool: {
- maxSockets: Infinity
- }
-});
var split = require('split-array-stream');
var through = require('through2');
-var extend = require('extend');
/**
* @type {module:datastore/entity}
@@ -40,17 +34,11 @@ var extend = require('extend');
*/
var entity = require('./entity.js');
-/**
- * @type {module:datastore/pb}
- * @private
- */
-var pb = require('./pb.js');
-
/**
* @type {module:datastore/query}
* @private
*/
-var Query = require('./query');
+var Query = require('./query.js');
/**
* @type {module:common/streamrouter}
@@ -64,35 +52,23 @@ var streamRouter = require('../common/stream-router.js');
*/
var util = require('../common/util.js');
-/**
- * @const {string} Non-transaction mode key.
- * @private
- */
-var MODE_NON_TRANSACTIONAL = 'NON_TRANSACTIONAL';
-
-/**
- * @const {string} Transaction mode key.
- * @private
- */
-var MODE_TRANSACTIONAL = 'TRANSACTIONAL';
-
/*! Developer Documentation
*
* Handles request logic for Datastore.
*
- * Creates requests to the Dataset endpoint. Designed to be inherited by
- * datastore.Dataset and datastore.Transaction objects.
+ * Creates requests to the Datastore endpoint. Designed to be inherited by
+ * {module:datastore} and {module:datastore/transaction} objects.
*
* @example
* // This is how to create a transaction object directly using this Transaction
* // class. The following transaction object is created for use in the examples
* // in this file below.
- * var dataset = gcloud.datastore.dataset({ projectId: 'project-id' });
+ * var datastore = gcloud.datastore({ projectId: 'project-id' });
* var Transaction = require('gcloud/lib/datastore/transaction');
- * var transaction = new Transaction(dataset, 'my-project-id');
+ * var transaction = new Transaction(datastore, 'my-project-id');
* transaction.id = '1234'; // Give the transaction an ID.
*/
-/*
+/**
* Handle logic for Datastore API operations.
*
* @constructor
@@ -101,13 +77,138 @@ var MODE_TRANSACTIONAL = 'TRANSACTIONAL';
*/
function DatastoreRequest() {}
+/**
+ * Generate IDs without creating entities.
+ *
+ * @param {Key} incompleteKey - The key object to complete.
+ * @param {number} n - How many IDs to generate.
+ * @param {function} callback - The callback function.
+ * @param {?error} callback.err - An error returned while making this request
+ * @param {array} callback.keys - The generated IDs
+ * @param {object} callback.apiResponse - The full API response.
+ *
+ * @example
+ * var incompleteKey = datastore.key(['Company']);
+ *
+ * //-
+ * // The following call will create 100 new IDs from the Company kind, which
+ * // exists under the default namespace.
+ * //-
+ * datastore.allocateIds(incompleteKey, 100, function(err, keys) {});
+ *
+ * //-
+ * // Or, if you're using a transaction object.
+ * //-
+ * datastore.runInTransaction(function(transaction, done) {
+ * transaction.allocateIds(incompleteKey, 100, function(err, keys) {
+ * done();
+ * });
+ * });
+ *
+ * //-
+ * // You may prefer to create IDs from a non-default namespace by providing an
+ * // incomplete key with a namespace. Similar to the previous example, the call
+ * // below will create 100 new IDs, but from the Company kind that exists under
+ * // the "ns-test" namespace.
+ * //-
+ * var incompleteKey = datastore.key({
+ * namespace: 'ns-test',
+ * path: ['Company']
+ * });
+ *
+ * function callback(err, keys, apiResponse) {}
+ *
+ * datastore.allocateIds(incompleteKey, 100, callback);
+ */
+DatastoreRequest.prototype.allocateIds = function(incompleteKey, n, callback) {
+ if (entity.isKeyComplete(incompleteKey)) {
+ throw new Error('An incomplete key should be provided.');
+ }
+
+ var incompleteKeys = [];
+ for (var i = 0; i < n; i++) {
+ incompleteKeys.push(entity.keyToKeyProto(incompleteKey));
+ }
+
+ var protoOpts = {
+ service: 'Datastore',
+ method: 'allocateIds'
+ };
+
+ var reqOpts = {
+ keys: incompleteKeys
+ };
+
+ this.request_(protoOpts, reqOpts, function(err, resp) {
+ if (err) {
+ callback(err, null, resp);
+ return;
+ }
+
+ var keys = (resp.keys || []).map(entity.keyFromKeyProto);
+
+ callback(null, keys, resp);
+ });
+};
+
+/**
+ * Delete all entities identified with the specified key(s).
+ *
+ * @param {Key|Key[]} key - Datastore key object(s).
+ * @param {function} callback - The callback function.
+ * @param {?error} callback.err - An error returned while making this request
+ * @param {object} callback.apiResponse - The full API response.
+ *
+ * @example
+ * var key = datastore.key(['Company', 123]);
+ * datastore.delete(key, function(err, apiResp) {});
+ *
+ * //-
+ * // Or, if you're using a transaction object.
+ * //-
+ * datastore.runInTransaction(function(transaction, done) {
+ * transaction.delete(key, function(err, apiResp) {
+ * done();
+ * });
+ * });
+ *
+ * //-
+ * // Delete multiple entities at once.
+ * //-
+ * datastore.delete([
+ * datastore.key(['Company', 123]),
+ * datastore.key(['Product', 'Computer'])
+ * ], function(err, apiResponse) {});
+ */
+DatastoreRequest.prototype.delete = function(keys, callback) {
+ callback = callback || util.noop;
+
+ var protoOpts = {
+ service: 'Datastore',
+ method: 'commit'
+ };
+
+ var reqOpts = {
+ mutations: arrify(keys).map(function(key) {
+ return {
+ delete: entity.keyToKeyProto(key)
+ };
+ })
+ };
+
+ if (this.id) {
+ this.requests_.push(reqOpts);
+ return;
+ }
+
+ this.request_(protoOpts, reqOpts, callback);
+};
+
/**
* Retrieve the entities identified with the specified key(s) in the current
* transaction. Get operations require a valid key to retrieve the
* key-identified entity from Datastore.
*
- * @resource [Datasets: lookup API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/lookup}
- *
* @throws {Error} If at least one Key object is not provided.
*
* @param {Key|Key[]} keys - Datastore key object(s).
@@ -120,14 +221,14 @@ function DatastoreRequest() {}
* //-
* // Get a single entity.
* //-
- * var key = dataset.key(['Company', 123]);
+ * var key = datastore.key(['Company', 123]);
*
- * dataset.get(key, function(err, entity) {});
+ * datastore.get(key, function(err, entity) {});
*
* //-
* // Or, if you're using a transaction object.
* //-
- * dataset.runInTransaction(function(transaction, done) {
+ * datastore.runInTransaction(function(transaction, done) {
* transaction.get(key, function(err, entity) {
* done();
* });
@@ -137,16 +238,16 @@ function DatastoreRequest() {}
* // Get multiple entities at once with a callback.
* //-
* var keys = [
- * dataset.key(['Company', 123]),
- * dataset.key(['Product', 'Computer'])
+ * datastore.key(['Company', 123]),
+ * datastore.key(['Product', 'Computer'])
* ];
*
- * dataset.get(keys, function(err, entities) {});
+ * datastore.get(keys, function(err, entities) {});
*
* //-
* // Or, get the entities as a readable object stream.
* //-
- * dataset.get(keys)
+ * datastore.get(keys)
* .on('error', function(err) {})
* .on('data', function(entity) {
* // entity is an entity object.
@@ -159,13 +260,13 @@ function DatastoreRequest() {}
* // Here's how you would update the value of an entity with the help of the
* // `save` method.
* //-
- * dataset.get(key, function(err, entity) {
+ * datastore.get(key, function(err, entity) {
* if (err) {
* // Error handling omitted.
* }
*
* entity.data.newValue = true;
- * dataset.save(entity, function(err) {});
+ * datastore.save(entity, function(err) {});
* });
*/
DatastoreRequest.prototype.get = function(keys, callback) {
@@ -212,19 +313,161 @@ DatastoreRequest.prototype.get = function(keys, callback) {
});
}
- this.makeReq_('lookup', { key: keys }, onApiResponse);
+ var protoOpts = {
+ service: 'Datastore',
+ method: 'lookup'
+ };
+
+ var reqOpts = {
+ keys: keys
+ };
+
+ this.request_(protoOpts, reqOpts, onApiResponse);
return stream;
};
/**
- * Maps to {module:datastore/dataset#save}, forcing the method to be `insert`.
+ * Maps to {module:datastore#save}, forcing the method to be `insert`.
*/
DatastoreRequest.prototype.insert = function(entities, callback) {
entities = arrify(entities).map(propAssign('method', 'insert'));
this.save(entities, callback);
};
+/**
+ * Datastore allows you to query entities by kind, filter them by property
+ * filters, and sort them by a property name. Projection and pagination are also
+ * supported.
+ *
+ * If you provide a callback, the query is run, and the results are returned as
+ * the second argument to your callback. A third argument may also exist, which
+ * is a query object that uses the end cursor from the previous query as the
+ * starting cursor for the next query. You can pass that object back to this
+ * method to see if more results exist.
+ *
+ * You may also omit the callback to this function to trigger streaming mode.
+ *
+ * See below for examples of both approaches.
+ *
+ * @param {module:datastore/query} q - Query object.
+ * @param {function=} callback - The callback function. If omitted, a readable
+ * stream instance is returned.
+ * @param {?error} callback.err - An error returned while making this request
+ * (may be null).
+ * @param {array} callback.entities - The list of entities returned by this
+ * query. Note that this is a single page of entities, not necessarily
+ * all of the entities.
+ * @param {?module:datastore/query} callback.nextQuery - If present, run another
+ * query with this object to check for more results.
+ * @param {object} callback.apiResponse - The full API response.
+ * @param {?error} callback.err - An error returned while making this request
+ * @param {module:datastore/entity[]} callback.entities - A list of Entities
+ * @param {?object} callback.nextQuery - If present, query with this object to
+ * check for more results.
+ * @param {object} callback.apiResponse - The full API response.
+ *
+ * @example
+ * //-
+ * // Where you see `transaction`, assume this is the context that's relevant to
+ * // your use, whether that be a Datastore or a Transaction object.
+ * //-
+ * var query = datastore.createQuery('Lion');
+ *
+ * datastore.runQuery(query, function(err, entities) {});
+ *
+ * //-
+ * // Or, if you're using a transaction object.
+ * //-
+ * datastore.runInTransaction(function(transaction, done) {
+ * transaction.runQuery(query, function(err, entities) {
+ * done();
+ * });
+ * });
+ *
+ * //-
+ * // To control how many API requests are made and page through the results
+ * // manually, call `autoPaginate(false)` on your query.
+ * //-
+ * var manualPageQuery = datastore.createQuery('Lion').autoPaginate(false);
+ *
+ * function callback(err, entities, nextQuery, apiResponse) {
+ * if (nextQuery) {
+ * // More results might exist.
+ * transaction.runQuery(nextQuery, callback);
+ * }
+ * }
+ *
+ * datastore.runQuery(manualPageQuery, callback);
+ *
+ * //-
+ * // If you omit the callback, runQuery will automatically call subsequent
+ * // queries until no results remain. Entity objects will be pushed as they are
+ * // found.
+ * //-
+ * datastore.runQuery(query)
+ * .on('error', console.error)
+ * .on('data', function (entity) {})
+ * .on('end', function() {
+ * // All entities retrieved.
+ * });
+ *
+ * //-
+ * // A keys-only query returns just the keys of the result entities instead of
+ * // the entities themselves, at lower latency and cost.
+ * //-
+ * var keysOnlyQuery = datastore.createQuery('Lion').select('__key__');
+ *
+ * datastore.runQuery(keysOnlyQuery, function(err, entities) {
+ * // entities[].key = Key object
+ * // entities[].data = Empty object
+ * });
+ */
+DatastoreRequest.prototype.runQuery = function(query, callback) {
+ var protoOpts = {
+ service: 'Datastore',
+ method: 'runQuery'
+ };
+
+ var reqOpts = {
+ readOptions: {},
+ query: entity.queryToQueryProto(query)
+ };
+
+ if (query.namespace) {
+ reqOpts.partitionId = {
+ namespaceId: query.namespace
+ };
+ }
+
+ this.request_(protoOpts, reqOpts, function(err, resp) {
+ if (err) {
+ callback(err, null, null, resp);
+ return;
+ }
+
+ var entities = [];
+ var nextQuery = null;
+
+ if (resp.batch.entityResults) {
+ entities = entity.formatArray(resp.batch.entityResults);
+ }
+
+ var NOT_FINISHED_CODE = 'MORE_RESULTS_AFTER_LIMIT';
+
+ if (resp.batch.moreResults === NOT_FINISHED_CODE) {
+ var endCursor = resp.batch.endCursor;
+ var offset = query.offsetVal === -1 ? 0 : query.offsetVal;
+ var nextOffset = offset - resp.batch.skippedResults;
+
+ nextQuery = extend(true, new Query(), query);
+ nextQuery.start(endCursor).offset(nextOffset);
+ }
+
+ callback(null, entities, nextQuery, resp);
+ });
+};
+
/**
* Insert or update the specified object(s). If a key is incomplete, its
* associated object is inserted and the original Key object is updated to
@@ -240,8 +483,6 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* included in *all* indexes, you must supply an entity's `data` property as an
* array. See below for an example.
*
- * @resource [Datasets: commit API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/commit}
- *
* @borrows {module:datastore/transaction#save} as save
*
* @throws {Error} If an unrecognized method is provided.
@@ -249,7 +490,7 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* @param {object|object[]} entities - Datastore key object(s).
* @param {Key} entities.key - Datastore key object.
* @param {string=} entities.method - Optional method to explicity use for save.
- * The choices include 'insert', 'update', 'upsert' and 'insert_auto_id'.
+ * The choices include 'insert', 'update', and 'upsert'.
* @param {object|object[]} entities.data - Data to save with the provided key.
* If you provide an array of objects, you must use the explicit syntax:
* `name` for the name of the property and `value` for its value. You may
@@ -266,9 +507,9 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* // Key object used to save will be updated to contain the path with its
* // generated ID.
* //-
- * var key = dataset.key('Company');
+ * var key = datastore.key('Company');
*
- * dataset.save({
+ * datastore.save({
* key: key,
* data: {
* rating: '10'
@@ -285,9 +526,9 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* // original Key object used to save will be updated to contain the path with
* // the name instead of a generated ID.
* //-
- * var key = dataset.key(['Company', 'donutshack']);
+ * var key = datastore.key(['Company', 'donutshack']);
*
- * dataset.save({
+ * datastore.save({
* key: key,
* data: {
* name: 'DonutShack',
@@ -305,12 +546,12 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* //
* // Here we are providing a key with namespace.
* //-
- * var key = dataset.key({
+ * var key = datastore.key({
* namespace: 'my-namespace',
* path: ['Company', 'donutshack']
* });
*
- * dataset.save({
+ * datastore.save({
* key: key,
* data: {
* name: 'DonutShack',
@@ -329,44 +570,52 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* // Key object used to save will be updated to contain the path with its
* // generated ID.
* //-
- * var key = dataset.key('Company');
+ * var key = datastore.key('Company');
*
- * dataset.save({
+ * datastore.save({
* key: key,
* data: {
- * name: 'DonutShack', // strings
- * rating: gcloud.datastore.int(8), // ints
- * worth: gcloud.datastore.double(123456.78), // doubles
- * numDonutsServed: 45, // detect number type (int or double)
- * founded: new Date('Tue May 12 2015 15:30:00 GMT-0400 (EDT)'), // dates
- * isStartup: true, // booleans
- * donutEmoji: new Buffer('\uD83C\uDF69'), // buffers
- * keywords: [ 'donut', 'coffee', 'yum' ] // lists of objects
+ * name: 'DonutShack',
+ * rating: gcloud.datastore.int(10),
+ * worth: gcloud.datastore.double(123456.78),
+ * location: gcloud.datastore.geoPoint({
+ * latitude: 40.6894,
+ * longitude: -74.0447
+ * }),
+ * numDonutsServed: 45,
+ * founded: new Date('Tue May 12 2015 15:30:00 GMT-0400 (EDT)'),
+ * isStartup: true,
+ * donutEmoji: new Buffer('\uD83C\uDF69'),
+ * keywords: [
+ * 'donut',
+ * 'coffee',
+ * 'yum'
+ * ]
* }
- * }, function(err) {});
+ * }, function(err, apiResponse) {});
*
* //-
* // To specify an `excludeFromIndexes` value for a Datastore entity, pass in
- * // an array for the key's data. The above example would then look like:
+ * // an array for the key's data.
* //-
- * dataset.save({
- * key: dataset.key('Company'),
+ * datastore.save({
+ * key: datastore.key('Company'),
* data: [
* {
* name: 'rating',
- * value: '10',
- * excludeFromIndexes: false
+ * value: 10,
+ * excludeFromIndexes: true
* }
* ]
- * }, function(err) {});
+ * }, function(err, apiResponse) {});
*
* //-
* // Save multiple entities at once.
* //-
- * var companyKey = dataset.key(['Company', 123]);
- * var productKey = dataset.key(['Product', 'Computer']);
+ * var companyKey = datastore.key(['Company', 123]);
+ * var productKey = datastore.key(['Product', 'Computer']);
*
- * dataset.save([
+ * datastore.save([
* {
* key: companyKey,
* data: {
@@ -379,33 +628,30 @@ DatastoreRequest.prototype.insert = function(entities, callback) {
* vendor: 'Dell'
* }
* }
- * ], function(err) {});
+ * ], function(err, apiResponse) {});
*
* //-
* // Explicitly attempt to 'insert' a specific entity.
* //-
- * var userKey = dataset.key(['User', 'chilts']);
+ * var userKey = datastore.key(['User', 'chilts']);
*
- * dataset.save([
- * {
- * key: userKey,
- * method: 'insert', // force the method to 'insert'
- * data: {
- * fullName: 'Andrew Chilton'
- * }
+ * datastore.save({
+ * key: userKey,
+ * method: 'insert',
+ * data: {
+ * fullName: 'Andrew Chilton'
* }
- * ], function(err, apiResponse) {});
+ * }, function(err, apiResponse) {});
*/
DatastoreRequest.prototype.save = function(entities, callback) {
entities = arrify(entities);
- var insertIndexes = [];
-
- var mutation = {
- insert: [],
- update: [],
- upsert: [],
- insert_auto_id: []
+ var insertIndexes = {};
+ var mutations = [];
+ var methods = {
+ insert: true,
+ update: true,
+ upsert: true
};
// Iterate over the entity objects, build a proto from all keys and values,
@@ -413,65 +659,59 @@ DatastoreRequest.prototype.save = function(entities, callback) {
entities.forEach(function(entityObject, index) {
entityObject = extend(true, {}, entityObject);
+ var mutation = {};
var entityProto = {};
- var method = entityObject.method;
+ var method = 'upsert';
+
+ if (entityObject.method) {
+ if (methods[entityObject.method]) {
+ method = entityObject.method;
+ } else {
+ throw new Error('Method ' + entityObject.method + ' not recognized.');
+ }
+ }
+
+ if (!entity.isKeyComplete(entityObject.key)) {
+ insertIndexes[index] = true;
+ }
if (is.array(entityObject.data)) {
- entityProto.property = entityObject.data.map(function(data) {
- data.value = entity.valueToProperty(data.value);
+ entityProto.properties = entityObject.data.reduce(function(acc, data) {
+ var value = entity.encodeValue(data.value);
if (is.boolean(data.excludeFromIndexes)) {
- var indexed = !data.excludeFromIndexes;
+ var excluded = data.excludeFromIndexes;
+ var values = value.arrayValue && value.arrayValue.values;
- if (is.array(data.value.list_value)) {
- data.value.list_value =
- data.value.list_value.map(propAssign('indexed', indexed));
+ if (values) {
+ values = values.map(propAssign('excludeFromIndexes', excluded));
} else {
- data.value.indexed = indexed;
+ value.excludeFromIndexes = data.excludeFromIndexes;
}
-
- delete data.excludeFromIndexes;
}
- return data;
- });
+ acc[data.name] = value;
+
+ return acc;
+ }, {});
} else {
entityProto = entity.entityToEntityProto(entityObject.data);
}
entityProto.key = entity.keyToKeyProto(entityObject.key);
- if (method) {
- if (mutation[method]) {
- mutation[method].push(entityProto);
-
- if (method === 'insert_auto_id') {
- insertIndexes.push(index);
- }
- } else {
- throw new Error('Method ' + method + ' not recognized.');
- }
- } else {
- if (entity.isKeyComplete(entityObject.key)) {
- mutation.upsert.push(entityProto);
- } else {
- insertIndexes.push(index);
- mutation.insert_auto_id.push(entityProto);
- }
- }
+ mutation[method] = entityProto;
+ mutations.push(mutation);
});
- var req = {
- mutation: mutation
+ var protoOpts = {
+ service: 'Datastore',
+ method: 'commit'
};
- if (this.id) {
- this.requests_.push(req);
- this.requestCallbacks_.push(onCommit);
- return;
- }
-
- this.makeReq_('commit', req, onCommit);
+ var reqOpts = {
+ mutations: mutations
+ };
function onCommit(err, resp) {
if (err || !resp) {
@@ -479,246 +719,31 @@ DatastoreRequest.prototype.save = function(entities, callback) {
return;
}
- var autoInserted = (resp.mutation_result.insert_auto_id_key || []);
- autoInserted.forEach(function(key, index) {
- var id = entity.keyFromKeyProto(key).id;
- entities[insertIndexes[index]].key.id = id;
+ arrify(resp.mutationResults).forEach(function(result, index) {
+ if (!result.key) {
+ return;
+ }
+
+ if (insertIndexes[index]) {
+ var id = entity.keyFromKeyProto(result.key).id;
+ entities[index].key.id = id;
+ }
});
callback(null, resp);
}
-};
-
-/**
- * Delete all entities identified with the specified key(s).
- *
- * @resource [Datasets: commit API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/commit#mutation.delete}
- *
- * @param {Key|Key[]} key - Datastore key object(s).
- * @param {function} callback - The callback function.
- * @param {?error} callback.err - An error returned while making this request
- * @param {object} callback.apiResponse - The full API response.
- *
- * @example
- * dataset.delete(dataset.key(['Company', 123]), function(err, apiResp) {});
- *
- * //-
- * // Or, if you're using a transaction object.
- * //-
- * dataset.runInTransaction(function(transaction, done) {
- * transaction.delete(dataset.key(['Company', 123]), function(err, apiResp) {
- * done();
- * });
- * });
- *
- * //-
- * // Delete multiple entities at once.
- * //-
- * dataset.delete([
- * dataset.key(['Company', 123]),
- * dataset.key(['Product', 'Computer'])
- * ], function(err, apiResponse) {});
- */
-DatastoreRequest.prototype.delete = function(keys, callback) {
- callback = callback || util.noop;
-
- var req = {
- mutation: {
- delete: arrify(keys).map(entity.keyToKeyProto)
- }
- };
if (this.id) {
- this.requests_.push(req);
+ this.requests_.push(reqOpts);
+ this.requestCallbacks_.push(onCommit);
return;
}
- this.makeReq_('commit', req, callback);
+ this.request_(protoOpts, reqOpts, onCommit);
};
/**
- * Datastore allows you to query entities by kind, filter them by property
- * filters, and sort them by a property name. Projection and pagination are also
- * supported.
- *
- * If you provide a callback, the query is run, and the results are returned as
- * the second argument to your callback. A third argument may also exist, which
- * is a query object that uses the end cursor from the previous query as the
- * starting cursor for the next query. You can pass that object back to this
- * method to see if more results exist.
- *
- * You may also omit the callback to this function to trigger streaming mode.
- *
- * See below for examples of both approaches.
- *
- * @resource [Datasets: runQuery API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/runQuery}
- *
- * @param {module:datastore/query} q - Query object.
- * @param {function=} callback - The callback function. If omitted, a readable
- * stream instance is returned.
- * @param {?error} callback.err - An error returned while making this request
- * (may be null).
- * @param {object[]} callback.entities - A list of entity objects which match
- * the provided keys.
- * @param {?module:datastore/query} callback.nextQuery - If present, run another
- * query with this object to check for more results.
- * @param {object} callback.apiResponse - The full API response.
- *
- * @example
- * //-
- * // Where you see `transaction`, assume this is the context that's relevant to
- * // your use, whether that be a Dataset or a Transaction object.
- * //-
- * var query = dataset.createQuery('Lion');
- *
- * dataset.runQuery(query, function(err, entities) {});
- *
- * //-
- * // Or, if you're using a transaction object.
- * //-
- * dataset.runInTransaction(function(transaction, done) {
- * transaction.runQuery(query, function(err, entities) {
- * done();
- * });
- * })
- *
- * //-
- * // To control how many API requests are made and page through the results
- * // manually, call `autoPaginate(false)` on your query.
- * //-
- * var manualPageQuery = dataset.createQuery('Lion').autoPaginate(false);
- *
- * var callback = function(err, entities, nextQuery, apiResponse) {
- * if (nextQuery) {
- * // More results might exist.
- * transaction.runQuery(nextQuery, callback);
- * }
- * };
- *
- * dataset.runQuery(manualPageQuery, callback);
- *
- * //-
- * // If you omit the callback, runQuery will automatically call subsequent
- * // queries until no results remain. Entity objects will be pushed as they are
- * // found.
- * //-
- * dataset.runQuery(query)
- * .on('error', console.error)
- * .on('data', function (entity) {})
- * .on('end', function() {
- * // All entities retrieved.
- * });
- *
- * //-
- * // A keys-only query returns just the keys of the result entities instead of
- * // the entities themselves, at lower latency and cost.
- * //-
- * var keysOnlyQuery = dataset.createQuery('Lion').select('__key__');
- *
- * dataset.runQuery(keysOnlyQuery, function(err, entities) {
- * // entities[].key = Key object
- * // entities[].data = Empty object
- * });
- */
-DatastoreRequest.prototype.runQuery = function(query, callback) {
- var req = {
- read_options: {},
- query: entity.queryToQueryProto(query)
- };
-
- if (query.namespace) {
- req.partition_id = {
- namespace: query.namespace
- };
- }
-
- this.makeReq_('runQuery', req, function(err, resp) {
- if (err) {
- callback(err, null, null, resp);
- return;
- }
-
- var entities = entity.formatArray(resp.batch.entity_result);
- var nextQuery = null;
-
- if (resp.batch.end_cursor && entities.length > 0) {
- var endCursor = resp.batch.end_cursor.toBase64();
-
- nextQuery = extend(true, new Query(), query);
- nextQuery.start(endCursor).offset(0);
- }
-
- callback(null, entities, nextQuery, resp);
- });
-};
-
-/**
- * Generate IDs without creating entities.
- *
- * @resource [Datasets: allocateIds API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/allocateIds}
- *
- * @param {Key} incompleteKey - The key object to complete.
- * @param {number} n - How many IDs to generate.
- * @param {function} callback - The callback function.
- * @param {?error} callback.err - An error returned while making this request
- * @param {array} callback.keys - The generated IDs
- * @param {object} callback.apiResponse - The full API response.
- *
- * @example
- * var incompleteKey = dataset.key(['Company']);
- *
- * // The following call will create 100 new IDs from the Company kind, which
- * // exists under the default namespace.
- * dataset.allocateIds(incompleteKey, 100, function(err, keys) {});
- *
- * //-
- * // Or, if you're using a transaction object.
- * //-
- * dataset.runInTransaction(function(transaction, done) {
- * transaction.allocateIds(incompleteKey, 100, function(err, keys) {
- * done();
- * });
- * });
- *
- * // You may prefer to create IDs from a non-default namespace by providing an
- * // incomplete key with a namespace. Similar to the previous example, the call
- * // below will create 100 new IDs, but from the Company kind that exists under
- * // the "ns-test" namespace.
- * var incompleteKey = dataset.key({
- * namespace: 'ns-test',
- * path: ['Company']
- * });
- * var callback = function(err, keys, apiResponse) {};
- * dataset.allocateIds(incompleteKey, 100, callback);
- */
-DatastoreRequest.prototype.allocateIds = function(incompleteKey, n, callback) {
- if (entity.isKeyComplete(incompleteKey)) {
- throw new Error('An incomplete key should be provided.');
- }
-
- var incompleteKeys = [];
- for (var i = 0; i < n; i++) {
- incompleteKeys.push(entity.keyToKeyProto(incompleteKey));
- }
-
- var req = {
- key: incompleteKeys
- };
-
- this.makeReq_('allocateIds', req, function(err, resp) {
- if (err) {
- callback(err, null, resp);
- return;
- }
-
- var keys = (resp.key || []).map(entity.keyFromKeyProto);
-
- callback(null, keys, resp);
- });
-};
-
-/**
- * Maps to {module:datastore/dataset#save}, forcing the method to be `update`.
+ * Maps to {module:datastore#save}, forcing the method to be `update`.
*/
DatastoreRequest.prototype.update = function(entities, callback) {
entities = arrify(entities).map(propAssign('method', 'update'));
@@ -726,7 +751,7 @@ DatastoreRequest.prototype.update = function(entities, callback) {
};
/**
- * Maps to {module:datastore/dataset#save}, forcing the method to be `upsert`.
+ * Maps to {module:datastore#save}, forcing the method to be `upsert`.
*/
DatastoreRequest.prototype.upsert = function(entities, callback) {
entities = arrify(entities).map(propAssign('method', 'upsert'));
@@ -742,98 +767,40 @@ DatastoreRequest.prototype.upsert = function(entities, callback) {
* @param {function} callback - The callback function.
*
* @private
- *
- * @example
- * var deleteRequest = {
- * mutation: {
- * delete: [] // datastore key objects.
- * }
- * };
- *
- * var dataset = gcloud.datastore.dataset({ projectId: 'project-id' });
- * var callback = function(err, result, apiResponse) {};
- * var Transaction = require('gcloud/lib/datastore/transaction');
- * var transaction = new Transaction(dataset, 'my-project-id');
- * transaction.makeReq_('commit', deleteRequest, callback);
*/
-DatastoreRequest.prototype.makeReq_ = function(method, body, callback) {
- // TODO: Handle non-HTTP 200 cases.
+DatastoreRequest.prototype.request_ = function(protoOpts, reqOpts, callback) {
if (!callback) {
- callback = body;
- body = {};
+ callback = reqOpts;
+ reqOpts = {};
}
callback = callback || util.noop;
+ var isTransaction = is.defined(this.id);
+ var method = protoOpts.method;
+
+ reqOpts.projectId = this.projectId;
+
// Set properties to indicate if we're in a transaction or not.
if (method === 'commit') {
- if (this.id) {
- body.mode = MODE_TRANSACTIONAL;
- body.transaction = this.id;
+ if (isTransaction) {
+ reqOpts.mode = 'TRANSACTIONAL';
+ reqOpts.transaction = this.id;
} else {
- body.mode = MODE_NON_TRANSACTIONAL;
+ reqOpts.mode = 'NON_TRANSACTIONAL';
}
}
if (method === 'rollback') {
- body.transaction = this.id;
+ reqOpts.transaction = this.id;
}
- if (this.id && (method === 'lookup' || method === 'runQuery')) {
- body.read_options = body.read_options || {};
- body.read_options.transaction = this.id;
+ if (isTransaction && (method === 'lookup' || method === 'runQuery')) {
+ reqOpts.readOptions = reqOpts.readOptions || {};
+ reqOpts.readOptions.transaction = this.id;
}
- var pbKey = method[0].toUpperCase() + method.substr(1);
- var pbRequest = new pb[pbKey + 'Request'](body).toBuffer();
- var pbResponse = pb[pbKey + 'Response'];
-
- var reqOpts = {
- method: 'POST',
- uri: format('{apiEndpoint}/{path}/{datasetId}/{method}', {
- apiEndpoint: this.apiEndpoint,
- path: 'datastore/v1beta2/datasets',
- datasetId: this.datasetId,
- method: method
- }),
- body: is.empty(body) ? '' : pbRequest,
- encoding: null,
- headers: {
- 'Content-Type': 'application/x-protobuf'
- }
- };
-
- this.makeAuthenticatedRequest_(reqOpts, {
- onAuthenticated: function(err, authenticatedReqOpts) {
- if (err) {
- callback(err, null);
- return;
- }
-
- request(authenticatedReqOpts, function(err, resp, body) {
- if (err) {
- callback(err, null);
- return;
- }
-
- var parsedResp = util.parseHttpRespMessage(resp);
-
- if (parsedResp.err) {
- callback(parsedResp.err, null, parsedResp.resp);
- return;
- }
-
- var parsedBody = util.parseHttpRespBody(pbResponse.decode(body));
-
- if (parsedBody.err) {
- callback(parsedBody.err, null, parsedResp.resp);
- return;
- }
-
- callback(null, parsedBody.body, resp);
- });
- }
- });
+ this.request(protoOpts, reqOpts, callback);
};
/*! Developer Documentation
diff --git a/lib/datastore/transaction.js b/lib/datastore/transaction.js
index 72e66983da0..f1613059e63 100644
--- a/lib/datastore/transaction.js
+++ b/lib/datastore/transaction.js
@@ -21,41 +21,38 @@
'use strict';
var arrify = require('arrify');
+var extend = require('extend');
var nodeutil = require('util');
/**
- * @type {module:common/util}
+ * @type {module:datastore/request}
* @private
*/
-var util = require('../common/util.js');
+var DatastoreRequest = require('./request.js');
/**
- * @type {module:datastore/request}
+ * @type {module:common/util}
* @private
*/
-var DatastoreRequest = require('./request.js');
-
-var extend = require('extend');
+var util = require('../common/util.js');
/*! Developer Documentation
*
- * @param {module:common/connection#Connection} connection - An authenticated
- * connection to Google Cloud Datastore.
- * @param {string} datasetId - Dataset ID.
+ * @param {module:datastore} datastore - A Datastore instance.
*
* @example
* // This is how to create a transaction object directly using this Transaction
* // class. The following transaction object is created for use in the examples
* // in this file below.
- * var dataset = gcloud.datastore.dataset({ projectId: 'project-id' });
+ * var datastore = gcloud.datastore({ projectId: 'project-id' });
* var Transaction = require('gcloud/lib/datastore/transaction');
- * var transaction = new Transaction(dataset, 'my-project-id');
+ * var transaction = new Transaction(datastore, 'my-project-id');
* transaction.id = '1234'; // Give the transaction an ID.
*/
/**
* Build a Transaction object. Transactions will be created for you by
- * {module:datastore/dataset}. When you need to run a transactional
- * operation, use {module:datastore/dataset#runInTransaction}.
+ * {module:datastore}. When you need to run a transactional operation, use
+ * {module:datastore#runInTransaction}.
*
* @resource [Transactions Reference]{@link https://cloud.google.com/datastore/docs/concepts/transactions}
*
@@ -64,22 +61,19 @@ var extend = require('extend');
* @mixes module:datastore/request
*
* @example
- * var datastore = gcloud.datastore;
- *
- * var dataset = datastore.dataset({
+ * var datastore = gcloud.datastore({
* projectId: 'my-project',
* keyFilename: '/path/to/keyfile.json'
* });
*
- * dataset.runInTransaction(function(transaction, done) {
+ * datastore.runInTransaction(function(transaction, done) {
* // `transaction` is a Transaction object.
* }, function(err) {});
*/
-function Transaction(dataset, datasetId) {
- this.id = null;
- this.apiEndpoint = dataset.apiEndpoint;
- this.makeAuthenticatedRequest_ = dataset.makeAuthenticatedRequest_;
- this.datasetId = datasetId;
+function Transaction(datastore) {
+ this.projectId = datastore.projectId;
+
+ this.request = datastore.request.bind(datastore);
// A queue for entity modifications made during the transaction.
this.modifiedEntities_ = [];
@@ -93,48 +87,58 @@ function Transaction(dataset, datasetId) {
nodeutil.inherits(Transaction, DatastoreRequest);
-/**
- * Begin a remote transaction and identify the current transaction instance with
- * the remote transaction's ID.
+/*! Developer Documentation
*
- * @param {function} callback - The function to execute within the context of
- * a transaction.
+ * Below, we override two methods that we inherit from DatastoreRequest:
+ * `delete` and `save`. This is done because:
*
- * @private
+ * A) the documentation needs to be different for a transactional save, and
+ * B) we build up a "modifiedEntities_" array on this object, used to build
+ * the final commit request with.
+ */
+/**
+ * Delete all entities identified with the specified key(s) in the current
+ * transaction.
+ *
+ * @param {Key|Key[]} key - Datastore key object(s).
*
* @example
- * transaction.begin_(function(err) {
- * // Perform Datastore operations as usual.
+ * datastore.runInTransaction(function(transaction, done) {
+ * // Delete a single entity.
+ * transaction.delete(datastore.key(['Company', 123]));
+ *
+ * // Delete multiple entities at once.
+ * transaction.delete([
+ * datastore.key(['Company', 123]),
+ * datastore.key(['Product', 'Computer'])
+ * ]);
+ *
+ * done();
* });
*/
-Transaction.prototype.begin_ = function(callback) {
+Transaction.prototype.delete = function(entities) {
var that = this;
- callback = callback || util.noop;
-
- this.makeReq_('beginTransaction', function(err, resp) {
- if (err) {
- callback(err, resp);
- return;
- }
-
- that.id = resp.transaction;
-
- callback(null, resp);
+ arrify(entities).forEach(function(ent) {
+ that.modifiedEntities_.push({
+ entity: {
+ key: ent
+ },
+ method: 'delete',
+ args: [ent]
+ });
});
};
/**
* Reverse a transaction remotely and finalize the current transaction instance.
*
- * @resource [Datasets: rollback API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/rollback}
- *
* @param {function} callback - The callback function.
* @param {?error} callback.err - An error returned while making this request
* @param {object} callback.apiResponse - The full API response.
*
* @example
- * dataset.runInTransaction(function(transaction, done) {
+ * datastore.runInTransaction(function(transaction, done) {
* transaction.rollback(function(err, apiResponse) {
* if (err) {
* // Transaction could not be rolled back.
@@ -148,17 +152,159 @@ Transaction.prototype.rollback = function(callback) {
callback = callback || util.noop;
- this.makeReq_('rollback', function(err, resp) {
+ var protoOpts = {
+ service: 'Datastore',
+ method: 'rollback'
+ };
+
+ this.request_(protoOpts, {}, function(err, resp) {
that.skipCommit = true;
callback(err || null, resp);
});
};
+/**
+ * Insert or update the specified object(s) in the current transaction. If a key
+ * is incomplete, its associated object is inserted and the original Key object
+ * is updated to contain the generated ID.
+ *
+ * This method will determine the correct Datastore method to execute (`upsert`,
+ * `insert`, `update`, and `insertAutoId`) by using the key(s) provided. For
+ * example, if you provide an incomplete key (one without an ID), the request
+ * will create a new entity and have its ID automatically assigned. If you
+ * provide a complete key, the entity will be updated with the data specified.
+ *
+ * By default, all properties are indexed. To prevent a property from being
+ * included in *all* indexes, you must supply an entity's `data` property as an
+ * array. See below for an example.
+ *
+ * @param {object|object[]} entities - Datastore key object(s).
+ * @param {Key} entities.key - Datastore key object.
+ * @param {object|object[]} entities.data - Data to save with the provided key.
+ * If you provide an array of objects, you must use the explicit syntax:
+ * `name` for the name of the property and `value` for its value. You may
+ * also specify an `excludeFromIndexes` property, set to `true` or `false`.
+ *
+ * @example
+ * //-
+ * // Save a single entity.
+ * //
+ * // Notice that we are providing an incomplete key. After the transaction is
+ * // committed, the Key object held by the `key` variable will be populated
+ * // with a path containing its generated ID.
+ * //-
+ * var key = datastore.key('Company');
+ *
+ * datastore.runInTransaction(function(transaction, done) {
+ * transaction.save({
+ * key: key,
+ * data: {
+ * rating: '10'
+ * }
+ * });
+ *
+ * done();
+ * });
+ *
+ * //-
+ * // To specify an `excludeFromIndexes` value for a Datastore entity, pass in
+ * // an array for the key's data. The above example would then look like:
+ * //-
+ * datastore.runInTransaction(function(transaction, done) {
+ * transaction.save({
+ * key: key,
+ * data: [
+ * {
+ * name: 'rating',
+ * value: '10',
+ * excludeFromIndexes: false
+ * }
+ * ]
+ * });
+ *
+ * done();
+ * });
+ *
+ * //-
+ * // Save multiple entities at once.
+ * //-
+ * var companyKey = datastore.key(['Company', 123]);
+ * var productKey = datastore.key(['Product', 'Computer']);
+ *
+ * datastore.runInTransaction(function(transaction, done) {
+ * transaction.save([
+ * {
+ * key: companyKey,
+ * data: {
+ * HQ: 'Dallas, TX'
+ * }
+ * },
+ * {
+ * key: productKey,
+ * data: {
+ * vendor: 'Dell'
+ * }
+ * }
+ * ]);
+ *
+ * done();
+ * });
+ */
+Transaction.prototype.save = function(entities) {
+ var that = this;
+
+ arrify(entities).forEach(function(ent) {
+ that.modifiedEntities_.push({
+ entity: {
+ key: ent.key
+ },
+ method: 'save',
+ args: [ent]
+ });
+ });
+};
+
+/**
+ * Begin a remote transaction and identify the current transaction instance with
+ * the remote transaction's ID.
+ *
+ * @param {function} callback - The function to execute within the context of
+ * a transaction.
+ *
+ * @private
+ *
+ * @example
+ * transaction.begin_(function(err) {
+ * // Perform Datastore operations as usual.
+ * });
+ */
+Transaction.prototype.begin_ = function(callback) {
+ var that = this;
+
+ callback = callback || util.noop;
+
+ var protoOpts = {
+ service: 'Datastore',
+ method: 'beginTransaction'
+ };
+
+ this.request_(protoOpts, {}, function(err, resp) {
+ if (err) {
+ callback(err, resp);
+ return;
+ }
+
+ that.id = resp.transaction;
+
+ callback(null, resp);
+ });
+};
+
/**
* Commit the remote transaction and finalize the current transaction instance.
* This function is provided as the `done` function in the callback of
- * `dataset.runInTransaction(function(transaction, done) {});`
+ * `datastore.runInTransaction(function(transaction, done) {});`
*
* @param {function} callback - The callback function.
*
@@ -238,13 +384,19 @@ Transaction.prototype.commit_ = function(callback) {
DatastoreRequest.prototype[method].call(that, args, util.noop);
});
+ var protoOpts = {
+ service: 'Datastore',
+ method: 'commit'
+ };
+
// Take the `req` array built previously, and merge them into one request to
// send as the final transactional commit.
- var req = this.requests_.reduce(function(acc, req) {
+ var reqOpts = this.requests_.reduce(function(acc, req) {
return extend(true, acc, req);
}, {});
- this.makeReq_('commit', req, function(err, resp) {
+
+ this.request_(protoOpts, reqOpts, function(err, resp) {
if (err) {
callback(err, resp);
return;
@@ -261,152 +413,4 @@ Transaction.prototype.commit_ = function(callback) {
});
};
-/*! Developer Documentation
- *
- * Below, we override two methods that we inherit from DatastoreRequest:
- * `delete` and `save`. This is done because:
- *
- * A) the documentation needs to be different for a transactional save, and
- * B) we build up a "modifiedEntities_" array on this object, used to build
- * the final commit request with.
- */
-/**
- * Delete all entities identified with the specified key(s) in the current
- * transaction.
- *
- * @resource [Datasets: commit API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/commit#mutation.delete}
- *
- * @param {Key|Key[]} key - Datastore key object(s).
- *
- * @example
- * dataset.runInTransaction(function(transaction, done) {
- * // Delete a single entity.
- * transaction.delete(dataset.key(['Company', 123]));
- *
- * // Delete multiple entities at once.
- * transaction.delete([
- * dataset.key(['Company', 123]),
- * dataset.key(['Product', 'Computer'])
- * ]);
- *
- * done();
- * });
- */
-Transaction.prototype.delete = function(entities) {
- var that = this;
-
- arrify(entities).forEach(function(ent) {
- that.modifiedEntities_.push({
- entity: {
- key: ent
- },
- method: 'delete',
- args: [ent]
- });
- });
-};
-
-/**
- * Insert or update the specified object(s) in the current transaction. If a key
- * is incomplete, its associated object is inserted and the original Key object
- * is updated to contain the generated ID.
- *
- * This method will determine the correct Datastore method to execute (`upsert`,
- * `insert`, `update`, and `insertAutoId`) by using the key(s) provided. For
- * example, if you provide an incomplete key (one without an ID), the request
- * will create a new entity and have its ID automatically assigned. If you
- * provide a complete key, the entity will be updated with the data specified.
- *
- * By default, all properties are indexed. To prevent a property from being
- * included in *all* indexes, you must supply an entity's `data` property as an
- * array. See below for an example.
- *
- * @resource [Datasets: commit API Documentation]{@link https://cloud.google.com/datastore/docs/apis/v1beta2/datasets/commit}
- *
- * @param {object|object[]} entities - Datastore key object(s).
- * @param {Key} entities.key - Datastore key object.
- * @param {object|object[]} entities.data - Data to save with the provided key.
- * If you provide an array of objects, you must use the explicit syntax:
- * `name` for the name of the property and `value` for its value. You may
- * also specify an `excludeFromIndexes` property, set to `true` or `false`.
- *
- * @example
- * //-
- * // Save a single entity.
- * //
- * // Notice that we are providing an incomplete key. After the transaction is
- * // committed, the Key object held by the `key` variable will be populated
- * // with a path containing its generated ID.
- * //-
- * var key = dataset.key('Company');
- *
- * dataset.runInTransaction(function(transaction, done) {
- * transaction.save({
- * key: key,
- * data: {
- * rating: '10'
- * }
- * });
- *
- * done();
- * });
- *
- * //-
- * // To specify an `excludeFromIndexes` value for a Datastore entity, pass in
- * // an array for the key's data. The above example would then look like:
- * //-
- * dataset.runInTransaction(function(transaction, done) {
- * transaction.save({
- * key: key,
- * data: [
- * {
- * name: 'rating',
- * value: '10',
- * excludeFromIndexes: false
- * }
- * ]
- * });
- *
- * done();
- * });
- *
- * //-
- * // Save multiple entities at once.
- * //-
- * var companyKey = dataset.key(['Company', 123]);
- * var productKey = dataset.key(['Product', 'Computer']);
- *
- * dataset.runInTransaction(function(transaction, done) {
- * transaction.save([
- * {
- * key: companyKey,
- * data: {
- * HQ: 'Dallas, TX'
- * }
- * },
- * {
- * key: productKey,
- * data: {
- * vendor: 'Dell'
- * }
- * }
- * ]);
- *
- * done();
- * });
- */
-Transaction.prototype.save = function(entities) {
- var that = this;
-
- arrify(entities).forEach(function(ent) {
- that.modifiedEntities_.push({
- entity: {
- key: ent.key
- },
- method: 'save',
- args: [ent]
- });
- });
-};
-
module.exports = Transaction;
diff --git a/lib/index.js b/lib/index.js
index 1da8a80291d..6e70f1aec02 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -79,8 +79,7 @@ var apis = {
*
* @example
* var gcloud = require('gcloud');
- * var datastore = gcloud.datastore;
- * var dataset = datastore.dataset({
+ * var datastore = gcloud.datastore({
* projectId: 'grape-spaceship-123',
* keyFilename: '/path/to/keyfile.json'
* });
@@ -309,18 +308,6 @@ var apis = {
vision: require('./vision')
};
-/**
- * Scoped APIs are "boxed in" APIs. The "outer" class (e.g. Datastore) is a
- * container for sub-classes that can be given separate authentication and
- * instantiation options.
- *
- * @type {object}
- * @private
- */
-var scopedApis = {
- datastore: true
-};
-
/*! Developer Documentation
*
* Previously we used gcloud to expose an object filled with factory patterns,
@@ -457,11 +444,7 @@ function gcloud(config) {
return Object.keys(apis).reduce(function(gcloudExposedApi, apiName) {
var Class = apis[apiName];
- if (scopedApis[apiName]) {
- gcloudExposedApi[apiName] = new Class(config);
- } else {
- gcloudExposedApi[apiName] = Class;
- }
+ gcloudExposedApi[apiName] = Class;
return gcloudExposedApi;
}, gcloudExposedApi);
diff --git a/package.json b/package.json
index bf1894c9029..544aa0b924b 100644
--- a/package.json
+++ b/package.json
@@ -86,7 +86,6 @@
"array-uniq": "^1.0.2",
"arrify": "^1.0.0",
"async": "^1.4.2",
- "camelize": "^1.0.0",
"concat-stream": "^1.5.0",
"create-error-class": "^2.0.1",
"dns-zonefile": "0.1.10",
@@ -97,7 +96,7 @@
"gcs-resumable-upload": "^0.4.0",
"google-auto-auth": "^0.2.0",
"google-proto-files": "^0.1.1",
- "grpc": "^0.13.0",
+ "grpc": "^0.13.1",
"hash-stream-validation": "^0.1.0",
"is": "^3.0.1",
"methmeth": "^1.0.0",
@@ -106,13 +105,11 @@
"once": "^1.3.1",
"prop-assign": "^1.0.0",
"propprop": "^0.3.0",
- "protobufjs": "^5.0.1",
"pumpify": "^1.3.3",
"request": "^2.53.0",
"retry-request": "^1.2.3",
"rgb-hex": "^1.0.0",
"snake": "0.0.1",
- "snakeize": "^0.1.0",
"split-array-stream": "^1.0.0",
"stream-events": "^1.0.1",
"string-format-obj": "^1.0.0",
diff --git a/scripts/docs.js b/scripts/docs.js
index 696146631cb..9db9394e1ae 100644
--- a/scripts/docs.js
+++ b/scripts/docs.js
@@ -29,7 +29,6 @@ var OUTPUT_FOLDER = './docs/json/master';
var IGNORE = [
'./lib/common/*',
'./lib/datastore/entity.js',
- './lib/datastore/pb.js',
'./lib/datastore/request.js',
'./lib/pubsub/iam.js',
'./lib/storage/acl.js'
diff --git a/system-test/datastore.js b/system-test/datastore.js
index dd81aaf720c..d80dc5fbbeb 100644
--- a/system-test/datastore.js
+++ b/system-test/datastore.js
@@ -16,20 +16,52 @@
'use strict';
-var env = require('./env.js');
-
var assert = require('assert');
var async = require('async');
-var datastore = require('../lib/datastore');
-var ds = datastore.dataset(env);
+
+var env = require('./env.js');
+var Datastore = require('../lib/datastore/index.js');
var entity = require('../lib/datastore/entity.js');
-describe('datastore', function() {
+describe('Datastore', function() {
+ var testKinds = [];
+ var datastore = new Datastore(env);
+
+ // Override the Key method so we can track what keys are created during the
+ // tests. They are then deleted in the `after` hook.
+ var key = datastore.key;
+ datastore.key = function() {
+ var keyObject = key.apply(this, arguments);
+ testKinds.push(keyObject.kind);
+ return keyObject;
+ };
+
+ after(function(done) {
+ function deleteEntities(kind, callback) {
+ var query = datastore.createQuery(kind);
+
+ datastore.runQuery(query, function(err, entities) {
+ if (err) {
+ callback(err);
+ return;
+ }
+
+ var keys = entities.map(function(entity) {
+ return entity.key;
+ });
+
+ datastore.delete(keys, callback);
+ });
+ }
+
+ async.each(testKinds, deleteEntities, done);
+ });
+
it('should allocate IDs', function(done) {
- ds.allocateIds(ds.key('Kind'), 10, function(err, keys) {
+ datastore.allocateIds(datastore.key('Kind'), 10, function(err, keys) {
assert.ifError(err);
- assert.equal(keys.length, 10);
- assert.equal(entity.isKeyComplete(keys[0]), true);
+ assert.strictEqual(keys.length, 10);
+ assert.strictEqual(entity.isKeyComplete(keys[0]), true);
done();
});
});
@@ -42,109 +74,119 @@ describe('datastore', function() {
author: 'Silvano',
isDraft: false,
wordCount: 400,
- rating: 5.0
+ rating: 5.0,
+ likes: null
};
it('should save/get/delete with a key name', function(done) {
- var postKey = ds.key(['Post', 'post1']);
- ds.save({ key: postKey, data: post }, function(err) {
+ var postKey = datastore.key(['Post', 'post1']);
+
+ datastore.save({ key: postKey, data: post }, function(err) {
assert.ifError(err);
- ds.get(postKey, function(err, entity) {
+ datastore.get(postKey, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, post);
- ds.delete(postKey, done);
+ datastore.delete(postKey, done);
});
});
});
it('should save/get/delete with a numeric key id', function(done) {
- var postKey = ds.key(['Post', 123456789]);
+ var postKey = datastore.key(['Post', 123456789]);
- ds.save({ key: postKey, data: post }, function(err) {
+ datastore.save({ key: postKey, data: post }, function(err) {
assert.ifError(err);
- ds.get(postKey, function(err, entity) {
+ datastore.get(postKey, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, post);
- ds.delete(postKey, done);
+ datastore.delete(postKey, done);
});
});
});
it('should save/get/delete a buffer', function(done) {
- var postKey = ds.key('Post');
+ var postKey = datastore.key(['Post']);
var data = {
buf: new Buffer('010100000000000000000059400000000000006940', 'hex')
};
- ds.save({ key: postKey, data: data }, function(err) {
+ datastore.save({ key: postKey, data: data }, function(err) {
assert.ifError(err);
var assignedId = postKey.id;
assert(assignedId);
- ds.get(postKey, function(err, entity) {
+ datastore.get(postKey, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, data);
- ds.delete(ds.key(['Post', assignedId]), done);
+ datastore.delete(datastore.key(['Post', assignedId]), done);
});
});
});
it('should save/get/delete with a generated key id', function(done) {
- var postKey = ds.key('Post');
+ var postKey = datastore.key('Post');
- ds.save({ key: postKey, data: post }, function(err) {
+ datastore.save({ key: postKey, data: post }, function(err) {
assert.ifError(err);
// The key's path should now be complete.
assert(postKey.id);
- ds.get(postKey, function(err, entity) {
+ datastore.get(postKey, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, post);
- ds.delete(postKey, done);
+ datastore.delete(postKey, done);
});
});
});
it('should fail explicitly set second insert on save', function(done) {
- var postKey = ds.key('Post');
+ var postKey = datastore.key('Post');
- ds.save({ key: postKey, data: post }, function(err) {
+ datastore.save({ key: postKey, data: post }, function(err) {
assert.ifError(err);
// The key's path should now be complete.
assert(postKey.id);
- ds.save({ key: postKey, method: 'insert', data: post }, function(err) {
- assert.notEqual(err, null); // should fail insert
+ datastore.save({
+ key: postKey,
+ method: 'insert',
+ data: post
+ }, function(err) {
+ assert.notStrictEqual(err, null); // should fail insert
- ds.get(postKey, function(err, entity) {
+ datastore.get(postKey, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, post);
- ds.delete(postKey, done);
+ datastore.delete(postKey, done);
});
});
});
});
it('should fail explicitly set first update on save', function(done) {
- var postKey = ds.key('Post');
+ var postKey = datastore.key('Post');
- ds.save({ key: postKey, method: 'update', data: post }, function(err) {
- assert.notEqual(err, null);
+ datastore.save({
+ key: postKey,
+ method: 'update',
+ data: post
+ }, function(err) {
+ assert.notStrictEqual(err, null);
done();
});
});
@@ -159,30 +201,30 @@ describe('datastore', function() {
wordCount: 450,
rating: 4.5,
};
- var key1 = ds.key('Post');
- var key2 = ds.key('Post');
- ds.save([
+ var key1 = datastore.key('Post');
+ var key2 = datastore.key('Post');
+
+ datastore.save([
{ key: key1, data: post },
{ key: key2, data: post2 }
], function(err) {
assert.ifError(err);
- ds.get([key1, key2], function(err, entities) {
+ datastore.get([key1, key2], function(err, entities) {
assert.ifError(err);
+ assert.strictEqual(entities.length, 2);
- assert.equal(entities.length, 2);
-
- ds.delete([key1, key2], done);
+ datastore.delete([key1, key2], done);
});
});
});
it('should get multiple entities in a stream', function(done) {
- var key1 = ds.key('Post');
- var key2 = ds.key('Post');
+ var key1 = datastore.key('Post');
+ var key2 = datastore.key('Post');
- ds.save([
+ datastore.save([
{ key: key1, data: post },
{ key: key2, data: post }
], function(err) {
@@ -190,7 +232,7 @@ describe('datastore', function() {
var numEntitiesEmitted = 0;
- ds.get([key1, key2])
+ datastore.get([key1, key2])
.on('error', done)
.on('data', function() {
numEntitiesEmitted++;
@@ -198,123 +240,207 @@ describe('datastore', function() {
.on('end', function() {
assert.strictEqual(numEntitiesEmitted, 2);
- ds.delete([key1, key2], done);
+ datastore.delete([key1, key2], done);
});
});
});
- });
- it('should save keys as a part of entity and query by key', function(done) {
- var personKey = ds.key(['Person', 'name']);
- ds.save({
- key: personKey,
- data: {
- fullName: 'Full name',
- linkedTo: personKey // himself
- }
- }, function(err) {
- assert.ifError(err);
- var q = ds.createQuery('Person')
- .filter('linkedTo', '=', personKey);
- ds.runQuery(q, function(err, results) {
+ it('should save keys as a part of entity and query by key', function(done) {
+ var personKey = datastore.key(['Person', 'name']);
+
+ datastore.save({
+ key: personKey,
+ data: {
+ fullName: 'Full name',
+ linkedTo: personKey // himself
+ }
+ }, function(err) {
assert.ifError(err);
- assert.strictEqual(results[0].data.fullName, 'Full name');
- assert.deepEqual(results[0].data.linkedTo, personKey);
- ds.delete(personKey, done);
+
+ var query = datastore.createQuery('Person')
+ .filter('linkedTo', personKey);
+
+ datastore.runQuery(query, function(err, results) {
+ assert.ifError(err);
+
+ assert.strictEqual(results[0].data.fullName, 'Full name');
+ assert.deepEqual(results[0].data.linkedTo, personKey);
+
+ datastore.delete(personKey, done);
+ });
+ });
+ });
+
+ describe('entity types', function() {
+ it('should save and decode an int', function(done) {
+ var integerValue = 2015;
+ var integerType = Datastore.int(integerValue);
+
+ var key = datastore.key('Person');
+
+ datastore.save({
+ key: key,
+ data: {
+ year: integerType
+ }
+ }, function(err) {
+ assert.ifError(err);
+
+ datastore.get(key, function(err, entity) {
+ assert.ifError(err);
+ assert.strictEqual(entity.data.year, integerValue);
+ done();
+ });
+ });
+ });
+
+ it('should save and decode a double', function(done) {
+ var doubleValue = 99.99;
+ var doubleType = Datastore.double(doubleValue);
+
+ var key = datastore.key('Person');
+
+ datastore.save({
+ key: key,
+ data: {
+ nines: doubleType
+ }
+ }, function(err) {
+ assert.ifError(err);
+
+ datastore.get(key, function(err, entity) {
+ assert.ifError(err);
+ assert.strictEqual(entity.data.nines, doubleValue);
+ done();
+ });
+ });
+ });
+
+ it('should save and decode a geo point', function(done) {
+ var geoPointValue = {
+ latitude: 40.6894,
+ longitude: -74.0447
+ };
+ var geoPointType = Datastore.geoPoint(geoPointValue);
+
+ var key = datastore.key('Person');
+
+ datastore.save({
+ key: key,
+ data: {
+ location: geoPointType
+ }
+ }, function(err) {
+ assert.ifError(err);
+
+ datastore.get(key, function(err, entity) {
+ assert.ifError(err);
+ assert.deepEqual(entity.data.location, geoPointValue);
+ done();
+ });
+ });
});
});
});
describe('querying the datastore', function() {
- var ancestor = ds.key(['Book', 'GoT']);
+ var ancestor = datastore.key(['Book', 'GoT']);
var keys = [
- ds.key(['Book', 'GoT', 'Character', 'Rickard']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard']),
- ds.key(['Book', 'GoT', 'Character', 'Catelyn']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard',
- 'Character', 'Arya']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard',
- 'Character', 'Sansa']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard',
- 'Character', 'Robb']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard',
- 'Character', 'Bran']),
- ds.key(['Book', 'GoT', 'Character', 'Rickard', 'Character', 'Eddard',
- 'Character', 'Jon Snow'])
- ];
+ // Paths:
+ ['Rickard'],
+ ['Rickard', 'Character', 'Eddard'],
+ ['Catelyn'],
+ ['Rickard', 'Character', 'Eddard', 'Character', 'Arya'],
+ ['Rickard', 'Character', 'Eddard', 'Character', 'Sansa'],
+ ['Rickard', 'Character', 'Eddard', 'Character', 'Robb'],
+ ['Rickard', 'Character', 'Eddard', 'Character', 'Bran'],
+ ['Rickard', 'Character', 'Eddard', 'Character', 'Jon Snow']
+ ].map(function(path) {
+ return datastore.key(['Book', 'GoT', 'Character'].concat(path));
+ });
- var characters = [{
- name: 'Rickard',
- family: 'Stark',
- appearances: 0,
- alive: false
- }, {
- name: 'Eddard',
- family: 'Stark',
- appearances: 9,
- alive: false
- }, {
- name: 'Catelyn',
- family: ['Stark', 'Tully'],
- appearances: 26,
- alive: false
- }, {
- name: 'Arya',
- family: 'Stark',
- appearances: 33,
- alive: true
- }, {
- name: 'Sansa',
- family: 'Stark',
- appearances: 31,
- alive: true
- }, {
- name: 'Robb',
- family: 'Stark',
- appearances: 22,
- alive: false
- }, {
- name: 'Bran',
- family: 'Stark',
- appearances: 25,
- alive: true
- }, {
- name: 'Jon Snow',
- family: 'Stark',
- appearances: 32,
- alive: true
- }];
+ var characters = [
+ {
+ name: 'Rickard',
+ family: 'Stark',
+ appearances: 9,
+ alive: false
+ },
+ {
+ name: 'Eddard',
+ family: 'Stark',
+ appearances: 9,
+ alive: false
+ },
+ {
+ name: 'Catelyn',
+ family: ['Stark', 'Tully'],
+ appearances: 26,
+ alive: false
+ },
+ {
+ name: 'Arya',
+ family: 'Stark',
+ appearances: 33,
+ alive: true
+ },
+ {
+ name: 'Sansa',
+ family: 'Stark',
+ appearances: 31,
+ alive: true
+ },
+ {
+ name: 'Robb',
+ family: 'Stark',
+ appearances: 22,
+ alive: false
+ },
+ {
+ name: 'Bran',
+ family: 'Stark',
+ appearances: 25,
+ alive: true
+ },
+ {
+ name: 'Jon Snow',
+ family: 'Stark',
+ appearances: 32,
+ alive: true
+ }
+ ];
before(function(done) {
- ds.save(keys.map(function(key, index) {
+ var keysToSave = keys.map(function(key, index) {
return {
key: key,
data: characters[index]
};
- }), function(err) {
- assert.ifError(err);
- done();
});
+
+ datastore.save(keysToSave, done);
+ });
+
+ after(function(done) {
+ datastore.delete(keys, done);
});
it('should limit queries', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor).limit(5)
- .autoPaginate(false);
+ var firstQ = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .limit(5)
+ .autoPaginate(false);
- ds.runQuery(q, function(err, firstEntities, secondQuery) {
+ datastore.runQuery(firstQ, function(err, firstEntities, secondQ) {
assert.ifError(err);
- assert.equal(firstEntities.length, 5);
+ assert.strictEqual(firstEntities.length, 5);
- ds.runQuery(secondQuery, function(err, secondEntities, thirdQuery) {
+ datastore.runQuery(secondQ, function(err, secondEntities, thirdQ) {
assert.ifError(err);
- assert.equal(secondEntities.length, 3);
-
- ds.runQuery(thirdQuery, function(err, thirdEntities) {
- assert.ifError(err);
- assert.equal(thirdEntities.length, 0);
- done();
- });
+ assert.strictEqual(secondEntities.length, 3);
+ assert.strictEqual(thirdQ, null);
+ done();
});
});
});
@@ -322,176 +448,192 @@ describe('datastore', function() {
it('should not go over a limit', function(done) {
var limit = 3;
- var q = ds.createQuery('Character')
- .hasAncestor(ancestor)
- .limit(limit);
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor).limit(limit);
- ds.runQuery(q, function(err, results) {
+ datastore.runQuery(q, function(err, results) {
assert.ifError(err);
- assert.equal(results.length, limit);
+ assert.strictEqual(results.length, limit);
done();
});
});
it('should run a query as a stream', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor);
+ var q = datastore.createQuery('Character').hasAncestor(ancestor);
var resultsReturned = 0;
- ds.runQuery(q)
+ datastore.runQuery(q)
.on('error', done)
.on('data', function() { resultsReturned++; })
.on('end', function() {
- assert.equal(resultsReturned, characters.length);
+ assert.strictEqual(resultsReturned, characters.length);
done();
});
});
it('should not go over a limit with a stream', function(done) {
var limit = 3;
- var q = ds.createQuery('Character').hasAncestor(ancestor).limit(limit);
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .limit(limit);
var resultsReturned = 0;
- ds.runQuery(q)
+ datastore.runQuery(q)
.on('error', done)
.on('data', function() { resultsReturned++; })
.on('end', function() {
- assert.equal(resultsReturned, limit);
+ assert.strictEqual(resultsReturned, limit);
done();
});
});
it('should filter queries with simple indexes', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .filter('appearances', '>=', 20);
- ds.runQuery(q, function(err, entities) {
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .filter('appearances', '>=', 20);
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
- assert.equal(entities.length, 6);
+ assert.strictEqual(entities.length, 6);
done();
});
});
it('should filter queries with defined indexes', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .filter('family', '=', 'Stark')
- .filter('appearances', '>=', 20);
- ds.runQuery(q, function(err, entities) {
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .filter('family', 'Stark')
+ .filter('appearances', '>=', 20);
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
- assert.equal(entities.length, 6);
+ assert.strictEqual(entities.length, 6);
done();
});
});
it('should filter by ancestor', function(done) {
- var q = ds.createQuery('Character')
- .hasAncestor(ancestor);
- ds.runQuery(q, function(err, entities) {
+ var q = datastore.createQuery('Character').hasAncestor(ancestor);
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
- assert.equal(entities.length, 8);
+ assert.strictEqual(entities.length, characters.length);
done();
});
});
it('should filter by key', function(done) {
- var key = ds.key(['Book', 'GoT', 'Character', 'Rickard']);
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .filter('__key__', '=', key);
- ds.runQuery(q, function(err, entities) {
+ var key = datastore.key(['Book', 'GoT', 'Character', 'Rickard']);
+
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .filter('__key__', key);
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
- assert.equal(entities.length, 1);
+ assert.strictEqual(entities.length, 1);
done();
});
});
it('should order queries', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .order('appearances');
- ds.runQuery(q, function(err, entities) {
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .order('appearances');
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
- assert.equal(entities[0].data.name, characters[0].name);
- assert.equal(entities[7].data.name, characters[3].name);
+
+ assert.strictEqual(entities[0].data.name, characters[0].name);
+ assert.strictEqual(entities[7].data.name, characters[3].name);
+
done();
});
});
it('should select projections', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .select(['name', 'family']);
- ds.runQuery(q, function(err, entities) {
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .select(['name', 'family']);
+
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
+
assert.deepEqual(entities[0].data, {
name: 'Arya',
family: 'Stark'
});
+
assert.deepEqual(entities[8].data, {
name: 'Sansa',
family: 'Stark'
});
+
done();
});
});
it('should paginate with offset and limit', function(done) {
- var q = ds.createQuery('Character')
+ var q = datastore.createQuery('Character')
.hasAncestor(ancestor)
.offset(2)
.limit(3)
.order('appearances')
.autoPaginate(false);
- ds.runQuery(q, function(err, entities, secondQuery) {
+ datastore.runQuery(q, function(err, entities, secondQuery) {
assert.ifError(err);
- assert.equal(entities.length, 3);
- assert.equal(entities[0].data.name, 'Robb');
- assert.equal(entities[2].data.name, 'Catelyn');
+ assert.strictEqual(entities.length, 3);
+ assert.strictEqual(entities[0].data.name, 'Robb');
+ assert.strictEqual(entities[2].data.name, 'Catelyn');
- ds.runQuery(secondQuery.offset(0), function(err, secondEntities) {
+ var offsetQuery = secondQuery.offset(0);
+ datastore.runQuery(offsetQuery, function(err, secondEntities) {
assert.ifError(err);
- assert.equal(secondEntities.length, 3);
- assert.equal(secondEntities[0].data.name, 'Sansa');
- assert.equal(secondEntities[2].data.name, 'Arya');
+ assert.strictEqual(secondEntities.length, 3);
+ assert.strictEqual(secondEntities[0].data.name, 'Sansa');
+ assert.strictEqual(secondEntities[2].data.name, 'Arya');
+
done();
});
});
});
it('should resume from a start cursor', function(done) {
- var q = ds.createQuery('Character')
+ var q = datastore.createQuery('Character')
.hasAncestor(ancestor)
.offset(2)
.limit(2)
.order('appearances')
.autoPaginate(false);
- ds.runQuery(q, function(err, entities, nextQuery) {
+ datastore.runQuery(q, function(err, entities, nextQuery) {
assert.ifError(err);
- ds.runQuery(nextQuery.limit(-1), function(err, secondEntities) {
+ datastore.runQuery(nextQuery.limit(-1), function(err, secondEntities) {
assert.ifError(err);
- assert.equal(secondEntities.length, 4);
- assert.equal(secondEntities[0].data.name, 'Catelyn');
- assert.equal(secondEntities[3].data.name, 'Arya');
+
+ assert.strictEqual(secondEntities.length, 4);
+ assert.strictEqual(secondEntities[0].data.name, 'Catelyn');
+ assert.strictEqual(secondEntities[3].data.name, 'Arya');
+
done();
});
});
});
it('should group queries', function(done) {
- var q = ds.createQuery('Character').hasAncestor(ancestor)
- .groupBy('alive');
- ds.runQuery(q, function(err, entities) {
- assert.ifError(err);
- assert.equal(entities.length, 2);
- done();
- });
- });
+ var q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .groupBy('appearances');
- after(function(done) {
- ds.delete(keys, function(err) {
+ datastore.runQuery(q, function(err, entities) {
assert.ifError(err);
+ assert.strictEqual(entities.length, characters.length - 1);
done();
});
});
@@ -499,12 +641,12 @@ describe('datastore', function() {
describe('transactions', function() {
it('should run in a transaction', function(done) {
- var key = ds.key(['Company', 'Google']);
+ var key = datastore.key(['Company', 'Google']);
var obj = {
url: 'www.google.com'
};
- ds.runInTransaction(function(t, tDone) {
+ datastore.runInTransaction(function(t, tDone) {
t.get(key, function(err) {
assert.ifError(err);
@@ -514,22 +656,22 @@ describe('datastore', function() {
}, function(err) {
assert.ifError(err);
- ds.get(key, function(err, entity) {
+ datastore.get(key, function(err, entity) {
assert.ifError(err);
assert.deepEqual(entity.data, obj);
- ds.delete(key, done);
+ datastore.delete(key, done);
});
});
});
it('should commit all saves and deletes at the end', function(done) {
- var deleteKey = ds.key(['Company', 'Subway']);
- var key = ds.key(['Company', 'Google']);
- var incompleteKey = ds.key('Company');
+ var deleteKey = datastore.key(['Company', 'Subway']);
+ var key = datastore.key(['Company', 'Google']);
+ var incompleteKey = datastore.key('Company');
- ds.runInTransaction(function(t, tDone) {
+ datastore.runInTransaction(function(t, tDone) {
t.delete(deleteKey);
t.save([
@@ -548,43 +690,50 @@ describe('datastore', function() {
assert.ifError(err);
// Incomplete key should have been given an ID.
- assert.equal(incompleteKey.path.length, 2);
+ assert.strictEqual(incompleteKey.path.length, 2);
async.parallel([
// The key queued for deletion should have been deleted.
function(done) {
- ds.get(deleteKey, function(err, entity) {
+ datastore.get(deleteKey, function(err, entity) {
assert.ifError(err);
- assert.equal(typeof entity, 'undefined');
+ assert.strictEqual(typeof entity, 'undefined');
done();
});
},
// Data should have been updated on the key.
function(done) {
- ds.get(key, function(err, entity) {
+ datastore.get(key, function(err, entity) {
assert.ifError(err);
- assert.equal(entity.data.rating, 10);
+ assert.strictEqual(entity.data.rating, 10);
done();
});
}
- ], done);
+ ], function(err) {
+ assert.ifError(err);
+ datastore.delete([key, incompleteKey], done);
+ });
});
});
it('should use the last modification to a key', function(done) {
- var incompleteKey = ds.key('Company');
- var key = ds.key(['Company', 'Google']);
+ var incompleteKey = datastore.key('Company');
+ var key = datastore.key(['Company', 'Google']);
- ds.runInTransaction(function(t, tDone) {
+ datastore.runInTransaction(function(t, tDone) {
t.save([
{
key: key,
- data: { rating: 10 }
+ data: {
+ rating: 10
+ }
},
{
key: incompleteKey,
- data: { rating: 100 }
+ data: {
+ rating: 100
+ }
}
]);
@@ -595,12 +744,12 @@ describe('datastore', function() {
assert.ifError(err);
// Should not return a result.
- ds.get(key, function(err, entity) {
+ datastore.get(key, function(err, entity) {
assert.ifError(err);
assert.strictEqual(entity, undefined);
// Incomplete key should have been given an id.
- assert.equal(incompleteKey.path.length, 2);
+ assert.strictEqual(incompleteKey.path.length, 2);
done();
});
});
diff --git a/system-test/pubsub.js b/system-test/pubsub.js
index 3bbd3d30250..cf7412e690a 100644
--- a/system-test/pubsub.js
+++ b/system-test/pubsub.js
@@ -360,11 +360,10 @@ describe('pubsub', function() {
topic.iam.getPolicy(function(err, policy) {
assert.ifError(err);
- assert.deepEqual(policy, {
- bindings: [],
- etag: 'ACAB',
- version: 0
- });
+ assert.deepEqual(policy.bindings, []);
+ assert.strictEqual(policy.etag.toString(), '\u0000 \u0001');
+ assert.strictEqual(policy.version, 0);
+
done();
});
});
diff --git a/test/bigquery/table.js b/test/bigquery/table.js
index 4d2794713f8..67ee325a8f9 100644
--- a/test/bigquery/table.js
+++ b/test/bigquery/table.js
@@ -772,7 +772,7 @@ describe('BigQuery/Table', function() {
});
describe('import', function() {
- var FILEPATH = require.resolve('../testdata/response_get.json');
+ var FILEPATH = require.resolve('../testdata/testfile.json');
var FILE = new FakeFile({
name: 'bucket-name',
makeReq_: util.noop
diff --git a/test/common/grpc-service.js b/test/common/grpc-service.js
index a2a5942f0a5..01ee4ab068a 100644
--- a/test/common/grpc-service.js
+++ b/test/common/grpc-service.js
@@ -161,12 +161,18 @@ describe('GrpcService', function() {
});
it('should call grpc.load correctly', function() {
- grpcLoadOverride = function(opts) {
+ grpcLoadOverride = function(opts, format, grpcOpts) {
assert.strictEqual(opts.root, ROOT_DIR);
var expectedFilePath = path.relative(ROOT_DIR, PROTO_FILE_PATH);
assert.strictEqual(opts.file, expectedFilePath);
+ assert.strictEqual(format, 'proto');
+ assert.deepEqual(grpcOpts, {
+ binaryAsBase64: true,
+ convertFieldsToCamelCase: true
+ });
+
return MOCK_GRPC_API;
};
@@ -210,7 +216,7 @@ describe('GrpcService', function() {
describe('request', function() {
var PROTO_OPTS = { service: 'service', method: 'method', timeout: 3000 };
- var REQ_OPTS = { camelOption: true };
+ var REQ_OPTS = {};
var GRPC_CREDENTIALS = {};
function ProtoService() {}
@@ -522,62 +528,15 @@ describe('GrpcService', function() {
});
it('should execute callback with response', function(done) {
- var expectedResponse = {};
-
- GrpcService.convertBuffers_ = function(response) {
- assert.strictEqual(response.snake_property, undefined);
- assert.strictEqual(response.snakeProperty, RESPONSE.snake_property);
- return expectedResponse;
- };
-
grpcService.request(PROTO_OPTS, REQ_OPTS, function(err, resp) {
assert.ifError(err);
- assert.strictEqual(resp, expectedResponse);
+ assert.strictEqual(resp, RESPONSE);
done();
});
});
});
});
- describe('convertBuffers_', function() {
- var DATA_OBJECT = { prop: {} };
- var DATA = [DATA_OBJECT];
-
- it('should check if data is buffer-like', function(done) {
- GrpcService.isBufferLike_ = function(data) {
- assert.strictEqual(data, DATA_OBJECT.prop);
- done();
- };
-
- GrpcService.convertBuffers_(DATA);
- });
-
- it('should convert buffer-like data into base64 strings', function() {
- var buffer = new Buffer([1, 2, 3]);
- var expectedString = buffer.toString('base64');
-
- GrpcService.isBufferLike_ = function() {
- return true;
- };
-
- GrpcService.objToArr_ = function(data) {
- assert.strictEqual(data, DATA_OBJECT.prop);
- return buffer;
- };
-
- var convertedData = GrpcService.convertBuffers_(DATA);
- assert.strictEqual(convertedData[0].prop, expectedString);
- });
-
- it('should convert buffers into base64 strings', function() {
- var buffer = new Buffer([1, 2, 3]);
- var expectedString = buffer.toString('base64');
-
- var convertedData = GrpcService.convertBuffers_([{ prop: buffer }]);
- assert.strictEqual(convertedData[0].prop, expectedString);
- });
- });
-
describe('convertValue_', function() {
it('should convert primitive values correctly', function() {
var convertedValues = extend(
diff --git a/test/datastore/dataset.js b/test/datastore/dataset.js
deleted file mode 100644
index e2594dde52e..00000000000
--- a/test/datastore/dataset.js
+++ /dev/null
@@ -1,400 +0,0 @@
-/**
- * Copyright 2014 Google Inc. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-'use strict';
-
-var assert = require('assert');
-var extend = require('extend');
-var mockery = require('mockery-next');
-var util = require('../../lib/common/util.js');
-
-var normalizeArgumentsCache = util.normalizeArguments;
-var normalizeArgumentsOverride;
-util.normalizeArguments = function() {
- return (normalizeArgumentsOverride || normalizeArgumentsCache)
- .apply(this, arguments);
-};
-
-var makeAuthenticatedRequestFactoryCache = util.makeAuthenticatedRequestFactory;
-var makeAuthenticatedRequestFactoryOverride;
-util.makeAuthenticatedRequestFactory = function() {
- if (makeAuthenticatedRequestFactoryOverride) {
- return makeAuthenticatedRequestFactoryOverride.apply(this, arguments);
- }
-
- return makeAuthenticatedRequestFactoryCache.apply(this, arguments);
-};
-
-function FakeTransaction() {
- this.calledWith_ = arguments;
-}
-
-describe('Dataset', function() {
- var Dataset;
- var dataset;
-
- var OPTIONS = {
- projectId: 'project-id',
- apiEndpoint: 'endpoint',
- credentials: {},
- keyFilename: 'key/file',
- email: 'email',
- namespace: 'namespace'
- };
-
- before(function() {
- mockery.registerMock('../../lib/common/util.js', util);
- mockery.registerMock('../../lib/datastore/transaction.js', FakeTransaction);
-
- mockery.enable({
- useCleanCache: true,
- warnOnUnregistered: false
- });
-
- Dataset = require('../../lib/datastore/dataset');
- });
-
- after(function() {
- mockery.deregisterAll();
- mockery.disable();
- });
-
- beforeEach(function() {
- delete process.env.DATASTORE_DATASET;
- makeAuthenticatedRequestFactoryOverride = null;
- normalizeArgumentsOverride = null;
- dataset = new Dataset(OPTIONS);
- });
-
- describe('instantiation', function() {
- var defaultProjectId;
-
- before(function() {
- defaultProjectId = process.env.GCLOUD_PROJECT;
- delete process.env.GCLOUD_PROJECT;
- });
-
- after(function() {
- process.env.GCLOUD_PROJECT = defaultProjectId;
- });
-
- it('should localize the dataset id', function() {
- assert.strictEqual(dataset.datasetId, OPTIONS.projectId);
- });
-
- it('should detect the dataset ID', function() {
- var datasetId = 'dataset-id';
- process.env.DATASTORE_DATASET = datasetId;
-
- var ds = new Dataset();
- assert.strictEqual(ds.datasetId, datasetId);
-
- delete process.env.DATASTORE_DATASET;
- });
-
- it('should throw if a datasetId can not be found', function() {
- assert.throws(function() {
- new Dataset();
- }, 'A project or dataset ID is required to use a Dataset.');
- });
-
- it('should set default API connection details', function(done) {
- var determineApiEndpoint_ = Dataset.prototype.determineApiEndpoint_;
-
- Dataset.prototype.determineApiEndpoint_ = function(customApiEndpoint) {
- Dataset.prototype.determineApiEndpoint_ = determineApiEndpoint_;
-
- assert.strictEqual(customApiEndpoint, OPTIONS.apiEndpoint);
- done();
- };
-
- new Dataset(OPTIONS);
- });
-
- it('should localize the namespace', function() {
- assert.strictEqual(dataset.namespace, OPTIONS.namespace);
- });
-
- it('should normalize the arguments', function() {
- var normalizeArgumentsCalled = false;
- var fakeOptions = { projectId: 'project-id' };
- var fakeContext = {};
-
- normalizeArgumentsOverride = function(context, options) {
- normalizeArgumentsCalled = true;
- assert.strictEqual(context, fakeContext);
- assert.strictEqual(options, fakeOptions);
- return options;
- };
-
- Dataset.call(fakeContext, fakeOptions);
- assert(normalizeArgumentsCalled);
- });
-
- it('should create an authenticated request factory', function() {
- var authenticatedRequest = {};
- var customEndpoint = 'custom-endpoint';
-
- var determineApiEndpoint_ = Dataset.prototype.determineApiEndpoint_;
- Dataset.prototype.determineApiEndpoint_ = function() {
- Dataset.prototype.determineApiEndpoint_ = determineApiEndpoint_;
- this.customEndpoint = customEndpoint;
- };
-
- makeAuthenticatedRequestFactoryOverride = function(config) {
- var expectedConfig = extend({}, OPTIONS, {
- customEndpoint: customEndpoint,
- scopes: [
- 'https://www.googleapis.com/auth/datastore',
- 'https://www.googleapis.com/auth/userinfo.email'
- ]
- });
-
- assert.deepEqual(config, expectedConfig);
-
- return authenticatedRequest;
- };
-
- var ds = new Dataset(OPTIONS);
- assert.strictEqual(ds.makeAuthenticatedRequest_, authenticatedRequest);
- });
- });
-
- describe('key', function() {
- it('should return key scoped by default namespace', function() {
- var ds = new Dataset({ projectId: 'test', namespace: 'my-ns' });
- var key = ds.key(['Company', 1]);
- assert.equal(key.namespace, 'my-ns');
- assert.deepEqual(key.path, ['Company', 1]);
- });
-
- it('should allow namespace specification', function() {
- var ds = new Dataset({ projectId: 'test', namespace: 'my-ns' });
- var key = ds.key({
- namespace: 'custom-ns',
- path: ['Company', 1]
- });
- assert.equal(key.namespace, 'custom-ns');
- assert.deepEqual(key.path, ['Company', 1]);
- });
-
- it('should create incomplete key from string', function() {
- var ds = new Dataset({ projectId: 'test' });
- var key = ds.key('hello');
- assert.deepEqual(key.path, ['hello', undefined]);
- });
-
- it('should create incomplete key from array in obj', function() {
- var ds = new Dataset({ projectId: 'test' });
- var key = ds.key({
- path: ['world']
- });
- assert.deepEqual(key.path, ['world', undefined]);
- });
-
- it('should create incomplete key from array', function() {
- var ds = new Dataset({ projectId: 'test' });
- var key = ds.key(['Company']);
- assert.deepEqual(key.path, ['Company', undefined]);
- });
- });
-
- describe('runInTransaction', function() {
- var ds;
-
- beforeEach(function() {
- ds = new Dataset({ projectId: 'test' });
- });
-
- it('should begin transaction', function(done) {
- ds.createTransaction_ = function() {
- return {
- begin_: function() {
- done();
- }
- };
- };
- ds.runInTransaction();
- });
-
- it('should execute callback with error if one occurred', function(done) {
- var error = new Error('Error.');
- var apiResponse = {};
-
- ds.createTransaction_ = function() {
- return {
- begin_: function(callback) {
- callback(error, apiResponse);
- }
- };
- };
-
- ds.runInTransaction(util.noop, function(err, apiResponse_) {
- assert.strictEqual(err, error);
- assert.strictEqual(apiResponse_, apiResponse);
- done();
- });
- });
-
- it('should return transaction object to the callback', function(done) {
- var transaction = {
- begin_: function(callback) {
- callback();
- },
- commit_: util.noop
- };
- ds.createTransaction_ = function() {
- return transaction;
- };
- ds.runInTransaction(function(t) {
- assert.deepEqual(t, transaction);
- done();
- }, assert.ifError);
- });
-
- it('should return correct done function to the callback', function(done) {
- ds.createTransaction_ = function() {
- return {
- begin_: function(callback) {
- callback();
- },
- commit_: function() {
- done();
- }
- };
- };
- ds.runInTransaction(function(t, tDone) {
- tDone();
- }, assert.ifError);
- });
- });
-
- describe('createQuery', function() {
- var ds;
- var dsWithNs;
-
- beforeEach(function() {
- ds = new Dataset({ projectId: 'test' });
- dsWithNs = new Dataset({
- projectId: 'test',
- namespace: 'my-ns'
- });
- });
-
- it('should not include a namespace on a ns-less dataset', function() {
- var query = ds.createQuery('Kind');
- assert.equal(query.namespace, undefined);
- });
-
- it('should scope query to namespace', function() {
- var query = dsWithNs.createQuery('Kind');
- assert.equal(query.namespace, 'my-ns');
- });
-
- it('should allow control over namespace and kinds', function() {
- var queryFromDs = ds.createQuery('my-ns', 'Kind');
- assert.equal(queryFromDs.namespace, 'my-ns');
-
- var queryFromDsWithNs = dsWithNs.createQuery('Kind');
- assert.equal(queryFromDsWithNs.namespace, 'my-ns');
- });
-
- it('should allow removal of namespace', function() {
- var query = dsWithNs.createQuery(null, 'Kind');
- assert.strictEqual(query.namespace, null);
- });
- });
-
- describe('determineApiEndpoint_', function() {
- it('should default to googleapis.com', function() {
- delete process.env.DATASTORE_HOST;
-
- dataset.determineApiEndpoint_();
-
- var expectedApiEndpoint = 'https://www.googleapis.com';
- assert.strictEqual(dataset.apiEndpoint, expectedApiEndpoint);
- });
-
- it('should remove slashes from the apiEndpoint', function() {
- var expectedApiEndpoint = 'http://localhost:8080';
-
- dataset.determineApiEndpoint_(expectedApiEndpoint);
- assert.strictEqual(dataset.apiEndpoint, expectedApiEndpoint);
-
- dataset.determineApiEndpoint_('http://localhost:8080/');
- assert.strictEqual(dataset.apiEndpoint, expectedApiEndpoint);
-
- dataset.determineApiEndpoint_('http://localhost:8080//');
- assert.strictEqual(dataset.apiEndpoint, expectedApiEndpoint);
- });
-
- it('should default to http if protocol is unspecified', function() {
- dataset.determineApiEndpoint_('localhost:8080');
- assert.strictEqual(dataset.apiEndpoint, 'http://localhost:8080');
- });
-
- it('should set customEndpoint when using explicit endpoint', function() {
- dataset.determineApiEndpoint_('http://localhost:8080');
- assert.strictEqual(dataset.customEndpoint, true);
- });
-
- it('should not set customEndpoint when using default endpoint', function() {
- var options = extend({}, OPTIONS);
- delete options.apiEndpoint;
-
- var dataset = new Dataset(options);
- dataset.determineApiEndpoint_();
- assert.strictEqual(dataset.customEndpoint, undefined);
- });
-
- describe('with DATASTORE_HOST environment variable', function() {
- var DATASTORE_HOST = 'http://localhost:9090';
-
- before(function() {
- process.env.DATASTORE_HOST = DATASTORE_HOST;
- });
-
- after(function() {
- delete process.env.DATASTORE_HOST;
- });
-
- it('should use the DATASTORE_HOST env var', function() {
- dataset.determineApiEndpoint_();
- assert.strictEqual(dataset.apiEndpoint, DATASTORE_HOST);
- });
-
- it('should favor an explicit apiEndpoint option', function() {
- var explicitApiEndpoint = 'http://apiendpointoverride';
- dataset.determineApiEndpoint_(explicitApiEndpoint);
- assert.strictEqual(dataset.apiEndpoint, explicitApiEndpoint);
- });
-
- it('should set customEndpoint', function() {
- dataset.determineApiEndpoint_();
- assert.strictEqual(dataset.customEndpoint, true);
- });
- });
- });
-
- describe('createTransaction_', function() {
- it('should create and return a Transaction', function() {
- var transaction = dataset.createTransaction_();
- assert(transaction instanceof FakeTransaction);
- assert.strictEqual(transaction.calledWith_[0], dataset);
- assert.strictEqual(transaction.calledWith_[1], dataset.datasetId);
- });
- });
-});
diff --git a/test/datastore/entity.js b/test/datastore/entity.js
index 5311cb3538d..05ebaf57c2a 100644
--- a/test/datastore/entity.js
+++ b/test/datastore/entity.js
@@ -17,467 +17,831 @@
'use strict';
var assert = require('assert');
-var entity = require('../../lib/datastore/entity.js');
-var datastore = require('../../lib/datastore');
-var ByteBuffer = require('bytebuffer');
-
-var entityProto = {
- property: [{
- name: 'linkedTo',
- value: {
- key_value: {
- path_element: [{
- kind: 'Kind',
- name: '123'
- }]
- }
- }
- }, {
- name: 'name',
- value: {
- string_value: 'Some name'
- }
- }, {
- name: 'flagged',
- value: {
- boolean_value: false
- }
- }, {
- name: 'count',
- value: {
- integer_value: 5
- }
- }, {
- name: 'total',
- value: {
- double_value: 7.8
- }
- }, {
- name: 'author',
- value: {
- entity_value: {
- property: [{
- name: 'name',
- value: {
- string_value: 'Burcu Dogan'
- }
- }]
- },
- indexed: false
- }
- }, {
- name: 'list',
- value: {
- list_value: [{
- integer_value: 6
- }, {
- boolean_value: false
- }]
- }
- }]
+var deepStrictEqual = require('deep-strict-equal');
+assert.deepStrictEqual = assert.deepStrictEqual || function() {
+ return assert(deepStrictEqual.apply(this, arguments));
};
-var queryFilterProto = {
- projection: [],
- kind: [{
- name: 'Kind1'
- }],
- filter: {
- composite_filter: {
- filter: [
- {
- property_filter: {
- property: { name: 'name' },
- operator: 'EQUAL',
- value: { string_value: 'John' }
- }
- },
- {
- property_filter: {
- property: { name: '__key__' },
- operator: 'HAS_ANCESTOR',
- value: {
- key_value: {
- path_element: [{ kind: 'Kind2', name: 'somename' }]
- }
- }
- }
- }
- ],
- operator: 'AND'
- }
- },
- end_cursor: new Buffer('end', 'base64'),
- order: [],
- group_by: []
-};
+var Datastore = require('../../lib/datastore/index.js');
-describe('Key', function() {
- it('should assign the namespace', function() {
- var namespace = 'NS';
- var key = new entity.Key({ namespace: namespace, path: [] });
- assert.strictEqual(key.namespace, namespace);
+describe('entity', function() {
+ var entity;
+
+ beforeEach(function() {
+ delete require.cache[require.resolve('../../lib/datastore/entity.js')];
+ entity = require('../../lib/datastore/entity.js');
});
- it('should assign the kind', function() {
- var kind = 'kind';
- var key = new entity.Key({ path: [kind] });
- assert.strictEqual(key.kind, kind);
+ describe('Double', function() {
+ it('should store the value', function() {
+ var value = 8.3;
+
+ var double = new entity.Double(value);
+ assert.strictEqual(double.value, value);
+ });
});
- it('should assign the ID', function() {
- var id = 11;
- var key = new entity.Key({ path: ['Kind', id] });
- assert.strictEqual(key.id, id);
+ describe('Double', function() {
+ it('should store the value', function() {
+ var value = 8.3;
+
+ var double = new entity.Double(value);
+ assert.strictEqual(double.value, value);
+ });
});
- it('should assign the name', function() {
- var name = 'name';
- var key = new entity.Key({ path: ['Kind', name] });
- assert.strictEqual(key.name, name);
+ describe('Int', function() {
+ it('should store the value', function() {
+ var value = 8;
+
+ var int = new entity.Int(value);
+ assert.strictEqual(int.value, value);
+ });
});
- it('should assign a parent', function() {
- var key = new entity.Key({ path: ['ParentKind', 1, 'Kind', 1] });
- assert(key.parent instanceof entity.Key);
+ describe('GeoPoint', function() {
+ it('should store the value', function() {
+ var value = {
+ latitude: 24,
+ longitude: 88
+ };
+
+ var geoPoint = new entity.GeoPoint(value);
+ assert.strictEqual(geoPoint.value, value);
+ });
});
- it('should always compute the correct path', function() {
- var key = new entity.Key({ path: ['ParentKind', 1, 'Kind', 1] });
- assert.deepEqual(key.path, ['ParentKind', 1, 'Kind', 1]);
+ describe('Key', function() {
+ it('should assign the namespace', function() {
+ var namespace = 'NS';
+ var key = new entity.Key({ namespace: namespace, path: [] });
+ assert.strictEqual(key.namespace, namespace);
+ });
- key.parent.kind = 'GrandParentKind';
- key.kind = 'ParentKind';
+ it('should assign the kind', function() {
+ var kind = 'kind';
+ var key = new entity.Key({ path: [kind] });
+ assert.strictEqual(key.kind, kind);
+ });
- assert.deepEqual(key.path, ['GrandParentKind', 1, 'ParentKind', 1]);
- });
-});
+ it('should assign the ID', function() {
+ var id = 11;
+ var key = new entity.Key({ path: ['Kind', id] });
+ assert.strictEqual(key.id, id);
+ });
-describe('keyFromKeyProto', function() {
- var proto = {
- partition_id: { namespace: '', dataset_id: 'datasetId' },
- path_element: [{ kind: 'Kind', name: 'Name' }]
- };
-
- var protoH = {
- partition_id: { namespace: 'Test', dataset_id: 'datasetId' },
- path_element: [{ kind: 'Kind', id: '111' }, { kind: 'Kind2', name: 'name' }]
- };
-
- var protoIncomplete = {
- partition_id: { namespace: 'Test', dataset_id: 'datasetId' },
- path_element: [{ kind: 'Kind', id: '111' }, { kind: 'Kind2' }]
- };
-
- var protoInvalid = {
- partition_id: { namespace: 'Test', dataset_id: 'datasetId' },
- path_element: [{ kind: 'Kind' }, { kind: 'Kind2' }]
- };
-
- it('should handle keys hierarchically', function() {
- var key = entity.keyFromKeyProto(protoH);
- assert.deepEqual(key, new entity.Key({
- namespace: 'Test',
- path: [ 'Kind', 111, 'Kind2', 'name' ]
- }));
- });
+ it('should assign the name', function() {
+ var name = 'name';
+ var key = new entity.Key({ path: ['Kind', name] });
+ assert.strictEqual(key.name, name);
+ });
- it('should not set namespace if default', function() {
- var key = entity.keyFromKeyProto(proto);
- assert.deepEqual(key, new entity.Key({ path: [ 'Kind', 'Name' ] }));
- });
+ it('should assign a parent', function() {
+ var key = new entity.Key({ path: ['ParentKind', 1, 'Kind', 1] });
+ assert(key.parent instanceof entity.Key);
+ });
- it('should not inject null into path if no id set', function() {
- var key = entity.keyFromKeyProto(protoIncomplete);
- assert.deepEqual(key, new entity.Key({
- namespace: 'Test',
- path: [ 'Kind', 111, 'Kind2' ]
- }));
- });
+ it('should always compute the correct path', function() {
+ var key = new entity.Key({ path: ['ParentKind', 1, 'Kind', 1] });
+ assert.deepEqual(key.path, ['ParentKind', 1, 'Kind', 1]);
+
+ key.parent.kind = 'GrandParentKind';
+ key.kind = 'ParentKind';
- it('should throw if path is invalid', function() {
- assert.throws(function() {
- entity.keyFromKeyProto(protoInvalid);
- }, /Invalid key. Ancestor keys require an id or name./);
+ assert.deepEqual(key.path, ['GrandParentKind', 1, 'ParentKind', 1]);
+ });
});
-});
-describe('keyToKeyProto', function() {
- it('should handle hierarchical key definitions', function() {
- var key = new entity.Key({ path: [ 'Kind1', 1, 'Kind2', 'name' ] });
- var proto = entity.keyToKeyProto(key);
- assert.strictEqual(proto.partition_id, undefined);
- assert.strictEqual(proto.path_element[0].kind, 'Kind1');
- assert.strictEqual(proto.path_element[0].id, 1);
- assert.strictEqual(proto.path_element[0].name, undefined);
- assert.strictEqual(proto.path_element[1].kind, 'Kind2');
- assert.strictEqual(proto.path_element[1].id, undefined);
- assert.strictEqual(proto.path_element[1].name, 'name');
+ describe('decodeValueProto', function() {
+ it('should decode arrays', function() {
+ var expectedValue = [{}];
+
+ var valueProto = {
+ value_type: 'arrayValue',
+ arrayValue: {
+ values: expectedValue
+ }
+ };
+
+ var run = false;
+
+ var decodeValueProto = entity.decodeValueProto;
+ entity.decodeValueProto = function(valueProto) {
+ if (!run) {
+ run = true;
+ return decodeValueProto.apply(null, arguments);
+ }
+
+ assert.strictEqual(valueProto, expectedValue[0]);
+ return valueProto;
+ };
+
+ assert.deepEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode blobs', function() {
+ var expectedValue = new Buffer('Hi');
+
+ var valueProto = {
+ value_type: 'blobValue',
+ blobValue: expectedValue.toString('base64')
+ };
+
+ assert.deepEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode null', function() {
+ var expectedValue = null;
+
+ var valueProto = {
+ value_type: 'nullValue',
+ nullValue: 0
+ };
+
+ var decodedValue = entity.decodeValueProto(valueProto);
+ assert.deepStrictEqual(decodedValue, expectedValue);
+ });
+
+ it('should decode doubles', function() {
+ var expectedValue = 8.3;
+
+ var valueProto = {
+ value_type: 'doubleValue',
+ doubleValue: expectedValue
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode ints', function() {
+ var expectedValue = 8;
+
+ var valueProto = {
+ value_type: 'integerValue',
+ integerValue: expectedValue
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode entities', function() {
+ var expectedValue = {};
+
+ var valueProto = {
+ value_type: 'entityValue',
+ entityValue: expectedValue
+ };
+
+ entity.entityFromEntityProto = function(entityProto) {
+ assert.strictEqual(entityProto, expectedValue);
+ return expectedValue;
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode keys', function() {
+ var expectedValue = {};
+
+ var valueProto = {
+ value_type: 'keyValue',
+ keyValue: expectedValue
+ };
+
+ entity.keyFromKeyProto = function(keyProto) {
+ assert.strictEqual(keyProto, expectedValue);
+ return expectedValue;
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should decode timestamps', function() {
+ var seconds = String(Date.now() / 1000);
+ var expectedValue = new Date(parseInt(seconds, 10) * 1000);
+
+ var valueProto = {
+ value_type: 'timestampValue',
+ timestampValue: {
+ seconds: seconds
+ }
+ };
+
+ assert.deepEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
+
+ it('should return the value if no conversions are necessary', function() {
+ var expectedValue = false;
+
+ var valueProto = {
+ value_type: 'booleanValue',
+ booleanValue: expectedValue
+ };
+
+ assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
+ });
});
- it('should detect the namespace of the hierarchical keys', function() {
- var key = new entity.Key({
- namespace: 'Namespace',
- path: [ 'Kind1', 1, 'Kind2', 'name' ]
+ describe('encodeValue', function() {
+ it('should encode a boolean', function() {
+ var value = true;
+
+ var expectedValueProto = {
+ booleanValue: value
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode null', function() {
+ var value = null;
+
+ var expectedValueProto = {
+ nullValue: 0
+ };
+
+ assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode an int', function() {
+ var value = 8;
+
+ var expectedValueProto = {
+ integerValue: value
+ };
+
+ entity.Int = function(value_) {
+ assert.strictEqual(value_, value);
+ this.value = value_;
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode an Int object', function() {
+ var value = new entity.Int(3);
+
+ var expectedValueProto = {
+ integerValue: value.value
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a double', function() {
+ var value = 8.3;
+
+ var expectedValueProto = {
+ doubleValue: value
+ };
+
+ entity.Double = function(value_) {
+ assert.strictEqual(value_, value);
+ this.value = value_;
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a Double object', function() {
+ var value = new entity.Double(3);
+
+ var expectedValueProto = {
+ doubleValue: value.value
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a date', function() {
+ var value = new Date();
+ var seconds = value.getTime() / 1000;
+ var secondsRounded = Math.floor(seconds);
+
+ var expectedValueProto = {
+ timestampValue: {
+ seconds: secondsRounded,
+ nanos: Math.floor((seconds - secondsRounded) * 1e9)
+ }
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a string', function() {
+ var value = 'Hi';
+
+ var expectedValueProto = {
+ stringValue: value
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a buffer', function() {
+ var value = new Buffer('Hi');
+
+ var expectedValueProto = {
+ blobValue: value.toString('base64')
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode an array', function() {
+ var value = [{}];
+
+ var expectedValueProto = {
+ arrayValue: {
+ values: value
+ }
+ };
+
+ var run = false;
+
+ var encodeValue = entity.encodeValue;
+ entity.encodeValue = function(value_) {
+ if (!run) {
+ run = true;
+ return encodeValue.apply(null, arguments);
+ }
+
+ assert.strictEqual(value_, value[0]);
+ return value_;
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode a Key', function() {
+ var value = new entity.Key({
+ namespace: 'ns',
+ path: ['Kind', 1]
});
- var proto = entity.keyToKeyProto(key);
- assert.strictEqual(proto.partition_id.namespace, 'Namespace');
- assert.strictEqual(proto.path_element[0].kind, 'Kind1');
- assert.strictEqual(proto.path_element[0].id, 1);
- assert.strictEqual(proto.path_element[0].name, undefined);
- assert.strictEqual(proto.path_element[1].kind, 'Kind2');
- assert.strictEqual(proto.path_element[1].id, undefined);
- assert.strictEqual(proto.path_element[1].name, 'name');
- });
- it('should handle incomplete keys with & without namespaces', function() {
- var key = new entity.Key({ path: [ 'Kind1' ] });
- var keyWithNS = new entity.Key({
- namespace: 'Namespace',
- path: [ 'Kind1' ]
+ var expectedValueProto = {
+ keyValue: value
+ };
+
+ entity.keyToKeyProto = function(key) {
+ assert.strictEqual(key, value);
+ return value;
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
+ });
+
+ it('should encode an object', function() {
+ var value = {
+ key: 'value'
+ };
+
+ var expectedValueProto = {
+ entityValue: {
+ properties: [
+ {
+ name: 'key',
+ value: value.key
+ }
+ ]
+ }
+ };
+
+ var run = false;
+
+ var encodeValue = entity.encodeValue;
+ entity.encodeValue = function(value_) {
+ if (!run) {
+ run = true;
+ return encodeValue.apply(null, arguments);
+ }
+
+ assert.strictEqual(value_, value.key);
+ return value_;
+ };
+
+ assert.deepEqual(entity.encodeValue(value), expectedValueProto);
});
- var keyWithNumericID = new entity.Key({
- path: [ 'Kind1', 234 ]
+
+ it('should throw if an invalid value was provided', function() {
+ assert.throws(function() {
+ entity.encodeValue({});
+ }, /Unsupported field value/);
});
- var keyWithStringID = new entity.Key({
- path: [ 'Kind1', 'StringId' ]
+ });
+
+ describe('entityFromEntityProto', function() {
+ it('should convert entity proto to entity', function() {
+ var expectedEntity = {
+ name: 'Stephen'
+ };
+
+ var entityProto = {
+ properties: {
+ name: {
+ value_type: 'stringValue',
+ stringValue: expectedEntity.name
+ }
+ }
+ };
+
+ assert.deepEqual(
+ entity.entityFromEntityProto(entityProto),
+ expectedEntity
+ );
});
+ });
- var proto = entity.keyToKeyProto(key);
- var protoWithNS = entity.keyToKeyProto(keyWithNS);
- var protoWithNumericID = entity.keyToKeyProto(keyWithNumericID);
- var protoWithStringID = entity.keyToKeyProto(keyWithStringID);
+ describe('entityToEntityProto', function() {
+ it('should format an entity', function() {
+ var value = 'Stephen';
- assert.strictEqual(proto.partition_id, undefined);
- assert.strictEqual(proto.path_element[0].kind, 'Kind1');
- assert.strictEqual(proto.path_element[0].id, undefined);
- assert.strictEqual(proto.path_element[0].name, undefined);
+ var entityObject = {
+ name: value
+ };
- assert.strictEqual(protoWithNS.partition_id.namespace, 'Namespace');
- assert.strictEqual(protoWithNS.path_element[0].kind, 'Kind1');
- assert.strictEqual(protoWithNS.path_element[0].id, undefined);
- assert.strictEqual(protoWithNS.path_element[0].name, undefined);
+ var expectedEntityProto = {
+ key: null,
+ properties: entityObject
+ };
- assert.strictEqual(protoWithNumericID.path_element[0].id, 234);
- assert.strictEqual(protoWithStringID.path_element[0].name, 'StringId');
+ entity.encodeValue = function(value_) {
+ assert.strictEqual(value_, value);
+ return value;
+ };
+
+ assert.deepEqual(
+ entity.entityToEntityProto(entityObject),
+ expectedEntityProto
+ );
+ });
});
- it('should throw if key contains 0 items', function() {
- assert.throws(function() {
- var key = new entity.Key({ path: [] });
- entity.keyToKeyProto(key);
- }, /A key should contain at least a kind/);
+ describe('formatArray', function() {
+ it('should convert protos to key/data entity array', function() {
+ var key = {};
+
+ var entityProto = {
+ key: key
+ };
+
+ var results = [
+ {
+ entity: entityProto
+ }
+ ];
+
+ var expectedResults = [
+ {
+ key: key,
+ data: entityProto
+ }
+ ];
+
+ entity.keyFromKeyProto = function(key_) {
+ assert.strictEqual(key_, key);
+ return key;
+ };
+
+ entity.entityFromEntityProto = function(entityProto_) {
+ assert.strictEqual(entityProto_, entityProto);
+ return entityProto;
+ };
+
+ assert.deepEqual(entity.formatArray(results), expectedResults);
+ });
});
- it('should throw if key path contains null ids', function() {
- assert.throws(function() {
+ describe('isKeyComplete', function() {
+ it('should convert key to key proto', function(done) {
var key = new entity.Key({
- namespace: 'Namespace',
- path: [ 'Kind1', null, 'Company' ]
+ path: ['Kind', 123]
});
- entity.keyToKeyProto(key);
- }, /Invalid key. Ancestor keys require an id or name./);
- });
- it('should not throw if last key path item is null', function() {
- assert.doesNotThrow(function() {
+ entity.keyToKeyProto = function(key_) {
+ assert.strictEqual(key_, key);
+ setImmediate(done);
+ return key;
+ };
+
+ entity.isKeyComplete(key);
+ });
+
+ it('should return true if key has id', function() {
var key = new entity.Key({
- namespace: 'Namespace',
- path: [ 'Kind1', 123, 'Company', null ]
+ path: ['Kind', 123]
});
- entity.keyToKeyProto(key);
+
+ assert.strictEqual(entity.isKeyComplete(key), true);
});
- });
-});
-describe('isKeyComplete', function() {
- it('should ret true if kind and an identifier have !0 vals', function() {
- [
- {
- key: new entity.Key({ path: [ 'Kind1' ] }),
- expected: false
- },
- {
- key: new entity.Key({ path: [ 'Kind1', 3 ] }),
- expected: true
- },
- {
- key: new entity.Key({ namespace: 'NS', path: [ 'Kind1' ] }),
- expected: false
- },
- {
- key: new entity.Key({ namespace: 'NS', path: [ 'Kind1', 'name' ] }),
- expected: true
- }
- ].forEach(function(test) {
- assert.strictEqual(entity.isKeyComplete(test.key), test.expected);
+ it('should return true if key has name', function() {
+ var key = new entity.Key({
+ path: ['Kind', 'name']
+ });
+
+ assert.strictEqual(entity.isKeyComplete(key), true);
});
- });
- it('should return false if there is no identifier', function() {
- var key = new entity.Key({ path: [ 'Kind' ] });
+ it('should return false if key does not have name or ID', function() {
+ var key = new entity.Key({
+ path: ['Kind']
+ });
- assert.strictEqual(entity.isKeyComplete(key), false);
+ assert.strictEqual(entity.isKeyComplete(key), false);
+ });
});
-});
-describe('entityFromEntityProto', function() {
- it('should support bool, int, double, str, entity & list values', function() {
- var obj = entity.entityFromEntityProto(entityProto);
- assert.deepEqual(obj.linkedTo, new entity.Key({ path: [ 'Kind', '123' ]}));
- assert.strictEqual(obj.name, 'Some name');
- assert.strictEqual(obj.flagged, false);
- assert.strictEqual(obj.count, 5);
- assert.strictEqual(obj.total, 7.8);
- assert.strictEqual(obj.author.name, 'Burcu Dogan');
- assert.strictEqual(obj.list[0], 6);
- assert.strictEqual(obj.list[1], false);
- });
-});
+ describe('keyFromKeyProto', function() {
+ var NAMESPACE = 'Namespace';
-describe('entityToEntityProto', function() {
- it('should format an entity', function() {
- var val = entity.entityToEntityProto({
- name: 'name'
- });
- var expected = {
- key: null,
- property: [
+ var keyProto = {
+ partitionId: {
+ namespaceId: NAMESPACE,
+ projectId: 'project-id'
+ },
+ path: [
{
- name: 'name',
- value: {
- string_value: 'name'
- }
+ kind: 'Kind',
+ id: '111'
+ },
+ {
+ kind: 'Kind2',
+ name: 'name'
}
]
};
- assert.deepEqual(val, expected);
- });
-});
-describe('queryToQueryProto', function() {
- it('should support filters and ancestory filtering', function() {
- var ds = datastore.dataset({ projectId: 'project-id' });
- var q = ds.createQuery('Kind1')
- .filter('name', '=', 'John')
- .end('end')
- .hasAncestor(new entity.Key({ path: [ 'Kind2', 'somename' ] }));
- var proto = entity.queryToQueryProto(q);
- assert.deepEqual(proto, queryFilterProto);
- });
-});
+ it('should set the namespace', function(done) {
+ entity.Key = function(keyOptions) {
+ assert.strictEqual(keyOptions.namespaceId, NAMESPACE);
+ done();
+ };
-describe('propertyToValue', function() {
- it('should translate a buffer', function() {
- var buffer = new Buffer('010159406940');
- var property = {
- blob_value: ByteBuffer.wrap(buffer)
- };
- var returnedbuffer = entity.propertyToValue(property);
- assert.deepEqual(buffer, returnedbuffer);
- });
-});
+ entity.keyFromKeyProto(keyProto);
+ });
-describe('valueToProperty', function() {
- it('should translate a boolean', function() {
- var val = entity.valueToProperty(true);
- assert.deepEqual(val, {
- boolean_value: true
+ it('should create a proper Key', function(done) {
+ entity.Key = function(keyOptions) {
+ assert.deepEqual(keyOptions, {
+ namespaceId: NAMESPACE,
+ path: [
+ 'Kind',
+ 111,
+ 'Kind2',
+ 'name'
+ ]
+ });
+
+ done();
+ };
+
+ entity.keyFromKeyProto(keyProto);
});
- });
- it('should translate an int', function() {
- var val1 = entity.valueToProperty(new entity.Int(3));
- var val2 = entity.valueToProperty(3);
- var expected = { integer_value: 3 };
- assert.deepEqual(val1, expected);
- assert.deepEqual(val2, expected);
- });
+ it('should return the created Key', function() {
+ var expectedValue = {};
- it('should translate a double', function() {
- var val1 = entity.valueToProperty(new entity.Double(3.1));
- var val2 = entity.valueToProperty(3.1);
- var expected = { double_value: 3.1 };
- assert.deepEqual(val1, expected);
- assert.deepEqual(val2, expected);
- });
+ entity.Key = function() {
+ return expectedValue;
+ };
- it('should translate a date', function() {
- var date = new Date();
- var val = entity.valueToProperty(date);
- var expected = {
- timestamp_microseconds_value: date.getTime() * 1000
- };
- assert.deepEqual(val, expected);
- });
+ assert.strictEqual(entity.keyFromKeyProto(keyProto), expectedValue);
+ });
- it('should translate a string', function() {
- var val = entity.valueToProperty('Hi');
- var expected = {
- string_value: 'Hi'
- };
- assert.deepEqual(val, expected);
- });
+ it('should throw if path is invalid', function(done) {
+ var keyProtoInvalid = {
+ partitionId: {
+ namespaceId: 'Namespace',
+ projectId: 'project-id'
+ },
+ path: [
+ {
+ kind: 'Kind'
+ },
+ {
+ kind: 'Kind2'
+ }
+ ]
+ };
- it('should translate a buffer', function() {
- var buffer = new Buffer('Hi');
- var val = entity.valueToProperty(buffer);
- var expected = {
- blob_value: buffer
- };
- assert.deepEqual(val, expected);
+ try {
+ entity.keyFromKeyProto(keyProtoInvalid);
+ } catch(e) {
+ assert.strictEqual(e.name, 'InvalidKey');
+ assert.strictEqual(e.message, 'Ancestor keys require an id or name.');
+ done();
+ }
+ });
});
- it('should translate an array', function() {
- var array = [1, '2', true];
- var val = entity.valueToProperty(array);
- var expected = {
- list_value: [
- { integer_value: 1 },
- { string_value: '2' },
- { boolean_value: true }
- ]
- };
- assert.deepEqual(val, expected);
- });
+ describe('keyToKeyProto', function() {
+ it('should handle hierarchical key definitions', function() {
+ var key = new entity.Key({
+ path: ['Kind1', 1, 'Kind2', 'name']
+ });
- it('should translate a Key', function() {
- var key = new entity.Key({
- namespace: 'ns',
- path: ['Kind', 3]
+ var keyProto = entity.keyToKeyProto(key);
+
+ assert.strictEqual(keyProto.partitionId, undefined);
+
+ assert.strictEqual(keyProto.path[0].kind, 'Kind1');
+ assert.strictEqual(keyProto.path[0].id, 1);
+ assert.strictEqual(keyProto.path[0].name, undefined);
+
+ assert.strictEqual(keyProto.path[1].kind, 'Kind2');
+ assert.strictEqual(keyProto.path[1].id, undefined);
+ assert.strictEqual(keyProto.path[1].name, 'name');
});
- var val = entity.valueToProperty(key);
- var expected = {
- key_value: entity.keyToKeyProto(key)
- };
- assert.deepEqual(val, expected);
- });
- describe('objects', function() {
- it('should translate an object', function() {
- var val = entity.valueToProperty({
- name: 'value'
+ it('should detect the namespace of the hierarchical keys', function() {
+ var key = new entity.Key({
+ namespace: 'Namespace',
+ path: ['Kind1', 1, 'Kind2', 'name']
});
- var expected = {
- entity_value: {
- property: [
+
+ var keyProto = entity.keyToKeyProto(key);
+
+ assert.strictEqual(keyProto.partitionId.namespaceId, 'Namespace');
+
+ assert.strictEqual(keyProto.path[0].kind, 'Kind1');
+ assert.strictEqual(keyProto.path[0].id, 1);
+ assert.strictEqual(keyProto.path[0].name, undefined);
+
+ assert.strictEqual(keyProto.path[1].kind, 'Kind2');
+ assert.strictEqual(keyProto.path[1].id, undefined);
+ assert.strictEqual(keyProto.path[1].name, 'name');
+ });
+
+ it('should handle incomplete keys with & without namespaces', function() {
+ var incompleteKey = new entity.Key({
+ path: ['Kind']
+ });
+
+ var incompleteKeyWithNs = new entity.Key({
+ namespace: 'Namespace',
+ path: ['Kind']
+ });
+
+ var keyProto = entity.keyToKeyProto(incompleteKey);
+ var keyProtoWithNs = entity.keyToKeyProto(incompleteKeyWithNs);
+
+ assert.strictEqual(keyProto.partitionId, undefined);
+ assert.strictEqual(keyProto.path[0].kind, 'Kind');
+ assert.strictEqual(keyProto.path[0].id, undefined);
+ assert.strictEqual(keyProto.path[0].name, undefined);
+
+ assert.strictEqual(keyProtoWithNs.partitionId.namespaceId, 'Namespace');
+ assert.strictEqual(keyProtoWithNs.path[0].kind, 'Kind');
+ assert.strictEqual(keyProtoWithNs.path[0].id, undefined);
+ assert.strictEqual(keyProtoWithNs.path[0].name, undefined);
+ });
+
+ it('should throw if key contains 0 items', function(done) {
+ var key = new entity.Key({
+ path: []
+ });
+
+ try {
+ entity.keyToKeyProto(key);
+ } catch(e) {
+ assert.strictEqual(e.name, 'InvalidKey');
+ assert.strictEqual(e.message, 'A key should contain at least a kind.');
+ done();
+ }
+ });
+
+ it('should throw if key path contains null ids', function(done) {
+ var key = new entity.Key({
+ namespace: 'Namespace',
+ path: ['Kind1', null, 'Company']
+ });
+
+ try {
+ entity.keyToKeyProto(key);
+ } catch(e) {
+ assert.strictEqual(e.name, 'InvalidKey');
+ assert.strictEqual(e.message, 'Ancestor keys require an id or name.');
+ done();
+ }
+ });
+
+ it('should not throw if key is incomplete', function() {
+ var key = new entity.Key({
+ namespace: 'Namespace',
+ path: ['Kind1', 123, 'Company', null]
+ });
+
+ assert.doesNotThrow(function() {
+ entity.keyToKeyProto(key);
+ });
+ });
+ });
+
+ describe('queryToQueryProto', function() {
+ var queryProto = {
+ distinctOn: [
+ {
+ name: 'name'
+ }
+ ],
+ kind: [
+ {
+ name: 'Kind1'
+ }
+ ],
+ order: [
+ {
+ property: {
+ name: 'name'
+ },
+ direction: 'ASCENDING'
+ }
+ ],
+ projection: [
+ {
+ property: {
+ name: 'name'
+ }
+ }
+ ],
+ endCursor: 'end',
+ limit: {
+ value: 1
+ },
+ offset: 1,
+ startCursor: 'start',
+ filter: {
+ compositeFilter: {
+ filters: [
+ {
+ propertyFilter: {
+ property: {
+ name: 'name'
+ },
+ op: 'EQUAL',
+ value: {
+ stringValue: 'John'
+ }
+ }
+ },
{
- name: 'name',
- value: {
- string_value: 'value',
+ propertyFilter: {
+ property: {
+ name: '__key__'
+ },
+ op: 'HAS_ANCESTOR',
+ value: {
+ keyValue: {
+ path: [
+ {
+ kind: 'Kind2',
+ name: 'somename'
+ }
+ ]
+ }
+ }
}
}
- ]
- },
- indexed: false
- };
- assert.deepEqual(val, expected);
+ ],
+ op: 'AND'
+ }
+ }
+ };
+
+ it('should support all configurations of a query', function() {
+ var ancestorKey = new entity.Key({
+ path: ['Kind2', 'somename']
+ });
+
+ var ds = new Datastore({ projectId: 'project-id' });
+
+ var query = ds.createQuery('Kind1')
+ .filter('name', 'John')
+ .start('start')
+ .end('end')
+ .groupBy(['name'])
+ .order('name')
+ .select('name')
+ .limit(1)
+ .offset(1)
+ .hasAncestor(ancestorKey);
+
+ assert.deepEqual(entity.queryToQueryProto(query), queryProto);
});
- it('should not translate a key-less object', function() {
- assert.throws(function() {
- entity.valueToProperty({});
- }, /Unsupported field value/);
+ it('should handle buffer start and end values', function() {
+ var ds = new Datastore({ projectId: 'project-id' });
+ var startVal = new Buffer('start');
+ var endVal = new Buffer('end');
+
+ var query = ds.createQuery('Kind1')
+ .start(startVal)
+ .end(endVal);
+
+ var queryProto = entity.queryToQueryProto(query);
+ assert.strictEqual(queryProto.endCursor, endVal.toString('base64'));
+ assert.strictEqual(queryProto.startCursor, startVal.toString('base64'));
});
});
});
diff --git a/test/datastore/index.js b/test/datastore/index.js
index aa4ca08d4d4..9ab44c93265 100644
--- a/test/datastore/index.js
+++ b/test/datastore/index.js
@@ -17,29 +17,75 @@
'use strict';
var entity = {
- Int: function(val) {
- entity.intCalledWith = val;
+ Int: function(value) {
+ this.value = value;
},
- Double: function(val) {
- entity.doubleCalledWith = val;
+ Double: function(value) {
+ this.value = value;
},
- intCalledWith: null,
- doubleCalledWith: null
+ GeoPoint: function(value) {
+ this.value = value;
+ },
+ Key: function() {
+ this.calledWith_ = arguments;
+ }
};
var assert = require('assert');
+var extend = require('extend');
var mockery = require('mockery-next');
+var util = require('../../lib/common/util.js');
+
+var fakeUtil = extend({}, util);
+
+function FakeQuery() {
+ this.calledWith_ = arguments;
+}
+
+function FakeTransaction() {
+ this.calledWith_ = arguments;
+}
+
+function FakeGrpcService() {
+ this.calledWith_ = arguments;
+}
+
describe('Datastore', function() {
+ var Datastore;
var datastore;
+ var PROJECT_ID = 'project-id';
+ var NAMESPACE = 'namespace';
+
+ var OPTIONS = {
+ projectId: PROJECT_ID,
+ apiEndpoint: 'http://endpoint',
+ credentials: {},
+ keyFilename: 'key/file',
+ email: 'email',
+ namespace: NAMESPACE
+ };
+
before(function() {
+ mockery.registerMock('../../lib/common/grpc-service.js', FakeGrpcService);
+ mockery.registerMock('../../lib/common/util.js', fakeUtil);
mockery.registerMock('../../lib/datastore/entity.js', entity);
+ mockery.registerMock('../../lib/datastore/query.js', FakeQuery);
+ mockery.registerMock('../../lib/datastore/transaction.js', FakeTransaction);
mockery.enable({
useCleanCache: true,
warnOnUnregistered: false
});
- datastore = require('../../lib/datastore/index.js');
+
+ Datastore = require('../../lib/datastore/index.js');
+ });
+
+ beforeEach(function() {
+ datastore = new Datastore({
+ projectId: PROJECT_ID,
+ namespace: NAMESPACE
+ });
});
after(function() {
@@ -47,19 +93,300 @@ describe('Datastore', function() {
mockery.disable();
});
- it('should expose Dataset class', function() {
- assert.equal(typeof datastore.dataset, 'function');
+ describe('instantiation', function() {
+ it('should normalize the arguments', function() {
+ var normalizeArguments = fakeUtil.normalizeArguments;
+ var normalizeArgumentsCalled = false;
+ var fakeContext = {};
+
+ fakeUtil.normalizeArguments = function(context, options_, config) {
+ normalizeArgumentsCalled = true;
+ assert.strictEqual(context, fakeContext);
+ assert.strictEqual(OPTIONS, options_);
+ assert.strictEqual(config.projectIdRequired, false);
+ return options_;
+ };
+
+ Datastore.call(fakeContext, OPTIONS);
+ assert(normalizeArgumentsCalled);
+
+ fakeUtil.normalizeArguments = normalizeArguments;
+ });
+
+ it('should set the default base URL', function() {
+ assert.strictEqual(datastore.defaultBaseUrl_, 'datastore.googleapis.com');
+ });
+
+ it('should set default API connection details', function(done) {
+ var determineBaseUrl_ = Datastore.prototype.determineBaseUrl_;
+
+ Datastore.prototype.determineBaseUrl_ = function(customApiEndpoint) {
+ Datastore.prototype.determineBaseUrl_ = determineBaseUrl_;
+
+ assert.strictEqual(customApiEndpoint, OPTIONS.apiEndpoint);
+ done();
+ };
+
+ new Datastore(OPTIONS);
+ });
+
+ it('should localize the namespace', function() {
+ assert.strictEqual(datastore.namespace, NAMESPACE);
+ });
+
+ it('should localize the projectId', function() {
+ assert.strictEqual(datastore.projectId, PROJECT_ID);
+ });
+
+ it('should use DATASTORE_PROJECT_ID', function() {
+ var datastoreProjectIdCached = process.env.DATASTORE_PROJECT_ID;
+ var projectId = 'overridden-project-id';
+
+ process.env.DATASTORE_PROJECT_ID = projectId;
+
+ var datastore = new Datastore(OPTIONS);
+ process.env.DATASTORE_PROJECT_ID = datastoreProjectIdCached;
+
+ assert.strictEqual(datastore.projectId, projectId);
+ });
+
+ it('should inherit from GrpcService', function() {
+ var datastore = new Datastore(OPTIONS);
+
+ var calledWith = datastore.calledWith_[0];
+
+ assert.strictEqual(calledWith.projectIdRequired, false);
+ assert.strictEqual(calledWith.baseUrl, datastore.baseUrl_);
+ assert.strictEqual(calledWith.customEndpoint, datastore.customEndpoint_);
+ assert.strictEqual(calledWith.service, 'datastore');
+ assert.strictEqual(calledWith.apiVersion, 'v1beta3');
+ assert.deepEqual(calledWith.scopes, [
+ 'https://www.googleapis.com/auth/datastore'
+ ]);
+ });
+ });
+
+ describe('double', function() {
+ it('should expose Double builder', function() {
+ var aDouble = 7.0;
+ var double = Datastore.double(aDouble);
+ assert.strictEqual(double.value, aDouble);
+ });
+ });
+
+ describe('geoPoint', function() {
+ it('should expose GeoPoint builder', function() {
+ var aGeoPoint = { latitude: 24, longitude: 88 };
+ var geoPoint = Datastore.geoPoint(aGeoPoint);
+ assert.strictEqual(geoPoint.value, aGeoPoint);
+ });
});
- it('should expose Int builder', function() {
- var anInt = 7;
- datastore.int(anInt);
- assert.equal(entity.intCalledWith, anInt);
+ describe('int', function() {
+ it('should expose Int builder', function() {
+ var anInt = 7;
+ var int = Datastore.int(anInt);
+ assert.strictEqual(int.value, anInt);
+ });
});
- it('should expose Double builder', function() {
- var aDouble = 7.0;
- datastore.double(aDouble);
- assert.equal(entity.doubleCalledWith, aDouble);
+ describe('createQuery', function() {
+ var dsWithoutNamespace;
+
+ beforeEach(function() {
+ dsWithoutNamespace = new Datastore({
+ projectId: 'test',
+ namespace: 'my-ns'
+ });
+ });
+
+ it('should return a Query object', function() {
+ var namespace = 'namespace';
+ var kind = ['Kind'];
+
+ var query = datastore.createQuery(namespace, kind);
+ assert(query instanceof FakeQuery);
+
+ assert.strictEqual(query.calledWith_[0], namespace);
+ assert.deepEqual(query.calledWith_[1], kind);
+ });
+
+ it('should include the default namespace', function() {
+ var kind = ['Kind'];
+ var query = datastore.createQuery(kind);
+
+ assert.strictEqual(query.calledWith_[0], datastore.namespace);
+ assert.deepEqual(query.calledWith_[1], kind);
+ });
+ });
+
+ describe('key', function() {
+ it('should return a Key object', function() {
+ var options = {};
+ var key = datastore.key(options);
+
+ assert.strictEqual(key.calledWith_[0], options);
+ });
+
+ it('should use a non-object argument as the path', function() {
+ var options = 'path';
+ var key = datastore.key(options);
+
+ assert.strictEqual(key.calledWith_[0].namespace, datastore.namespace);
+ assert.deepEqual(key.calledWith_[0].path, [options]);
+ });
+ });
+
+ describe('runInTransaction', function() {
+ it('should begin transaction', function(done) {
+ datastore.createTransaction_ = function() {
+ return {
+ begin_: function() {
+ done();
+ }
+ };
+ };
+
+ datastore.runInTransaction();
+ });
+
+ it('should execute callback with error if one occurred', function(done) {
+ var error = new Error('Error.');
+ var apiResponse = {};
+
+ datastore.createTransaction_ = function() {
+ return {
+ begin_: function(callback) {
+ callback(error, apiResponse);
+ }
+ };
+ };
+
+ datastore.runInTransaction(util.noop, function(err, apiResponse_) {
+ assert.strictEqual(err, error);
+ assert.strictEqual(apiResponse_, apiResponse);
+ done();
+ });
+ });
+
+ it('should return transaction object to the callback', function(done) {
+ var transaction = {
+ begin_: function(callback) {
+ callback();
+ },
+ commit_: util.noop
+ };
+
+ datastore.createTransaction_ = function() {
+ return transaction;
+ };
+
+ datastore.runInTransaction(function(t) {
+ assert.deepEqual(t, transaction);
+ done();
+ }, assert.ifError);
+ });
+
+ it('should return correct done function to the callback', function(done) {
+ datastore.createTransaction_ = function() {
+ return {
+ begin_: function(callback) {
+ callback();
+ },
+ commit_: function() {
+ done();
+ }
+ };
+ };
+
+ datastore.runInTransaction(function(t, tDone) {
+ tDone();
+ }, assert.ifError);
+ });
+ });
+
+ describe('createTransaction_', function() {
+ it('should return a Transaction object', function() {
+ var transaction = datastore.createTransaction_();
+ assert.strictEqual(transaction.calledWith_[0], datastore);
+ });
+ });
+
+ describe('determineBaseUrl_', function() {
+ function setHost(host) {
+ process.env.DATASTORE_EMULATOR_HOST = host;
+ }
+
+ beforeEach(function() {
+ delete process.env.DATASTORE_EMULATOR_HOST;
+ });
+
+ it('should default to defaultBaseUrl_', function() {
+ var defaultBaseUrl_ = 'defaulturl';
+ datastore.defaultBaseUrl_ = defaultBaseUrl_;
+
+ datastore.determineBaseUrl_();
+ assert.strictEqual(datastore.baseUrl_, defaultBaseUrl_);
+ });
+
+ it('should remove slashes from the baseUrl', function() {
+ var expectedBaseUrl = 'localhost:8080';
+
+ setHost('localhost:8080/');
+ datastore.determineBaseUrl_();
+ assert.strictEqual(datastore.baseUrl_, expectedBaseUrl);
+
+ setHost('localhost:8080//');
+ datastore.determineBaseUrl_();
+ assert.strictEqual(datastore.baseUrl_, expectedBaseUrl);
+ });
+
+ it('should remove the protocol if specified', function() {
+ setHost('http://localhost:8080');
+ datastore.determineBaseUrl_();
+ assert.strictEqual(datastore.baseUrl_, 'localhost:8080');
+
+ setHost('https://localhost:8080');
+ datastore.determineBaseUrl_();
+ assert.strictEqual(datastore.baseUrl_, 'localhost:8080');
+ });
+
+ it('should not set customEndpoint_ when using default baseurl', function() {
+ var datastore = new Datastore({ projectId: PROJECT_ID });
+ datastore.determineBaseUrl_();
+ assert.strictEqual(datastore.customEndpoint_, undefined);
+ });
+
+ it('should set customEndpoint_ when using custom API endpoint', function() {
+ datastore.determineBaseUrl_('apiEndpoint');
+ assert.strictEqual(datastore.customEndpoint_, true);
+ });
+
+ it('should set baseUrl when using custom API endpoint', function() {
+ datastore.determineBaseUrl_('apiEndpoint');
+ assert.strictEqual(datastore.baseUrl_, 'apiEndpoint');
+ });
+
+ describe('with DATASTORE_EMULATOR_HOST environment variable', function() {
+ var DATASTORE_EMULATOR_HOST = 'localhost:9090';
+
+ beforeEach(function() {
+ setHost(DATASTORE_EMULATOR_HOST);
+ });
+
+ after(function() {
+ delete process.env.DATASTORE_EMULATOR_HOST;
+ });
+
+ it('should use the DATASTORE_EMULATOR_HOST env var', function() {
+ datastore.determineBaseUrl_();
+ assert.strictEqual(datastore.baseUrl_, DATASTORE_EMULATOR_HOST);
+ });
+
+ it('should set customEndpoint_', function() {
+ datastore.determineBaseUrl_();
+ assert.strictEqual(datastore.customEndpoint_, true);
+ });
+ });
});
});
diff --git a/test/datastore/query.js b/test/datastore/query.js
index 5f19c0227c5..14832c9ec75 100644
--- a/test/datastore/query.js
+++ b/test/datastore/query.js
@@ -17,10 +17,7 @@
'use strict';
var assert = require('assert');
-var datastore = require('../../lib/datastore');
-var entity = require('../../lib/datastore/entity.js');
var Query = require('../../lib/datastore/query.js');
-var queryProto = require('../testdata/proto_query.json');
describe('Query', function() {
describe('instantiation', function() {
@@ -304,19 +301,4 @@ describe('Query', function() {
assert.strictEqual(query, nextQuery);
});
});
-
- describe('proto conversion', function() {
- it('should be converted to a query proto successfully', function() {
- var query = new Query(['Kind'])
- .select(['name', 'count'])
- .filter('count', '>=', datastore.int(5))
- .filter('name', '=', 'Burcu')
- .order('count', { descending: true })
- .groupBy(['count'])
- .offset(5)
- .limit(10);
-
- assert.deepEqual(entity.queryToQueryProto(query), queryProto);
- });
- });
});
diff --git a/test/datastore/request.js b/test/datastore/request.js
index 3a245c08214..82b84f98179 100644
--- a/test/datastore/request.js
+++ b/test/datastore/request.js
@@ -18,45 +18,14 @@
var arrify = require('arrify');
var assert = require('assert');
-var ByteBuffer = require('bytebuffer');
-var entity = require('../../lib/datastore/entity.js');
var extend = require('extend');
-var format = require('string-format-obj');
var is = require('is');
var mockery = require('mockery-next');
-var mockRespGet = require('../testdata/response_get.json');
-var pb = require('../../lib/datastore/pb.js');
-var Query = require('../../lib/datastore/query.js');
-var requestModule = require('request');
var stream = require('stream');
-var util = require('../../lib/common/util.js');
-
-var REQUEST_DEFAULT_CONF;
-var requestOverride;
-function fakeRequest() {
- return (requestOverride || requestModule).apply(null, arguments);
-}
-fakeRequest.defaults = function(defaultConfiguration) {
- // Ignore the default values, so we don't have to test for them in every API
- // call.
- REQUEST_DEFAULT_CONF = defaultConfiguration;
- return fakeRequest;
-};
-// Create a protobuf "FakeMethod" request & response.
-pb.FakeMethodRequest = function() {
- this.toBuffer = function() {
- return new Buffer('');
- };
-};
-var pbFakeMethodResponseDecode = util.noop;
-pb.FakeMethodResponse = {
- decode: function() {
- var decodeFn = pbFakeMethodResponseDecode;
- pbFakeMethodResponseDecode = util.noop;
- return decodeFn.apply(this, arguments);
- }
-};
+var util = require('../../lib/common/util.js');
+var entity = require('../../lib/datastore/entity.js');
+var Query = require('../../lib/datastore/query.js');
var entityOverrides = {};
var fakeEntity;
@@ -78,6 +47,10 @@ fakeUtil = Object.keys(util).reduce(function(fakeUtil, methodName) {
return fakeUtil;
}, {});
+function FakeQuery() {
+ this.calledWith_ = arguments;
+}
+
var extended = false;
var fakeStreamRouter = {
extend: function(Class, methods) {
@@ -94,20 +67,21 @@ var fakeStreamRouter = {
describe('Request', function() {
var Request;
- var key;
var request;
- var CUSTOM_ENDPOINT = 'http://localhost:8080';
+
+ var key;
before(function() {
- mockery.registerMock('../../lib/datastore/entity.js', fakeEntity);
- mockery.registerMock('../../lib/common/util.js', fakeUtil);
- mockery.registerMock('../../lib/datastore/pb.js', pb);
mockery.registerMock('../../lib/common/stream-router.js', fakeStreamRouter);
- mockery.registerMock('request', fakeRequest);
+ mockery.registerMock('../../lib/common/util.js', fakeUtil);
+ mockery.registerMock('../../lib/datastore/entity.js', fakeEntity);
+ mockery.registerMock('../../lib/datastore/query.js', FakeQuery);
+
mockery.enable({
useCleanCache: true,
warnOnUnregistered: false
});
+
Request = require('../../lib/datastore/request.js');
});
@@ -121,33 +95,131 @@ describe('Request', function() {
namespace: 'namespace',
path: ['Company', 123]
});
+ FakeQuery.prototype = new Query();
entityOverrides = {};
utilOverrides = {};
- requestOverride = null;
request = new Request();
- request.apiEndpoint = CUSTOM_ENDPOINT;
- request.makeAuthenticatedRequest_ = function(req, callback) {
- (callback.onAuthenticated || callback)(null, req);
- };
});
describe('instantiation', function() {
it('should extend the correct methods', function() {
assert(extended); // See `fakeStreamRouter.extend`
});
+ });
- it('should have set correct defaults on Request', function() {
- assert.deepEqual(REQUEST_DEFAULT_CONF, {
- pool: {
- maxSockets: Infinity
- }
+ describe('allocateIds', function() {
+ var incompleteKey;
+ var apiResponse = {
+ keys: [
+ { path: [{ kind: 'Kind', id: 123 }] }
+ ]
+ };
+
+ beforeEach(function() {
+ incompleteKey = new entity.Key({ namespace: null, path: ['Kind'] });
+ });
+
+ it('should produce proper allocate IDs req protos', function(done) {
+ request.request_ = function(protoOpts, reqOpts, callback) {
+ assert.strictEqual(protoOpts.service, 'Datastore');
+ assert.strictEqual(protoOpts.method, 'allocateIds');
+
+ assert.equal(reqOpts.keys.length, 1);
+
+ callback(null, apiResponse);
+ };
+
+ request.allocateIds(incompleteKey, 1, function(err, keys) {
+ assert.ifError(err);
+ var generatedKey = keys[0];
+ assert.strictEqual(generatedKey.path.pop(), 123);
+ done();
+ });
+ });
+
+ it('should exec callback with error & API response', function(done) {
+ var error = new Error('Error.');
+
+ request.request_ = function(protoOpts, reqOpts, callback) {
+ callback(error, apiResponse);
+ };
+
+ request.allocateIds(incompleteKey, 1, function(err, keys, apiResponse_) {
+ assert.strictEqual(err, error);
+ assert.strictEqual(keys, null);
+ assert.strictEqual(apiResponse_, apiResponse);
+ done();
+ });
+ });
+
+ it('should return apiResponse in callback', function(done) {
+ request.request_ = function(protoOpts, reqOpts, callback) {
+ callback(null, apiResponse);
+ };
+
+ request.allocateIds(incompleteKey, 1, function(err, keys, apiResponse_) {
+ assert.ifError(err);
+ assert.strictEqual(apiResponse_, apiResponse);
+ done();
+ });
+ });
+
+ it('should throw if trying to allocate IDs with complete keys', function() {
+ assert.throws(function() {
+ request.allocateIds(key);
+ });
+ });
+ });
+
+ describe('delete', function() {
+ it('should delete by key', function(done) {
+ request.request_ = function(protoOpts, reqOpts, callback) {
+ assert.strictEqual(protoOpts.service, 'Datastore');
+ assert.strictEqual(protoOpts.method, 'commit');
+ assert(is.object(reqOpts.mutations[0].delete));
+ callback();
+ };
+ request.delete(key, done);
+ });
+
+ it('should return apiResponse in callback', function(done) {
+ var resp = { success: true };
+ request.request_ = function(protoOpts, reqOpts, callback) {
+ callback(null, resp);
+ };
+ request.delete(key, function(err, apiResponse) {
+ assert.ifError(err);
+ assert.deepEqual(resp, apiResponse);
+ done();
+ });
+ });
+
+ it('should multi delete by keys', function(done) {
+ request.request_ = function(protoOpts, reqOpts, callback) {
+ assert.equal(reqOpts.mutations.length, 2);
+ callback();
+ };
+ request.delete([ key, key ], done);
+ });
+
+ describe('transactions', function() {
+ beforeEach(function() {
+ // Trigger transaction mode.
+ request.id = 'transaction-id';
+ request.requests_ = [];
+ });
+
+ it('should queue request', function() {
+ request.delete(key);
+
+ assert(is.object(request.requests_[0].mutations[0].delete));
});
});
});
describe('get', function() {
beforeEach(function() {
- request.makeReq_ = function() {};
+ request.request_ = function() {};
});
it('should throw if no keys are provided', function() {
@@ -170,9 +242,11 @@ describe('Request', function() {
});
it('should make correct request', function(done) {
- request.makeReq_ = function(method, req) {
- assert.equal(method, 'lookup');
- assert.deepEqual(req.key[0], entity.keyToKeyProto(key));
+ request.request_ = function(protoOpts, reqOpts) {
+ assert.strictEqual(protoOpts.service, 'Datastore');
+ assert.strictEqual(protoOpts.method, 'lookup');
+
+ assert.deepEqual(reqOpts.keys[0], entity.keyToKeyProto(key));
done();
};
@@ -185,7 +259,7 @@ describe('Request', function() {
var apiResponse = { a: 'b', c: 'd' };
beforeEach(function() {
- request.makeReq_ = function(method, req, callback) {
+ request.request_ = function(protoOpts, reqOpts, callback) {
setImmediate(function() {
callback(error, apiResponse);
});
@@ -224,7 +298,55 @@ describe('Request', function() {
});
describe('success', function() {
- var apiResponse = extend(true, {}, mockRespGet);
+ var apiResponse = {
+ found: [
+ {
+ entity: {
+ key: {
+ partitionId: {
+ projectId: 'grape-spaceship-123'
+ },
+ path: [
+ {
+ kind: 'Post',
+ name: 'post1'
+ }
+ ]
+ },
+ properties: {
+ title: {
+ stringValue: 'How to make the perfect pizza in your grill'
+ },
+ tags: {
+ arrayValue: {
+ values: [
+ {
+ stringValue: 'pizza'
+ },
+ {
+ stringValue: 'grill'
+ }
+ ]
+ }
+ },
+ rating: {
+ integerValue: '5'
+ },
+ author: {
+ stringValue: 'Silvano'
+ },
+ wordCount: {
+ integerValue: '400'
+ },
+ isDraft: {
+ booleanValue: false
+ }
+ }
+ }
+ }
+ ]
+ };
+
var expectedResult = entity.formatArray(apiResponse.found)[0];
var apiResponseWithMultiEntities = extend(true, {}, apiResponse);
@@ -238,7 +360,7 @@ describe('Request', function() {
];
beforeEach(function() {
- request.makeReq_ = function(method, req, callback) {
+ request.request_ = function(protoOpts, reqOpts, callback) {
callback(null, apiResponse);
};
});
@@ -254,7 +376,7 @@ describe('Request', function() {
});
it('should continue looking for deferred results', function(done) {
- request.makeReq_ = function(method, req, callback) {
+ request.request_ = function(protoOpts, reqOpts, callback) {
callback(null, apiResponseWithDeferred);
};
@@ -279,7 +401,7 @@ describe('Request', function() {
});
it('should exec callback w/ array from multiple keys', function(done) {
- request.makeReq_ = function(method, req, callback) {
+ request.request_ = function(protoOpts, reqOpts, callback) {
callback(null, apiResponseWithMultiEntities);
};
@@ -307,7 +429,7 @@ describe('Request', function() {
it('should not push more results if stream was ended', function(done) {
var entitiesEmitted = 0;
- request.makeReq_ = function(method, req, callback) {
+ request.request_ = function(protoOpts, reqOpts, callback) {
setImmediate(function() {
callback(null, apiResponseWithMultiEntities);
});
@@ -327,7 +449,7 @@ describe('Request', function() {
it('should not get more results if stream was ended', function(done) {
var lookupCount = 0;
- request.makeReq_ = function(method, req, callback) {
+ request.request_ = function(protoOpts, reqOpts, callback) {
lookupCount++;
setImmediate(function() {
callback(null, apiResponseWithDeferred);
@@ -369,61 +491,171 @@ describe('Request', function() {
});
});
- describe('save', function() {
- it('should save with incomplete key', function(done) {
- request.makeReq_ = function(method, req, callback) {
- assert.equal(method, 'commit');
- assert.equal(req.mutation.insert_auto_id.length, 1);
- callback();
+ describe('runQuery', function() {
+ it('should make correct request', function(done) {
+ var query = { namespace: 'namespace' };
+ var queryProto = {};
+
+ entityOverrides.queryToQueryProto = function(query_) {
+ assert.strictEqual(query_, query);
+ return queryProto;
};
- var key = new entity.Key({ namespace: 'ns', path: ['Company'] });
- request.save({ key: key, data: {} }, done);
+
+ request.request_ = function(protoOpts, reqOpts) {
+ assert.strictEqual(protoOpts.service, 'Datastore');
+ assert.strictEqual(protoOpts.method, 'runQuery');
+ assert(is.empty(reqOpts.readOptions));
+ assert.strictEqual(reqOpts.query, queryProto);
+ assert.strictEqual(reqOpts.partitionId.namespaceId, query.namespace);
+
+ done();
+ };
+
+ request.runQuery(query, assert.ifError);
});
- it('should set the ID on incomplete key objects', function(done) {
- var key = new entity.Key({ namespace: 'ns', path: ['Company'] });
- var id = 50714372;
-
- var mockCommitResponse = {
- mutation_result: {
- insert_auto_id_key: [
- {
- partition_id: {
- dataset_id: 's~project-id',
- namespace: 'ns'
- },
- path_element: [
- {
- kind: 'Company',
- id: id,
- name: null
- }
- ]
- }
- ]
+ describe('error', function() {
+ var error = new Error('Error.');
+ var apiResponse = {};
+
+ beforeEach(function() {
+ entity.queryToQueryProto = util.noop;
+
+ request.request_ = function(protoOpts, reqOpts, callback) {
+ callback(error, apiResponse);
+ };
+ });
+
+ it('should execute callback with error & API response', function(done) {
+ request.runQuery({}, function(err, results, nextQuery, apiResponse_) {
+ assert.strictEqual(err, error);
+ assert.strictEqual(results, null);
+ assert.strictEqual(nextQuery, null);
+ assert.strictEqual(apiResponse_, apiResponse);
+
+ done();
+ });
+ });
+ });
+
+ describe('success', function() {
+ var entityResults = ['a', 'b', 'c'];
+ var endCursor = 'endCursor';
+
+ var apiResponse = {
+ batch: {
+ entityResults: entityResults,
+ endCursor: endCursor,
+ moreResults: 'MORE_RESULTS_AFTER_LIMIT',
+ skippedResults: 0
}
};
- request.makeReq_ = function(method, req, callback) {
- callback(null, mockCommitResponse);
- };
+ beforeEach(function() {
+ request.request_ = function(protoOpts, reqOpts, callback) {
+ callback(null, apiResponse);
+ };
+ });
- request.save({ key: key, data: {} }, function(err) {
- assert.ifError(err);
+ it('should format results', function(done) {
+ entityOverrides.formatArray = function(array) {
+ assert.strictEqual(array, entityResults);
+ return entityResults;
+ };
- assert.equal(key.path[1], id);
+ request.runQuery({}, function(err, entities) {
+ assert.ifError(err);
+ assert.strictEqual(entities, entityResults);
+ done();
+ });
+ });
- done();
+ it('should return nextQuery', function(done) {
+ entityOverrides.formatArray = util.noop;
+
+ var query = {
+ offsetVal: 8
+ };
+
+ var startCalled = false;
+ var offsetCalled = false;
+
+ FakeQuery.prototype.start = function(endCursor_) {
+ assert.strictEqual(endCursor_, endCursor);
+ startCalled = true;
+ return this;
+ };
+
+ FakeQuery.prototype.offset = function(offset_) {
+ var offset = query.offsetVal - apiResponse.batch.skippedResults;
+ assert.strictEqual(offset_, offset);
+ offsetCalled = true;
+ return this;
+ };
+
+ request.runQuery(query, function(err) {
+ assert.ifError(err);
+ assert.strictEqual(startCalled, true);
+ assert.strictEqual(offsetCalled, true);
+ done();
+ });
});
});
+ });
+ describe('save', function() {
it('should save with keys', function(done) {
- request.makeReq_ = function(method, req, callback) {
- assert.equal(method, 'commit');
- assert.equal(req.mutation.upsert.length, 2);
- assert.equal(req.mutation.upsert[0].property[0].name, 'k');
- assert.equal(
- req.mutation.upsert[0].property[0].value.string_value, 'v');
+ var expectedReq = {
+ mutations: [
+ {
+ upsert: {
+ key: {
+ partitionId: {
+ namespaceId: 'namespace'
+ },
+ path: [
+ {
+ kind: 'Company',
+ id: 123
+ }
+ ]
+ },
+ properties: {
+ k: {
+ stringValue: 'v'
+ }
+ }
+ }
+ },
+ {
+ upsert: {
+ key: {
+ partitionId: {
+ namespaceId: 'namespace'
+ },
+ path: [
+ {
+ kind: 'Company',
+ id: 123
+ }
+ ]
+ },
+ properties: {
+ k: {
+ stringValue: 'v'
+ }
+ }
+ }
+ }
+ ]
+ };
+
+ request.request_ = function(protoOpts, reqOpts, callback) {
+ assert.strictEqual(protoOpts.service, 'Datastore');
+ assert.strictEqual(protoOpts.method, 'commit');
+
+ assert.deepEqual(reqOpts, expectedReq);
+
callback();
};
request.save([
@@ -433,29 +665,20 @@ describe('Request', function() {
});
it('should save with specific method', function(done) {
- request.makeReq_ = function(method, req, callback) {
- assert.equal(method, 'commit');
+ request.request_ = function(protoOpts, reqOpts, callback) {
+ assert.equal(reqOpts.mutations.length, 3);
+ assert(is.object(reqOpts.mutations[0].insert));
+ assert(is.object(reqOpts.mutations[1].update));
+ assert(is.object(reqOpts.mutations[2].upsert));
- assert.equal(req.mutation.insert.length, 1);
- assert.equal(req.mutation.update.length, 1);
- assert.equal(req.mutation.upsert.length, 1);
- assert.equal(req.mutation.insert_auto_id.length, 1);
+ var insert = reqOpts.mutations[0].insert;
+ assert.deepEqual(insert.properties.k, { stringValue: 'v' });
- var insert = req.mutation.insert[0];
- assert.strictEqual(insert.property[0].name, 'k');
- assert.strictEqual(insert.property[0].value.string_value, 'v');
+ var update = reqOpts.mutations[1].update;
+ assert.deepEqual(update.properties.k2, { stringValue: 'v2' });
- var update = req.mutation.update[0];
- assert.strictEqual(update.property[0].name, 'k2');
- assert.strictEqual(update.property[0].value.string_value, 'v2');
-
- var upsert = req.mutation.upsert[0];
- assert.strictEqual(upsert.property[0].name, 'k3');
- assert.strictEqual(upsert.property[0].value.string_value, 'v3');
-
- var insertAutoId = req.mutation.insert_auto_id[0];
- assert.strictEqual(insertAutoId.property[0].name, 'k4');
- assert.strictEqual(insertAutoId.property[0].value.string_value, 'v4');
+ var upsert = reqOpts.mutations[2].upsert;
+ assert.deepEqual(upsert.properties.k3, { stringValue: 'v3' });
callback();
};
@@ -463,8 +686,7 @@ describe('Request', function() {
request.save([
{ key: key, method: 'insert', data: { k: 'v' } },
{ key: key, method: 'update', data: { k2: 'v2' } },
- { key: key, method: 'upsert', data: { k3: 'v3' } },
- { key: key, method: 'insert_auto_id', data: { k4: 'v4' } }
+ { key: key, method: 'upsert', data: { k3: 'v3' } }
], done);
});
@@ -496,7 +718,7 @@ describe('Request', function() {
];
var expectedEntities = extend(true, {}, entities);
- request.makeReq_ = function() {
+ request.request_ = function() {
// By the time the request is made, the original object has already been
// transformed into a raw request.
assert.deepEqual(entities, expectedEntities);
@@ -508,69 +730,43 @@ describe('Request', function() {
it('should return apiResponse in callback', function(done) {
var key = new entity.Key({ namespace: 'ns', path: ['Company'] });
- var mockCommitResponse = {
- mutation_result: {
- insert_auto_id_key: [
- {
- partition_id: {
- dataset_id: 's~project-id',
- namespace: 'ns'
- },
- path_element: [
- {
- kind: 'Company',
- id: 123,
- name: null
- }
- ]
- }
- ]
- }
- };
- request.makeReq_ = function(method, req, callback) {
+ var mockCommitResponse = {};
+ request.request_ = function(protoOpts, reqOpts, callback) {
callback(null, mockCommitResponse);
};
request.save({ key: key, data: {} }, function(err, apiResponse) {
assert.ifError(err);
- assert.deepEqual(mockCommitResponse, apiResponse);
+ assert.strictEqual(mockCommitResponse, apiResponse);
done();
});
});
- it('should not set an indexed value by default', function(done) {
- request.makeReq_ = function(method, req) {
- var property = req.mutation.upsert[0].property[0];
- assert.equal(property.name, 'name');
- assert.equal(property.value.string_value, 'value');
- assert.strictEqual(property.value.indexed, undefined);
+ it('should allow setting the indexed value of a property', function(done) {
+ request.request_ = function(protoOpts, reqOpts) {
+ var property = reqOpts.mutations[0].upsert.properties.name;
+ assert.strictEqual(property.stringValue, 'value');
+ assert.strictEqual(property.excludeFromIndexes, true);
done();
};
- request.save({
- key: key,
- data: [{ name: 'name', value: 'value' }]
- }, assert.ifError);
- });
- it('should allow setting the indexed value of property', function(done) {
- request.makeReq_ = function(method, req) {
- var property = req.mutation.upsert[0].property[0];
- assert.equal(property.name, 'name');
- assert.equal(property.value.string_value, 'value');
- assert.strictEqual(property.value.indexed, false);
- done();
- };
request.save({
key: key,
- data: [{ name: 'name', value: 'value', excludeFromIndexes: true }]
+ data: [
+ {
+ name: 'name',
+ value: 'value',
+ excludeFromIndexes: true
+ }
+ ]
}, assert.ifError);
});
it('should allow setting the indexed value on arrays', function(done) {
- request.makeReq_ = function(method, req) {
- var property = req.mutation.upsert[0].property[0];
+ request.request_ = function(protoOpts, reqOpts) {
+ var property = reqOpts.mutations[0].upsert.properties.name;
- property.value.list_value.forEach(function(value) {
- assert.strictEqual(value.indexed, false);
+ property.arrayValue.values.forEach(function(value) {
+ assert.strictEqual(value.excludeFromIndexes, true);
});
done();
@@ -578,11 +774,13 @@ describe('Request', function() {
request.save({
key: key,
- data: [{
- name: 'name',
- value: ['one', 'two', 'three'],
- excludeFromIndexes: true
- }]
+ data: [
+ {
+ name: 'name',
+ value: ['one', 'two', 'three'],
+ excludeFromIndexes: true
+ }
+ ]
}, assert.ifError);
});
@@ -606,151 +804,6 @@ describe('Request', function() {
});
});
- describe('delete', function() {
- it('should delete by key', function(done) {
- request.makeReq_ = function(method, req, callback) {
- assert.equal(method, 'commit');
- assert.equal(!!req.mutation.delete, true);
- callback();
- };
- request.delete(key, done);
- });
-
- it('should return apiResponse in callback', function(done) {
- var resp = { success: true };
- request.makeReq_ = function(method, req, callback) {
- callback(null, resp);
- };
- request.delete(key, function(err, apiResponse) {
- assert.ifError(err);
- assert.deepEqual(resp, apiResponse);
- done();
- });
- });
-
- it('should multi delete by keys', function(done) {
- request.makeReq_ = function(method, req, callback) {
- assert.equal(method, 'commit');
- assert.equal(req.mutation.delete.length, 2);
- callback();
- };
- request.delete([ key, key ], done);
- });
-
- describe('transactions', function() {
- beforeEach(function() {
- // Trigger transaction mode.
- request.id = 'transaction-id';
- request.requests_ = [];
- });
-
- it('should queue request', function() {
- request.delete(key);
-
- assert.equal(typeof request.requests_[0].mutation.delete, 'object');
- });
- });
- });
-
- describe('runQuery', function() {
- var query;
- var mockResponse = {
- withResults: {
- batch: { entity_result: mockRespGet.found }
- },
- withResultsAndEndCursor: {
- batch: {
- entity_result: mockRespGet.found,
- end_cursor: new ByteBuffer().writeIString('cursor').flip()
- }
- }
- };
-
- beforeEach(function() {
- query = new Query('namespace', ['Kind']);
- });
-
- describe('errors', function() {
- it('should handle upstream errors', function() {
- var error = new Error('Error.');
- request.makeReq_ = function(method, req, callback) {
- assert.equal(method, 'runQuery');
- callback(error);
- };
-
- request.runQuery(query, function(err) {
- assert.equal(err, error);
- });
- });
- });
-
- it('should execute callback with results', function() {
- request.makeReq_ = function(method, req, callback) {
- assert.equal(method, 'runQuery');
- callback(null, mockResponse.withResults);
- };
-
- request.runQuery(query, function(err, entities) {
- assert.ifError(err);
- assert.deepEqual(entities[0].key.path, ['Kind', 5732568548769792]);
-
- var data = entities[0].data;
- assert.strictEqual(data.author, 'Silvano');
- assert.strictEqual(data.isDraft, false);
- assert.deepEqual(data.publishedAt, new Date(978336000000));
- });
- });
-
- it('should execute callback with apiResponse', function(done) {
- request.makeReq_ = function(method, req, callback) {
- callback(null, mockResponse.withResults);
- };
-
- request.runQuery(query, function(err, entities, nextQuery, apiResponse) {
- assert.ifError(err);
- assert.deepEqual(mockResponse.withResults, apiResponse);
- done();
- });
- });
-
- it('should return null nextQuery if no end cursor exists', function(done) {
- request.makeReq_ = function(method, req, callback) {
- callback(null, mockResponse.withResults);
- };
-
- request.runQuery(query, function(err, entities, nextQuery) {
- assert.ifError(err);
- assert.strictEqual(nextQuery, null);
- done();
- });
- });
-
- it('should return a nextQuery', function(done) {
- var response = mockResponse.withResultsAndEndCursor;
-
- request.makeReq_ = function(method, req, callback) {
- callback(null, response);
- };
-
- request.runQuery(query, function(err, entities, nextQuery) {
- assert.ifError(err);
- assert.equal(nextQuery.startVal, response.batch.end_cursor.toBase64());
- done();
- });
- });
-
- it('should set a partition_id from a namespace', function(done) {
- var namespace = 'namespace';
-
- request.makeReq_ = function(method, req) {
- assert.strictEqual(req.partition_id.namespace, namespace);
- done();
- };
-
- request.runQuery(query, assert.ifError);
- });
- });
-
describe('update', function() {
it('should pass the correct arguments to save', function(done) {
request.save = function(entities, callback) {
@@ -793,347 +846,105 @@ describe('Request', function() {
});
});
- describe('allocateIds', function() {
- var incompleteKey;
- var apiResponse = {
- key: [
- { path_element: [{ kind: 'Kind', id: 123 }] }
- ]
- };
+ describe('request_', function() {
+ var PROJECT_ID = 'project-id';
+ var PROTO_OPTS = {};
beforeEach(function() {
- incompleteKey = new entity.Key({ namespace: null, path: ['Kind'] });
+ request.projectId = PROJECT_ID;
});
- it('should produce proper allocate IDs req protos', function(done) {
- request.makeReq_ = function(method, req, callback) {
- assert.equal(method, 'allocateIds');
- assert.equal(req.key.length, 1);
-
- callback(null, apiResponse);
- };
+ it('should make the correct request', function(done) {
+ var reqOpts = {};
- request.allocateIds(incompleteKey, 1, function(err, keys) {
- assert.ifError(err);
- var generatedKey = keys[0];
- assert.strictEqual(generatedKey.path.pop(), 123);
+ request.request = function(protoOpts, reqOpts_) {
+ assert.strictEqual(protoOpts, PROTO_OPTS);
+ assert.strictEqual(reqOpts_, reqOpts);
+ assert.strictEqual(reqOpts_.projectId, PROJECT_ID);
done();
- });
- });
-
- it('should exec callback with error & API response', function(done) {
- var error = new Error('Error.');
-
- request.makeReq_ = function(method, req, callback) {
- callback(error, apiResponse);
- };
-
- request.allocateIds(incompleteKey, 1, function(err, keys, apiResponse_) {
- assert.strictEqual(err, error);
- assert.strictEqual(keys, null);
- assert.strictEqual(apiResponse_, apiResponse);
- done();
- });
- });
-
- it('should return apiResponse in callback', function(done) {
- request.makeReq_ = function(method, req, callback) {
- callback(null, apiResponse);
};
- request.allocateIds(incompleteKey, 1, function(err, keys, apiResponse_) {
- assert.ifError(err);
- assert.strictEqual(apiResponse_, apiResponse);
- done();
- });
+ request.request_(PROTO_OPTS, reqOpts, assert.ifError);
});
- it('should throw if trying to allocate IDs with complete keys', function() {
- assert.throws(function() {
- request.allocateIds(key);
- });
- });
- });
+ describe('commit', function() {
+ it('should set the mode', function(done) {
+ var reqOpts = {};
- describe('makeReq_', function() {
- beforeEach(function() {
- request.connection = {
- createAuthenticatedReq: util.noop
- };
- });
-
- it('should assemble correct request', function(done) {
- var method = 'commit';
- var datasetId = 'dataset-id';
- var expectedUri =
- format('{apiEndpoint}/datastore/v1beta2/datasets/{dId}/{method}', {
- apiEndpoint: CUSTOM_ENDPOINT,
- dId: datasetId,
- method: method
- });
-
- request.datasetId = datasetId;
- request.makeAuthenticatedRequest_ = function(opts) {
- assert.equal(opts.method, 'POST');
- assert.equal(opts.uri, expectedUri);
- assert.equal(opts.headers['Content-Type'], 'application/x-protobuf');
- done();
- };
- request.makeReq_(method, {}, util.noop);
- });
-
- it('should make API request', function(done) {
- var mockRequest = { mock: 'request' };
- requestOverride = function(req) {
- assert.deepEqual(req, mockRequest);
- done();
- return new stream.Writable();
- };
- request.makeAuthenticatedRequest_ = function(opts, callback) {
- (callback.onAuthenticated || callback)(null, mockRequest);
- };
- request.makeReq_('commit', {}, util.noop);
- });
-
- it('should execute onAuthenticated with error', function(done) {
- var error = new Error('Error.');
-
- request.makeAuthenticatedRequest_ = function(opts, callback) {
- (callback.onAuthenticated || callback)(error);
- };
+ request.request = function(protoOpts, reqOpts_) {
+ assert.strictEqual(reqOpts_, reqOpts);
+ assert.strictEqual(reqOpts_.mode, 'NON_TRANSACTIONAL');
+ done();
+ };
- request.makeReq_('commit', {}, function(err) {
- assert.strictEqual(err, error);
- done();
+ request.request_({ method: 'commit' }, reqOpts, assert.ifError);
});
});
- it('should send protobuf request', function(done) {
- var requestOptions = { mode: 'NON_TRANSACTIONAL' };
- var decoded = new pb.CommitRequest(requestOptions).toBuffer();
- requestOverride = function(req) {
- assert.equal(String(req.body), String(decoded));
- done();
- };
- request.makeReq_('commit', requestOptions, util.noop);
- });
-
- it('should respect API host and port configuration', function(done) {
- request.apiEndpoint = CUSTOM_ENDPOINT;
+ describe('transaction', function() {
+ var TRANSACTION_ID = 'transaction';
- requestOverride = function(req) {
- assert.equal(req.uri.indexOf(CUSTOM_ENDPOINT), 0);
- done();
- };
-
- request.makeReq_('fakeMethod', util.noop);
- });
-
- it('should execute callback with error from request', function(done) {
- var error = new Error('Error.');
-
- requestOverride = function(req, callback) {
- callback(error);
- };
-
- request.makeReq_('fakeMethod', function(err) {
- assert.strictEqual(err, error);
- done();
+ beforeEach(function() {
+ request.id = TRANSACTION_ID;
});
- });
-
- it('should parse response', function(done) {
- var resp = {};
-
- requestOverride = function(req, callback) {
- callback(null, resp);
- };
-
- utilOverrides.parseHttpRespMessage = function(resp_) {
- assert.strictEqual(resp_, resp);
- setImmediate(done);
- return resp;
- };
-
- request.makeReq_('fakeMethod', util.noop);
- });
- it('should return error from parsed response', function(done) {
- var error = new Error('Error.');
- var resp = {};
-
- requestOverride = function(req, callback) {
- callback(null, resp);
- };
+ it('should set the commit transaction info', function(done) {
+ var reqOpts = {};
- utilOverrides.parseHttpRespMessage = function() {
- return {
- err: error,
- resp: resp
+ request.request = function(protoOpts, reqOpts_) {
+ assert.strictEqual(reqOpts_, reqOpts);
+ assert.strictEqual(reqOpts_.mode, 'TRANSACTIONAL');
+ assert.strictEqual(reqOpts_.transaction, request.id);
+ done();
};
- };
- request.makeReq_('fakeMethod', function(err, results, apiResponse) {
- assert.strictEqual(err, error);
- assert.strictEqual(results, null);
- assert.strictEqual(apiResponse, resp);
- done();
+ request.id = 'transaction-id';
+ request.request_({ method: 'commit' }, reqOpts, assert.ifError);
});
- });
-
- it('should parse body', function(done) {
- var resp = {};
- var body = {};
- requestOverride = function(req, callback) {
- callback(null, resp, body);
- };
+ it('should set the rollback transaction info', function(done) {
+ var reqOpts = {};
- utilOverrides.parseHttpRespBody = function() {
- return {
- body: body
+ request.request = function(protoOpts, reqOpts_) {
+ assert.strictEqual(reqOpts_, reqOpts);
+ assert.strictEqual(reqOpts_.transaction, request.id);
+ done();
};
- };
- request.makeReq_('fakeMethod', function(err, results, apiResponse) {
- assert.strictEqual(err, null);
- assert.strictEqual(results, body);
- assert.strictEqual(apiResponse, resp);
- done();
+ request.id = 'transaction-id';
+ request.request_({ method: 'rollback' }, reqOpts, assert.ifError);
});
- });
-
- it('should return error from parsed body', function(done) {
- var error = new Error('Error.');
- var resp = {};
- var body = {};
- requestOverride = function(req, callback) {
- callback(null, resp, body);
- };
-
- utilOverrides.parseHttpRespBody = function() {
- return {
- err: error,
- body: body
+ it('should set the lookup transaction info', function(done) {
+ var reqOpts = {
+ readOptions: {}
};
- };
- request.makeReq_('fakeMethod', function(err, results, apiResponse) {
- assert.strictEqual(err, error);
- assert.strictEqual(results, null);
- assert.strictEqual(apiResponse, resp);
- done();
- });
- });
-
- it('should decode the protobuf response', function(done) {
- pbFakeMethodResponseDecode = function() {
- done();
- };
- requestOverride = function(req, callback) {
- callback(null, {}, new Buffer(''));
- };
- request.makeReq_('fakeMethod', util.noop);
- });
-
- describe('transactional and non-transactional properties', function() {
- beforeEach(function() {
- request.createAuthenticatedRequest_ = function(opts, callback) {
- (callback.onAuthenticated || callback)();
+ request.request = function(protoOpts, reqOpts_) {
+ assert.strictEqual(reqOpts_, reqOpts);
+ assert.strictEqual(reqOpts_.readOptions, reqOpts.readOptions);
+ assert.strictEqual(reqOpts_.readOptions.transaction, request.id);
+ done();
};
- });
-
- describe('rollback', function() {
- it('should attach transacational properties', function(done) {
- request.id = 'EeMXCSGvwcSWGkkABRmGMTWdbi_pa66VflNhQAGblQFMXf9HrmNGa' +
- 'GugEsO1M2_2x7wZvLencG51uwaDOTZCjTkkRh7bw_oyKUgTmtJ0iWJwath7';
- var expected = new pb.RollbackRequest({
- transaction: request.id
- }).toBuffer();
- requestOverride = function(req) {
- assert.deepEqual(req.body, expected);
- done();
- };
- request.makeReq_('rollback', util.noop);
- });
- });
- describe('commit', function() {
- it('should attach transactional properties', function(done) {
- request.id = 'EeMXCSGvwcSWGkkABRmGMTWdbi_pa66VflNhQAGblQFMXf9HrmNGa' +
- 'GugEsO1M2_2x7wZvLencG51uwaDOTZCjTkkRh7bw_oyKUgTmtJ0iWJwath7';
- var expected = new pb.CommitRequest({
- mode: 'TRANSACTIONAL',
- transaction: request.id
- }).toBuffer();
- requestOverride = function(req) {
- assert.deepEqual(req.body, expected);
- done();
- };
- request.makeReq_('commit', util.noop);
- });
-
- it('should attach non-transactional properties', function(done) {
- var expected = new pb.CommitRequest({
- mode: 'NON_TRANSACTIONAL'
- }).toBuffer();
- requestOverride = function(req) {
- assert.deepEqual(req.body, expected);
- done();
- };
- request.makeReq_('commit', util.noop);
- });
+ request.id = 'transaction-id';
+ request.request_({ method: 'lookup' }, reqOpts, assert.ifError);
});
- describe('lookup', function() {
- it('should attach transactional properties', function(done) {
- request.id = 'EeMXCSGvwcSWGkkABRmGMTWdbi_pa66VflNhQAGblQFMXf9HrmNGa' +
- 'GugEsO1M2_2x7wZvLencG51uwaDOTZCjTkkRh7bw_oyKUgTmtJ0iWJwath7';
- var expected = new pb.LookupRequest({
- read_options: {
- transaction: request.id
- }
- }).toBuffer();
- requestOverride = function(req) {
- assert.deepEqual(req.body, expected);
- done();
- };
- request.makeReq_('lookup', util.noop);
- });
-
- it('should not attach transactional properties', function(done) {
- requestOverride = function(req) {
- assert.strictEqual(req.body, '');
- done();
- };
- request.makeReq_('lookup', util.noop);
- });
- });
+ it('should set the lookup transaction info', function(done) {
+ var reqOpts = {
+ readOptions: {}
+ };
- describe('runQuery', function() {
- it('should attach transactional properties', function(done) {
- request.id = 'EeMXCSGvwcSWGkkABRmGMTWdbi_pa66VflNhQAGblQFMXf9HrmNGa' +
- 'GugEsO1M2_2x7wZvLencG51uwaDOTZCjTkkRh7bw_oyKUgTmtJ0iWJwath7';
- var expected = new pb.RunQueryRequest({
- read_options: {
- transaction: request.id
- }
- }).toBuffer();
- requestOverride = function(req) {
- assert.deepEqual(req.body, expected);
- done();
- };
- request.makeReq_('runQuery', util.noop);
- });
+ request.request = function(protoOpts, reqOpts_) {
+ assert.strictEqual(reqOpts_, reqOpts);
+ assert.strictEqual(reqOpts_.readOptions, reqOpts.readOptions);
+ assert.strictEqual(reqOpts_.readOptions.transaction, request.id);
+ done();
+ };
- it('should not attach transactional properties', function(done) {
- requestOverride = function(req) {
- assert.strictEqual(req.body, '');
- done();
- };
- request.makeReq_('runQuery', util.noop);
- });
+ request.id = 'transaction-id';
+ request.request_({ method: 'runQuery' }, reqOpts, assert.ifError);
});
});
});
diff --git a/test/datastore/transaction.js b/test/datastore/transaction.js
index 5a13f075747..41a90b1069b 100644
--- a/test/datastore/transaction.js
+++ b/test/datastore/transaction.js
@@ -50,6 +50,7 @@ describe('Transaction', function() {
var Transaction;
var transaction;
var TRANSACTION_ID = 'transaction-id';
+ var PROJECT_ID = 'project-id';
function key(path) {
return new entity.Key({ path: arrify(path) });
@@ -64,6 +65,7 @@ describe('Transaction', function() {
useCleanCache: true,
warnOnUnregistered: false
});
+
Transaction = require('../../lib/datastore/transaction.js');
});
@@ -74,45 +76,59 @@ describe('Transaction', function() {
beforeEach(function() {
transaction = new Transaction({
- authenticateReq_: function(req, callback) {
- return callback(null, req);
- }
- }, 'project-id');
+ request: function() {},
+ projectId: PROJECT_ID
+ });
});
describe('instantiation', function() {
- it('should assign default properties', function() {
- var datasetId = 'abc';
+ it('should localize the project ID', function() {
+ assert.strictEqual(transaction.projectId, PROJECT_ID);
+ });
+
+ it('should localize request function', function(done) {
+ var transaction;
+
var fakeDataset = {
- apiEndpoint: 'http://localhost:8080',
- makeAuthenticatedRequest_: function fakeMakeAuthenticatedRequest_() {}
+ request: {
+ bind: function(context) {
+ assert.strictEqual(context, fakeDataset);
+
+ setImmediate(function() {
+ assert.strictEqual(transaction.request, fakeDataset.request);
+ done();
+ });
+
+ return fakeDataset.request;
+ }
+ }
};
- var transaction = new Transaction(fakeDataset, datasetId);
+ transaction = new Transaction(fakeDataset);
+ });
- assert.strictEqual(transaction.id, null);
- assert.deepEqual(transaction.apiEndpoint, fakeDataset.apiEndpoint);
- assert.equal(
- transaction.makeAuthenticatedRequest_,
- fakeDataset.makeAuthenticatedRequest_
- );
- assert.equal(transaction.datasetId, datasetId);
+ it('should localize default properties', function() {
+ assert.deepEqual(transaction.modifiedEntities_, []);
+ assert.deepEqual(transaction.requestCallbacks_, []);
+ assert.deepEqual(transaction.requests_, []);
});
});
describe('begin_', function() {
it('should begin', function(done) {
- transaction.makeReq_ = function(method, req, callback) {
- callback = callback || req;
- assert.equal(method, 'beginTransaction');
+ transaction.request_ = function(protoOpts, reqOpts, callback) {
+ callback = callback || reqOpts;
+ assert.strictEqual(protoOpts.service, 'Datastore');
+ assert.equal(protoOpts.method, 'beginTransaction');
done();
};
+
transaction.begin_();
});
it('should set transaction id', function(done) {
- transaction.makeReq_ = function(method, req, callback) {
- callback = callback || req;
+ transaction.request_ = function(protoOpts, reqOpts, callback) {
+ callback = callback || reqOpts;
callback(null, { transaction: TRANSACTION_ID });
};
transaction.begin_(function(err) {
@@ -124,8 +140,8 @@ describe('Transaction', function() {
it('should pass error to callback', function(done) {
var error = new Error('Error.');
- transaction.makeReq_ = function(method, req, callback) {
- callback = callback || req;
+ transaction.request_ = function(protoOpts, reqOpts, callback) {
+ callback = callback || reqOpts;
callback(error);
};
transaction.begin_(function(err) {
@@ -136,8 +152,8 @@ describe('Transaction', function() {
it('should pass apiResponse to callback', function(done) {
var resp = { success: true };
- transaction.makeReq_ = function(method, req, callback) {
- callback = callback || req;
+ transaction.request_ = function(protoOpts, reqOpts, callback) {
+ callback = callback || reqOpts;
callback(null, resp);
};
transaction.begin_(function(err, apiResponse) {
@@ -154,8 +170,9 @@ describe('Transaction', function() {
});
it('should rollback', function(done) {
- transaction.makeReq_ = function(method) {
- assert.equal(method, 'rollback');
+ transaction.request_ = function(protoOpts) {
+ assert.strictEqual(protoOpts.service, 'Datastore');
+ assert.equal(protoOpts.method, 'rollback');
done();
};
transaction.rollback();
@@ -163,8 +180,8 @@ describe('Transaction', function() {
it('should pass error to callback', function(done) {
var error = new Error('Error.');
- transaction.makeReq_ = function(method, req, callback) {
- callback = callback || req;
+ transaction.request_ = function(protoOpts, reqOpts, callback) {
+ callback = callback || reqOpts;
callback(error);
};
transaction.rollback(function(err) {
@@ -175,8 +192,8 @@ describe('Transaction', function() {
it('should pass apiResponse to callback', function(done) {
var resp = { success: true };
- transaction.makeReq_ = function(method, req, callback) {
- callback = callback || req;
+ transaction.request_ = function(protoOpts, reqOpts, callback) {
+ callback = callback || reqOpts;
callback(null, resp);
};
transaction.rollback(function(err, apiResponse) {
@@ -187,8 +204,8 @@ describe('Transaction', function() {
});
it('should set skipCommit', function(done) {
- transaction.makeReq_ = function(method, req, callback) {
- callback = callback || req;
+ transaction.request_ = function(protoOpts, reqOpts, callback) {
+ callback = callback || reqOpts;
callback();
};
transaction.rollback(function() {
@@ -198,8 +215,8 @@ describe('Transaction', function() {
});
it('should set skipCommit when rollback errors', function(done) {
- transaction.makeReq_ = function(method, req, callback) {
- callback = callback || req;
+ transaction.request_ = function(protoOpts, reqOpts, callback) {
+ callback = callback || reqOpts;
callback(new Error('Error.'));
};
transaction.rollback(function() {
@@ -215,8 +232,9 @@ describe('Transaction', function() {
});
it('should commit', function(done) {
- transaction.makeReq_ = function(method) {
- assert.equal(method, 'commit');
+ transaction.request_ = function(protoOpts) {
+ assert.equal(protoOpts.service, 'Datastore');
+ assert.equal(protoOpts.method, 'commit');
done();
};
transaction.commit_();
@@ -226,15 +244,15 @@ describe('Transaction', function() {
transaction.skipCommit = true;
// If called, the test will blow up.
- transaction.makeReq_ = done;
+ transaction.request_ = done;
transaction.commit_(done);
});
it('should pass error to callback', function(done) {
var error = new Error('Error.');
- transaction.makeReq_ = function(method, req, callback) {
- callback = callback || req;
+ transaction.request_ = function(protoOpts, reqOpts, callback) {
+ callback = callback || reqOpts;
callback(error);
};
transaction.commit_(function(err) {
@@ -245,8 +263,8 @@ describe('Transaction', function() {
it('should pass apiResponse to callback', function(done) {
var resp = { success: true };
- transaction.makeReq_ = function(method, req, callback) {
- callback = callback || req;
+ transaction.request_ = function(protoOpts, reqOpts, callback) {
+ callback = callback || reqOpts;
callback(null, resp);
};
transaction.commit_(function(err, apiResponse) {
@@ -283,7 +301,7 @@ describe('Transaction', function() {
saveCalled++;
};
- transaction.makeReq_ = util.noop;
+ transaction.request_ = util.noop;
transaction.commit_();
@@ -312,7 +330,7 @@ describe('Transaction', function() {
saveCalled++;
};
- transaction.makeReq_ = util.noop;
+ transaction.request_ = util.noop;
transaction.commit_();
assert.equal(deleteCalled, 0);
@@ -325,10 +343,10 @@ describe('Transaction', function() {
{ e: 'f', g: 'h' }
];
- transaction.makeReq_ = function(method, req) {
+ transaction.request_ = function(protoOpts, reqOpts) {
var req1 = transaction.requests_[0];
var req2 = transaction.requests_[1];
- assert.deepEqual(req, extend(req1, req2));
+ assert.deepEqual(reqOpts, extend(req1, req2));
done();
};
@@ -344,7 +362,7 @@ describe('Transaction', function() {
function() { cb2Called = true; }
];
- transaction.makeReq_ = function(method, req, cb) {
+ transaction.request_ = function(protoOpts, reqOpts, cb) {
cb();
};
diff --git a/test/index.js b/test/index.js
index 7b508f8eae6..7dcee5761d7 100644
--- a/test/index.js
+++ b/test/index.js
@@ -163,11 +163,11 @@ describe('gcloud', function() {
});
describe('datastore', function() {
- it('should create a single Datastore', function() {
- var datastore = localGcloud.datastore;
+ it('should create a new Datastore', function() {
+ var datastore = localGcloud.datastore(options);
assert(datastore instanceof FakeDatastore);
- assert.deepEqual(datastore.calledWith_[0], expectedConfig);
+ assert.strictEqual(datastore.calledWith_[0], options);
});
});
diff --git a/test/storage/bucket.js b/test/storage/bucket.js
index c3232e4e6be..89838c7f143 100644
--- a/test/storage/bucket.js
+++ b/test/storage/bucket.js
@@ -878,7 +878,7 @@ describe('Bucket', function() {
});
describe('upload', function() {
- var basename = 'proto_query.json';
+ var basename = 'testfile.json';
var filepath = 'test/testdata/' + basename;
var textFilepath = 'test/testdata/textfile.txt';
var metadata = { a: 'b', c: 'd' };
diff --git a/test/testdata/proto_query.json b/test/testdata/proto_query.json
deleted file mode 100644
index d6fc6019ac7..00000000000
--- a/test/testdata/proto_query.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
- "projection": [{
- "property": {
- "name": "name"
- }
- }, {
- "property": {
- "name": "count"
- }
- }],
- "kind": [{
- "name": "Kind"
- }],
- "filter": {
- "composite_filter": {
- "filter": [{
- "property_filter": {
- "property": {
- "name": "count"
- },
- "operator": "GREATER_THAN_OR_EQUAL",
- "value": {
- "integer_value": 5
- }
- }
- }, {
- "property_filter": {
- "property": {
- "name": "name"
- },
- "operator": "EQUAL",
- "value": {
- "string_value": "Burcu"
- }
- }
- }],
- "operator": "AND"
- }
- },
- "order": [{
- "property": {
- "name": "count"
- },
- "direction": "DESCENDING"
- }],
- "group_by": [{
- "name": "count"
- }],
- "offset": 5,
- "limit": 10
-}
\ No newline at end of file
diff --git a/test/testdata/response_get.json b/test/testdata/response_get.json
deleted file mode 100644
index 6137f87a639..00000000000
--- a/test/testdata/response_get.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
- "found": [{
- "entity": {
- "key": {
- "partition_id": {
- "dataset_id": "s~bamboo-shift-504",
- "namespace": null
- },
- "path_element": [{
- "kind": "Kind",
- "id": 5732568548769792,
- "name": null
- }]
- },
- "property": [{
- "name": "publishedAt",
- "value": {
- "boolean_value": null,
- "integer_value": null,
- "double_value": null,
- "timestamp_microseconds_value": 978336000000000,
- "key_value": null,
- "blob_key_value": null,
- "string_value": null,
- "blob_value": null,
- "entity_value": null,
- "list_value": [],
- "meaning": null,
- "indexed": true
- }
- }, {
- "name": "author",
- "value": {
- "boolean_value": null,
- "integer_value": null,
- "double_value": null,
- "timestamp_microseconds_value": null,
- "key_value": null,
- "blob_key_value": null,
- "string_value": "Silvano",
- "blob_value": null,
- "entity_value": null,
- "list_value": [],
- "meaning": null,
- "indexed": true
- }
- }, {
- "name": "isDraft",
- "value": {
- "boolean_value": false,
- "integer_value": null,
- "double_value": null,
- "timestamp_microseconds_value": null,
- "key_value": null,
- "blob_key_value": null,
- "string_value": null,
- "blob_value": null,
- "entity_value": null,
- "list_value": [],
- "meaning": null,
- "indexed": true
- }
- }]
- }
- }],
- "missing": [],
- "deferred": []
-}
\ No newline at end of file