diff --git a/package.json b/package.json
index 0d1991a1a..e91262964 100644
--- a/package.json
+++ b/package.json
@@ -31,14 +31,14 @@
"scripts": {
"docs": "jsdoc -c .jsdoc.js",
"generate-scaffolding": "repo-tools generate all && repo-tools generate lib_samples_readme -l samples/ --config ../.cloud-repo-tools.json",
- "lint": "eslint 'samples/*.js' 'samples/**/*.js'",
+ "lint": "gts check && eslint '**/*.js'",
"cover": "nyc --reporter=lcov mocha build/test && nyc report",
"samples-test": "cd samples/ && npm link ../ && npm test && cd ../",
"test-no-cover": "mocha build/test",
"test": "npm run cover",
"presystem-test": "npm run compile",
"system-test": "mocha build/system-test --timeout 600000",
- "fix": "eslint 'samples/*.js' 'samples/**/*.js' --fix",
+ "fix": "gts fix && eslint '**/*.js' --fix",
"clean": "gts clean",
"compile": "tsc -p . && cp -r src/v1/ build/src/v1/ && cp -r protos build/ && cp test/*.js build/test",
"prepare": "npm run compile",
diff --git a/src/.eslintrc.yml b/src/.eslintrc.yml
new file mode 100644
index 000000000..a04a47e41
--- /dev/null
+++ b/src/.eslintrc.yml
@@ -0,0 +1,4 @@
+---
+rules:
+ node/no-unpublished-require: off
+ node/no-missing-require: off
diff --git a/src/entity.ts b/src/entity.ts
index ed289258a..eb9e7283c 100644
--- a/src/entity.ts
+++ b/src/entity.ts
@@ -184,7 +184,6 @@ entity.isDsGeoPoint = isDsGeoPoint;
* });
*/
class Key {
-
namespace: string;
id?: string;
name?: string;
@@ -294,7 +293,7 @@ function decodeValueProto(valueProto) {
}
case 'integerValue': {
- return parseInt(value, 10);
+ return Number(value);
}
case 'entityValue': {
@@ -306,13 +305,11 @@ function decodeValueProto(valueProto) {
}
case 'timestampValue': {
- const milliseconds = parseInt(value.nanos, 10) / 1e6;
- return new Date(parseInt(value.seconds, 10) * 1000 + milliseconds);
+ const milliseconds = Number(value.nanos) / 1e6;
+ return new Date(Number(value.seconds) * 1000 + milliseconds);
}
- default: {
- return value;
- }
+ default: { return value; }
}
}
@@ -455,6 +452,7 @@ function entityFromEntityProto(entityProto) {
const properties = entityProto.properties || {};
+ // tslint:disable-next-line forin
for (const property in properties) {
const value = properties[property];
entityObject[property] = entity.decodeValueProto(value);
@@ -502,10 +500,13 @@ function entityToEntityProto(entityObject) {
const entityProto = {
key: null,
- properties: Object.keys(properties).reduce((encoded, key) => {
- encoded[key] = entity.encodeValue(properties[key]);
- return encoded;
- }, {}),
+ properties: Object.keys(properties)
+ .reduce(
+ (encoded, key) => {
+ encoded[key] = entity.encodeValue(properties[key]);
+ return encoded;
+ },
+ {}),
};
if (excludeFromIndexes && excludeFromIndexes.length > 0) {
@@ -526,11 +527,9 @@ function entityToEntityProto(entityObject) {
if (!hasArrayPath && !hasEntityPath) {
// This is the path end node. Traversal ends here in either case.
if (entity.properties) {
- if (
- entity.properties[path] &&
- // array properties should be excluded with [] syntax:
- !entity.properties[path].arrayValue
- ) {
+ if (entity.properties[path] &&
+ // array properties should be excluded with [] syntax:
+ !entity.properties[path].arrayValue) {
// This is the property to exclude!
entity.properties[path].excludeFromIndexes = true;
}
@@ -561,11 +560,9 @@ function entityToEntityProto(entityObject) {
return;
}
- if (
- firstPathPartIsArray &&
- // check also if the property in question is actually an array value.
- entity.properties[firstPathPart].arrayValue
- ) {
+ if (firstPathPartIsArray &&
+ // check also if the property in question is actually an array value.
+ entity.properties[firstPathPart].arrayValue) {
const array = entity.properties[firstPathPart].arrayValue;
array.values.forEach(value => {
if (remainderPath === '') {
@@ -573,15 +570,15 @@ function entityToEntityProto(entityObject) {
// equivalent with excluding all its values
// (including entity values at their roots):
excludePathFromEntity(
- value,
- remainderPath // === ''
+ value,
+ remainderPath // === ''
);
} else {
// Path traversal continues at value.entityValue,
// if it is an entity, or must end at value.
excludePathFromEntity(
- value.entityValue || value,
- remainderPath // !== ''
+ value.entityValue || value,
+ remainderPath // !== ''
);
}
});
@@ -758,6 +755,7 @@ function keyToKeyProto(key) {
}
keyProto.path.unshift(pathElement);
+ // tslint:disable-next-line no-conditional-assignment
} while ((key = key.parent) && ++numKeysWalked);
return keyProto;
diff --git a/src/index.ts b/src/index.ts
index f8fd84aba..6acf4e6be 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -37,13 +37,13 @@
*/
import * as arrify from 'arrify';
-import {GrpcClient, GrpcClientOptions} from 'google-gax';
import {GoogleAuth} from 'google-auth-library';
+import {GrpcClient, GrpcClientOptions} from 'google-gax';
import * as is from 'is';
-import {DatastoreRequest} from './request';
import {entity} from './entity';
import {Query} from './query';
+import {DatastoreRequest} from './request';
import {Transaction} from './transaction';
const {grpc} = new GrpcClient({} as GrpcClientOptions);
@@ -88,7 +88,7 @@ const gapic = Object.freeze({
*
* Additionally, `DATASTORE_PROJECT_ID` is recognized. If you have this set,
* you don't need to provide a `projectId`.
- *-
+ *
*
* @class
* @extends {DatastoreRequest}
@@ -104,13 +104,14 @@ const gapic = Object.freeze({
* @example
Import the client library
* const {Datastore} = require('@google-cloud/datastore');
*
- * @example Create a client that uses Application Default Credentials (ADC):
- * const datastore = new Datastore();
+ * @example Create a client that uses Application
+ * Default Credentials (ADC): const datastore = new Datastore();
*
- * @example Create a client with explicit credentials:
- * const datastore = new Datastore({
- * projectId: 'your-project-id',
- * keyFilename: '/path/to/keyfile.json'
+ * @example Create a client with explicit
+ * credentials: const datastore = new Datastore({ projectId:
+ * 'your-project-id', keyFilename: '/path/to/keyfile.json'
* });
*
* @example Retrieving Records
@@ -312,9 +313,11 @@ const gapic = Object.freeze({
* const customerId2 = 4993882;
* const customerKey1 = datastore.key(['Customer', customerId1]);
* const customerKey2 = datastore.key(['Customer', customerId2]);
- * const cookieKey1 = datastore.key(['Customer', customerId1, 'Cookie', 'cookie28839']); // child entity
- * const cookieKey2 = datastore.key(['Customer', customerId1, 'Cookie', 'cookie78984']); // child entity
- * const cookieKey3 = datastore.key(['Customer', customerId2, 'Cookie', 'cookie93911']); // child entity
+ * const cookieKey1 = datastore.key(['Customer', customerId1, 'Cookie',
+ * 'cookie28839']); // child entity const cookieKey2 =
+ * datastore.key(['Customer', customerId1, 'Cookie', 'cookie78984']); // child
+ * entity const cookieKey3 = datastore.key(['Customer', customerId2, 'Cookie',
+ * 'cookie93911']); // child entity
*
* const entities = [];
*
@@ -400,7 +403,7 @@ class Datastore extends DatastoreRequest {
this.namespace = options.namespace;
const userProvidedProjectId =
- options.projectId || process.env.DATASTORE_PROJECT_ID;
+ options.projectId || process.env.DATASTORE_PROJECT_ID;
const defaultProjectId = '{{projectId}}';
/**
@@ -413,16 +416,15 @@ class Datastore extends DatastoreRequest {
this.determineBaseUrl_(options.apiEndpoint);
this.options = Object.assign(
- {
- libName: 'gccl',
- libVersion: require('../../package.json').version,
- scopes: gapic.v1.DatastoreClient.scopes,
- servicePath: this.baseUrl_,
- port: is.number(this.port_) ? this.port_ : 443,
- projectId: userProvidedProjectId,
- },
- options
- );
+ {
+ libName: 'gccl',
+ libVersion: require('../../package.json').version,
+ scopes: gapic.v1.DatastoreClient.scopes,
+ servicePath: this.baseUrl_,
+ port: is.number(this.port_) ? this.port_ : 443,
+ projectId: userProvidedProjectId,
+ },
+ options);
if (this.customEndpoint_) {
this.options.sslCreds = grpc.credentials.createInsecure();
}
@@ -647,7 +649,8 @@ class Datastore extends DatastoreRequest {
}
/**
- * Helper to create a Key object, scoped to the instance's namespace by default.
+ * Helper to create a Key object, scoped to the instance's namespace by
+ * default.
*
* You may also specify a configuration object to define a namespace and path.
*
@@ -664,15 +667,15 @@ class Datastore extends DatastoreRequest {
* const key = datastore.key('Company');
*
* @example
- * Create a complete key with a kind value of `Company` and id `123`.
- * const {Datastore} = require('@google-cloud/datastore');
+ * Create a complete key with a kind value of `Company` and id
+ * `123`. const {Datastore} = require('@google-cloud/datastore');
* const datastore = new Datastore();
* const key = datastore.key(['Company', 123]);
*
* @example
- * If the ID integer is outside the bounds of a JavaScript Number object, create an Int.
- * const {Datastore} = require('@google-cloud/datastore');
- * const datastore = new Datastore();
+ * If the ID integer is outside the bounds of a JavaScript Number
+ * object, create an Int. const {Datastore} =
+ * require('@google-cloud/datastore'); const datastore = new Datastore();
* const key = datastore.key([
* 'Company',
* datastore.int('100000000000001234')
@@ -686,8 +689,8 @@ class Datastore extends DatastoreRequest {
* const key = datastore.key(['Company', 'Google']);
*
* @example
- * Create a complete key from a provided namespace and path.
- * const {Datastore} = require('@google-cloud/datastore');
+ * Create a complete key from a provided namespace and
+ * path. const {Datastore} = require('@google-cloud/datastore');
* const datastore = new Datastore();
* const key = datastore.key({
* namespace: 'My-NS',
@@ -695,12 +698,10 @@ class Datastore extends DatastoreRequest {
* });
*/
key(options) {
- options = is.object(options)
- ? options
- : {
- namespace: this.namespace,
- path: arrify(options),
- };
+ options = is.object(options) ? options : {
+ namespace: this.namespace,
+ path: arrify(options),
+ };
return new entity.Key(options);
}
@@ -742,9 +743,9 @@ class Datastore extends DatastoreRequest {
}
/**
- * Determine the appropriate endpoint to use for API requests. If not explicitly
- * defined, check for the "DATASTORE_EMULATOR_HOST" environment variable, used
- * to connect to a local Datastore server.
+ * Determine the appropriate endpoint to use for API requests. If not
+ * explicitly defined, check for the "DATASTORE_EMULATOR_HOST" environment
+ * variable, used to connect to a local Datastore server.
*
* @private
*
@@ -768,10 +769,9 @@ class Datastore extends DatastoreRequest {
this.port_ = Number(baseUrl.match(port)![1]);
}
- this.baseUrl_ = baseUrl
- .replace(leadingProtocol, '')
- .replace(port, '')
- .replace(trailingSlashes, '');
+ this.baseUrl_ = baseUrl.replace(leadingProtocol, '')
+ .replace(port, '')
+ .replace(trailingSlashes, '');
}
/**
@@ -813,19 +813,21 @@ export {Datastore};
* @module {Datastore} @google-cloud/datastore
* @alias nodejs-datastore
*
- * @example Install the client library with npm:
- * npm install --save @google-cloud/datastore
+ * @example Install the client library with npm: npm install --save
+ * @google-cloud/datastore
*
* @example Import the client library
* const {Datastore} = require('@google-cloud/datastore');
*
- * @example Create a client that uses Application Default Credentials (ADC):
- * const datastore = new Datastore();
+ * @example Create a client that uses Application
+ * Default Credentials (ADC): const datastore = new Datastore();
*
- * @example Create a client with explicit credentials:
- * const datastore = new Datastore({
- * projectId: 'your-project-id',
- * keyFilename: '/path/to/keyfile.json'
+ * @example Create a client with explicit
+ * credentials: const datastore = new Datastore({ projectId:
+ * 'your-project-id', keyFilename: '/path/to/keyfile.json'
* });
*
* @example include:samples/quickstart.js
diff --git a/src/query.ts b/src/query.ts
index 2079b3076..27d2f136e 100644
--- a/src/query.ts
+++ b/src/query.ts
@@ -384,7 +384,8 @@ class Query {
* });
*
* //-
- * // A keys-only query returns just the keys of the result entities instead of
+ * // A keys-only query returns just the keys of the result entities instead
+ * of
* // the entities themselves, at lower latency and cost.
* //-
* query.select('__key__');
diff --git a/src/request.ts b/src/request.ts
index 6d5f89806..42a8a971c 100644
--- a/src/request.ts
+++ b/src/request.ts
@@ -14,9 +14,10 @@
* limitations under the License.
*/
-import * as arrify from 'arrify';
import {replaceProjectIdToken} from '@google-cloud/projectify';
import {promisifyAll} from '@google-cloud/promisify';
+import * as arrify from 'arrify';
+
const concat = require('concat-stream');
import * as extend from 'extend';
import * as is from 'is';
@@ -31,7 +32,7 @@ const gapic = Object.freeze({
import {entity} from './entity';
import {Query} from './query';
-import { Datastore } from '.';
+import {Datastore} from '.';
/**
* A map of read consistency values to proto codes.
@@ -54,7 +55,6 @@ const CONSISTENCY_PROTO_CODE = {
* @class
*/
class DatastoreRequest {
-
id;
requests_;
requestCallbacks_;
@@ -143,9 +143,12 @@ class DatastoreRequest {
* });
*
* //-
- * // You may prefer to create IDs from a non-default namespace by providing an
- * // incomplete key with a namespace. Similar to the previous example, the call
- * // below will create 100 new IDs, but from the Company kind that exists under
+ * // You may prefer to create IDs from a non-default namespace by providing
+ * an
+ * // incomplete key with a namespace. Similar to the previous example, the
+ * call
+ * // below will create 100 new IDs, but from the Company kind that exists
+ * under
* // the "ns-test" namespace.
* //-
* const incompleteKey = datastore.key({
@@ -177,23 +180,23 @@ class DatastoreRequest {
}
this.request_(
- {
- client: 'DatastoreClient',
- method: 'allocateIds',
- reqOpts: {
- keys: new Array(options.allocations).fill(entity.keyToKeyProto(key)),
+ {
+ client: 'DatastoreClient',
+ method: 'allocateIds',
+ reqOpts: {
+ keys:
+ new Array(options.allocations).fill(entity.keyToKeyProto(key)),
+ },
+ gaxOpts: options.gaxOptions,
},
- gaxOpts: options.gaxOptions,
- },
- (err, resp) => {
- if (err) {
- callback(err, null, resp);
- return;
- }
- const keys = arrify(resp.keys).map(entity.keyFromKeyProto);
- callback(null, keys, resp);
- }
- );
+ (err, resp) => {
+ if (err) {
+ callback(err, null, resp);
+ return;
+ }
+ const keys = arrify(resp.keys).map(entity.keyFromKeyProto);
+ callback(null, keys, resp);
+ });
}
/**
@@ -242,37 +245,36 @@ class DatastoreRequest {
}
this.request_(
- {
- client: 'DatastoreClient',
- method: 'lookup',
- reqOpts,
- gaxOpts: options.gaxOptions,
- },
- (err, resp) => {
- if (err) {
- stream.destroy(err);
- return;
- }
-
- const entities = entity.formatArray(resp.found);
- const nextKeys = (resp.deferred || [])
- .map(entity.keyFromKeyProto)
- .map(entity.keyToKeyProto);
-
- split(entities, stream).then(streamEnded => {
- if (streamEnded) {
+ {
+ client: 'DatastoreClient',
+ method: 'lookup',
+ reqOpts,
+ gaxOpts: options.gaxOptions,
+ },
+ (err, resp) => {
+ if (err) {
+ stream.destroy(err);
return;
}
- if (nextKeys.length > 0) {
- makeRequest(nextKeys);
- return;
- }
+ const entities = entity.formatArray(resp.found);
+ const nextKeys = (resp.deferred || [])
+ .map(entity.keyFromKeyProto)
+ .map(entity.keyToKeyProto);
+
+ split(entities, stream).then(streamEnded => {
+ if (streamEnded) {
+ return;
+ }
- stream.push(null);
+ if (nextKeys.length > 0) {
+ makeRequest(nextKeys);
+ return;
+ }
+
+ stream.push(null);
+ });
});
- }
- );
};
const stream = streamEvents(through.obj());
@@ -352,14 +354,13 @@ class DatastoreRequest {
}
this.request_(
- {
- client: 'DatastoreClient',
- method: 'commit',
- reqOpts,
- gaxOpts: gaxOptions,
- },
- callback
- );
+ {
+ client: 'DatastoreClient',
+ method: 'commit',
+ reqOpts,
+ gaxOpts: gaxOptions,
+ },
+ callback);
}
/**
@@ -456,13 +457,11 @@ class DatastoreRequest {
options = options || {};
this.createReadStream(keys, options)
- .on('error', callback)
- .pipe(
- concat(results => {
+ .on('error', callback)
+ .pipe(concat(results => {
const isSingleLookup = !is.array(keys);
callback(null, isSingleLookup ? results[0] : results);
- })
- );
+ }));
}
/**
@@ -480,26 +479,25 @@ class DatastoreRequest {
* @param {object} callback.apiResponse The full API response.
*/
insert(entities, callback) {
- entities = arrify(entities)
- .map(DatastoreRequest.prepareEntityObject_)
- .map(x => {
- x.method = 'insert';
- return x;
- });
+ entities =
+ arrify(entities).map(DatastoreRequest.prepareEntityObject_).map(x => {
+ x.method = 'insert';
+ return x;
+ });
this.save(entities, callback);
}
/**
* Datastore allows you to query entities by kind, filter them by property
- * filters, and sort them by a property name. Projection and pagination are also
- * supported.
+ * filters, and sort them by a property name. Projection and pagination are
+ * also supported.
*
- * The query is run, and the results are returned as the second argument to your
- * callback. A third argument may also exist, which is a query object that uses
- * the end cursor from the previous query as the starting cursor for the next
- * query. You can pass that object back to this method to see if more results
- * exist.
+ * The query is run, and the results are returned as the second argument to
+ * your callback. A third argument may also exist, which is a query object
+ * that uses the end cursor from the previous query as the starting cursor for
+ * the next query. You can pass that object back to this method to see if more
+ * results exist.
*
* @param {Query} query Query object.
* @param {object} [options] Optional configuration.
@@ -526,7 +524,8 @@ class DatastoreRequest {
*
* @example
* //-
- * // Where you see `transaction`, assume this is the context that's relevant to
+ * // Where you see `transaction`, assume this is the context that's relevant
+ * to
* // your use, whether that be a Datastore or a Transaction object.
* //-
* const query = datastore.createQuery('Lion');
@@ -562,7 +561,8 @@ class DatastoreRequest {
* });
*
* //-
- * // A keys-only query returns just the keys of the result entities instead of
+ * // A keys-only query returns just the keys of the result entities instead
+ * of
* // the entities themselves, at lower latency and cost.
* //-
* const keysOnlyQuery = datastore.createQuery('Lion').select('__key__');
@@ -591,15 +591,14 @@ class DatastoreRequest {
let info;
this.runQueryStream(query, options)
- .on('error', callback)
- .on('info', info_ => {
- info = info_;
- })
- .pipe(
- concat(results => {
+ .on('error', callback)
+ .on('info',
+ info_ => {
+ info = info_;
+ })
+ .pipe(concat(results => {
callback(null, results, info);
- })
- );
+ }));
}
/**
@@ -657,14 +656,13 @@ class DatastoreRequest {
}
this.request_(
- {
- client: 'DatastoreClient',
- method: 'runQuery',
- reqOpts,
- gaxOpts: options.gaxOptions,
- },
- onResultSet
- );
+ {
+ client: 'DatastoreClient',
+ method: 'runQuery',
+ reqOpts,
+ gaxOpts: options.gaxOptions,
+ },
+ onResultSet);
};
function onResultSet(err, resp) {
@@ -726,15 +724,15 @@ class DatastoreRequest {
* associated object is inserted and the original Key object is updated to
* contain the generated ID.
*
- * This method will determine the correct Datastore method to execute (`upsert`,
- * `insert`, or `update`) by using the key(s) provided. For example, if you
- * provide an incomplete key (one without an ID), the request will create a new
- * entity and have its ID automatically assigned. If you provide a complete key,
- * the entity will be updated with the data specified.
+ * This method will determine the correct Datastore method to execute
+ * (`upsert`, `insert`, or `update`) by using the key(s) provided. For
+ * example, if you provide an incomplete key (one without an ID), the request
+ * will create a new entity and have its ID automatically assigned. If you
+ * provide a complete key, the entity will be updated with the data specified.
*
* By default, all properties are indexed. To prevent a property from being
- * included in *all* indexes, you must supply an `excludeFromIndexes` array. See
- * below for an example.
+ * included in *all* indexes, you must supply an `excludeFromIndexes` array.
+ * See below for an example.
*
* @borrows {@link Transaction#save} as save
*
@@ -743,8 +741,8 @@ class DatastoreRequest {
* @param {object|object[]} entities Datastore key object(s).
* @param {Key} entities.key Datastore key object.
* @param {string[]} [entities.excludeFromIndexes] Exclude properties from
- * indexing using a simple JSON path notation. See the example below to see
- * how to target properties at different levels of nesting within your
+ * indexing using a simple JSON path notation. See the example below to
+ * see how to target properties at different levels of nesting within your
* @param {string} [entities.method] Explicit method to use, either 'insert',
* 'update', or 'upsert'.
* @param {object} entities.data Data to save with the provided key.
@@ -759,7 +757,8 @@ class DatastoreRequest {
* //-
* // Save a single entity.
* //
- * // Notice that we are providing an incomplete key. After saving, the original
+ * // Notice that we are providing an incomplete key. After saving, the
+ * original
* // Key object used to save will be updated to contain the path with its
* // generated ID.
* //-
@@ -779,8 +778,10 @@ class DatastoreRequest {
* //-
* // Save a single entity using a provided name instead of auto-generated ID.
* //
- * // Here we are providing a key with name instead of an ID. After saving, the
- * // original Key object used to save will be updated to contain the path with
+ * // Here we are providing a key with name instead of an ID. After saving,
+ * the
+ * // original Key object used to save will be updated to contain the path
+ * with
* // the name instead of a generated ID.
* //-
* const key = datastore.key(['Company', 'donutshack']);
@@ -826,7 +827,8 @@ class DatastoreRequest {
* // Save different types of data, including ints, doubles, dates, booleans,
* // blobs, and lists.
* //
- * // Notice that we are providing an incomplete key. After saving, the original
+ * // Notice that we are providing an incomplete key. After saving, the
+ * original
* // Key object used to save will be updated to contain the path with its
* // generated ID.
* //-
@@ -944,62 +946,60 @@ class DatastoreRequest {
// Iterate over the entity objects, build a proto from all keys and values,
// then place in the correct mutation array (insert, update, etc).
- entities
- .map(DatastoreRequest.prepareEntityObject_)
- .forEach((entityObject, index) => {
- const mutation = {};
- // tslint:disable-next-line no-any
- let entityProto: any = {};
- let method = 'upsert';
-
- if (entityObject.method) {
- if (methods[entityObject.method]) {
- method = entityObject.method;
- } else {
- throw new Error(
- 'Method ' + entityObject.method + ' not recognized.'
- );
+ entities.map(DatastoreRequest.prepareEntityObject_)
+ .forEach((entityObject, index) => {
+ const mutation = {};
+ // tslint:disable-next-line no-any
+ let entityProto: any = {};
+ let method = 'upsert';
+
+ if (entityObject.method) {
+ if (methods[entityObject.method]) {
+ method = entityObject.method;
+ } else {
+ throw new Error(
+ 'Method ' + entityObject.method + ' not recognized.');
+ }
}
- }
- if (!entity.isKeyComplete(entityObject.key)) {
- insertIndexes[index] = true;
- }
+ if (!entity.isKeyComplete(entityObject.key)) {
+ insertIndexes[index] = true;
+ }
- // @TODO remove in @google-cloud/datastore@2.0.0
- // This was replaced with a more efficient mechanism in the top-level
- // `excludeFromIndexes` option.
- if (is.array(entityObject.data)) {
- entityProto.properties = entityObject.data.reduce((acc, data) => {
- const value = entity.encodeValue(data.value);
-
- if (is.boolean(data.excludeFromIndexes)) {
- const excluded = data.excludeFromIndexes;
- let values = value.arrayValue && value.arrayValue.values;
-
- if (values) {
- values = values.map(x => {
- x.excludeFromIndexes = excluded;
- return x;
- });
- } else {
- value.excludeFromIndexes = data.excludeFromIndexes;
+ // @TODO remove in @google-cloud/datastore@2.0.0
+ // This was replaced with a more efficient mechanism in the top-level
+ // `excludeFromIndexes` option.
+ if (is.array(entityObject.data)) {
+ entityProto.properties = entityObject.data.reduce((acc, data) => {
+ const value = entity.encodeValue(data.value);
+
+ if (is.boolean(data.excludeFromIndexes)) {
+ const excluded = data.excludeFromIndexes;
+ let values = value.arrayValue && value.arrayValue.values;
+
+ if (values) {
+ values = values.map(x => {
+ x.excludeFromIndexes = excluded;
+ return x;
+ });
+ } else {
+ value.excludeFromIndexes = data.excludeFromIndexes;
+ }
}
- }
- acc[data.name] = value;
+ acc[data.name] = value;
- return acc;
- }, {});
- } else {
- entityProto = entity.entityToEntityProto(entityObject);
- }
+ return acc;
+ }, {});
+ } else {
+ entityProto = entity.entityToEntityProto(entityObject);
+ }
- entityProto.key = entity.keyToKeyProto(entityObject.key);
+ entityProto.key = entity.keyToKeyProto(entityObject.key);
- mutation[method] = entityProto;
- mutations.push(mutation);
- });
+ mutation[method] = entityProto;
+ mutations.push(mutation);
+ });
const reqOpts = {
mutations,
@@ -1032,14 +1032,13 @@ class DatastoreRequest {
}
this.request_(
- {
- client: 'DatastoreClient',
- method: 'commit',
- reqOpts,
- gaxOpts: gaxOptions,
- },
- onCommit
- );
+ {
+ client: 'DatastoreClient',
+ method: 'commit',
+ reqOpts,
+ gaxOpts: gaxOptions,
+ },
+ onCommit);
}
/**
@@ -1057,12 +1056,11 @@ class DatastoreRequest {
* @param {object} callback.apiResponse The full API response.
*/
update(entities, callback) {
- entities = arrify(entities)
- .map(DatastoreRequest.prepareEntityObject_)
- .map(x => {
- x.method = 'update';
- return x;
- });
+ entities =
+ arrify(entities).map(DatastoreRequest.prepareEntityObject_).map(x => {
+ x.method = 'update';
+ return x;
+ });
this.save(entities, callback);
}
@@ -1082,19 +1080,18 @@ class DatastoreRequest {
* @param {object} callback.apiResponse The full API response.
*/
upsert(entities, callback) {
- entities = arrify(entities)
- .map(DatastoreRequest.prepareEntityObject_)
- .map(x => {
- x.method = 'upsert';
- return x;
- });
+ entities =
+ arrify(entities).map(DatastoreRequest.prepareEntityObject_).map(x => {
+ x.method = 'upsert';
+ return x;
+ });
this.save(entities, callback);
}
/**
- * Make a request to the API endpoint. Properties to indicate a transactional or
- * non-transactional operation are added automatically.
+ * Make a request to the API endpoint. Properties to indicate a transactional
+ * or non-transactional operation are added automatically.
*
* @param {object} config Configuration object.
* @param {object} config.gaxOpts GAX options.
@@ -1132,8 +1129,7 @@ class DatastoreRequest {
if (isTransaction && (method === 'lookup' || method === 'runQuery')) {
if (reqOpts.readOptions && reqOpts.readOptions.readConsistency) {
throw new Error(
- 'Read consistency cannot be specified in a transaction.'
- );
+ 'Read consistency cannot be specified in a transaction.');
}
reqOpts.readOptions = {
@@ -1151,9 +1147,7 @@ class DatastoreRequest {
if (!datastore.clients_.has(clientName)) {
datastore.clients_.set(
- clientName,
- new gapic.v1[clientName](datastore.options)
- );
+ clientName, new gapic.v1[clientName](datastore.options));
}
const gaxClient = datastore.clients_.get(clientName);
reqOpts = replaceProjectIdToken(reqOpts, projectId!);
diff --git a/src/transaction.ts b/src/transaction.ts
index ab34c4991..9dab1b610 100644
--- a/src/transaction.ts
+++ b/src/transaction.ts
@@ -14,8 +14,8 @@
* limitations under the License.
*/
-import * as arrify from 'arrify';
import {promisifyAll} from '@google-cloud/promisify';
+import * as arrify from 'arrify';
import * as is from 'is';
import {entity} from './entity';
@@ -83,26 +83,28 @@ class Transaction extends DatastoreRequest {
}
/*! Developer Documentation
- *
- * Below, we override two methods that we inherit from DatastoreRequest:
- * `delete` and `save`. This is done because:
- *
- * A) the documentation needs to be different for a transactional save, and
- * B) we build up a "modifiedEntities_" array on this object, used to build
- * the final commit request with.
- */
+ *
+ * Below, we override two methods that we inherit from DatastoreRequest:
+ * `delete` and `save`. This is done because:
+ *
+ * A) the documentation needs to be different for a transactional save, and
+ * B) we build up a "modifiedEntities_" array on this object, used to build
+ * the final commit request with.
+ */
/**
- * Commit the remote transaction and finalize the current transaction instance.
+ * Commit the remote transaction and finalize the current transaction
+ * instance.
*
- * If the commit request fails, we will automatically rollback the transaction.
+ * If the commit request fails, we will automatically rollback the
+ * transaction.
*
* @param {object} [gaxOptions] Request configuration options, outlined here:
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions.
* @param {function} callback The callback function.
* @param {?error} callback.err An error returned while making this request.
- * If the commit fails, we automatically try to rollback the transaction (see
- * {module:datastore/transaction#rollback}).
+ * If the commit fails, we automatically try to rollback the transaction
+ * (see {module:datastore/transaction#rollback}).
* @param {object} callback.apiResponse The full API response.
*
* @example
@@ -139,100 +141,102 @@ class Transaction extends DatastoreRequest {
const keys = {};
this.modifiedEntities_
- // Reverse the order of the queue to respect the "last queued request wins"
- // behavior.
- .reverse()
- // Limit the operations we're going to send through to only the most
- // recently queued operations. E.g., if a user tries to save with the same
- // key they just asked to be deleted, the delete request will be ignored,
- // giving preference to the save operation.
- .filter(modifiedEntity => {
- const key = modifiedEntity.entity.key;
-
- if (!entity.isKeyComplete(key)) {
- return true;
- }
-
- const stringifiedKey = JSON.stringify(modifiedEntity.entity.key);
-
- if (!keys[stringifiedKey]) {
- keys[stringifiedKey] = true;
- return true;
- }
-
- return false;
- })
- // Group entities together by method: `save` mutations, then `delete`. Note:
- // `save` mutations being first is required to maintain order when assigning
- // IDs to incomplete keys.
- .sort((a, b) => {
- return a.method < b.method ? 1 : a.method > b.method ? -1 : 0;
- })
- // Group arguments together so that we only make one call to each method.
- // This is important for `DatastoreRequest.save`, especially, as that method
- // handles assigning auto-generated IDs to the original keys passed in. When
- // we eventually execute the `save` method's API callback, having all the
- // keys together is necessary to maintain order.
- .reduce((acc, entityObject) => {
- const lastEntityObject = acc[acc.length - 1];
- const sameMethod =
- lastEntityObject && entityObject.method === lastEntityObject.method;
-
- if (!lastEntityObject || !sameMethod) {
- acc.push(entityObject);
- } else {
- lastEntityObject.args = lastEntityObject.args.concat(
- entityObject.args
- );
- }
-
- return acc;
- }, [])
- // Call each of the mutational methods (DatastoreRequest[save,delete]) to
- // build up a `req` array on this instance. This will also build up a
- // `callbacks` array, that is the same callback that would run if we were
- // using `save` and `delete` outside of a transaction, to process the
- // response from the API.
- .forEach(modifiedEntity => {
- const method = modifiedEntity.method;
- const args = modifiedEntity.args.reverse();
- DatastoreRequest.prototype[method].call(this, args, () => {});
- });
+ // Reverse the order of the queue to respect the "last queued request
+ // wins" behavior.
+ .reverse()
+ // Limit the operations we're going to send through to only the most
+ // recently queued operations. E.g., if a user tries to save with the
+ // same key they just asked to be deleted, the delete request will be
+ // ignored, giving preference to the save operation.
+ .filter(modifiedEntity => {
+ const key = modifiedEntity.entity.key;
+
+ if (!entity.isKeyComplete(key)) {
+ return true;
+ }
+
+ const stringifiedKey = JSON.stringify(modifiedEntity.entity.key);
+
+ if (!keys[stringifiedKey]) {
+ keys[stringifiedKey] = true;
+ return true;
+ }
+
+ return false;
+ })
+ // Group entities together by method: `save` mutations, then `delete`.
+ // Note: `save` mutations being first is required to maintain order when
+ // assigning IDs to incomplete keys.
+ .sort((a, b) => {
+ return a.method < b.method ? 1 : a.method > b.method ? -1 : 0;
+ })
+ // Group arguments together so that we only make one call to each
+ // method. This is important for `DatastoreRequest.save`, especially, as
+ // that method handles assigning auto-generated IDs to the original keys
+ // passed in. When we eventually execute the `save` method's API
+ // callback, having all the keys together is necessary to maintain
+ // order.
+ .reduce(
+ (acc, entityObject) => {
+ const lastEntityObject = acc[acc.length - 1];
+ const sameMethod = lastEntityObject &&
+ entityObject.method === lastEntityObject.method;
+
+ if (!lastEntityObject || !sameMethod) {
+ acc.push(entityObject);
+ } else {
+ lastEntityObject.args =
+ lastEntityObject.args.concat(entityObject.args);
+ }
+
+ return acc;
+ },
+ [])
+ // Call each of the mutational methods (DatastoreRequest[save,delete])
+ // to build up a `req` array on this instance. This will also build up a
+ // `callbacks` array, that is the same callback that would run if we
+ // were using `save` and `delete` outside of a transaction, to process
+ // the response from the API.
+ .forEach(modifiedEntity => {
+ const method = modifiedEntity.method;
+ const args = modifiedEntity.args.reverse();
+ DatastoreRequest.prototype[method].call(this, args, () => {});
+ });
// Take the `req` array built previously, and merge them into one request to
// send as the final transactional commit.
const reqOpts = {
- mutations: this.requests_.map(x => x.mutations).reduce((a, b) => a.concat(b), []),
+ mutations: this.requests_.map(x => x.mutations)
+ .reduce((a, b) => a.concat(b), []),
};
this.request_(
- {
- client: 'DatastoreClient',
- method: 'commit',
- reqOpts,
- gaxOpts: gaxOptions,
- },
- (err, resp) => {
- if (err) {
- // Rollback automatically for the user.
- this.rollback(() => {
- // Provide the error & API response from the failed commit to the user.
- // Even a failed rollback should be transparent.
- // RE: https://github.com/GoogleCloudPlatform/google-cloud-node/pull/1369#discussion_r66833976
- callback(err, resp);
+ {
+ client: 'DatastoreClient',
+ method: 'commit',
+ reqOpts,
+ gaxOpts: gaxOptions,
+ },
+ (err, resp) => {
+ if (err) {
+ // Rollback automatically for the user.
+ this.rollback(() => {
+ // Provide the error & API response from the failed commit to the
+ // user. Even a failed rollback should be transparent. RE:
+ // https://github.com/GoogleCloudPlatform/google-cloud-node/pull/1369#discussion_r66833976
+ callback(err, resp);
+ });
+ return;
+ }
+
+ // The `callbacks` array was built previously. These are the callbacks
+ // that handle the API response normally when using the
+ // DatastoreRequest.save and .delete methods.
+ this.requestCallbacks_.forEach(cb => {
+ cb(null, resp);
});
- return;
- }
-
- // The `callbacks` array was built previously. These are the callbacks that
- // handle the API response normally when using the DatastoreRequest.save and
- // .delete methods.
- this.requestCallbacks_.forEach(cb => {
- cb(null, resp);
+ callback(null, resp);
});
- callback(null, resp);
- }
- );
}
/**
@@ -322,7 +326,8 @@ class Transaction extends DatastoreRequest {
}
/**
- * Reverse a transaction remotely and finalize the current transaction instance.
+ * Reverse a transaction remotely and finalize the current transaction
+ * instance.
*
* @param {object} [gaxOptions] Request configuration options, outlined here:
* https://googleapis.github.io/gax-nodejs/global.html#CallOptions.
@@ -363,21 +368,20 @@ class Transaction extends DatastoreRequest {
callback = callback || (() => {});
this.request_(
- {
- client: 'DatastoreClient',
- method: 'rollback',
- gaxOpts: gaxOptions,
- },
- (err, resp) => {
- this.skipCommit = true;
- callback(err || null, resp);
- }
- );
+ {
+ client: 'DatastoreClient',
+ method: 'rollback',
+ gaxOpts: gaxOptions,
+ },
+ (err, resp) => {
+ this.skipCommit = true;
+ callback(err || null, resp);
+ });
}
/**
- * Begin a remote transaction. In the callback provided, run your transactional
- * commands.
+ * Begin a remote transaction. In the callback provided, run your
+ * transactional commands.
*
* @param {object} [options] Configuration object.
* @param {object} [options.gaxOptions] Request configuration options, outlined
@@ -454,43 +458,42 @@ class Transaction extends DatastoreRequest {
}
this.request_(
- {
- client: 'DatastoreClient',
- method: 'beginTransaction',
- reqOpts,
- gaxOpts: options.gaxOptions,
- },
- (err, resp) => {
- if (err) {
- callback(err, null, resp);
- return;
- }
- this.id = resp.transaction;
- callback(null, this, resp);
- }
- );
+ {
+ client: 'DatastoreClient',
+ method: 'beginTransaction',
+ reqOpts,
+ gaxOpts: options.gaxOptions,
+ },
+ (err, resp) => {
+ if (err) {
+ callback(err, null, resp);
+ return;
+ }
+ this.id = resp.transaction;
+ callback(null, this, resp);
+ });
}
/**
- * Insert or update the specified object(s) in the current transaction. If a key
- * is incomplete, its associated object is inserted and the original Key object
- * is updated to contain the generated ID.
+ * Insert or update the specified object(s) in the current transaction. If a
+ * key is incomplete, its associated object is inserted and the original Key
+ * object is updated to contain the generated ID.
*
- * This method will determine the correct Datastore method to execute (`upsert`,
- * `insert`, or `update`) by using the key(s) provided. For example, if you
- * provide an incomplete key (one without an ID), the request will create a new
- * entity and have its ID automatically assigned. If you provide a complete key,
- * the entity will be updated with the data specified.
+ * This method will determine the correct Datastore method to execute
+ * (`upsert`, `insert`, or `update`) by using the key(s) provided. For
+ * example, if you provide an incomplete key (one without an ID), the request
+ * will create a new entity and have its ID automatically assigned. If you
+ * provide a complete key, the entity will be updated with the data specified.
*
* By default, all properties are indexed. To prevent a property from being
- * included in *all* indexes, you must supply an `excludeFromIndexes` array. See
- * below for an example.
+ * included in *all* indexes, you must supply an `excludeFromIndexes` array.
+ * See below for an example.
*
* @param {object|object[]} entities Datastore key object(s).
* @param {Key} entities.key Datastore key object.
* @param {string[]} [entities.excludeFromIndexes] Exclude properties from
- * indexing using a simple JSON path notation. See the example below to see
- * how to target properties at different levels of nesting within your
+ * indexing using a simple JSON path notation. See the example below to
+ * see how to target properties at different levels of nesting within your
* entity.
* @param {object} entities.data Data to save with the provided key.
*
diff --git a/src/v1/datastore_client.js b/src/v1/datastore_client.js
index e5d2319cd..c76ba04d3 100644
--- a/src/v1/datastore_client.js
+++ b/src/v1/datastore_client.js
@@ -613,5 +613,4 @@ class DatastoreClient {
}
}
-
module.exports = DatastoreClient;
diff --git a/system-test/datastore.ts b/system-test/datastore.ts
index eaca9431d..b54029533 100644
--- a/system-test/datastore.ts
+++ b/system-test/datastore.ts
@@ -23,7 +23,7 @@ describe('Datastore', () => {
const testKinds: Array<{}> = [];
const datastore = new Datastore();
// Override the Key method so we can track what keys are created during the
- // tests. They are then deleted in the `after` hook.
+ // tests. They are then deleted in the `after` hook.
const key = datastore.key;
datastore.key = function(options) {
const keyObject = key.call(this, options);
@@ -121,33 +121,32 @@ describe('Datastore', () => {
};
datastore.save(
- {
- key: postKey,
- data,
- excludeFromIndexes: [
- 'longString',
- 'longStringArray[]',
- 'metadata.obj.longString',
- 'metadata.obj.longStringArray[].longString',
- 'metadata.obj.longStringArray[].nestedLongStringArray[].longString',
- 'metadata.longString',
- 'metadata.longStringArray[].longString',
- 'metadata.longStringArray[].nestedLongStringArray[].longString',
- ],
- },
- err => {
- assert.ifError(err);
-
- datastore.get(postKey, (err, entity) => {
+ {
+ key: postKey,
+ data,
+ excludeFromIndexes: [
+ 'longString',
+ 'longStringArray[]',
+ 'metadata.obj.longString',
+ 'metadata.obj.longStringArray[].longString',
+ 'metadata.obj.longStringArray[].nestedLongStringArray[].longString',
+ 'metadata.longString',
+ 'metadata.longStringArray[].longString',
+ 'metadata.longStringArray[].nestedLongStringArray[].longString',
+ ],
+ },
+ err => {
assert.ifError(err);
- assert.deepStrictEqual(entity, data);
- assert.deepStrictEqual(entity[datastore.KEY], postKey);
+ datastore.get(postKey, (err, entity) => {
+ assert.ifError(err);
+
+ assert.deepStrictEqual(entity, data);
+ assert.deepStrictEqual(entity[datastore.KEY], postKey);
- datastore.delete(postKey, done);
+ datastore.delete(postKey, done);
+ });
});
- }
- );
});
it('should save/get/delete with a key name', done => {
@@ -255,22 +254,21 @@ describe('Datastore', () => {
]);
datastore.save(
- {
- key: longIdKey,
- data: {
- test: true,
+ {
+ key: longIdKey,
+ data: {
+ test: true,
+ },
},
- },
- err => {
- assert.ifError(err);
-
- datastore.get(longIdKey, (err, entity) => {
+ err => {
assert.ifError(err);
- assert.strictEqual(entity.test, true);
- done();
+
+ datastore.get(longIdKey, (err, entity) => {
+ assert.ifError(err);
+ assert.strictEqual(entity.test, true);
+ done();
+ });
});
- }
- );
});
it('should fail explicitly set second insert on save', done => {
@@ -283,20 +281,19 @@ describe('Datastore', () => {
assert(postKey.id);
datastore.save(
- {
- key: postKey,
- method: 'insert',
- data: post,
- },
- err => {
- assert.notStrictEqual(err, null); // should fail insert
- datastore.get(postKey, (err, entity) => {
- assert.ifError(err);
- assert.deepStrictEqual(entity, post);
- datastore.delete(postKey, done);
+ {
+ key: postKey,
+ method: 'insert',
+ data: post,
+ },
+ err => {
+ assert.notStrictEqual(err, null); // should fail insert
+ datastore.get(postKey, (err, entity) => {
+ assert.ifError(err);
+ assert.deepStrictEqual(entity, post);
+ datastore.delete(postKey, done);
+ });
});
- }
- );
});
});
@@ -304,16 +301,15 @@ describe('Datastore', () => {
const postKey = datastore.key('Post');
datastore.save(
- {
- key: postKey,
- method: 'update',
- data: post,
- },
- err => {
- assert.notStrictEqual(err, null);
- done();
- }
- );
+ {
+ key: postKey,
+ method: 'update',
+ data: post,
+ },
+ err => {
+ assert.notStrictEqual(err, null);
+ done();
+ });
});
it('should save/get/delete multiple entities at once', done => {
@@ -331,16 +327,14 @@ describe('Datastore', () => {
const key2 = datastore.key('Post');
datastore.save(
- [{key: key1, data: post}, {key: key2, data: post2}],
- err => {
- assert.ifError(err);
- datastore.get([key1, key2], (err, entities) => {
+ [{key: key1, data: post}, {key: key2, data: post2}], err => {
assert.ifError(err);
- assert.strictEqual(entities.length, 2);
- datastore.delete([key1, key2], done);
+ datastore.get([key1, key2], (err, entities) => {
+ assert.ifError(err);
+ assert.strictEqual(entities.length, 2);
+ datastore.delete([key1, key2], done);
+ });
});
- }
- );
});
it('should get multiple entities in a stream', done => {
@@ -348,53 +342,49 @@ describe('Datastore', () => {
const key2 = datastore.key('Post');
datastore.save(
- [{key: key1, data: post}, {key: key2, data: post}],
- err => {
- assert.ifError(err);
-
- let numEntitiesEmitted = 0;
+ [{key: key1, data: post}, {key: key2, data: post}], err => {
+ assert.ifError(err);
- datastore
- .createReadStream([key1, key2])
- .on('error', done)
- .on('data', () => {
- numEntitiesEmitted++;
- })
- .on('end', () => {
- assert.strictEqual(numEntitiesEmitted, 2);
- datastore.delete([key1, key2], done);
- });
- }
- );
+ let numEntitiesEmitted = 0;
+
+ datastore.createReadStream([key1, key2])
+ .on('error', done)
+ .on('data',
+ () => {
+ numEntitiesEmitted++;
+ })
+ .on('end', () => {
+ assert.strictEqual(numEntitiesEmitted, 2);
+ datastore.delete([key1, key2], done);
+ });
+ });
});
it('should save keys as a part of entity and query by key', done => {
const personKey = datastore.key(['People', 'US', 'Person', 'name']);
datastore.save(
- {
- key: personKey,
- data: {
- fullName: 'Full name',
- linkedTo: personKey, // himself
+ {
+ key: personKey,
+ data: {
+ fullName: 'Full name',
+ linkedTo: personKey, // himself
+ },
},
- },
- err => {
- assert.ifError(err);
+ err => {
+ assert.ifError(err);
- const query = datastore
- .createQuery('Person')
- .hasAncestor(datastore.key(['People', 'US']))
- .filter('linkedTo', personKey);
+ const query = datastore.createQuery('Person')
+ .hasAncestor(datastore.key(['People', 'US']))
+ .filter('linkedTo', personKey);
- datastore.runQuery(query, (err, results) => {
- assert.ifError(err);
- assert.strictEqual(results[0].fullName, 'Full name');
- assert.deepStrictEqual(results[0].linkedTo, personKey);
- datastore.delete(personKey, done);
+ datastore.runQuery(query, (err, results) => {
+ assert.ifError(err);
+ assert.strictEqual(results[0].fullName, 'Full name');
+ assert.deepStrictEqual(results[0].linkedTo, personKey);
+ datastore.delete(personKey, done);
+ });
});
- }
- );
});
describe('entity types', () => {
@@ -405,22 +395,21 @@ describe('Datastore', () => {
const key = datastore.key('Person');
datastore.save(
- {
- key,
- data: {
- year: integerType,
+ {
+ key,
+ data: {
+ year: integerType,
+ },
},
- },
- err => {
- assert.ifError(err);
-
- datastore.get(key, (err, entity) => {
+ err => {
assert.ifError(err);
- assert.strictEqual(entity.year, integerValue);
- done();
+
+ datastore.get(key, (err, entity) => {
+ assert.ifError(err);
+ assert.strictEqual(entity.year, integerValue);
+ done();
+ });
});
- }
- );
});
it('should save and decode a double', done => {
@@ -430,22 +419,21 @@ describe('Datastore', () => {
const key = datastore.key('Person');
datastore.save(
- {
- key,
- data: {
- nines: doubleType,
+ {
+ key,
+ data: {
+ nines: doubleType,
+ },
},
- },
- err => {
- assert.ifError(err);
-
- datastore.get(key, (err, entity) => {
+ err => {
assert.ifError(err);
- assert.strictEqual(entity.nines, doubleValue);
- done();
+
+ datastore.get(key, (err, entity) => {
+ assert.ifError(err);
+ assert.strictEqual(entity.nines, doubleValue);
+ done();
+ });
});
- }
- );
});
it('should save and decode a geo point', done => {
@@ -458,22 +446,21 @@ describe('Datastore', () => {
const key = datastore.key('Person');
datastore.save(
- {
- key,
- data: {
- location: geoPointType,
+ {
+ key,
+ data: {
+ location: geoPointType,
+ },
},
- },
- err => {
- assert.ifError(err);
-
- datastore.get(key, (err, entity) => {
+ err => {
assert.ifError(err);
- assert.deepStrictEqual(entity.location, geoPointValue);
- done();
+
+ datastore.get(key, (err, entity) => {
+ assert.ifError(err);
+ assert.deepStrictEqual(entity.location, geoPointValue);
+ done();
+ });
});
- }
- );
});
});
});
@@ -562,19 +549,16 @@ describe('Datastore', () => {
});
it('should limit queries', done => {
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .limit(5);
+ const q =
+ datastore.createQuery('Character').hasAncestor(ancestor).limit(5);
datastore.runQuery(q, (err, firstEntities, info) => {
assert.ifError(err);
assert.strictEqual(firstEntities.length, 5);
- const secondQ = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .start(info.endCursor);
+ const secondQ = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .start(info.endCursor);
datastore.runQuery(secondQ, (err, secondEntities) => {
assert.ifError(err);
@@ -587,10 +571,8 @@ describe('Datastore', () => {
it('should not go over a limit', done => {
const limit = 3;
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .limit(limit);
+ const q =
+ datastore.createQuery('Character').hasAncestor(ancestor).limit(limit);
datastore.runQuery(q, (err, results) => {
assert.ifError(err);
@@ -602,44 +584,41 @@ describe('Datastore', () => {
it('should run a query as a stream', done => {
const q = datastore.createQuery('Character').hasAncestor(ancestor);
let resultsReturned = 0;
- datastore
- .runQueryStream(q)
- .on('error', done)
- .on('data', () => {
- resultsReturned++;
- })
- .on('end', () => {
- assert.strictEqual(resultsReturned, characters.length);
- done();
- });
+ datastore.runQueryStream(q)
+ .on('error', done)
+ .on('data',
+ () => {
+ resultsReturned++;
+ })
+ .on('end', () => {
+ assert.strictEqual(resultsReturned, characters.length);
+ done();
+ });
});
it('should not go over a limit with a stream', done => {
const limit = 3;
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .limit(limit);
+ const q =
+ datastore.createQuery('Character').hasAncestor(ancestor).limit(limit);
let resultsReturned = 0;
- datastore
- .runQueryStream(q)
- .on('error', done)
- .on('data', () => {
- resultsReturned++;
- })
- .on('end', () => {
- assert.strictEqual(resultsReturned, limit);
- done();
- });
+ datastore.runQueryStream(q)
+ .on('error', done)
+ .on('data',
+ () => {
+ resultsReturned++;
+ })
+ .on('end', () => {
+ assert.strictEqual(resultsReturned, limit);
+ done();
+ });
});
it('should filter queries with simple indexes', done => {
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .filter('appearances', '>=', 20);
+ const q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .filter('appearances', '>=', 20);
datastore.runQuery(q, (err, entities) => {
assert.ifError(err);
@@ -649,11 +628,10 @@ describe('Datastore', () => {
});
it('should filter queries with defined indexes', done => {
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .filter('family', 'Stark')
- .filter('appearances', '>=', 20);
+ const q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .filter('family', 'Stark')
+ .filter('appearances', '>=', 20);
datastore.runQuery(q, (err, entities) => {
assert.ifError(err);
@@ -675,10 +653,9 @@ describe('Datastore', () => {
it('should filter by key', done => {
const key = datastore.key(['Book', 'GoT', 'Character', 'Rickard']);
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .filter('__key__', key);
+ const q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .filter('__key__', key);
datastore.runQuery(q, (err, entities) => {
assert.ifError(err);
@@ -688,10 +665,9 @@ describe('Datastore', () => {
});
it('should order queries', done => {
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .order('appearances');
+ const q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .order('appearances');
datastore.runQuery(q, (err, entities) => {
assert.ifError(err);
@@ -704,10 +680,10 @@ describe('Datastore', () => {
});
it('should select projections', done => {
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .select(['name', 'family']);
+ const q =
+ datastore.createQuery('Character').hasAncestor(ancestor).select([
+ 'name', 'family'
+ ]);
datastore.runQuery(q, (err, entities) => {
assert.ifError(err);
@@ -727,12 +703,11 @@ describe('Datastore', () => {
});
it('should paginate with offset and limit', done => {
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .offset(2)
- .limit(3)
- .order('appearances');
+ const q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .offset(2)
+ .limit(3)
+ .order('appearances');
datastore.runQuery(q, (err, entities, info) => {
assert.ifError(err);
@@ -741,11 +716,10 @@ describe('Datastore', () => {
assert.strictEqual(entities[0].name, 'Robb');
assert.strictEqual(entities[2].name, 'Catelyn');
- const secondQ = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .order('appearances')
- .start(info.endCursor);
+ const secondQ = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .order('appearances')
+ .start(info.endCursor);
datastore.runQuery(secondQ, (err, secondEntities) => {
assert.ifError(err);
@@ -758,21 +732,19 @@ describe('Datastore', () => {
});
it('should resume from a start cursor', done => {
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .offset(2)
- .limit(2)
- .order('appearances');
+ const q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .offset(2)
+ .limit(2)
+ .order('appearances');
datastore.runQuery(q, (err, entities, info) => {
assert.ifError(err);
- const secondQ = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .order('appearances')
- .start(info.endCursor);
+ const secondQ = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .order('appearances')
+ .start(info.endCursor);
datastore.runQuery(secondQ, (err, secondEntities) => {
assert.ifError(err);
@@ -787,10 +759,9 @@ describe('Datastore', () => {
});
it('should group queries', done => {
- const q = datastore
- .createQuery('Character')
- .hasAncestor(ancestor)
- .groupBy('appearances');
+ const q = datastore.createQuery('Character')
+ .hasAncestor(ancestor)
+ .groupBy('appearances');
datastore.runQuery(q, (err, entities) => {
assert.ifError(err);
@@ -836,63 +807,61 @@ describe('Datastore', () => {
const incompleteKey = datastore.key('Company');
datastore.save(
- {
- key: deleteKey,
- data: {},
- },
- err => {
- assert.ifError(err);
-
- const transaction = datastore.transaction();
-
- transaction.run(err => {
+ {
+ key: deleteKey,
+ data: {},
+ },
+ err => {
assert.ifError(err);
- transaction.delete(deleteKey);
-
- transaction.save([
- {
- key,
- data: {rating: 10},
- },
- {
- key: incompleteKey,
- data: {rating: 100},
- },
- ]);
+ const transaction = datastore.transaction();
- transaction.commit(err => {
+ transaction.run(err => {
assert.ifError(err);
- // Incomplete key should have been given an ID.
- assert.strictEqual(incompleteKey.path.length, 2);
-
- async.parallel(
- [
- // The key queued for deletion should have been deleted.
- callback => {
- datastore.get(deleteKey, (err, entity) => {
- assert.ifError(err);
- assert.strictEqual(typeof entity, 'undefined');
- callback();
- });
- },
+ transaction.delete(deleteKey);
- // Data should have been updated on the key.
- callback => {
- datastore.get(key, (err, entity) => {
- assert.ifError(err);
- assert.strictEqual(entity.rating, 10);
- callback();
- });
- },
- ],
- done
- );
+ transaction.save([
+ {
+ key,
+ data: {rating: 10},
+ },
+ {
+ key: incompleteKey,
+ data: {rating: 100},
+ },
+ ]);
+
+ transaction.commit(err => {
+ assert.ifError(err);
+
+ // Incomplete key should have been given an ID.
+ assert.strictEqual(incompleteKey.path.length, 2);
+
+ async.parallel(
+ [
+ // The key queued for deletion should have been deleted.
+ callback => {
+ datastore.get(deleteKey, (err, entity) => {
+ assert.ifError(err);
+ assert.strictEqual(typeof entity, 'undefined');
+ callback();
+ });
+ },
+
+ // Data should have been updated on the key.
+ callback => {
+ datastore.get(key, (err, entity) => {
+ assert.ifError(err);
+ assert.strictEqual(entity.rating, 10);
+ callback();
+ });
+ },
+ ],
+ done);
+ });
});
});
- }
- );
});
it('should use the last modification to a key', done => {
diff --git a/test/.eslintrc.yml b/test/.eslintrc.yml
index 73f7bbc94..2eb32898b 100644
--- a/test/.eslintrc.yml
+++ b/test/.eslintrc.yml
@@ -3,3 +3,4 @@ env:
mocha: true
rules:
node/no-unpublished-require: off
+ node/no-missing-require: off
diff --git a/test/entity.ts b/test/entity.ts
index fb521f6d8..d42941f47 100644
--- a/test/entity.ts
+++ b/test/entity.ts
@@ -18,22 +18,22 @@ import * as assert from 'assert';
import * as extend from 'extend';
import {Datastore} from '../src';
-describe('entity', function() {
+describe('entity', () => {
let entity;
- beforeEach(function() {
+ beforeEach(() => {
delete require.cache[require.resolve('../src/entity.js')];
entity = require('../src/entity.js').entity;
});
- describe('KEY_SYMBOL', function() {
- it('should export the symbol', function() {
+ describe('KEY_SYMBOL', () => {
+ it('should export the symbol', () => {
assert.strictEqual(entity.KEY_SYMBOL.toString(), 'Symbol(KEY)');
});
});
- describe('Double', function() {
- it('should store the value', function() {
+ describe('Double', () => {
+ it('should store the value', () => {
const value = 8.3;
const double = new entity.Double(value);
@@ -41,25 +41,25 @@ describe('entity', function() {
});
});
- describe('isDsDouble', function() {
- it('should correctly identify a Double', function() {
+ describe('isDsDouble', () => {
+ it('should correctly identify a Double', () => {
const double = new entity.Double(0.42);
assert.strictEqual(entity.isDsDouble(double), true);
});
- it('should correctly identify a homomorphic non-Double', function() {
+ it('should correctly identify a homomorphic non-Double', () => {
const nonDouble = Object.assign({}, new entity.Double(42));
assert.strictEqual(entity.isDsDouble(nonDouble), false);
});
- it('should correctly identify a primitive', function() {
+ it('should correctly identify a primitive', () => {
const primitiveDouble = 0.42;
assert.strictEqual(entity.isDsDouble(primitiveDouble), false);
});
});
- describe('Int', function() {
- it('should store the stringified value', function() {
+ describe('Int', () => {
+ it('should store the stringified value', () => {
const value = 8;
const int = new entity.Int(value);
@@ -67,25 +67,25 @@ describe('entity', function() {
});
});
- describe('isDsInt', function() {
- it('should correctly identify an Int', function() {
+ describe('isDsInt', () => {
+ it('should correctly identify an Int', () => {
const int = new entity.Int(42);
assert.strictEqual(entity.isDsInt(int), true);
});
- it('should correctly identify homomorphic non-Int', function() {
+ it('should correctly identify homomorphic non-Int', () => {
const nonInt = Object.assign({}, new entity.Int(42));
assert.strictEqual(entity.isDsInt(nonInt), false);
});
- it('should correctly identify a primitive', function() {
+ it('should correctly identify a primitive', () => {
const primitiveInt = 42;
assert.strictEqual(entity.isDsInt(primitiveInt), false);
});
});
- describe('GeoPoint', function() {
- it('should store the value', function() {
+ describe('GeoPoint', () => {
+ it('should store the value', () => {
const value = {
latitude: 24,
longitude: 88,
@@ -96,62 +96,62 @@ describe('entity', function() {
});
});
- describe('isDsGeoPoint', function() {
- it('should correctly identify a GeoPoint', function() {
+ describe('isDsGeoPoint', () => {
+ it('should correctly identify a GeoPoint', () => {
const geoPoint = new entity.GeoPoint({latitude: 24, longitude: 88});
assert.strictEqual(entity.isDsGeoPoint(geoPoint), true);
});
- it('should correctly identify a homomorphic non-GeoPoint', function() {
+ it('should correctly identify a homomorphic non-GeoPoint', () => {
const geoPoint = new entity.GeoPoint({latitude: 24, longitude: 88});
const nonGeoPoint = Object.assign({}, geoPoint);
assert.strictEqual(entity.isDsGeoPoint(nonGeoPoint), false);
});
});
- describe('Key', function() {
- it('should assign the namespace', function() {
+ describe('Key', () => {
+ it('should assign the namespace', () => {
const namespace = 'NS';
const key = new entity.Key({namespace, path: []});
assert.strictEqual(key.namespace, namespace);
});
- it('should assign the kind', function() {
+ it('should assign the kind', () => {
const kind = 'kind';
const key = new entity.Key({path: [kind]});
assert.strictEqual(key.kind, kind);
});
- it('should assign the ID', function() {
+ it('should assign the ID', () => {
const id = 11;
const key = new entity.Key({path: ['Kind', id]});
assert.strictEqual(key.id, id);
});
- it('should assign the ID from an Int', function() {
+ it('should assign the ID from an Int', () => {
const id = new entity.Int(11);
const key = new entity.Key({path: ['Kind', id]});
assert.strictEqual(key.id, id.value);
});
- it('should assign the name', function() {
+ it('should assign the name', () => {
const name = 'name';
const key = new entity.Key({path: ['Kind', name]});
assert.strictEqual(key.name, name);
});
- it('should assign a parent', function() {
+ it('should assign a parent', () => {
const key = new entity.Key({path: ['ParentKind', 1, 'Kind', 1]});
assert(key.parent instanceof entity.Key);
});
- it('should not modify input path', function() {
+ it('should not modify input path', () => {
const inputPath = ['ParentKind', 1, 'Kind', 1];
- new entity.Key({path: inputPath});
+ const key = new entity.Key({path: inputPath});
assert.deepStrictEqual(inputPath, ['ParentKind', 1, 'Kind', 1]);
});
- it('should always compute the correct path', function() {
+ it('should always compute the correct path', () => {
const key = new entity.Key({path: ['ParentKind', 1, 'Kind', 1]});
assert.deepStrictEqual(key.path, ['ParentKind', 1, 'Kind', 1]);
@@ -162,20 +162,20 @@ describe('entity', function() {
});
});
- describe('isDsKey', function() {
- it('should correctly identify a Key', function() {
+ describe('isDsKey', () => {
+ it('should correctly identify a Key', () => {
const key = new entity.Key({path: ['Kind', 1]});
assert.strictEqual(entity.isDsKey(key), true);
});
- it('should correctly identify a homomorphic non-Key', function() {
+ it('should correctly identify a homomorphic non-Key', () => {
const notKey = Object.assign({}, new entity.Key({path: ['Kind', 1]}));
assert.strictEqual(entity.isDsKey(notKey), false);
});
});
- describe('decodeValueProto', function() {
- it('should decode arrays', function() {
+ describe('decodeValueProto', () => {
+ it('should decode arrays', () => {
const expectedValue = [{}];
const valueProto = {
@@ -188,10 +188,10 @@ describe('entity', function() {
let run = false;
const decodeValueProto = entity.decodeValueProto;
- entity.decodeValueProto = function(valueProto) {
+ entity.decodeValueProto = (valueProto) => {
if (!run) {
run = true;
- return decodeValueProto.apply(null, arguments);
+ return decodeValueProto(valueProto);
}
assert.strictEqual(valueProto, expectedValue[0]);
@@ -199,12 +199,10 @@ describe('entity', function() {
};
assert.deepStrictEqual(
- entity.decodeValueProto(valueProto),
- expectedValue
- );
+ entity.decodeValueProto(valueProto), expectedValue);
});
- it('should decode blobs', function() {
+ it('should decode blobs', () => {
const expectedValue = Buffer.from('Hi');
const valueProto = {
@@ -213,12 +211,10 @@ describe('entity', function() {
};
assert.deepStrictEqual(
- entity.decodeValueProto(valueProto),
- expectedValue
- );
+ entity.decodeValueProto(valueProto), expectedValue);
});
- it('should decode null', function() {
+ it('should decode null', () => {
const expectedValue = null;
const valueProto = {
@@ -230,7 +226,7 @@ describe('entity', function() {
assert.deepStrictEqual(decodedValue, expectedValue);
});
- it('should decode doubles', function() {
+ it('should decode doubles', () => {
const expectedValue = 8.3;
const valueProto = {
@@ -241,7 +237,7 @@ describe('entity', function() {
assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
});
- it('should decode ints', function() {
+ it('should decode ints', () => {
const expectedValue = 8;
const valueProto = {
@@ -252,7 +248,7 @@ describe('entity', function() {
assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
});
- it('should decode entities', function() {
+ it('should decode entities', () => {
const expectedValue = {};
const valueProto = {
@@ -260,7 +256,7 @@ describe('entity', function() {
entityValue: expectedValue,
};
- entity.entityFromEntityProto = function(entityProto) {
+ entity.entityFromEntityProto = (entityProto) => {
assert.strictEqual(entityProto, expectedValue);
return expectedValue;
};
@@ -268,7 +264,7 @@ describe('entity', function() {
assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
});
- it('should decode keys', function() {
+ it('should decode keys', () => {
const expectedValue = {};
const valueProto = {
@@ -276,7 +272,7 @@ describe('entity', function() {
keyValue: expectedValue,
};
- entity.keyFromKeyProto = function(keyProto) {
+ entity.keyFromKeyProto = (keyProto) => {
assert.strictEqual(keyProto, expectedValue);
return expectedValue;
};
@@ -284,7 +280,7 @@ describe('entity', function() {
assert.strictEqual(entity.decodeValueProto(valueProto), expectedValue);
});
- it('should decode timestamps', function() {
+ it('should decode timestamps', () => {
const date = new Date();
const seconds = Math.floor(date.getTime() / 1000);
@@ -301,12 +297,10 @@ describe('entity', function() {
};
assert.deepStrictEqual(
- entity.decodeValueProto(valueProto),
- expectedValue
- );
+ entity.decodeValueProto(valueProto), expectedValue);
});
- it('should return the value if no conversions are necessary', function() {
+ it('should return the value if no conversions are necessary', () => {
const expectedValue = false;
const valueProto = {
@@ -318,8 +312,8 @@ describe('entity', function() {
});
});
- describe('encodeValue', function() {
- it('should encode a boolean', function() {
+ describe('encodeValue', () => {
+ it('should encode a boolean', () => {
const value = true;
const expectedValueProto = {
@@ -329,7 +323,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode null', function() {
+ it('should encode null', () => {
const value = null;
const expectedValueProto = {
@@ -339,7 +333,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode an int', function() {
+ it('should encode an int', () => {
const value = 8;
const expectedValueProto = {
@@ -354,7 +348,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode an Int object', function() {
+ it('should encode an Int object', () => {
const value = new entity.Int(3);
const expectedValueProto = {
@@ -364,7 +358,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode a double', function() {
+ it('should encode a double', () => {
const value = 8.3;
const expectedValueProto = {
@@ -379,7 +373,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode a Double object', function() {
+ it('should encode a Double object', () => {
const value = new entity.Double(3);
const expectedValueProto = {
@@ -389,7 +383,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode a GeoPoint object', function() {
+ it('should encode a GeoPoint object', () => {
const value = new entity.GeoPoint();
const expectedValueProto = {
@@ -399,7 +393,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode a date', function() {
+ it('should encode a date', () => {
const value = new Date();
const seconds = value.getTime() / 1000;
@@ -413,7 +407,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode a string', function() {
+ it('should encode a string', () => {
const value = 'Hi';
const expectedValueProto = {
@@ -423,7 +417,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode a buffer', function() {
+ it('should encode a buffer', () => {
const value = Buffer.from('Hi');
const expectedValueProto = {
@@ -433,7 +427,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode an array', function() {
+ it('should encode an array', () => {
const value = [{}];
const expectedValueProto = {
@@ -445,10 +439,10 @@ describe('entity', function() {
let run = false;
const encodeValue = entity.encodeValue;
- entity.encodeValue = function(value_) {
+ entity.encodeValue = (value_) => {
if (!run) {
run = true;
- return encodeValue.apply(null, arguments);
+ return encodeValue(value_);
}
assert.strictEqual(value_, value[0]);
@@ -458,7 +452,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode a Key', function() {
+ it('should encode a Key', () => {
const value = new entity.Key({
namespace: 'ns',
path: ['Kind', 1],
@@ -468,7 +462,7 @@ describe('entity', function() {
keyValue: value,
};
- entity.keyToKeyProto = function(key) {
+ entity.keyToKeyProto = (key) => {
assert.strictEqual(key, value);
return value;
};
@@ -476,7 +470,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should encode an object', function() {
+ it('should encode an object', () => {
const value = {
key: 'value',
};
@@ -492,10 +486,10 @@ describe('entity', function() {
let run = false;
const encodeValue = entity.encodeValue;
- entity.encodeValue = function(value_) {
+ entity.encodeValue = (value_) => {
if (!run) {
run = true;
- return encodeValue.apply(null, arguments);
+ return encodeValue(value_);
}
assert.strictEqual(value_, value.key);
@@ -505,7 +499,7 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should clone an object', function() {
+ it('should clone an object', () => {
const value = {
a: {
b: {
@@ -522,7 +516,7 @@ describe('entity', function() {
assert.notStrictEqual(value, encodedValue);
});
- it('should encode an empty object', function() {
+ it('should encode an empty object', () => {
const value = {};
const expectedValueProto = {
@@ -534,15 +528,15 @@ describe('entity', function() {
assert.deepStrictEqual(entity.encodeValue(value), expectedValueProto);
});
- it('should throw if an invalid value was provided', function() {
- assert.throws(function() {
+ it('should throw if an invalid value was provided', () => {
+ assert.throws(() => {
entity.encodeValue();
}, /Unsupported field value/);
});
});
- describe('entityFromEntityProto', function() {
- it('should convert entity proto to entity', function() {
+ describe('entityFromEntityProto', () => {
+ it('should convert entity proto to entity', () => {
const expectedEntity = {
name: 'Stephen',
};
@@ -557,14 +551,12 @@ describe('entity', function() {
};
assert.deepStrictEqual(
- entity.entityFromEntityProto(entityProto),
- expectedEntity
- );
+ entity.entityFromEntityProto(entityProto), expectedEntity);
});
});
- describe('entityToEntityProto', function() {
- it('should format an entity', function() {
+ describe('entityToEntityProto', () => {
+ it('should format an entity', () => {
const value = 'Stephen';
const entityObject = {
@@ -578,18 +570,16 @@ describe('entity', function() {
properties: entityObject.data,
};
- entity.encodeValue = function(value_) {
+ entity.encodeValue = (value_) => {
assert.strictEqual(value_, value);
return value;
};
assert.deepStrictEqual(
- entity.entityToEntityProto(entityObject),
- expectedEntityProto
- );
+ entity.entityToEntityProto(entityObject), expectedEntityProto);
});
- it('should respect excludeFromIndexes', function() {
+ it('should respect excludeFromIndexes', () => {
const value1 = 'Stephen';
const value2 = 'Stephen2';
const value3 = 'Stephen3';
@@ -948,8 +938,12 @@ describe('entity', function() {
entityValue: {
properties: {
b: {
- // excludeFromIndexes: ['nestedArrayVariants[].a[].b'] does not apply here,
- // To exclude this array (= all its elements), we would use ['nestedArrayVariants[].a[].b[]']
+ // excludeFromIndexes:
+ // ['nestedArrayVariants[].a[].b'] does not
+ // apply here,
+ // To exclude this array (= all its
+ // elements), we would use
+ // ['nestedArrayVariants[].a[].b[]']
arrayValue: {
values: [
{
@@ -974,7 +968,8 @@ describe('entity', function() {
},
},
alpha: {
- // `excludeFromIndexes: ['alpha[]']` results in exclusion of all array elements
+ // `excludeFromIndexes: ['alpha[]']` results in exclusion of all
+ // array elements
arrayValue: {
values: [
{
@@ -989,7 +984,8 @@ describe('entity', function() {
},
},
omega: {
- // `excludeFromIndexes: ['omega']` is not applied, because 'omega' is an array.
+ // `excludeFromIndexes: ['omega']` is not applied, because 'omega'
+ // is an array.
arrayValue: {
values: [
{
@@ -1005,14 +1001,12 @@ describe('entity', function() {
};
assert.deepStrictEqual(
- entity.entityToEntityProto(entityObject),
- expectedEntityProto
- );
+ entity.entityToEntityProto(entityObject), expectedEntityProto);
});
});
- describe('formatArray', function() {
- it('should convert protos to key/data entity array', function() {
+ describe('formatArray', () => {
+ it('should convert protos to key/data entity array', () => {
const key = {};
const entityProto = {
@@ -1027,12 +1021,12 @@ describe('entity', function() {
const expectedResults = entityProto;
- entity.keyFromKeyProto = function(key_) {
+ entity.keyFromKeyProto = (key_) => {
assert.strictEqual(key_, key);
return key;
};
- entity.entityFromEntityProto = function(entityProto_) {
+ entity.entityFromEntityProto = (entityProto_) => {
assert.strictEqual(entityProto_, entityProto);
return entityProto;
};
@@ -1044,13 +1038,13 @@ describe('entity', function() {
});
});
- describe('isKeyComplete', function() {
- it('should convert key to key proto', function(done) {
+ describe('isKeyComplete', () => {
+ it('should convert key to key proto', (done) => {
const key = new entity.Key({
path: ['Kind', 123],
});
- entity.keyToKeyProto = function(key_) {
+ entity.keyToKeyProto = (key_) => {
assert.strictEqual(key_, key);
setImmediate(done);
return key;
@@ -1059,7 +1053,7 @@ describe('entity', function() {
entity.isKeyComplete(key);
});
- it('should return true if key has id', function() {
+ it('should return true if key has id', () => {
const key = new entity.Key({
path: ['Kind', 123],
});
@@ -1067,7 +1061,7 @@ describe('entity', function() {
assert.strictEqual(entity.isKeyComplete(key), true);
});
- it('should return true if key has name', function() {
+ it('should return true if key has name', () => {
const key = new entity.Key({
path: ['Kind', 'name'],
});
@@ -1075,7 +1069,7 @@ describe('entity', function() {
assert.strictEqual(entity.isKeyComplete(key), true);
});
- it('should return false if key does not have name or ID', function() {
+ it('should return false if key does not have name or ID', () => {
const key = new entity.Key({
path: ['Kind'],
});
@@ -1084,7 +1078,7 @@ describe('entity', function() {
});
});
- describe('keyFromKeyProto', function() {
+ describe('keyFromKeyProto', () => {
const NAMESPACE = 'Namespace';
const keyProto = {
@@ -1106,39 +1100,42 @@ describe('entity', function() {
],
};
- it('should set the namespace', function(done) {
- entity.Key = function(keyOptions) {
- assert.strictEqual(keyOptions.namespace, NAMESPACE);
- done();
+ it('should set the namespace', (done) => {
+ entity.Key = class {
+ constructor(keyOptions) {
+ assert.strictEqual(keyOptions.namespace, NAMESPACE);
+ done();
+ }
};
-
entity.keyFromKeyProto(keyProto);
});
- it('should create a proper Key', function(done) {
- entity.Key = function(keyOptions) {
- assert.deepStrictEqual(keyOptions, {
- namespace: NAMESPACE,
- path: ['Kind', new entity.Int(111), 'Kind2', 'name'],
- });
-
- done();
+ it('should create a proper Key', (done) => {
+ entity.Key = class {
+ constructor(keyOptions) {
+ assert.deepStrictEqual(keyOptions, {
+ namespace: NAMESPACE,
+ path: ['Kind', new entity.Int(111), 'Kind2', 'name'],
+ });
+ done();
+ }
};
-
entity.keyFromKeyProto(keyProto);
});
- it('should return the created Key', function() {
+ it('should return the created Key', () => {
const expectedValue = {};
- entity.Key = function() {
- return expectedValue;
+ entity.Key = class {
+ constructor() {
+ return expectedValue;
+ }
};
assert.strictEqual(entity.keyFromKeyProto(keyProto), expectedValue);
});
- it('should throw if path is invalid', function(done) {
+ it('should throw if path is invalid', (done) => {
const keyProtoInvalid = {
partitionId: {
namespaceId: 'Namespace',
@@ -1164,8 +1161,8 @@ describe('entity', function() {
});
});
- describe('keyToKeyProto', function() {
- it('should handle hierarchical key definitions', function() {
+ describe('keyToKeyProto', () => {
+ it('should handle hierarchical key definitions', () => {
const key = new entity.Key({
path: ['Kind1', 1, 'Kind2', 'name', 'Kind3', new entity.Int(3)],
});
@@ -1187,7 +1184,7 @@ describe('entity', function() {
assert.strictEqual(keyProto.path[2].name, undefined);
});
- it('should detect the namespace of the hierarchical keys', function() {
+ it('should detect the namespace of the hierarchical keys', () => {
const key = new entity.Key({
namespace: 'Namespace',
path: ['Kind1', 1, 'Kind2', 'name'],
@@ -1206,7 +1203,7 @@ describe('entity', function() {
assert.strictEqual(keyProto.path[1].name, 'name');
});
- it('should handle incomplete keys with & without namespaces', function() {
+ it('should handle incomplete keys with & without namespaces', () => {
const incompleteKey = new entity.Key({
path: ['Kind'],
});
@@ -1230,7 +1227,7 @@ describe('entity', function() {
assert.strictEqual(keyProtoWithNs.path[0].name, undefined);
});
- it('should throw if key contains 0 items', function(done) {
+ it('should throw if key contains 0 items', (done) => {
const key = new entity.Key({
path: [],
});
@@ -1244,7 +1241,7 @@ describe('entity', function() {
}
});
- it('should throw if key path contains null ids', function(done) {
+ it('should throw if key path contains null ids', (done) => {
const key = new entity.Key({
namespace: 'Namespace',
path: ['Kind1', null, 'Company'],
@@ -1259,19 +1256,19 @@ describe('entity', function() {
}
});
- it('should not throw if key is incomplete', function() {
+ it('should not throw if key is incomplete', () => {
const key = new entity.Key({
namespace: 'Namespace',
path: ['Kind1', 123, 'Company', null],
});
- assert.doesNotThrow(function() {
+ assert.doesNotThrow(() => {
entity.keyToKeyProto(key);
});
});
});
- describe('queryToQueryProto', function() {
+ describe('queryToQueryProto', () => {
const queryProto = {
distinctOn: [
{
@@ -1342,37 +1339,33 @@ describe('entity', function() {
},
};
- it('should support all configurations of a query', function() {
+ it('should support all configurations of a query', () => {
const ancestorKey = new entity.Key({
path: ['Kind2', 'somename'],
});
const ds = new Datastore({projectId: 'project-id'});
- const query = ds
- .createQuery('Kind1')
- .filter('name', 'John')
- .start('start')
- .end('end')
- .groupBy(['name'])
- .order('name')
- .select('name')
- .limit(1)
- .offset(1)
- .hasAncestor(ancestorKey);
+ const query = ds.createQuery('Kind1')
+ .filter('name', 'John')
+ .start('start')
+ .end('end')
+ .groupBy(['name'])
+ .order('name')
+ .select('name')
+ .limit(1)
+ .offset(1)
+ .hasAncestor(ancestorKey);
assert.deepStrictEqual(entity.queryToQueryProto(query), queryProto);
});
- it('should handle buffer start and end values', function() {
+ it('should handle buffer start and end values', () => {
const ds = new Datastore({projectId: 'project-id'});
const startVal = Buffer.from('start');
const endVal = Buffer.from('end');
- const query = ds
- .createQuery('Kind1')
- .start(startVal)
- .end(endVal);
+ const query = ds.createQuery('Kind1').start(startVal).end(endVal);
const queryProto = entity.queryToQueryProto(query);
assert.strictEqual(queryProto.endCursor, endVal);
diff --git a/test/gapic-v1.js b/test/gapic-v1.js
index a2b2746ee..dc700ab76 100644
--- a/test/gapic-v1.js
+++ b/test/gapic-v1.js
@@ -69,11 +69,7 @@ describe('DatastoreClient', () => {
};
// Mock Grpc layer
- client._innerApiCalls.lookup = mockSimpleGrpcMethod(
- request,
- null,
- error
- );
+ client._innerApiCalls.lookup = mockSimpleGrpcMethod(request, null, error);
client.lookup(request, (err, response) => {
assert(err instanceof Error);
@@ -258,11 +254,7 @@ describe('DatastoreClient', () => {
};
// Mock Grpc layer
- client._innerApiCalls.commit = mockSimpleGrpcMethod(
- request,
- null,
- error
- );
+ client._innerApiCalls.commit = mockSimpleGrpcMethod(request, null, error);
client.commit(request, (err, response) => {
assert(err instanceof Error);
@@ -455,7 +447,6 @@ describe('DatastoreClient', () => {
});
});
});
-
});
function mockSimpleGrpcMethod(expectedRequest, response, error) {
diff --git a/test/index.ts b/test/index.ts
index 89d5e7b05..e8543f167 100644
--- a/test/index.ts
+++ b/test/index.ts
@@ -61,25 +61,23 @@ const fakeEntity: any = {
},
};
-let GoogleAuthOverride;
+let googleAuthOverride;
function fakeGoogleAuth() {
- return (GoogleAuthOverride || function() {}).apply(null, arguments);
+ return (googleAuthOverride || (() => {})).apply(null, arguments);
}
let createInsecureOverride;
const fakeGoogleGax = {
- GrpcClient: class extends gax.GrpcClient {
+ GrpcClient: class extends gax.GrpcClient{
constructor(opts) {
// super constructor must be called first!
super(opts);
this.grpc = {
credentials: {
createInsecure() {
- return (createInsecureOverride || function() {}).apply(
- null,
- arguments
- );
+ return (createInsecureOverride || (() => {}))
+ .apply(null, arguments);
},
},
} as gax.GrpcModule;
@@ -103,7 +101,8 @@ class FakeTransaction {
function FakeV1() {}
-describe('Datastore', function() {
+describe('Datastore', () => {
+ // tslint:disable-next-line variable-name
let Datastore;
let datastore;
@@ -121,22 +120,22 @@ describe('Datastore', function() {
namespace: NAMESPACE,
};
- before(function() {
+ before(() => {
Datastore = proxyquire('../src', {
- './entity.js': {entity: fakeEntity},
- './query.js': {Query: FakeQuery},
- './transaction.js': {Transaction: FakeTransaction},
- './v1': FakeV1,
- 'google-auth-library': {
- GoogleAuth: fakeGoogleAuth,
- },
- 'google-gax': fakeGoogleGax,
- }).Datastore;
+ './entity.js': {entity: fakeEntity},
+ './query.js': {Query: FakeQuery},
+ './transaction.js': {Transaction: FakeTransaction},
+ './v1': FakeV1,
+ 'google-auth-library': {
+ GoogleAuth: fakeGoogleAuth,
+ },
+ 'google-gax': fakeGoogleGax,
+ }).Datastore;
});
- beforeEach(function() {
+ beforeEach(() => {
createInsecureOverride = null;
- GoogleAuthOverride = null;
+ googleAuthOverride = null;
datastore = new Datastore({
projectId: PROJECT_ID,
@@ -144,7 +143,7 @@ describe('Datastore', function() {
});
});
- afterEach(function() {
+ afterEach(() => {
if (typeof DATASTORE_PROJECT_ID_CACHED === 'string') {
process.env.DATASTORE_PROJECT_ID = DATASTORE_PROJECT_ID_CACHED;
} else {
@@ -152,45 +151,45 @@ describe('Datastore', function() {
}
});
- after(function() {
+ after(() => {
createInsecureOverride = null;
- GoogleAuthOverride = null;
+ googleAuthOverride = null;
});
- it('should export GAX client', function() {
+ it('should export GAX client', () => {
assert.ok(require('../src').v1);
});
- describe('instantiation', function() {
- it('should initialize an empty Client map', function() {
+ describe('instantiation', () => {
+ it('should initialize an empty Client map', () => {
assert(datastore.clients_ instanceof Map);
assert.strictEqual(datastore.clients_.size, 0);
});
- it('should alias itself to the datastore property', function() {
+ it('should alias itself to the datastore property', () => {
assert.strictEqual(datastore.datastore, datastore);
});
- it('should localize the namespace', function() {
+ it('should localize the namespace', () => {
assert.strictEqual(datastore.namespace, NAMESPACE);
});
- it('should localize the projectId', function() {
+ it('should localize the projectId', () => {
assert.strictEqual(datastore.projectId, PROJECT_ID);
assert.strictEqual(datastore.options.projectId, PROJECT_ID);
});
- it('should default project ID to placeholder', function() {
+ it('should default project ID to placeholder', () => {
const datastore = new Datastore({});
assert.strictEqual(datastore.projectId, '{{projectId}}');
});
- it('should not default options.projectId to placeholder', function() {
+ it('should not default options.projectId to placeholder', () => {
const datastore = new Datastore({});
assert.strictEqual(datastore.options.projectId, undefined);
});
- it('should use DATASTORE_PROJECT_ID', function() {
+ it('should use DATASTORE_PROJECT_ID', () => {
const projectId = 'overridden-project-id';
process.env.DATASTORE_PROJECT_ID = projectId;
@@ -201,24 +200,24 @@ describe('Datastore', function() {
assert.strictEqual(datastore.options.projectId, projectId);
});
- it('should set the default base URL', function() {
+ it('should set the default base URL', () => {
assert.strictEqual(datastore.defaultBaseUrl_, 'datastore.googleapis.com');
});
- it('should set default API connection details', function(done) {
+ it('should set default API connection details', (done) => {
const determineBaseUrl_ = Datastore.prototype.determineBaseUrl_;
- Datastore.prototype.determineBaseUrl_ = function(customApiEndpoint) {
+ Datastore.prototype.determineBaseUrl_ = (customApiEndpoint) => {
Datastore.prototype.determineBaseUrl_ = determineBaseUrl_;
assert.strictEqual(customApiEndpoint, OPTIONS.apiEndpoint);
done();
};
- new Datastore(OPTIONS);
+ const d = new Datastore(OPTIONS);
});
- it('should localize the options', function() {
+ it('should localize the options', () => {
delete process.env.DATASTORE_PROJECT_ID;
const options = {
@@ -231,22 +230,20 @@ describe('Datastore', function() {
assert.notStrictEqual(datastore.options, options);
assert.deepStrictEqual(
- datastore.options,
- Object.assign(
- {
- libName: 'gccl',
- libVersion: require('../../package.json').version,
- scopes: v1.DatastoreClient.scopes,
- servicePath: datastore.baseUrl_,
- port: 443,
- projectId: undefined,
- },
- options
- )
- );
- });
-
- it('should set port if detected', function() {
+ datastore.options,
+ Object.assign(
+ {
+ libName: 'gccl',
+ libVersion: require('../../package.json').version,
+ scopes: v1.DatastoreClient.scopes,
+ servicePath: datastore.baseUrl_,
+ port: 443,
+ projectId: undefined,
+ },
+ options));
+ });
+
+ it('should set port if detected', () => {
const determineBaseUrl_ = Datastore.prototype.determineBaseUrl_;
const port = 99;
@@ -260,7 +257,7 @@ describe('Datastore', function() {
assert.strictEqual(datastore.options.port, port);
});
- it('should set grpc ssl credentials if custom endpoint', function() {
+ it('should set grpc ssl credentials if custom endpoint', () => {
const determineBaseUrl_ = Datastore.prototype.determineBaseUrl_;
Datastore.prototype.determineBaseUrl_ = function() {
@@ -269,7 +266,7 @@ describe('Datastore', function() {
};
const fakeInsecureCreds = {};
- createInsecureOverride = function() {
+ createInsecureOverride = () => {
return fakeInsecureCreds;
};
@@ -278,10 +275,10 @@ describe('Datastore', function() {
assert.strictEqual(datastore.options.sslCreds, fakeInsecureCreds);
});
- it('should cache a local GoogleAuth instance', function() {
+ it('should cache a local GoogleAuth instance', () => {
const fakeGoogleAuthInstance = {};
- GoogleAuthOverride = function() {
+ googleAuthOverride = () => {
return fakeGoogleAuthInstance;
};
@@ -290,54 +287,54 @@ describe('Datastore', function() {
});
});
- describe('double', function() {
- it('should expose Double builder', function() {
+ describe('double', () => {
+ it('should expose Double builder', () => {
const aDouble = 7.0;
const double = Datastore.double(aDouble);
assert.strictEqual(double.value, aDouble);
});
- it('should also be on the prototype', function() {
+ it('should also be on the prototype', () => {
const aDouble = 7.0;
const double = datastore.double(aDouble);
assert.strictEqual(double.value, aDouble);
});
});
- describe('geoPoint', function() {
- it('should expose GeoPoint builder', function() {
+ describe('geoPoint', () => {
+ it('should expose GeoPoint builder', () => {
const aGeoPoint = {latitude: 24, longitude: 88};
const geoPoint = Datastore.geoPoint(aGeoPoint);
assert.strictEqual(geoPoint.value, aGeoPoint);
});
- it('should also be on the prototype', function() {
+ it('should also be on the prototype', () => {
const aGeoPoint = {latitude: 24, longitude: 88};
const geoPoint = datastore.geoPoint(aGeoPoint);
assert.strictEqual(geoPoint.value, aGeoPoint);
});
});
- describe('int', function() {
- it('should expose Int builder', function() {
+ describe('int', () => {
+ it('should expose Int builder', () => {
const anInt = 7;
const int = Datastore.int(anInt);
assert.strictEqual(int.value, anInt);
});
- it('should also be on the prototype', function() {
+ it('should also be on the prototype', () => {
const anInt = 7;
const int = datastore.int(anInt);
assert.strictEqual(int.value, anInt);
});
});
- describe('isDouble', function() {
- it('should pass value to entity', function() {
+ describe('isDouble', () => {
+ it('should pass value to entity', () => {
const value = 0.42;
let called = false;
const saved = fakeEntity.isDsDouble;
- fakeEntity.isDsDouble = function(arg) {
+ fakeEntity.isDsDouble = (arg) => {
assert.strictEqual(arg, value);
called = true;
return false;
@@ -347,19 +344,19 @@ describe('Datastore', function() {
fakeEntity.isDsDouble = saved;
});
- it('should expose Double identifier', function() {
+ it('should expose Double identifier', () => {
const something = {};
Datastore.isDouble(something);
assert.strictEqual(fakeEntity.calledWith_[0], something);
});
});
- describe('isGeoPoint', function() {
- it('should pass value to entity', function() {
+ describe('isGeoPoint', () => {
+ it('should pass value to entity', () => {
const value = {fakeLatitude: 1, fakeLongitude: 2};
let called = false;
const saved = fakeEntity.isDsGeoPoint;
- fakeEntity.isDsGeoPoint = function(arg) {
+ fakeEntity.isDsGeoPoint = (arg) => {
assert.strictEqual(arg, value);
called = true;
return false;
@@ -369,19 +366,19 @@ describe('Datastore', function() {
fakeEntity.isDsGeoPoint = saved;
});
- it('should expose GeoPoint identifier', function() {
+ it('should expose GeoPoint identifier', () => {
const something = {};
Datastore.isGeoPoint(something);
assert.strictEqual(fakeEntity.calledWith_[0], something);
});
});
- describe('isInt', function() {
- it('should pass value to entity', function() {
+ describe('isInt', () => {
+ it('should pass value to entity', () => {
const value = 42;
let called = false;
const saved = fakeEntity.isDsInt;
- fakeEntity.isDsInt = function(arg) {
+ fakeEntity.isDsInt = (arg) => {
assert.strictEqual(arg, value);
called = true;
return false;
@@ -391,19 +388,19 @@ describe('Datastore', function() {
fakeEntity.isDsInt = saved;
});
- it('should expose Int identifier', function() {
+ it('should expose Int identifier', () => {
const something = {};
Datastore.isInt(something);
assert.strictEqual(fakeEntity.calledWith_[0], something);
});
});
- describe('isKey', function() {
- it('should pass value to entity', function() {
+ describe('isKey', () => {
+ it('should pass value to entity', () => {
const value = {zz: true};
let called = false;
const saved = fakeEntity.isDsKey;
- fakeEntity.isDsKey = function(arg) {
+ fakeEntity.isDsKey = (arg) => {
assert.strictEqual(arg, value);
called = true;
return false;
@@ -413,67 +410,61 @@ describe('Datastore', function() {
fakeEntity.isDsKey = saved;
});
- it('should expose Key identifier', function() {
+ it('should expose Key identifier', () => {
const something = {};
datastore.isKey(something);
assert.strictEqual(fakeEntity.calledWith_[0], something);
});
});
- describe('KEY', function() {
- it('should expose the KEY symbol', function() {
+ describe('KEY', () => {
+ it('should expose the KEY symbol', () => {
assert.strictEqual(Datastore.KEY, fakeEntity.KEY_SYMBOL);
});
- it('should also be on the prototype', function() {
+ it('should also be on the prototype', () => {
assert.strictEqual(datastore.KEY, Datastore.KEY);
});
});
- describe('MORE_RESULTS_AFTER_CURSOR', function() {
- it('should expose a MORE_RESULTS_AFTER_CURSOR helper', function() {
+ describe('MORE_RESULTS_AFTER_CURSOR', () => {
+ it('should expose a MORE_RESULTS_AFTER_CURSOR helper', () => {
assert.strictEqual(
- Datastore.MORE_RESULTS_AFTER_CURSOR,
- 'MORE_RESULTS_AFTER_CURSOR'
- );
+ Datastore.MORE_RESULTS_AFTER_CURSOR, 'MORE_RESULTS_AFTER_CURSOR');
});
- it('should also be on the prototype', function() {
+ it('should also be on the prototype', () => {
assert.strictEqual(
- datastore.MORE_RESULTS_AFTER_CURSOR,
- Datastore.MORE_RESULTS_AFTER_CURSOR
- );
+ datastore.MORE_RESULTS_AFTER_CURSOR,
+ Datastore.MORE_RESULTS_AFTER_CURSOR);
});
});
- describe('MORE_RESULTS_AFTER_LIMIT', function() {
- it('should expose a MORE_RESULTS_AFTER_LIMIT helper', function() {
+ describe('MORE_RESULTS_AFTER_LIMIT', () => {
+ it('should expose a MORE_RESULTS_AFTER_LIMIT helper', () => {
assert.strictEqual(
- Datastore.MORE_RESULTS_AFTER_LIMIT,
- 'MORE_RESULTS_AFTER_LIMIT'
- );
+ Datastore.MORE_RESULTS_AFTER_LIMIT, 'MORE_RESULTS_AFTER_LIMIT');
});
- it('should also be on the prototype', function() {
+ it('should also be on the prototype', () => {
assert.strictEqual(
- datastore.MORE_RESULTS_AFTER_LIMIT,
- Datastore.MORE_RESULTS_AFTER_LIMIT
- );
+ datastore.MORE_RESULTS_AFTER_LIMIT,
+ Datastore.MORE_RESULTS_AFTER_LIMIT);
});
});
- describe('NO_MORE_RESULTS', function() {
- it('should expose a NO_MORE_RESULTS helper', function() {
+ describe('NO_MORE_RESULTS', () => {
+ it('should expose a NO_MORE_RESULTS helper', () => {
assert.strictEqual(Datastore.NO_MORE_RESULTS, 'NO_MORE_RESULTS');
});
- it('should also be on the prototype', function() {
+ it('should also be on the prototype', () => {
assert.strictEqual(datastore.NO_MORE_RESULTS, Datastore.NO_MORE_RESULTS);
});
});
- describe('createQuery', function() {
- it('should return a Query object', function() {
+ describe('createQuery', () => {
+ it('should return a Query object', () => {
const namespace = 'namespace';
const kind = ['Kind'];
@@ -485,7 +476,7 @@ describe('Datastore', function() {
assert.deepStrictEqual(query.calledWith_[2], kind);
});
- it('should include the default namespace', function() {
+ it('should include the default namespace', () => {
const kind = ['Kind'];
const query = datastore.createQuery(kind);
@@ -494,7 +485,7 @@ describe('Datastore', function() {
assert.deepStrictEqual(query.calledWith_[2], kind);
});
- it('should include the default namespace in a kindless query', function() {
+ it('should include the default namespace in a kindless query', () => {
const query = datastore.createQuery();
assert.strictEqual(query.calledWith_[0], datastore);
@@ -503,15 +494,15 @@ describe('Datastore', function() {
});
});
- describe('key', function() {
- it('should return a Key object', function() {
+ describe('key', () => {
+ it('should return a Key object', () => {
const options = {};
const key = datastore.key(options);
assert.strictEqual(key.calledWith_[0], options);
});
- it('should use a non-object argument as the path', function() {
+ it('should use a non-object argument as the path', () => {
const options = 'path';
const key = datastore.key(options);
@@ -520,29 +511,29 @@ describe('Datastore', function() {
});
});
- describe('transaction', function() {
- it('should return a Transaction object', function() {
+ describe('transaction', () => {
+ it('should return a Transaction object', () => {
const transaction = datastore.transaction();
assert.strictEqual(transaction.calledWith_[0], datastore);
});
- it('should pass options to the Transaction constructor', function() {
+ it('should pass options to the Transaction constructor', () => {
const options = {};
const transaction = datastore.transaction(options);
assert.strictEqual(transaction.calledWith_[1], options);
});
});
- describe('determineBaseUrl_', function() {
+ describe('determineBaseUrl_', () => {
function setHost(host) {
process.env.DATASTORE_EMULATOR_HOST = host;
}
- beforeEach(function() {
+ beforeEach(() => {
delete process.env.DATASTORE_EMULATOR_HOST;
});
- it('should default to defaultBaseUrl_', function() {
+ it('should default to defaultBaseUrl_', () => {
const defaultBaseUrl_ = 'defaulturl';
datastore.defaultBaseUrl_ = defaultBaseUrl_;
@@ -550,7 +541,7 @@ describe('Datastore', function() {
assert.strictEqual(datastore.baseUrl_, defaultBaseUrl_);
});
- it('should remove slashes from the baseUrl', function() {
+ it('should remove slashes from the baseUrl', () => {
const expectedBaseUrl = 'localhost';
setHost('localhost/');
@@ -562,7 +553,7 @@ describe('Datastore', function() {
assert.strictEqual(datastore.baseUrl_, expectedBaseUrl);
});
- it('should remove the protocol if specified', function() {
+ it('should remove the protocol if specified', () => {
setHost('http://localhost');
datastore.determineBaseUrl_();
assert.strictEqual(datastore.baseUrl_, 'localhost');
@@ -572,48 +563,48 @@ describe('Datastore', function() {
assert.strictEqual(datastore.baseUrl_, 'localhost');
});
- it('should set Numberified port if one was found', function() {
+ it('should set Numberified port if one was found', () => {
setHost('http://localhost:9090');
datastore.determineBaseUrl_();
assert.strictEqual(datastore.port_, 9090);
});
- it('should not set customEndpoint_ when using default baseurl', function() {
+ it('should not set customEndpoint_ when using default baseurl', () => {
const datastore = new Datastore({projectId: PROJECT_ID});
datastore.determineBaseUrl_();
assert.strictEqual(datastore.customEndpoint_, undefined);
});
- it('should set customEndpoint_ when using custom API endpoint', function() {
+ it('should set customEndpoint_ when using custom API endpoint', () => {
datastore.determineBaseUrl_('apiEndpoint');
assert.strictEqual(datastore.customEndpoint_, true);
});
- it('should set baseUrl when using custom API endpoint', function() {
+ it('should set baseUrl when using custom API endpoint', () => {
datastore.determineBaseUrl_('apiEndpoint');
assert.strictEqual(datastore.baseUrl_, 'apiEndpoint');
});
- describe('with DATASTORE_EMULATOR_HOST environment variable', function() {
+ describe('with DATASTORE_EMULATOR_HOST environment variable', () => {
const DATASTORE_EMULATOR_HOST = 'localhost:9090';
const EXPECTED_BASE_URL = 'localhost';
const EXPECTED_PORT = 9090;
- beforeEach(function() {
+ beforeEach(() => {
setHost(DATASTORE_EMULATOR_HOST);
});
- after(function() {
+ after(() => {
delete process.env.DATASTORE_EMULATOR_HOST;
});
- it('should use the DATASTORE_EMULATOR_HOST env var', function() {
+ it('should use the DATASTORE_EMULATOR_HOST env var', () => {
datastore.determineBaseUrl_();
assert.strictEqual(datastore.baseUrl_, EXPECTED_BASE_URL);
assert.strictEqual(datastore.port_, EXPECTED_PORT);
});
- it('should set customEndpoint_', function() {
+ it('should set customEndpoint_', () => {
datastore.determineBaseUrl_();
assert.strictEqual(datastore.customEndpoint_, true);
});
diff --git a/test/query.ts b/test/query.ts
index 35ead605a..7afa84e42 100644
--- a/test/query.ts
+++ b/test/query.ts
@@ -16,7 +16,7 @@
import * as assert from 'assert';
-describe('Query', function() {
+describe('Query', () => {
const SCOPE = {};
const NAMESPACE = 'Namespace';
const KINDS = 'Kind';
@@ -24,38 +24,37 @@ describe('Query', function() {
const Query = require('../src/query.js').Query;
let query;
- beforeEach(function() {
+ beforeEach(() => {
query = new Query(SCOPE, NAMESPACE, KINDS);
});
- describe('instantiation', function() {
- it('should localize the scope', function() {
+ describe('instantiation', () => {
+ it('should localize the scope', () => {
assert.strictEqual(query.scope, SCOPE);
});
- it('should localize the namespace', function() {
+ it('should localize the namespace', () => {
assert.strictEqual(query.namespace, NAMESPACE);
});
- it('should localize the kind', function() {
+ it('should localize the kind', () => {
assert.strictEqual(query.kinds, KINDS);
});
- it('should use null for all falsy namespace values', function() {
- [
- new Query(SCOPE, '', KINDS),
- new Query(SCOPE, null, KINDS),
- new Query(SCOPE, undefined, KINDS),
- new Query(SCOPE, 0, KINDS),
- new Query(SCOPE, KINDS),
- ].forEach(function(query) {
+ it('should use null for all falsy namespace values', () => {
+ [new Query(SCOPE, '', KINDS),
+ new Query(SCOPE, null, KINDS),
+ new Query(SCOPE, undefined, KINDS),
+ new Query(SCOPE, 0, KINDS),
+ new Query(SCOPE, KINDS),
+ ].forEach((query) => {
assert.strictEqual(query.namespace, null);
});
});
});
- describe('filter', function() {
- it('should support filtering', function() {
+ describe('filter', () => {
+ it('should support filtering', () => {
const now = new Date();
const query = new Query(['kind1']).filter('date', '<=', now);
const filter = query.filters[0];
@@ -65,14 +64,14 @@ describe('Query', function() {
assert.strictEqual(filter.val, now);
});
- it('should recognize all the different operators', function() {
+ it('should recognize all the different operators', () => {
const now = new Date();
const query = new Query(['kind1'])
- .filter('date', '<=', now)
- .filter('name', '=', 'Title')
- .filter('count', '>', 20)
- .filter('size', '<', 10)
- .filter('something', '>=', 11);
+ .filter('date', '<=', now)
+ .filter('name', '=', 'Title')
+ .filter('count', '>', 20)
+ .filter('size', '<', 10)
+ .filter('something', '>=', 11);
assert.strictEqual(query.filters[0].name, 'date');
assert.strictEqual(query.filters[0].op, '<=');
@@ -95,30 +94,27 @@ describe('Query', function() {
assert.strictEqual(query.filters[4].val, 11);
});
- it('should remove any whitespace surrounding the filter name', function() {
+ it('should remove any whitespace surrounding the filter name', () => {
const query = new Query(['kind1']).filter(' count ', '>', 123);
assert.strictEqual(query.filters[0].name, 'count');
});
- it('should remove any whitespace surrounding the operator', function() {
- const query = new Query(['kind1']).filter(
- 'count',
- ' < ',
- 123
- );
+ it('should remove any whitespace surrounding the operator', () => {
+ const query =
+ new Query(['kind1']).filter('count', ' < ', 123);
assert.strictEqual(query.filters[0].op, '<');
});
- it('should return the query instance', function() {
+ it('should return the query instance', () => {
const query = new Query(['kind1']);
const nextQuery = query.filter('count', '<', 5);
assert.strictEqual(query, nextQuery);
});
- it('should default the operator to "="', function() {
+ it('should default the operator to "="', () => {
const query = new Query(['kind1']).filter('name', 'Stephen');
const filter = query.filters[0];
@@ -128,8 +124,8 @@ describe('Query', function() {
});
});
- describe('hasAncestor', function() {
- it('should support ancestor filtering', function() {
+ describe('hasAncestor', () => {
+ it('should support ancestor filtering', () => {
const query = new Query(['kind1']).hasAncestor(['kind2', 123]);
assert.strictEqual(query.filters[0].name, '__key__');
@@ -137,7 +133,7 @@ describe('Query', function() {
assert.deepStrictEqual(query.filters[0].val, ['kind2', 123]);
});
- it('should return the query instance', function() {
+ it('should return the query instance', () => {
const query = new Query(['kind1']);
const nextQuery = query.hasAncestor(['kind2', 123]);
@@ -145,32 +141,31 @@ describe('Query', function() {
});
});
- describe('order', function() {
- it('should default ordering to ascending', function() {
+ describe('order', () => {
+ it('should default ordering to ascending', () => {
const query = new Query(['kind1']).order('name');
assert.strictEqual(query.orders[0].name, 'name');
assert.strictEqual(query.orders[0].sign, '+');
});
- it('should support ascending order', function() {
+ it('should support ascending order', () => {
const query = new Query(['kind1']).order('name');
assert.strictEqual(query.orders[0].name, 'name');
assert.strictEqual(query.orders[0].sign, '+');
});
- it('should support descending order', function() {
+ it('should support descending order', () => {
const query = new Query(['kind1']).order('count', {descending: true});
assert.strictEqual(query.orders[0].name, 'count');
assert.strictEqual(query.orders[0].sign, '-');
});
- it('should support both ascending and descending', function() {
- const query = new Query(['kind1'])
- .order('name')
- .order('count', {descending: true});
+ it('should support both ascending and descending', () => {
+ const query =
+ new Query(['kind1']).order('name').order('count', {descending: true});
assert.strictEqual(query.orders[0].name, 'name');
assert.strictEqual(query.orders[0].sign, '+');
@@ -178,7 +173,7 @@ describe('Query', function() {
assert.strictEqual(query.orders[1].sign, '-');
});
- it('should return the query instance', function() {
+ it('should return the query instance', () => {
const query = new Query(['kind1']);
const nextQuery = query.order('name');
@@ -186,20 +181,20 @@ describe('Query', function() {
});
});
- describe('groupBy', function() {
- it('should store an array of properties to group by', function() {
+ describe('groupBy', () => {
+ it('should store an array of properties to group by', () => {
const query = new Query(['kind1']).groupBy(['name', 'size']);
assert.deepStrictEqual(query.groupByVal, ['name', 'size']);
});
- it('should convert a single property into an array', function() {
+ it('should convert a single property into an array', () => {
const query = new Query(['kind1']).groupBy('name');
assert.deepStrictEqual(query.groupByVal, ['name']);
});
- it('should return the query instance', function() {
+ it('should return the query instance', () => {
const query = new Query(['kind1']);
const nextQuery = query.groupBy(['name', 'size']);
@@ -207,20 +202,20 @@ describe('Query', function() {
});
});
- describe('select', function() {
- it('should store an array of properties to select', function() {
+ describe('select', () => {
+ it('should store an array of properties to select', () => {
const query = new Query(['kind1']).select(['name', 'size']);
assert.deepStrictEqual(query.selectVal, ['name', 'size']);
});
- it('should convert a single property into an array', function() {
+ it('should convert a single property into an array', () => {
const query = new Query(['kind1']).select('name');
assert.deepStrictEqual(query.selectVal, ['name']);
});
- it('should return the query instance', function() {
+ it('should return the query instance', () => {
const query = new Query(['kind1']);
const nextQuery = query.select(['name', 'size']);
@@ -228,14 +223,14 @@ describe('Query', function() {
});
});
- describe('start', function() {
- it('should capture the starting cursor value', function() {
+ describe('start', () => {
+ it('should capture the starting cursor value', () => {
const query = new Query(['kind1']).start('X');
assert.strictEqual(query.startVal, 'X');
});
- it('should return the query instance', function() {
+ it('should return the query instance', () => {
const query = new Query(['kind1']);
const nextQuery = query.start('X');
@@ -243,14 +238,14 @@ describe('Query', function() {
});
});
- describe('end', function() {
- it('should capture the ending cursor value', function() {
+ describe('end', () => {
+ it('should capture the ending cursor value', () => {
const query = new Query(['kind1']).end('Z');
assert.strictEqual(query.endVal, 'Z');
});
- it('should return the query instance', function() {
+ it('should return the query instance', () => {
const query = new Query(['kind1']);
const nextQuery = query.end('Z');
@@ -258,14 +253,14 @@ describe('Query', function() {
});
});
- describe('limit', function() {
- it('should capture the number of results to limit to', function() {
+ describe('limit', () => {
+ it('should capture the number of results to limit to', () => {
const query = new Query(['kind1']).limit(20);
assert.strictEqual(query.limitVal, 20);
});
- it('should return the query instance', function() {
+ it('should return the query instance', () => {
const query = new Query(['kind1']);
const nextQuery = query.limit(20);
@@ -273,14 +268,14 @@ describe('Query', function() {
});
});
- describe('offset', function() {
- it('should capture the number of results to offset by', function() {
+ describe('offset', () => {
+ it('should capture the number of results to offset by', () => {
const query = new Query(['kind1']).offset(100);
assert.strictEqual(query.offsetVal, 100);
});
- it('should return the query instance', function() {
+ it('should return the query instance', () => {
const query = new Query(['kind1']);
const nextQuery = query.offset(100);
@@ -288,8 +283,8 @@ describe('Query', function() {
});
});
- describe('run', function() {
- it('should call the parent instance runQuery correctly', function(done) {
+ describe('run', () => {
+ it('should call the parent instance runQuery correctly', (done) => {
const args = [0, 1, 2];
query.scope.runQuery = function() {
@@ -305,8 +300,8 @@ describe('Query', function() {
});
});
- describe('runStream', function() {
- it('should call the parent instance runQueryStream correctly', function() {
+ describe('runStream', () => {
+ it('should call the parent instance runQueryStream correctly', () => {
const args = [0, 1, 2];
const runQueryReturnValue = {};
diff --git a/test/request.ts b/test/request.ts
index b81660c66..4cb7fa8c4 100644
--- a/test/request.ts
+++ b/test/request.ts
@@ -14,21 +14,22 @@
* limitations under the License.
*/
+import * as pjy from '@google-cloud/projectify';
+import * as pfy from '@google-cloud/promisify';
import * as assert from 'assert';
import * as extend from 'extend';
import * as is from 'is';
import * as proxyquire from 'proxyquire';
import * as sinon from 'sinon';
import * as through from 'through2';
-import * as pfy from '@google-cloud/promisify';
-import * as pjy from '@google-cloud/projectify';
+
import {entity} from '../src/entity.js';
import {Query} from '../src/query.js';
let promisified = false;
const fakePfy = Object.assign({}, pfy, {
- promisifyAll(Class) {
- if (Class.name === 'DatastoreRequest') {
+ promisifyAll(klass) {
+ if (klass.name === 'DatastoreRequest') {
promisified = true;
}
},
@@ -44,7 +45,7 @@ let v1FakeClientOverride;
const fakeV1 = {
FakeClient: class {
constructor() {
- return (v1FakeClientOverride || function() {}).apply(null, arguments);
+ return (v1FakeClientOverride || (() => {})).apply(null, arguments);
}
},
};
@@ -54,6 +55,7 @@ class FakeQuery extends Query {}
let pjyOverride;
describe('Request', () => {
+ // tslint:disable-next-line variable-name
let Request;
let request;
let key;
@@ -61,12 +63,12 @@ describe('Request', () => {
before(() => {
Request = proxyquire('../src/request', {
- '@google-cloud/promisify': fakePfy,
- '@google-cloud/projectify': fakePjy,
- './entity': {entity},
- './query': {Query: FakeQuery},
- './v1': fakeV1,
- }).DatastoreRequest;
+ '@google-cloud/promisify': fakePfy,
+ '@google-cloud/projectify': fakePjy,
+ './entity': {entity},
+ './query': {Query: FakeQuery},
+ './v1': fakeV1,
+ }).DatastoreRequest;
});
after(() => {
@@ -106,9 +108,7 @@ describe('Request', () => {
assert.notStrictEqual(preparedEntityObject, obj);
assert.notStrictEqual(preparedEntityObject.data.nested, obj.data.nested);
assert.deepStrictEqual(
- preparedEntityObject,
- expectedPreparedEntityObject
- );
+ preparedEntityObject, expectedPreparedEntityObject);
});
it('should format an entity', () => {
@@ -204,7 +204,7 @@ describe('Request', () => {
it('should exec callback with error & API response', done => {
sandbox.stub(entity, 'isKeyComplete');
sandbox.stub(entity, 'keyToKeyProto');
- request.allocateIds(INCOMPLETE_KEY, OPTIONS, function(err, keys, resp) {
+ request.allocateIds(INCOMPLETE_KEY, OPTIONS, (err, keys, resp) => {
assert.strictEqual(err, ERROR);
assert.strictEqual(keys, null);
assert.strictEqual(resp, API_RESPONSE);
@@ -233,7 +233,7 @@ describe('Request', () => {
assert.strictEqual(keyProto, API_RESPONSE.keys[0]);
return key;
});
- request.allocateIds(INCOMPLETE_KEY, OPTIONS, function(err, keys, resp) {
+ request.allocateIds(INCOMPLETE_KEY, OPTIONS, (err, keys, resp) => {
assert.ifError(err);
assert.deepStrictEqual(keys, [key]);
assert.strictEqual(resp, API_RESPONSE);
@@ -268,9 +268,7 @@ describe('Request', () => {
assert.strictEqual(config.client, 'DatastoreClient');
assert.strictEqual(config.method, 'lookup');
assert.deepStrictEqual(
- config.reqOpts.keys[0],
- entity.keyToKeyProto(key)
- );
+ config.reqOpts.keys[0], entity.keyToKeyProto(key));
done();
};
const stream = request.createReadStream(key);
@@ -287,10 +285,7 @@ describe('Request', () => {
done();
};
- request
- .createReadStream(key, options)
- .on('error', done)
- .emit('reading');
+ request.createReadStream(key, options).on('error', done).emit('reading');
});
it('should allow setting strong read consistency', done => {
@@ -299,10 +294,9 @@ describe('Request', () => {
done();
};
- request
- .createReadStream(key, {consistency: 'strong'})
- .on('error', done)
- .emit('reading');
+ request.createReadStream(key, {consistency: 'strong'})
+ .on('error', done)
+ .emit('reading');
});
it('should allow setting strong eventual consistency', done => {
@@ -311,10 +305,9 @@ describe('Request', () => {
done();
};
- request
- .createReadStream(key, {consistency: 'eventual'})
- .on('error', done)
- .emit('reading');
+ request.createReadStream(key, {consistency: 'eventual'})
+ .on('error', done)
+ .emit('reading');
});
describe('error', () => {
@@ -330,13 +323,10 @@ describe('Request', () => {
});
it('should emit error', done => {
- request
- .createReadStream(key)
- .on('data', () => {})
- .on('error', err => {
- assert.strictEqual(err, error);
- done();
- });
+ request.createReadStream(key).on('data', () => {}).on('error', err => {
+ assert.strictEqual(err, error);
+ done();
+ });
});
it('should end stream', done => {
@@ -426,10 +416,7 @@ describe('Request', () => {
return arr;
});
- request
- .createReadStream(key)
- .on('error', done)
- .emit('reading');
+ request.createReadStream(key).on('error', done).emit('reading');
});
it('should continue looking for deferred results', done => {
@@ -443,29 +430,26 @@ describe('Request', () => {
return;
}
- const expectedKeys = apiResponseWithDeferred.deferred
- .map(entity.keyFromKeyProto)
- .map(entity.keyToKeyProto);
+ const expectedKeys =
+ apiResponseWithDeferred.deferred.map(entity.keyFromKeyProto)
+ .map(entity.keyToKeyProto);
assert.deepStrictEqual(config.reqOpts.keys, expectedKeys);
done();
};
- request
- .createReadStream(key)
- .on('error', done)
- .emit('reading');
+ request.createReadStream(key).on('error', done).emit('reading');
});
it('should push results to the stream', done => {
- request
- .createReadStream(key)
- .on('error', done)
- .on('data', entity => {
- assert.deepStrictEqual(entity, expectedResult);
- })
- .on('end', done)
- .emit('reading');
+ request.createReadStream(key)
+ .on('error', done)
+ .on('data',
+ entity => {
+ assert.deepStrictEqual(entity, expectedResult);
+ })
+ .on('end', done)
+ .emit('reading');
});
it('should not push more results if stream was ended', done => {
@@ -478,15 +462,18 @@ describe('Request', () => {
};
const stream = request.createReadStream([key, key]);
- stream.on('data', () => {
- entitiesEmitted++;
- stream.end();
- })
- .on('end', () => {
- assert.strictEqual(entitiesEmitted, 1);
- done();
- })
- .emit('reading');
+ stream
+ .on('data',
+ () => {
+ entitiesEmitted++;
+ stream.end();
+ })
+ .on('end',
+ () => {
+ assert.strictEqual(entitiesEmitted, 1);
+ done();
+ })
+ .emit('reading');
});
it('should not get more results if stream was ended', done => {
@@ -500,14 +487,14 @@ describe('Request', () => {
};
const stream = request.createReadStream(key);
- stream
- .on('error', done)
- .on('data', () => stream.end())
- .on('end', () => {
- assert.strictEqual(lookupCount, 1);
- done();
- })
- .emit('reading');
+ stream.on('error', done)
+ .on('data', () => stream.end())
+ .on('end',
+ () => {
+ assert.strictEqual(lookupCount, 1);
+ done();
+ })
+ .emit('reading');
});
});
});
@@ -668,7 +655,7 @@ describe('Request', () => {
});
it('should pass the correct arguments to save', done => {
- request.save = function(entities, callback) {
+ request.save = (entities, callback) => {
assert.deepStrictEqual(JSON.parse(JSON.stringify(entities)), [
{
key: {
@@ -703,10 +690,7 @@ describe('Request', () => {
done();
});
- request
- .runQueryStream(query)
- .on('error', done)
- .emit('reading');
+ request.runQueryStream(query).on('error', done).emit('reading');
});
it('should make correct request when the stream is ready', done => {
@@ -721,18 +705,13 @@ describe('Request', () => {
assert(is.empty(config.reqOpts.readOptions));
assert.strictEqual(config.reqOpts.query, queryProto);
assert.strictEqual(
- config.reqOpts.partitionId.namespaceId,
- query.namespace
- );
+ config.reqOpts.partitionId.namespaceId, query.namespace);
assert.strictEqual(config.gaxOpts, undefined);
done();
};
- request
- .runQueryStream(query)
- .on('error', done)
- .emit('reading');
+ request.runQueryStream(query).on('error', done).emit('reading');
});
it('should allow customization of GAX options', done => {
@@ -746,10 +725,7 @@ describe('Request', () => {
done();
};
- request
- .runQueryStream({}, options)
- .on('error', done)
- .emit('reading');
+ request.runQueryStream({}, options).on('error', done).emit('reading');
});
it('should allow setting strong read consistency', done => {
@@ -759,10 +735,9 @@ describe('Request', () => {
done();
};
- request
- .runQueryStream({}, {consistency: 'strong'})
- .on('error', done)
- .emit('reading');
+ request.runQueryStream({}, {consistency: 'strong'})
+ .on('error', done)
+ .emit('reading');
});
it('should allow setting strong eventual consistency', done => {
@@ -772,10 +747,9 @@ describe('Request', () => {
done();
};
- request
- .runQueryStream({}, {consistency: 'eventual'})
- .on('error', done)
- .emit('reading');
+ request.runQueryStream({}, {consistency: 'eventual'})
+ .on('error', done)
+ .emit('reading');
});
describe('error', () => {
@@ -789,13 +763,13 @@ describe('Request', () => {
it('should emit error on a stream', done => {
sandbox.stub(entity, 'queryToQueryProto');
- request
- .runQueryStream({})
- .on('error', err => {
- assert.strictEqual(err, error);
- done();
- })
- .emit('reading');
+ request.runQueryStream({})
+ .on('error',
+ err => {
+ assert.strictEqual(err, error);
+ done();
+ })
+ .emit('reading');
});
});
@@ -820,11 +794,10 @@ describe('Request', () => {
callback(null, apiResponse);
};
- formatArrayStub = sandbox
- .stub(entity, 'formatArray')
- .callsFake(array => {
- return array;
- });
+ formatArrayStub =
+ sandbox.stub(entity, 'formatArray').callsFake(array => {
+ return array;
+ });
});
it('should format results', done => {
@@ -837,14 +810,13 @@ describe('Request', () => {
const entities: Array<{}> = [];
- request
- .runQueryStream({})
- .on('error', done)
- .on('data', entity => entities.push(entity))
- .on('end', () => {
- assert.deepStrictEqual(entities, apiResponse.batch.entityResults);
- done();
- });
+ request.runQueryStream({})
+ .on('error', done)
+ .on('data', entity => entities.push(entity))
+ .on('end', () => {
+ assert.deepStrictEqual(entities, apiResponse.batch.entityResults);
+ done();
+ });
});
it('should re-run query if not finished', done => {
@@ -865,9 +837,7 @@ describe('Request', () => {
formatArrayStub.restore();
sandbox.stub(entity, 'formatArray').callsFake(array => {
assert.strictEqual(
- array,
- entityResultsPerApiCall[timesRequestCalled]
- );
+ array, entityResultsPerApiCall[timesRequestCalled]);
return entityResultsPerApiCall[timesRequestCalled];
});
@@ -876,7 +846,7 @@ describe('Request', () => {
const resp = extend(true, {}, apiResponse);
resp.batch.entityResults =
- entityResultsPerApiCall[timesRequestCalled];
+ entityResultsPerApiCall[timesRequestCalled];
if (timesRequestCalled === 1) {
assert.strictEqual(config.client, 'DatastoreClient');
@@ -894,9 +864,7 @@ describe('Request', () => {
FakeQuery.prototype.start = function(endCursor) {
assert.strictEqual(
- endCursor,
- apiResponse.batch.endCursor.toString('base64')
- );
+ endCursor, apiResponse.batch.endCursor.toString('base64'));
startCalled = true;
return this;
};
@@ -910,9 +878,7 @@ describe('Request', () => {
sandbox.stub(FakeQuery.prototype, 'limit').callsFake(limit_ => {
if (timesRequestCalled === 1) {
assert.strictEqual(
- limit_,
- entityResultsPerApiCall[1].length - query.limitVal
- );
+ limit_, entityResultsPerApiCall[1].length - query.limitVal);
} else {
// Should restore the original limit.
assert.strictEqual(limit_, query.limitVal);
@@ -929,29 +895,30 @@ describe('Request', () => {
const entities: Array<{}> = [];
let info;
- request
- .runQueryStream(query)
- .on('error', done)
- .on('info', function(_info) {
- info = _info;
- })
- .on('data', function(entity) {
- entities.push(entity);
- })
- .on('end', () => {
- const allResults = ([] as Array<{}>).slice
- .call(entityResultsPerApiCall[1])
- .concat(entityResultsPerApiCall[2]);
-
- assert.deepStrictEqual(entities, allResults);
-
- assert.deepStrictEqual(info, {
- endCursor: apiResponse.batch.endCursor.toString('base64'),
- moreResults: apiResponse.batch.moreResults,
+ request.runQueryStream(query)
+ .on('error', done)
+ .on('info',
+ (_info) => {
+ info = _info;
+ })
+ .on('data',
+ (entity) => {
+ entities.push(entity);
+ })
+ .on('end', () => {
+ const allResults = ([] as Array<{}>)
+ .slice.call(entityResultsPerApiCall[1])
+ .concat(entityResultsPerApiCall[2]);
+
+ assert.deepStrictEqual(entities, allResults);
+
+ assert.deepStrictEqual(info, {
+ endCursor: apiResponse.batch.endCursor.toString('base64'),
+ moreResults: apiResponse.batch.moreResults,
+ });
+
+ done();
});
-
- done();
- });
});
it('should handle large limitless queries', done => {
@@ -980,15 +947,14 @@ describe('Request', () => {
limitCalled = true;
});
- request
- .runQueryStream(query)
- .on('error', done)
- .on('data', () => {})
- .on('end', () => {
- assert.strictEqual(timesRequestCalled, 2);
- assert.strictEqual(limitCalled, false);
- done();
- });
+ request.runQueryStream(query)
+ .on('error', done)
+ .on('data', () => {})
+ .on('end', () => {
+ assert.strictEqual(timesRequestCalled, 2);
+ assert.strictEqual(limitCalled, false);
+ done();
+ });
});
it('should not push more results if stream was ended', done => {
@@ -1002,7 +968,7 @@ describe('Request', () => {
const resp = extend(true, {}, apiResponse);
resp.batch.entityResults =
- entityResultsPerApiCall[timesRequestCalled];
+ entityResultsPerApiCall[timesRequestCalled];
if (timesRequestCalled === 1) {
resp.batch.moreResults = 'NOT_FINISHED';
@@ -1013,16 +979,16 @@ describe('Request', () => {
}
};
- const stream = request
- .runQueryStream({})
- .on('data', () => {
- entitiesEmitted++;
- stream.end();
- })
- .on('end', () => {
- assert.strictEqual(entitiesEmitted, 1);
- done();
- });
+ const stream = request.runQueryStream({})
+ .on('data',
+ () => {
+ entitiesEmitted++;
+ stream.end();
+ })
+ .on('end', () => {
+ assert.strictEqual(entitiesEmitted, 1);
+ done();
+ });
});
it('should not get more results if stream was ended', done => {
@@ -1034,13 +1000,12 @@ describe('Request', () => {
};
const stream = request.runQueryStream({});
- stream
- .on('error', done)
- .on('data', () => stream.end())
- .on('end', () => {
- assert.strictEqual(timesRequestCalled, 1);
- done();
- });
+ stream.on('error', done)
+ .on('data', () => stream.end())
+ .on('end', () => {
+ assert.strictEqual(timesRequestCalled, 1);
+ done();
+ });
});
});
});
@@ -1059,7 +1024,7 @@ describe('Request', () => {
setImmediate(() => {
stream.emit('info', fakeInfo);
- fakeEntities.forEach(function(entity) {
+ fakeEntities.forEach((entity) => {
stream.push(entity);
});
@@ -1073,7 +1038,7 @@ describe('Request', () => {
it('should return an array of entities', done => {
const options = {};
- request.runQuery(query, options, function(err, entities, info) {
+ request.runQuery(query, options, (err, entities, info) => {
assert.ifError(err);
assert.deepStrictEqual(entities, fakeEntities);
assert.strictEqual(info, fakeInfo);
@@ -1183,10 +1148,7 @@ describe('Request', () => {
callback();
};
- request.save(
- [{key, data: {k: 'v'}}, {key, data: {k: 'v'}}],
- done
- );
+ request.save([{key, data: {k: 'v'}}, {key, data: {k: 'v'}}], done);
});
it('should allow customization of GAX options', done => {
@@ -1198,13 +1160,11 @@ describe('Request', () => {
};
request.save(
- {
- key,
- data: {},
- },
- gaxOptions,
- assert.ifError
- );
+ {
+ key,
+ data: {},
+ },
+ gaxOptions, assert.ifError);
});
it('should prepare entity objects', done => {
@@ -1249,27 +1209,25 @@ describe('Request', () => {
};
request.save(
- [
- {key, method: 'insert', data: {k: 'v'}},
- {key, method: 'update', data: {k2: 'v2'}},
- {key, method: 'upsert', data: {k3: 'v3'}},
- ],
- done
- );
+ [
+ {key, method: 'insert', data: {k: 'v'}},
+ {key, method: 'update', data: {k2: 'v2'}},
+ {key, method: 'upsert', data: {k3: 'v3'}},
+ ],
+ done);
});
it('should throw if a given method is not recognized', () => {
assert.throws(() => {
request.save(
- {
- key,
- method: 'auto_insert_id',
- data: {
- k: 'v',
+ {
+ key,
+ method: 'auto_insert_id',
+ data: {
+ k: 'v',
+ },
},
- },
- assert.ifError
- );
+ assert.ifError);
}, /Method auto_insert_id not recognized/);
});
@@ -1305,7 +1263,7 @@ describe('Request', () => {
request.request_ = (config, callback) => {
callback(null, mockCommitResponse);
};
- request.save({key, data: {}}, function(err, apiResponse) {
+ request.save({key, data: {}}, (err, apiResponse) => {
assert.ifError(err);
assert.strictEqual(mockCommitResponse, apiResponse);
done();
@@ -1321,25 +1279,24 @@ describe('Request', () => {
};
request.save(
- {
- key,
- data: [
- {
- name: 'name',
- value: 'value',
- excludeFromIndexes: true,
- },
- ],
- },
- assert.ifError
- );
+ {
+ key,
+ data: [
+ {
+ name: 'name',
+ value: 'value',
+ excludeFromIndexes: true,
+ },
+ ],
+ },
+ assert.ifError);
});
it('should allow setting the indexed value on arrays', done => {
request.request_ = config => {
const property = config.reqOpts.mutations[0].upsert.properties.name;
- property.arrayValue.values.forEach(function(value) {
+ property.arrayValue.values.forEach((value) => {
assert.strictEqual(value.excludeFromIndexes, true);
});
@@ -1347,18 +1304,17 @@ describe('Request', () => {
};
request.save(
- {
- key,
- data: [
- {
- name: 'name',
- value: ['one', 'two', 'three'],
- excludeFromIndexes: true,
- },
- ],
- },
- assert.ifError
- );
+ {
+ key,
+ data: [
+ {
+ name: 'name',
+ value: ['one', 'two', 'three'],
+ excludeFromIndexes: true,
+ },
+ ],
+ },
+ assert.ifError);
});
it('should assign ID on keys without them', done => {
@@ -1393,24 +1349,23 @@ describe('Request', () => {
});
request.save(
- [
- {key: incompleteKey, data: {}},
- {key: incompleteKey2, data: {}},
- {key: completeKey, data: {}},
- ],
- err => {
- assert.ifError(err);
+ [
+ {key: incompleteKey, data: {}},
+ {key: incompleteKey2, data: {}},
+ {key: completeKey, data: {}},
+ ],
+ err => {
+ assert.ifError(err);
- assert.strictEqual(incompleteKey.id, ids[0]);
- assert.strictEqual(incompleteKey2.id, ids[1]);
+ assert.strictEqual(incompleteKey.id, ids[0]);
+ assert.strictEqual(incompleteKey2.id, ids[1]);
- assert.strictEqual(keyProtos.length, 2);
- assert.strictEqual(keyProtos[0], response.mutationResults[0].key);
- assert.strictEqual(keyProtos[1], response.mutationResults[1].key);
+ assert.strictEqual(keyProtos.length, 2);
+ assert.strictEqual(keyProtos[0], response.mutationResults[0].key);
+ assert.strictEqual(keyProtos[1], response.mutationResults[1].key);
- done();
- }
- );
+ done();
+ });
});
describe('transactions', () => {
@@ -1455,7 +1410,7 @@ describe('Request', () => {
});
it('should pass the correct arguments to save', done => {
- request.save = function(entities, callback) {
+ request.save = (entities, callback) => {
assert.deepStrictEqual(JSON.parse(JSON.stringify(entities)), [
{
key: {
@@ -1497,7 +1452,7 @@ describe('Request', () => {
});
it('should pass the correct arguments to save', done => {
- request.save = function(entities, callback) {
+ request.save = (entities, callback) => {
assert.deepStrictEqual(JSON.parse(JSON.stringify(entities)), [
{
key: {
@@ -1520,7 +1475,7 @@ describe('Request', () => {
describe('request_', () => {
const CONFIG = {
- client: 'FakeClient', // name set at top of file
+ client: 'FakeClient', // name set at top of file
method: 'method',
reqOpts: {
a: 'b',
@@ -1559,7 +1514,7 @@ describe('Request', () => {
it('should return error if getting project ID failed', done => {
const error = new Error('Error.');
- request.datastore.auth.getProjectId = function(callback) {
+ request.datastore.auth.getProjectId = (callback) => {
callback(error);
};
request.request_(CONFIG, err => {
@@ -1573,8 +1528,8 @@ describe('Request', () => {
[CONFIG.method]() {},
};
v1FakeClientOverride = (options) => {
- assert.deepStrictEqual(options, request.datastore.options);
- return fakeClient;
+ assert.deepStrictEqual(options, request.datastore.options);
+ return fakeClient;
};
request.datastore.clients_ = new Map();
request.request_(CONFIG, assert.ifError);
@@ -1598,7 +1553,7 @@ describe('Request', () => {
const expectedReqOpts: any = Object.assign({}, CONFIG.reqOpts);
expectedReqOpts.projectId = request.projectId;
- pjyOverride = function(reqOpts, projectId) {
+ pjyOverride = (reqOpts, projectId) => {
assert.notStrictEqual(reqOpts, CONFIG.reqOpts);
assert.deepStrictEqual(reqOpts, expectedReqOpts);
assert.strictEqual(projectId, PROJECT_ID);
diff --git a/test/transaction.ts b/test/transaction.ts
index 09dd4b62f..1cce80102 100644
--- a/test/transaction.ts
+++ b/test/transaction.ts
@@ -14,17 +14,17 @@
* limitations under the License.
*/
+import * as pfy from '@google-cloud/promisify';
import * as arrify from 'arrify';
import * as assert from 'assert';
import * as proxyquire from 'proxyquire';
-import * as pfy from '@google-cloud/promisify';
const {entity} = require('../src/entity');
let promisified = false;
const fakePfy = Object.assign({}, pfy, {
- promisifyAll(Class, options) {
- if (Class.name !== 'Transaction') {
+ promisifyAll(klass, options) {
+ if (klass.name !== 'Transaction') {
return;
}
promisified = true;
@@ -32,9 +32,9 @@ const fakePfy = Object.assign({}, pfy, {
},
});
-// tslint:disable-next-line no-any
+// tslint:disable-next-line no-any variable-name
const DatastoreRequestOverride: any = {
- delete() {},
+ delete () {},
save() {},
};
@@ -42,19 +42,20 @@ class FakeDatastoreRequest {
delete() {
const args = [].slice.apply(arguments);
const results = DatastoreRequestOverride.delete.apply(null, args);
- DatastoreRequestOverride.delete = function() {};
+ DatastoreRequestOverride.delete = () => {};
return results;
}
save() {
const args = [].slice.apply(arguments);
const results = DatastoreRequestOverride.save.apply(null, args);
- DatastoreRequestOverride.save = function() {};
+ DatastoreRequestOverride.save = () => {};
return results;
}
}
-describe('Transaction', function() {
+describe('Transaction', () => {
+ // tslint:disable-next-line variable-name
let Transaction;
let transaction;
const TRANSACTION_ID = 'transaction-id';
@@ -71,35 +72,35 @@ describe('Transaction', function() {
return new entity.Key({path: arrify(path)});
}
- before(function() {
+ before(() => {
Transaction = proxyquire('../src/transaction.js', {
- '@google-cloud/promisify': fakePfy,
- './request.js': {DatastoreRequest: FakeDatastoreRequest},
- }).Transaction;
+ '@google-cloud/promisify': fakePfy,
+ './request.js': {DatastoreRequest: FakeDatastoreRequest},
+ }).Transaction;
});
- beforeEach(function() {
+ beforeEach(() => {
transaction = new Transaction(DATASTORE);
});
- describe('instantiation', function() {
- it('should promisify all the things', function() {
+ describe('instantiation', () => {
+ it('should promisify all the things', () => {
assert(promisified);
});
- it('should localize the datastore instance', function() {
+ it('should localize the datastore instance', () => {
assert.strictEqual(transaction.datastore, DATASTORE);
});
- it('should localize the project ID', function() {
+ it('should localize the project ID', () => {
assert.strictEqual(transaction.projectId, PROJECT_ID);
});
- it('should localize the namespace', function() {
+ it('should localize the namespace', () => {
assert.strictEqual(transaction.namespace, NAMESPACE);
});
- it('should localize the transaction ID', function() {
+ it('should localize the transaction ID', () => {
const options = {
id: 'transaction-id',
};
@@ -108,7 +109,7 @@ describe('Transaction', function() {
assert.strictEqual(transaction.id, options.id);
});
- it('should localize readOnly', function() {
+ it('should localize readOnly', () => {
const options = {
readOnly: true,
};
@@ -117,14 +118,14 @@ describe('Transaction', function() {
assert.strictEqual(transaction.readOnly, true);
});
- it('should localize request function', function(done) {
+ it('should localize request function', done => {
// tslint:disable-next-line no-any
const fakeDataset: any = {
request_: {
bind(context) {
assert.strictEqual(context, fakeDataset);
- setImmediate(function() {
+ setImmediate(() => {
assert.strictEqual(transaction.request, fakeDataset.request);
done();
});
@@ -137,20 +138,20 @@ describe('Transaction', function() {
const transaction = new Transaction(fakeDataset);
});
- it('should localize default properties', function() {
+ it('should localize default properties', () => {
assert.deepStrictEqual(transaction.modifiedEntities_, []);
assert.deepStrictEqual(transaction.requestCallbacks_, []);
assert.deepStrictEqual(transaction.requests_, []);
});
});
- describe('commit', function() {
- beforeEach(function() {
+ describe('commit', () => {
+ beforeEach(() => {
transaction.id = TRANSACTION_ID;
});
- it('should commit', function(done) {
- transaction.request_ = function(config) {
+ it('should commit', done => {
+ transaction.request_ = config => {
assert.strictEqual(config.client, 'DatastoreClient');
assert.strictEqual(config.method, 'commit');
assert.strictEqual(config.gaxOptions, undefined);
@@ -159,10 +160,10 @@ describe('Transaction', function() {
transaction.commit();
});
- it('should accept gaxOptions', function(done) {
+ it('should accept gaxOptions', done => {
const gaxOptions = {};
- transaction.request_ = function(config) {
+ transaction.request_ = config => {
assert.strictEqual(config.gaxOpts, gaxOptions);
done();
};
@@ -170,7 +171,7 @@ describe('Transaction', function() {
transaction.commit(gaxOptions);
});
- it('should skip the commit', function(done) {
+ it('should skip the commit', done => {
transaction.skipCommit = true;
// If called, the test will blow up.
@@ -179,25 +180,25 @@ describe('Transaction', function() {
transaction.commit(done);
});
- describe('errors', function() {
+ describe('errors', () => {
const error = new Error('Error.');
const apiResponse = {};
const rollbackError = new Error('Error.');
const rollbackApiResponse = {};
- beforeEach(function() {
- transaction.rollback = function(callback) {
+ beforeEach(() => {
+ transaction.rollback = (callback) => {
callback(rollbackError, rollbackApiResponse);
};
- transaction.request_ = function(config, callback) {
+ transaction.request_ = (config, callback) => {
callback(error, apiResponse);
};
});
- it('should pass the commit error to the callback', function(done) {
- transaction.commit(function(err, resp) {
+ it('should pass the commit error to the callback', done => {
+ transaction.commit((err, resp) => {
assert.strictEqual(err, error);
assert.strictEqual(resp, apiResponse);
done();
@@ -205,19 +206,19 @@ describe('Transaction', function() {
});
});
- it('should pass apiResponse to callback', function(done) {
+ it('should pass apiResponse to callback', done => {
const resp = {success: true};
- transaction.request_ = function(config, callback) {
+ transaction.request_ = (config, callback) => {
callback(null, resp);
};
- transaction.commit(function(err, apiResponse) {
+ transaction.commit((err, apiResponse) => {
assert.ifError(err);
assert.deepStrictEqual(resp, apiResponse);
done();
});
});
- it('should group mutations & execute original methods', function() {
+ it('should group mutations & execute original methods', () => {
const deleteArg1 = key(['Product', 123]);
const deleteArg2 = key(['Product', 234]);
@@ -233,18 +234,18 @@ describe('Transaction', function() {
const args: Array<{}> = [];
let deleteCalled = 0;
- DatastoreRequestOverride.delete = function() {
- args.push(arguments[0]);
+ DatastoreRequestOverride.delete = (a) => {
+ args.push(a);
deleteCalled++;
};
let saveCalled = 0;
- DatastoreRequestOverride.save = function() {
- args.push(arguments[0]);
+ DatastoreRequestOverride.save = (a) => {
+ args.push(a);
saveCalled++;
};
- transaction.request_ = function() {};
+ transaction.request_ = () => {};
transaction.commit();
@@ -260,43 +261,43 @@ describe('Transaction', function() {
]);
});
- it('should honor ordering of mutations (last wins)', function() {
+ it('should honor ordering of mutations (last wins)', () => {
// The delete should be ignored.
transaction.delete(key(['Product', 123]));
transaction.save({key: key(['Product', 123]), data: ''});
let deleteCalled = 0;
- DatastoreRequestOverride.delete = function() {
+ DatastoreRequestOverride.delete = () => {
deleteCalled++;
};
let saveCalled = 0;
- DatastoreRequestOverride.save = function() {
+ DatastoreRequestOverride.save = () => {
saveCalled++;
};
- transaction.request_ = function() {};
+ transaction.request_ = () => {};
transaction.commit();
assert.strictEqual(deleteCalled, 0);
assert.strictEqual(saveCalled, 1);
});
- it('should not squash key-incomplete mutations', function(done) {
+ it('should not squash key-incomplete mutations', done => {
transaction.save({key: key(['Product']), data: ''});
transaction.save({key: key(['Product']), data: ''});
- DatastoreRequestOverride.save = function(entities) {
+ DatastoreRequestOverride.save = (entities) => {
assert.strictEqual(entities.length, 2);
done();
};
- transaction.request_ = function() {};
+ transaction.request_ = () => {};
transaction.commit();
});
- it('should send the built request object', function(done) {
+ it('should send the built request object', done => {
transaction.requests_ = [
{
mutations: [{a: 'b'}, {c: 'd'}],
@@ -306,7 +307,7 @@ describe('Transaction', function() {
},
];
- transaction.request_ = function(config) {
+ transaction.request_ = config => {
assert.deepStrictEqual(config.reqOpts, {
mutations: [{a: 'b'}, {c: 'd'}, {e: 'f'}, {g: 'h'}],
});
@@ -316,20 +317,20 @@ describe('Transaction', function() {
transaction.commit();
});
- it('should execute the queued callbacks', function() {
+ it('should execute the queued callbacks', () => {
let cb1Called = false;
let cb2Called = false;
transaction.requestCallbacks_ = [
- function() {
+ () => {
cb1Called = true;
},
- function() {
+ () => {
cb2Called = true;
},
];
- transaction.request_ = function(config, cb) {
+ transaction.request_ = (config, cb) => {
cb();
};
@@ -340,8 +341,8 @@ describe('Transaction', function() {
});
});
- describe('createQuery', function() {
- it('should return query from datastore.createQuery', function() {
+ describe('createQuery', () => {
+ it('should return query from datastore.createQuery', () => {
const args = [0, 1];
const createQueryReturnValue = {};
@@ -357,8 +358,8 @@ describe('Transaction', function() {
});
});
- describe('delete', function() {
- it('should push entities into a queue', function() {
+ describe('delete', () => {
+ it('should push entities into a queue', () => {
const keys = [
key('Product123'),
key('Product234'),
@@ -369,7 +370,7 @@ describe('Transaction', function() {
assert.strictEqual(transaction.modifiedEntities_.length, keys.length);
- transaction.modifiedEntities_.forEach(function(queuedEntity) {
+ transaction.modifiedEntities_.forEach((queuedEntity) => {
assert.strictEqual(queuedEntity.method, 'delete');
assert(keys.indexOf(queuedEntity.entity.key) > -1);
assert.deepStrictEqual(queuedEntity.args, [queuedEntity.entity.key]);
@@ -377,13 +378,13 @@ describe('Transaction', function() {
});
});
- describe('rollback', function() {
- beforeEach(function() {
+ describe('rollback', () => {
+ beforeEach(() => {
transaction.id = TRANSACTION_ID;
});
- it('should rollback', function(done) {
- transaction.request_ = function(config) {
+ it('should rollback', done => {
+ transaction.request_ = config => {
assert.strictEqual(config.client, 'DatastoreClient');
assert.strictEqual(config.method, 'rollback');
assert.strictEqual(config.gaxOptions, undefined);
@@ -392,10 +393,10 @@ describe('Transaction', function() {
transaction.rollback();
});
- it('should allow setting gaxOptions', function(done) {
+ it('should allow setting gaxOptions', done => {
const gaxOptions = {};
- transaction.request_ = function(config) {
+ transaction.request_ = config => {
assert.strictEqual(config.gaxOpts, gaxOptions);
done();
};
@@ -403,53 +404,53 @@ describe('Transaction', function() {
transaction.rollback(gaxOptions);
});
- it('should pass error to callback', function(done) {
+ it('should pass error to callback', done => {
const error = new Error('Error.');
- transaction.request_ = function(config, callback) {
+ transaction.request_ = (config, callback) => {
callback(error);
};
- transaction.rollback(function(err) {
+ transaction.rollback((err) => {
assert.deepStrictEqual(err, error);
done();
});
});
- it('should pass apiResponse to callback', function(done) {
+ it('should pass apiResponse to callback', done => {
const resp = {success: true};
- transaction.request_ = function(config, callback) {
+ transaction.request_ = (config, callback) => {
callback(null, resp);
};
- transaction.rollback(function(err, apiResponse) {
+ transaction.rollback((err, apiResponse) => {
assert.ifError(err);
assert.deepStrictEqual(resp, apiResponse);
done();
});
});
- it('should set skipCommit', function(done) {
- transaction.request_ = function(config, callback) {
+ it('should set skipCommit', done => {
+ transaction.request_ = (config, callback) => {
callback();
};
- transaction.rollback(function() {
+ transaction.rollback(() => {
assert.strictEqual(transaction.skipCommit, true);
done();
});
});
- it('should set skipCommit when rollback errors', function(done) {
- transaction.request_ = function(config, callback) {
+ it('should set skipCommit when rollback errors', done => {
+ transaction.request_ = (config, callback) => {
callback(new Error('Error.'));
};
- transaction.rollback(function() {
+ transaction.rollback(() => {
assert.strictEqual(transaction.skipCommit, true);
done();
});
});
});
- describe('run', function() {
- it('should make the correct API request', function(done) {
- transaction.request_ = function(config) {
+ describe('run', () => {
+ it('should make the correct API request', done => {
+ transaction.request_ = config => {
assert.strictEqual(config.client, 'DatastoreClient');
assert.strictEqual(config.method, 'beginTransaction');
assert.deepStrictEqual(config.reqOpts, {transactionOptions: {}});
@@ -460,10 +461,10 @@ describe('Transaction', function() {
transaction.run(assert.ifError);
});
- it('should allow setting gaxOptions', function(done) {
+ it('should allow setting gaxOptions', done => {
const gaxOptions = {};
- transaction.request_ = function(config) {
+ transaction.request_ = config => {
assert.strictEqual(config.gaxOpts, gaxOptions);
done();
};
@@ -471,31 +472,27 @@ describe('Transaction', function() {
transaction.run({gaxOptions});
});
- describe('options.readOnly', function() {
- it('should respect the readOnly option', function(done) {
+ describe('options.readOnly', () => {
+ it('should respect the readOnly option', done => {
const options = {
readOnly: true,
};
- transaction.request_ = function(config) {
+ transaction.request_ = config => {
assert.deepStrictEqual(
- config.reqOpts.transactionOptions.readOnly,
- {}
- );
+ config.reqOpts.transactionOptions.readOnly, {});
done();
};
transaction.run(options, assert.ifError);
});
- it('should respect the global readOnly option', function(done) {
+ it('should respect the global readOnly option', done => {
transaction.readOnly = true;
- transaction.request_ = function(config) {
+ transaction.request_ = config => {
assert.deepStrictEqual(
- config.reqOpts.transactionOptions.readOnly,
- {}
- );
+ config.reqOpts.transactionOptions.readOnly, {});
done();
};
@@ -503,13 +500,13 @@ describe('Transaction', function() {
});
});
- describe('options.transactionId', function() {
- it('should respect the transactionId option', function(done) {
+ describe('options.transactionId', () => {
+ it('should respect the transactionId option', done => {
const options = {
transactionId: 'transaction-id',
};
- transaction.request_ = function(config) {
+ transaction.request_ = config => {
assert.deepStrictEqual(config.reqOpts.transactionOptions.readWrite, {
previousTransaction: options.transactionId,
});
@@ -519,10 +516,10 @@ describe('Transaction', function() {
transaction.run(options, assert.ifError);
});
- it('should respect the global transactionId option', function(done) {
+ it('should respect the global transactionId option', done => {
transaction.id = 'transaction-id';
- transaction.request_ = function(config) {
+ transaction.request_ = config => {
assert.deepStrictEqual(config.reqOpts.transactionOptions.readWrite, {
previousTransaction: transaction.id,
});
@@ -533,8 +530,8 @@ describe('Transaction', function() {
});
});
- describe('options.transactionOptions', function() {
- it('should allow full override of transactionOptions', function(done) {
+ describe('options.transactionOptions', () => {
+ it('should allow full override of transactionOptions', done => {
transaction.readOnly = true;
const options = {
@@ -545,7 +542,7 @@ describe('Transaction', function() {
},
};
- transaction.request_ = function(config) {
+ transaction.request_ = config => {
assert.deepStrictEqual(config.reqOpts, options);
done();
};
@@ -554,18 +551,18 @@ describe('Transaction', function() {
});
});
- describe('error', function() {
+ describe('error', () => {
const error = new Error('Error.');
const apiResponse = {};
- beforeEach(function() {
- transaction.request_ = function(config, callback) {
+ beforeEach(() => {
+ transaction.request_ = (config, callback) => {
callback(error, apiResponse);
};
});
- it('should pass error & API response to callback', function(done) {
- transaction.run(function(err, transaction, apiResponse_) {
+ it('should pass error & API response to callback', done => {
+ transaction.run((err, transaction, apiResponse_) => {
assert.strictEqual(err, error);
assert.strictEqual(transaction, null);
assert.strictEqual(apiResponse_, apiResponse);
@@ -574,29 +571,29 @@ describe('Transaction', function() {
});
});
- describe('success', function() {
+ describe('success', () => {
const apiResponse = {
transaction: TRANSACTION_ID,
};
- beforeEach(function() {
- transaction.request_ = function(config, callback) {
+ beforeEach(() => {
+ transaction.request_ = (config, callback) => {
callback(null, apiResponse);
};
});
- it('should set transaction id', function(done) {
+ it('should set transaction id', done => {
delete transaction.id;
- transaction.run(function(err) {
+ transaction.run((err) => {
assert.ifError(err);
assert.strictEqual(transaction.id, TRANSACTION_ID);
done();
});
});
- it('should exec callback with Transaction & apiResponse', function(done) {
- transaction.run(function(err, transaction_, apiResponse_) {
+ it('should exec callback with Transaction & apiResponse', done => {
+ transaction.run((err, transaction_, apiResponse_) => {
assert.ifError(err);
assert.strictEqual(transaction_, transaction);
assert.deepStrictEqual(apiResponse_, apiResponse);
@@ -606,8 +603,8 @@ describe('Transaction', function() {
});
});
- describe('save', function() {
- it('should push entities into a queue', function() {
+ describe('save', () => {
+ it('should push entities into a queue', () => {
const entities = [
{key: key('Product123'), data: 123},
{key: key('Product234'), data: 234},
@@ -618,10 +615,10 @@ describe('Transaction', function() {
assert.strictEqual(transaction.modifiedEntities_.length, entities.length);
- transaction.modifiedEntities_.forEach(function(queuedEntity) {
+ transaction.modifiedEntities_.forEach((queuedEntity) => {
assert.strictEqual(queuedEntity.method, 'save');
- const match = entities.filter(function(ent) {
+ const match = entities.filter((ent) => {
return ent.key === queuedEntity.entity.key;
})[0];