Skip to content

Commit

Permalink
Merge pull request #897 from stephenplusplus/spp--datastore-v1beta3
Browse files Browse the repository at this point in the history
datastore: update to v1beta3
  • Loading branch information
callmehiphop committed Apr 4, 2016
2 parents b299aca + 83cf80f commit 0ebc3b6
Show file tree
Hide file tree
Showing 30 changed files with 3,294 additions and 4,020 deletions.
2 changes: 2 additions & 0 deletions .jshintignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
system-test/data/*
test/testdata/*
14 changes: 8 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -136,33 +136,35 @@ var gcloud = require('gcloud');
// Authenticating on a per-API-basis. You don't need to do this if you auth on a
// global basis (see Authentication section above).

var dataset = gcloud.datastore.dataset({
var datastore = gcloud.datastore({
projectId: 'my-project',
keyFilename: '/path/to/keyfile.json'
});

dataset.get(dataset.key(['Product', 'Computer']), function(err, entity) {
var key = datastore.key(['Product', 'Computer']);

datastore.get(key, function(err, entity) {
console.log(err || entity);
});

// Save data to your dataset.
// Save data to Datastore.
var blogPostData = {
title: 'How to make the perfect homemade pasta',
author: 'Andrew Chilton',
isDraft: true
};

var blogPostKey = dataset.key('BlogPost');
var blogPostKey = datastore.key('BlogPost');

dataset.save({
datastore.save({
key: blogPostKey,
data: blogPostData
}, function(err) {
// `blogPostKey` has been updated with an ID so you can do more operations
// with it, such as an update.
blogPostData.isDraft = false;

dataset.save({
datastore.save({
key: blogPostKey,
data: blogPostData
}, function(err) {
Expand Down
3 changes: 0 additions & 3 deletions docs/toc.json
Original file line number Diff line number Diff line change
Expand Up @@ -100,9 +100,6 @@
"title": "Datastore",
"type": "datastore",
"nav": [{
"title": "Dataset",
"type": "datastore/dataset"
}, {
"title": "Query",
"type": "datastore/query"
}, {
Expand Down
7 changes: 0 additions & 7 deletions docs/troubleshooting.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,3 @@ async.eachLimit(subscriptions, PARALLEL_LIMIT, deleteSubscription, function(err)
This will only allow 10 at a time to go through, making it easier on the API to keep up with your requests.

Reference Issue: [#1101](https://github.com/GoogleCloudPlatform/gcloud-node/issues/1101)


## I cannot connect to Datastore from a Compute Engine instance.

Currently, the version of Datastore our library supports (v1beta2) requires not only the `cloud-platform` auth scope, but the `userinfo.email` scope as well. When you create a VM, be sure to select both of these scopes (possibly referred to as "Cloud Datastore" and "User info") in order to access the API from gcloud-node without receiving a 401 error.

Reference Issue: [#1169](https://github.com/GoogleCloudPlatform/gcloud-node/issues/1169#issuecomment-198428431)
55 changes: 7 additions & 48 deletions lib/common/grpc-service.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,11 @@

'use strict';

var camelize = require('camelize');
var googleProtoFiles = require('google-proto-files');
var grpc = require('grpc');
var is = require('is');
var nodeutil = require('util');
var path = require('path');
var snakeize = require('snakeize');

/**
* @type {module:common/service}
Expand Down Expand Up @@ -169,11 +167,15 @@ function GrpcService(config, options) {

for (var protoService in protoServices) {
var protoFilePath = protoServices[protoService];
var grpcOpts = {
binaryAsBase64: true,
convertFieldsToCamelCase: true
};

this.protos[protoService] = grpc.load({
root: rootDir,
file: path.relative(rootDir, protoFilePath)
}).google[service][apiVersion];
}, 'proto', grpcOpts).google[service][apiVersion];
}
}

Expand Down Expand Up @@ -239,14 +241,7 @@ GrpcService.prototype.request = function(protoOpts, reqOpts, callback) {
grpcOpts.deadline = new Date(Date.now() + protoOpts.timeout);
}

// snakeize and camelize are used to transform camelCase request options to
// snake_case. This is what ProtoBuf.js (via gRPC) expects. Similarly, the
// response is in snake_case, which is why we use camelize to return it to
// camelCase.
//
// An option will be added to gRPC to allow us to skip this step:
// https://github.com/grpc/grpc/issues/5005
service[protoOpts.method](snakeize(reqOpts), function(err, resp) {
service[protoOpts.method](reqOpts, function(err, resp) {
if (err) {
if (HTTP_ERROR_CODE_MAP[err.code]) {
var httpError = HTTP_ERROR_CODE_MAP[err.code];
Expand All @@ -257,46 +252,10 @@ GrpcService.prototype.request = function(protoOpts, reqOpts, callback) {
return;
}

callback(null, GrpcService.convertBuffers_(camelize(resp)));
callback(null, resp);
}, null, grpcOpts);
};

/**
* Iterate over an object, finding anything that resembles a Buffer, then
* convert it to a base64 string representation.
*
* @todo Replace this function: https://github.com/grpc/grpc/issues/5006
*
* @private
*
* @param {*} data - An object or array to iterate over.
* @return {*} - The converted object.
*/
GrpcService.convertBuffers_ = function(data) {
if (is.array(data)) {
return data.map(GrpcService.convertBuffers_);
}

if (is.object(data)) {
for (var prop in data) {
if (data.hasOwnProperty(prop)) {
var value = data[prop];

if (Buffer.isBuffer(value)) {
data[prop] = value.toString('base64');
} else if (GrpcService.isBufferLike_(value)) {
var arrayValue = GrpcService.objToArr_(value);
data[prop] = new Buffer(arrayValue).toString('base64');
} else {
data[prop] = GrpcService.convertBuffers_(value);
}
}
}
}

return data;
};

/**
* Convert a raw value to a type-denoted protobuf message-friendly object.
*
Expand Down
Loading

0 comments on commit 0ebc3b6

Please sign in to comment.