From bb1f031a4dc29066d1230d6e10714071c4d68f02 Mon Sep 17 00:00:00 2001 From: Jason Dobry Date: Wed, 19 Oct 2016 13:10:13 -0700 Subject: [PATCH] Update BigQuery samples --- bigquery/README.md | 85 ++-- bigquery/datasets.js | 215 ++++----- bigquery/package.json | 15 +- bigquery/queries.js | 249 +++++----- bigquery/quickstart.js | 14 +- bigquery/system-test/datasets.test.js | 127 ++---- bigquery/system-test/queries.test.js | 61 ++- bigquery/system-test/quickstart.test.js | 29 +- bigquery/system-test/tables.test.js | 347 ++++++-------- bigquery/tables.js | 511 +++++++++++---------- bigquery/test/datasets.test.js | 235 +--------- bigquery/test/queries.test.js | 263 +---------- bigquery/test/quickstart.test.js | 6 +- bigquery/test/tables.test.js | 552 ++--------------------- package.json | 4 + pubsub/system-test/subscriptions.test.js | 26 +- pubsub/system-test/topics.test.js | 26 +- pubsub/test/subscriptions.test.js | 26 +- pubsub/test/topics.test.js | 26 +- pubsub/topics.js | 16 +- 20 files changed, 940 insertions(+), 1893 deletions(-) diff --git a/bigquery/README.md b/bigquery/README.md index a67297bf28..36891a9f7e 100644 --- a/bigquery/README.md +++ b/bigquery/README.md @@ -35,22 +35,21 @@ __Usage:__ `node datasets --help` ``` Commands: - create Create a new dataset with the specified ID. - delete Delete the dataset with the specified ID. - list List datasets in the specified project. - size Calculate the size of the specified dataset. + create Creates a new dataset. + delete Deletes a dataset. + list [projectId] Lists all datasets in the specified project or the current project. + size [projectId] Calculates the size of a dataset. Options: - --projectId, -p Optionally specify the project ID to use. [string] [default: "nodejs-docs-samples"] - --help Show help [boolean] + --help Show help [boolean] Examples: - node datasets create my_dataset Create a new dataset with the ID "my_dataset". - node datasets delete my_dataset Delete a dataset identified as "my_dataset". - node datasets list List datasets. - node datasets list -p bigquery-public-data List datasets in the "bigquery-public-data" project. - node datasets size my_dataset Calculate the size of "my_dataset". - node datasets size hacker_news -p bigquery-public-data Calculate the size of "bigquery-public-data:hacker_news". + node datasets create my_dataset Creates a new dataset named "my_dataset". + node datasets delete my_dataset Deletes a dataset named "my_dataset". + node datasets list Lists all datasets in the current project. + node datasets list bigquery-public-data Lists all datasets in the "bigquery-public-data" project. + node datasets size my_dataset Calculates the size of "my_dataset" in the current project. + node datasets size hacker_news bigquery-public-data Calculates the size of "bigquery-public-data:hacker_news". For more information, see https://cloud.google.com/bigquery/docs ``` @@ -68,17 +67,17 @@ __Usage:__ `node queries --help` Commands: sync Run the specified synchronous query. async Start the specified asynchronous query. - wait Wait for the specified job to complete and retrieve its results. + shakespeare Queries a public Shakespeare dataset. Options: --help Show help [boolean] Examples: - node queries sync "SELECT * FROM - `publicdata.samples.natality` LIMIT 5;" - node queries async "SELECT * FROM - `publicdata.samples.natality` LIMIT 5;" - node queries wait job_VwckYXnR8yz54GBDMykIGnrc2 + node queries sync "SELECT * FROM publicdata.samples.natality Synchronously queries the natality dataset. + LIMIT 5;" + node queries async "SELECT * FROM Queries the natality dataset as a job. + publicdata.samples.natality LIMIT 5;" + node queries shakespeare Queries a public Shakespeare dataset. For more information, see https://cloud.google.com/bigquery/docs ``` @@ -94,39 +93,41 @@ __Usage:__ `node tables --help` ``` Commands: - create Create a new table with the specified ID in the - specified dataset. - list List tables in the specified dataset. - delete Delete the specified table from the specified dataset. - copy Make a copy of an existing table. - - browse List the rows from the specified table. - import Import data from a local file or a Google Cloud Storage - file into the specified table. + create [projectId] Creates a new table. + list [projectId] Lists all tables in a dataset. + delete [projectId] Deletes a table. + copy Makes a copy of a table. + [projectId] + browse [projectId] Lists rows in a table. + import [projectId] Imports data from a local file into a table. + import-gcs Imports data from a Google Cloud Storage file into a + [projectId] table. export Export a table from BigQuery to Google Cloud Storage. - insert Insert a JSON array (as a string or newline-delimited + [projectId] + insert [projectId] Insert a JSON array (as a string or newline-delimited file) into a BigQuery table. Options: --help Show help [boolean] Examples: - node tables create my_dataset my_table Create table "my_table" in "my_dataset". - node tables list my_dataset List tables in "my_dataset". - node tables browse my_dataset my_table Display rows from "my_table" in "my_dataset". - node tables delete my_dataset my_table Delete "my_table" from "my_dataset". - node tables import my_dataset my_table ./data.csv Import a local file into a table. - node tables import my_dataset my_table data.csv --bucket Import a GCS file into a table. - my-bucket - node tables export my_dataset my_table my-bucket my-file Export my_dataset:my_table to gcs://my-bucket/my-file as - raw CSV. - node tables export my_dataset my_table my-bucket my-file -f Export my_dataset:my_table to gcs://my-bucket/my-file as - JSON --gzip gzipped JSON. - node tables insert my_dataset my_table json_string Insert the JSON array represented by json_string into + node tables create my_dataset my_table "Name:string, Createss a new table named "my_table" in "my_dataset". + Age:integer, Weight:float, IsMagic:boolean" + node tables list my_dataset Lists tables in "my_dataset". + node tables browse my_dataset my_table Displays rows from "my_table" in "my_dataset". + node tables delete my_dataset my_table Deletes "my_table" from "my_dataset". + node tables import my_dataset my_table ./data.csv Imports a local file into a table. + node tables import-gcs my_dataset my_table my-bucket Imports a GCS file into a table. + data.csv + node tables export my_dataset my_table my-bucket my-file Exports my_dataset:my_table to gcs://my-bucket/my-file + as raw CSV. + node tables export my_dataset my_table my-bucket my-file -f Exports my_dataset:my_table to gcs://my-bucket/my-file + JSON --gzip as gzipped JSON. + node tables insert my_dataset my_table json_string Inserts the JSON array represented by json_string into my_dataset:my_table. - node tables insert my_dataset my_table json_file Insert the JSON objects contained in json_file (one per + node tables insert my_dataset my_table json_file Inserts the JSON objects contained in json_file (one per line) into my_dataset:my_table. - node tables copy src_dataset src_table dest_dataset Copy src_dataset:src_table to dest_dataset:dest_table. + node tables copy src_dataset src_table dest_dataset Copies src_dataset:src_table to dest_dataset:dest_table. dest_table For more information, see https://cloud.google.com/bigquery/docs diff --git a/bigquery/datasets.js b/bigquery/datasets.js index 4d1b1f0eda..b0adc10815 100644 --- a/bigquery/datasets.js +++ b/bigquery/datasets.js @@ -1,125 +1,108 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; -// [START setup] -// By default, the client will authenticate using the service account file -// specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use -// the project specified by the GCLOUD_PROJECT environment variable. See -// https://googlecloudplatform.github.io/google-cloud-node/#/docs/google-cloud/latest/guides/authentication -var BigQuery = require('@google-cloud/bigquery'); -// [END setup] - -function createDataset (datasetId, callback) { - var bigquery = BigQuery(); - var dataset = bigquery.dataset(datasetId); - - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=create - dataset.create(function (err, dataset, apiResponse) { - if (err) { - return callback(err); - } - - console.log('Created dataset: %s', datasetId); - return callback(null, dataset, apiResponse); - }); +const BigQuery = require('@google-cloud/bigquery'); + +// [START bigquery_create_dataset] +function createDataset (datasetId) { + // Instantiates a client + const bigquery = BigQuery(); + + // Creates a new dataset, e.g. "my_new_dataset" + return bigquery.createDataset(datasetId) + .then((results) => { + const dataset = results[0]; + console.log(`Dataset ${dataset.id} created.`); + return dataset; + }); } +// [END bigquery_create_dataset] -function deleteDataset (datasetId, callback) { - var bigquery = BigQuery(); - var dataset = bigquery.dataset(datasetId); +// [START bigquery_delete_dataset] +function deleteDataset (datasetId) { + // Instantiates a client + const bigquery = BigQuery(); - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=delete - dataset.delete(function (err) { - if (err) { - return callback(err); - } + // References an existing dataset, e.g. "my_dataset" + const dataset = bigquery.dataset(datasetId); - console.log('Deleted dataset: %s', datasetId); - return callback(null); - }); + // Deletes the dataset + return dataset.delete() + .then(() => { + console.log(`Dataset ${dataset.id} deleted.`); + }); } +// [END bigquery_delete_dataset] -function listDatasets (projectId, callback) { - var bigquery = BigQuery({ +// [START bigquery_list_datasets] +function listDatasets (projectId) { + // Instantiates a client + const bigquery = BigQuery({ projectId: projectId }); - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery?method=getDatasets - bigquery.getDatasets(function (err, datasets) { - if (err) { - return callback(err); - } - - console.log('Found %d dataset(s)!', datasets.length); - return callback(null, datasets); - }); + // Lists all datasets in the specified project + return bigquery.getDatasets() + .then((results) => { + const datasets = results[0]; + console.log('Datasets:'); + datasets.forEach((dataset) => console.log(dataset.id)); + return datasets; + }); } +// [END bigquery_list_datasets] -// [START get_dataset_size] -// Control-flow helper library -var async = require('async'); - -function getDatasetSize (datasetId, projectId, callback) { - // Instantiate a bigquery client - var bigquery = BigQuery({ +// [START bigquery_get_dataset_size] +function getDatasetSize (datasetId, projectId) { + // Instantiate a client + const bigquery = BigQuery({ projectId: projectId }); - var dataset = bigquery.dataset(datasetId); - - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=getTables - dataset.getTables(function (err, tables) { - if (err) { - return callback(err); - } - - return async.map(tables, function (table, cb) { - // Fetch more detailed info for each table - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=get - table.get(function (err, tableInfo) { - if (err) { - return cb(err); - } - // Return numBytes converted to Megabytes - var numBytes = tableInfo.metadata.numBytes; - return cb(null, (parseInt(numBytes, 10) / 1000) / 1000); - }); - }, function (err, sizes) { - if (err) { - return callback(err); - } - var sum = sizes.reduce(function (cur, prev) { - return cur + prev; - }, 0); - - console.log('Size of %s: %d MB', datasetId, sum); - return callback(null, sum); + + // References an existing dataset, e.g. "my_dataset" + const dataset = bigquery.dataset(datasetId); + + // Lists all tables in the dataset + return dataset.getTables() + .then((results) => results[0]) + // Retrieve the metadata for each table + .then((tables) => Promise.all(tables.map((table) => table.get()))) + .then((results) => results.map((result) => result[0])) + // Select the size of each table + .then((tables) => tables.map((table) => (parseInt(table.metadata.numBytes, 10) / 1000) / 1000)) + // Sum up the sizes + .then((sizes) => sizes.reduce((cur, prev) => cur + prev, 0)) + // Print and return the size + .then((sum) => { + console.log(`Size of ${dataset.id}: ${sum} MB`); + return sum; }); - }); } -// [END get_dataset_size] +// [END bigquery_get_dataset_size] // The command-line program -var cli = require('yargs'); -var makeHandler = require('../utils').makeHandler; +const cli = require(`yargs`); -var program = module.exports = { +const program = module.exports = { createDataset: createDataset, deleteDataset: deleteDataset, listDatasets: listDatasets, getDatasetSize: getDatasetSize, - main: function (args) { + main: (args) => { // Run the command-line program cli.help().strict().parse(args).argv; } @@ -127,35 +110,27 @@ var program = module.exports = { cli .demand(1) - .command('create ', 'Create a new dataset with the specified ID.', {}, function (options) { - program.createDataset(options.datasetId, makeHandler()); - }) - .command('delete ', 'Delete the dataset with the specified ID.', {}, function (options) { - program.deleteDataset(options.datasetId, makeHandler()); + .command(`create `, `Creates a new dataset.`, {}, (opts) => { + program.createDataset(opts.datasetId); }) - .command('list', 'List datasets in the specified project.', {}, function (options) { - program.listDatasets(options.projectId, makeHandler(true, 'id')); + .command(`delete `, `Deletes a dataset.`, {}, (opts) => { + program.deleteDataset(opts.datasetId); }) - .command('size ', 'Calculate the size of the specified dataset.', {}, function (options) { - program.getDatasetSize(options.datasetId, options.projectId, makeHandler()); + .command(`list [projectId]`, `Lists all datasets in the specified project or the current project.`, {}, (opts) => { + program.listDatasets(opts.projectId || process.env.GCLOUD_PROJECT); }) - .option('projectId', { - alias: 'p', - requiresArg: true, - type: 'string', - default: process.env.GCLOUD_PROJECT, - description: 'Optionally specify the project ID to use.', - global: true + .command(`size [projectId]`, `Calculates the size of a dataset.`, {}, (opts) => { + program.getDatasetSize(opts.datasetId, opts.projectId || process.env.GCLOUD_PROJECT); }) - .example('node $0 create my_dataset', 'Create a new dataset with the ID "my_dataset".') - .example('node $0 delete my_dataset', 'Delete a dataset identified as "my_dataset".') - .example('node $0 list', 'List datasets.') - .example('node $0 list -p bigquery-public-data', 'List datasets in the "bigquery-public-data" project.') - .example('node $0 size my_dataset', 'Calculate the size of "my_dataset".') - .example('node $0 size hacker_news -p bigquery-public-data', 'Calculate the size of "bigquery-public-data:hacker_news".') + .example(`node $0 create my_dataset`, `Creates a new dataset named "my_dataset".`) + .example(`node $0 delete my_dataset`, `Deletes a dataset named "my_dataset".`) + .example(`node $0 list`, `Lists all datasets in the current project.`) + .example(`node $0 list bigquery-public-data`, `Lists all datasets in the "bigquery-public-data" project.`) + .example(`node $0 size my_dataset`, `Calculates the size of "my_dataset" in the current project.`) + .example(`node $0 size hacker_news bigquery-public-data`, `Calculates the size of "bigquery-public-data:hacker_news".`) .wrap(120) .recommendCommands() - .epilogue('For more information, see https://cloud.google.com/bigquery/docs'); + .epilogue(`For more information, see https://cloud.google.com/bigquery/docs`); if (module === require.main) { program.main(process.argv.slice(2)); diff --git a/bigquery/package.json b/bigquery/package.json index ba22673bf9..fcc48d77a6 100644 --- a/bigquery/package.json +++ b/bigquery/package.json @@ -5,18 +5,17 @@ "license": "Apache Version 2.0", "author": "Google Inc.", "scripts": { - "test": "mocha -R spec -t 120000 --require intelli-espower-loader ../test/_setup.js test/*.test.js", - "system-test": "mocha -R spec -t 120000 --require intelli-espower-loader ../system-test/_setup.js system-test/*.test.js" + "test": "mocha -R spec -t 10000 --require intelli-espower-loader ../test/_setup.js test/*.test.js", + "system-test": "mocha -R spec -t 10000 --require intelli-espower-loader ../system-test/_setup.js system-test/*.test.js" }, "dependencies": { - "@google-cloud/bigquery": "^0.1.1", - "@google-cloud/storage": "^0.1.1", - "async": "^2.0.1", - "request": "^2.74.0", - "yargs": "^5.0.0" + "@google-cloud/bigquery": "^0.4.0", + "@google-cloud/storage": "^0.4.0", + "async": "^2.1.2", + "yargs": "^6.3.0" }, "devDependencies": { - "mocha": "^3.0.2", + "mocha": "^3.1.2", "node-uuid": "^1.4.7" }, "engines": { diff --git a/bigquery/queries.js b/bigquery/queries.js index f5e92c12b5..ebbb1d6ccd 100644 --- a/bigquery/queries.js +++ b/bigquery/queries.js @@ -1,160 +1,133 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; -// [START complete] -// [START setup] -// By default, gcloud will authenticate using the service account file specified -// by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use the -// project specified by the GCLOUD_PROJECT environment variable. See -// https://googlecloudplatform.github.io/gcloud-node/#/docs/guides/authentication -var BigQuery = require('@google-cloud/bigquery'); -// [END setup] +// [START bigquery_simple_app_all] +const BigQuery = require('@google-cloud/bigquery'); -function printExample (rows) { +// [START bigquery_simple_app_print] +function printResult (rows) { console.log('Query Results:'); rows.forEach(function (row) { - var str = ''; - for (var key in row) { + let str = ''; + for (let key in row) { if (str) { - str += '\n'; + str = `${str}\n`; } - str += key + ': ' + row[key]; + str = `${str}${key}: ${row[key]}`; } console.log(str); }); } +// [END bigquery_simple_app_print] -function queryShakespeare (callback) { - var bigquery = BigQuery(); +// [START bigquery_simple_app_query] +const sqlQuery = `SELECT + corpus, COUNT(*) as unique_words +FROM publicdata.samples.shakespeare +GROUP BY + corpus +ORDER BY + unique_words DESC LIMIT 10;`; - var sqlQuery = 'SELECT\n' + - ' TOP(corpus, 10) as title,\n' + - ' COUNT(*) as unique_words\n' + - 'FROM `publicdata.samples.shakespeare`;'; +function queryShakespeare () { + // Instantiates a client + const bigquery = BigQuery(); - // Construct query object. // Query options list: https://cloud.google.com/bigquery/docs/reference/v2/jobs/query - var options = { + const options = { query: sqlQuery, - - // Use standard SQL syntax for queries. - // See: https://cloud.google.com/bigquery/sql-reference/ - useLegacySql: false + useLegacySql: false // Use standard SQL syntax for queries. }; - // Run the query - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery?method=query - bigquery.query(options, function (err, rows) { - if (err) { - return callback(err); - } - - // Print the result - printExample(rows); - - return callback(null, rows); - }); + // Runs the query + return bigquery.query(options) + .then((results) => { + const rows = results[0]; + printResult(rows); + return rows; + }); } -// [END complete] +// [END bigquery_simple_app_query] +// [END bigquery_simple_app_all] -function syncQuery (sqlQuery, callback) { - var bigquery = BigQuery(); +// [START bigquery_sync_query] +function syncQuery (sqlQuery) { + // Instantiates a client + const bigquery = BigQuery(); - // Construct query object. // Query options list: https://cloud.google.com/bigquery/docs/reference/v2/jobs/query - var options = { + const options = { query: sqlQuery, - - // Time out after 10 seconds. - timeoutMs: 10000, - - // Use standard SQL syntax for queries. - // See: https://cloud.google.com/bigquery/sql-reference/ - useLegacySql: false + timeoutMs: 10000, // Time out after 10 seconds. + useLegacySql: false // Use standard SQL syntax for queries. }; - // Run the query - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery?method=query - bigquery.query(options, function (err, rows) { - if (err) { - return callback(err); - } - - console.log('Received %d row(s)!', rows.length); - return callback(null, rows); - }); -} - -function waitForJob (jobId, callback) { - var bigquery = BigQuery(); - - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/job - var job = bigquery.job(jobId); - - job - .on('error', callback) - .on('complete', function (metadata) { - // The job is done, get query results - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/job?method=getQueryResults - job.getQueryResults(function (err, rows) { - if (err) { - return callback(err); - } - - console.log('Job complete, received %d row(s)!', rows.length); - return callback(null, rows); - }); + // Runs the query + return bigquery.query(options) + .then((results) => { + const rows = results[0]; + console.log('Rows:'); + rows.forEach((row) => console.log(row)); + return rows; }); } +// [END bigquery_sync_query] -function asyncQuery (sqlQuery, callback) { - var bigquery = BigQuery(); +// [START bigquery_async_query] +function asyncQuery (sqlQuery) { + // Instantiates a client + const bigquery = BigQuery(); - // Construct query object // Query options list: https://cloud.google.com/bigquery/docs/reference/v2/jobs/query - var options = { + const options = { query: sqlQuery, - - // Use standard SQL syntax for queries. - // See: https://cloud.google.com/bigquery/sql-reference/ - useLegacySql: false + useLegacySql: false // Use standard SQL syntax for queries. }; - // Run the query asynchronously - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery?method=startQuery - bigquery.startQuery(options, function (err, job) { - if (err) { - return callback(err); - } - - console.log('Started job: %s', job.id); - return waitForJob(job.id, callback); - }); + let job; + + // Runs the query as a job + return bigquery.startQuery(options) + .then((results) => { + job = results[0]; + console.log(`Job ${job.id} started.`); + return job.promise(); + }) + .then(() => { + console.log(`Job ${job.id} completed.`); + return job.getQueryResults(); + }) + .then((results) => { + const rows = results[0]; + console.log('Rows:'); + rows.forEach((row) => console.log(row)); + return rows; + }); } +// [END bigquery_async_query] // The command-line program -var cli = require('yargs'); -var makeHandler = require('../utils').makeHandler; +const cli = require(`yargs`); -var program = module.exports = { - printExample: printExample, +const program = module.exports = { queryShakespeare: queryShakespeare, asyncQuery: asyncQuery, - waitForJob: waitForJob, syncQuery: syncQuery, - main: function (args) { + main: (args) => { // Run the command-line program cli.help().strict().parse(args).argv; } @@ -162,21 +135,39 @@ var program = module.exports = { cli .demand(1) - .command('sync ', 'Run the specified synchronous query.', {}, function (options) { - program.syncQuery(options.sqlQuery, makeHandler()); - }) - .command('async ', 'Start the specified asynchronous query.', {}, function (options) { - program.asyncQuery(options.sqlQuery, makeHandler()); - }) - .command('wait ', 'Wait for the specified job to complete and retrieve its results.', {}, function (options) { - program.waitForJob(options.jobId, makeHandler()); - }) - .example('node $0 sync "SELECT * FROM `publicdata.samples.natality` LIMIT 5;"') - .example('node $0 async "SELECT * FROM `publicdata.samples.natality` LIMIT 5;"') - .example('node $0 wait job_VwckYXnR8yz54GBDMykIGnrc2') + .command( + `sync `, + `Run the specified synchronous query.`, + {}, + (opts) => program.syncQuery(opts.sqlQuery) + ) + .command( + `async `, + `Start the specified asynchronous query.`, + {}, + (opts) => program.asyncQuery(opts.sqlQuery) + ) + .command( + `shakespeare`, + `Queries a public Shakespeare dataset.`, + {}, + program.queryShakespeare + ) + .example( + `node $0 sync "SELECT * FROM publicdata.samples.natality LIMIT 5;"`, + `Synchronously queries the natality dataset.` + ) + .example( + `node $0 async "SELECT * FROM publicdata.samples.natality LIMIT 5;"`, + `Queries the natality dataset as a job.` + ) + .example( + `node $0 shakespeare`, + `Queries a public Shakespeare dataset.` + ) .wrap(120) .recommendCommands() - .epilogue('For more information, see https://cloud.google.com/bigquery/docs'); + .epilogue(`For more information, see https://cloud.google.com/bigquery/docs`); if (module === require.main) { program.main(process.argv.slice(2)); diff --git a/bigquery/quickstart.js b/bigquery/quickstart.js index 90d00b5503..0a392f2f66 100644 --- a/bigquery/quickstart.js +++ b/bigquery/quickstart.js @@ -23,7 +23,7 @@ const BigQuery = require('@google-cloud/bigquery'); const projectId = 'YOUR_PROJECT_ID'; // Instantiates a client -const bigqueryClient = BigQuery({ +const bigquery = BigQuery({ projectId: projectId }); @@ -31,12 +31,10 @@ const bigqueryClient = BigQuery({ const datasetName = 'my_new_dataset'; // Creates the new dataset -bigqueryClient.createDataset(datasetName, (err, dataset) => { - if (err) { - console.error(err); - return; - } +bigquery.createDataset(datasetName) + .then((results) => { + const dataset = results[0]; - console.log(`Dataset ${dataset.name} created.`); -}); + console.log(`Dataset ${dataset.id} created.`); + }); // [END bigquery_quickstart] diff --git a/bigquery/system-test/datasets.test.js b/bigquery/system-test/datasets.test.js index aadb99badd..b034aa754c 100644 --- a/bigquery/system-test/datasets.test.js +++ b/bigquery/system-test/datasets.test.js @@ -1,94 +1,61 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; -var BigQuery = require('@google-cloud/bigquery'); -var uuid = require('node-uuid'); -var program = require('../datasets'); +const bigquery = require(`@google-cloud/bigquery`)(); +const uuid = require(`node-uuid`); +const path = require(`path`); +const run = require(`../../utils`).run; -var bigquery = BigQuery(); -var projectId = process.env.GCLOUD_PROJECT; -var datasetId = 'nodejs-docs-samples-test-' + uuid.v4(); +const cwd = path.join(__dirname, `..`); +const cmd = `node datasets.js`; +const datasetId = (`nodejs-docs-samples-test-${uuid.v4()}`).replace(/-/gi, '_'); -// BigQuery only accepts underscores -datasetId = datasetId.replace(/-/gi, '_'); +describe(`bigquery:datasets`, function () { + after(() => bigquery.dataset(datasetId).delete({ force: true }).catch(() => undefined)); -describe('bigquery:datasets', function () { - after(function (done) { - bigquery.dataset(datasetId).delete({ - force: true - }, function () { - // Ignore any error, the dataset might already have been successfully deleted - done(); - }); - }); - - describe('createDataset', function () { - it('should create a new dataset', function (done) { - program.createDataset(datasetId, function (err, dataset, apiResponse) { - assert.equal(err, null); - assert.notEqual(dataset, undefined); - assert.equal(dataset.id, datasetId); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Created dataset: %s', datasetId]); - assert.notEqual(apiResponse, undefined); - - done(); - }); - }); + it(`should create a dataset`, () => { + const output = run(`${cmd} create ${datasetId}`, cwd); + assert.equal(output, `Dataset ${datasetId} created.`); + return bigquery.dataset(datasetId).exists() + .then((results) => assert.equal(results[0], true)); }); - describe('listDatasets', function () { - it('should list datasets', function (done) { - program.listDatasets(projectId, function (err, datasets) { - assert.equal(err, null); - assert.equal(Array.isArray(datasets), true); - assert.equal(datasets.length > 0, true); - var matchingDatasets = datasets.filter(function (dataset) { - return dataset.id === datasetId; - }); - assert.equal(matchingDatasets.length, 1, 'newly created dataset is in list'); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Found %d dataset(s)!', datasets.length]); - - done(); - }); - }); + it(`should list datasets`, (done) => { + // Listing is eventually consistent. Give the indexes time to update. + setTimeout(() => { + const output = run(`${cmd} list`, cwd); + assert.notEqual(output.indexOf(`Datasets:`), -1); + assert.notEqual(output.indexOf(datasetId), -1); + done(); + }, 5000); }); - describe('getDatasetSize', function () { - it('should return the size of a dataset', function (done) { - program.getDatasetSize(datasetId, projectId, function (err, size) { - assert.equal(err, null); - assert.equal(typeof size, 'number', 'should have received a number'); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Size of %s: %d MB', datasetId, size]); - - done(); - }); - }); + it(`should return the size of a dataset`, function () { + let output = run(`${cmd} size hacker_news bigquery-public-data`, cwd); + assert.notEqual(output.indexOf(`Size of hacker_news`), -1); + assert.notEqual(output.indexOf(`MB`), -1); + output = run(`${cmd} size ${datasetId}`, cwd); + assert.notEqual(output.indexOf(`Size of ${datasetId}: 0 MB`), -1); }); - describe('deleteDataset', function () { - it('should list datasets', function (done) { - program.deleteDataset(datasetId, function (err) { - assert.equal(err, null); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Deleted dataset: %s', datasetId]); - - done(); - }); - }); + it(`should delete a dataset`, () => { + const output = run(`${cmd} delete ${datasetId}`, cwd); + assert.equal(output, `Dataset ${datasetId} deleted.`); + return bigquery.dataset(datasetId).exists() + .then((results) => assert.equal(results[0], false)); }); }); diff --git a/bigquery/system-test/queries.test.js b/bigquery/system-test/queries.test.js index cccb677357..b338cfaef3 100644 --- a/bigquery/system-test/queries.test.js +++ b/bigquery/system-test/queries.test.js @@ -13,32 +13,51 @@ 'use strict'; -var program = require('../queries'); +const path = require(`path`); +const run = require(`../../utils`).run; -var sqlQuery = 'SELECT * FROM `publicdata.samples.natality` LIMIT 5;'; +const cwd = path.join(__dirname, `..`); +const cmd = `node queries.js`; -describe('bigquery:queries', function () { - describe('syncQuery', function () { - it('should fetch data given a query', function (done) { - program.syncQuery(sqlQuery, function (err, data) { - assert.equal(err, null); - assert.equal(Array.isArray(data), true); - assert.equal(data.length, 5); +const expectedShakespeareResult = `Query Results: +corpus: hamlet +unique_words: 5318 +corpus: kinghenryv +unique_words: 5104 +corpus: cymbeline +unique_words: 4875 +corpus: troilusandcressida +unique_words: 4795 +corpus: kinglear +unique_words: 4784 +corpus: kingrichardiii +unique_words: 4713 +corpus: 2kinghenryvi +unique_words: 4683 +corpus: coriolanus +unique_words: 4653 +corpus: 2kinghenryiv +unique_words: 4605 +corpus: antonyandcleopatra +unique_words: 4582`; - done(); - }); - }); +const sqlQuery = `SELECT * FROM publicdata.samples.natality LIMIT 5;`; + +describe(`bigquery:queries`, function () { + it(`should query shakespeare`, () => { + const output = run(`${cmd} shakespeare`, cwd); + assert.equal(output, expectedShakespeareResult); }); - describe('asyncQuery', function () { - it('should submit a job and fetch its results', function (done) { - program.asyncQuery(sqlQuery, function (err, data) { - assert.equal(err, null); - assert.equal(Array.isArray(data), true); - assert.equal(data.length, 5); + it(`should run a sync query`, () => { + const output = run(`${cmd} sync "${sqlQuery}"`, cwd); + assert.notEqual(output.indexOf(`Rows:`), -1); + assert.notEqual(output.indexOf(`source_year`), -1); + }); - done(); - }); - }); + it(`should run an async query`, () => { + const output = run(`${cmd} async "${sqlQuery}"`, cwd); + assert.notEqual(output.indexOf(`Rows:`), -1); + assert.notEqual(output.indexOf(`source_year`), -1); }); }); diff --git a/bigquery/system-test/quickstart.test.js b/bigquery/system-test/quickstart.test.js index 3f0b6f1ded..52172fbef8 100644 --- a/bigquery/system-test/quickstart.test.js +++ b/bigquery/system-test/quickstart.test.js @@ -23,7 +23,7 @@ const expectedDatasetId = `my_new_dataset`; let datasetId = `nodejs-docs-samples-test-${uuid.v4()}`; datasetId = datasetId.replace(/-/gi, `_`); -describe.only(`bigquery:quickstart`, () => { +describe(`bigquery:quickstart`, () => { let bigqueryMock, BigqueryMock; after((done) => { @@ -33,21 +33,22 @@ describe.only(`bigquery:quickstart`, () => { }); }); - it(`should create a dataset`, (done) => { + it(`quickstart should create a dataset`, (done) => { bigqueryMock = { - createDataset: (_datasetId, _callback) => { + createDataset: (_datasetId) => { assert.equal(_datasetId, expectedDatasetId); - assert.equal(typeof _callback, 'function'); - - bigquery.createDataset(datasetId, (err, dataset, apiResponse) => { - _callback(err, dataset, apiResponse); - assert.ifError(err); - assert.notEqual(dataset, undefined); - assert.notEqual(apiResponse, undefined); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, [`Dataset ${dataset.name} created.`]); - done(); - }); + + return bigquery.createDataset(datasetId) + .then((results) => { + const dataset = results[0]; + assert.notEqual(dataset, undefined); + setTimeout(() => { + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, [`Dataset ${dataset.id} created.`]); + done(); + }, 500); + return results; + }).catch(done); } }; BigqueryMock = sinon.stub().returns(bigqueryMock); diff --git a/bigquery/system-test/tables.test.js b/bigquery/system-test/tables.test.js index a67bb37ad7..dffb4ab7b7 100644 --- a/bigquery/system-test/tables.test.js +++ b/bigquery/system-test/tables.test.js @@ -1,224 +1,177 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; -var bigquery = require('@google-cloud/bigquery')(); -var storage = require('@google-cloud/storage')(); -var uuid = require('node-uuid'); -var program = require('../tables'); -var path = require('path'); - -function generateUuid () { - return 'nodejs_docs_samples_' + uuid.v4().replace(/-/gi, '_'); -} - -var rows = [ +const bigquery = require(`@google-cloud/bigquery`)(); +const storage = require(`@google-cloud/storage`)(); +const uuid = require(`node-uuid`); +const path = require(`path`); +const utils = require(`../../utils`); +const run = utils.run; +const noop = utils.noop; + +const cwd = path.join(__dirname, `..`); +const cmd = `node tables.js`; +const generateUuid = () => `nodejs_docs_samples_${uuid.v4().replace(/-/gi, '_')}`; + +const datasetId = generateUuid(); +const srcDatasetId = datasetId; +const destDatasetId = generateUuid(); +const tableId = generateUuid(); +const srcTableId = tableId; +const destTableId = generateUuid(); +const schema = `Name:string, Age:integer, Weight:float, IsMagic:boolean`; +const bucketName = generateUuid(); +const exportFileName = `data.json`; +const importFileName = `data.csv`; +const localFilePath = path.join(__dirname, `../resources/${importFileName}`); +const rows = [ { Name: 'foo', Age: 27, Weight: 80.3, IsMagic: true }, { Name: 'bar', Age: 13, Weight: 54.6, IsMagic: false } ]; -var options = { - projectId: process.env.GCLOUD_PROJECT, - localFilePath: path.join(__dirname, '../resources/data.csv'), - bucketName: generateUuid(), - fileName: 'data.json', - datasetId: generateUuid(), - tableId: generateUuid(), - schema: 'Name:string, Age:integer, Weight:float, IsMagic:boolean', - rows: rows -}; -var srcDatasetId = options.datasetId; -var srcTableId = options.tableId; -var destDatasetId = generateUuid(); -var destTableId = generateUuid(); -describe('bigquery:tables', function () { - before(function (done) { - // Create bucket - storage.createBucket(options.bucketName, function (err, bucket) { - assert.ifError(err, 'bucket creation succeeded'); - // Upload data.csv - bucket.upload(options.localFilePath, function (err) { - assert.ifError(err, 'file upload succeeded'); - // Create srcDataset - bigquery.createDataset(srcDatasetId, function (err) { - assert.ifError(err, 'srcDataset creation succeeded'); - // Create destDataset - bigquery.createDataset(destDatasetId, function (err) { - assert.ifError(err, 'destDataset creation succeeded'); - done(); - }); - }); - }); - }); +describe('bigquery:tables', () => { + before(() => { + return storage.createBucket(bucketName) + .then((results) => results[0].upload(localFilePath)) + .then(() => bigquery.createDataset(srcDatasetId)) + .then(() => bigquery.createDataset(destDatasetId)); }); - after(function (done) { - // Delete srcDataset - bigquery.dataset(srcDatasetId).delete({ force: true }, function () { - // Delete destDataset - bigquery.dataset(destDatasetId).delete({ force: true }, function () { - // Delete files - storage.bucket(options.bucketName).deleteFiles({ force: true }, function (err) { - if (err) { - return done(err); - } - // Delete bucket - setTimeout(function () { - storage.bucket(options.bucketName).delete(done); + after(() => { + return bigquery.dataset(srcDatasetId).delete({ force: true }) + .then(() => bigquery.dataset(destDatasetId).delete({ force: true }), noop) + .then(() => storage.bucket(bucketName).deleteFiles({ force: true }), noop) + .then(() => { + return new Promise((resolve, reject) => { + setTimeout(() => { + storage.bucket(bucketName).delete().then(resolve, reject); }, 2000); }); - }); - }); + }, noop); }); - describe('createTable', function () { - it('should create a new table', function (done) { - program.createTable(options.datasetId, options.tableId, options.schema, function (err, table) { - assert.equal(err, null); - assert.notEqual(table, undefined); - assert.equal(table.id, options.tableId); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Created table %s in %s', options.tableId, options.datasetId]); - - // Listing is eventually consistent, give the index time to update - setTimeout(done, 5000); - }); - }); + it(`should create a table`, () => { + const output = run(`${cmd} create ${datasetId} ${tableId} "${schema}"`, cwd); + assert.equal(output, `Table ${tableId} created.`); + return bigquery.dataset(datasetId).table(tableId).exists() + .then((results) => assert.equal(results[0], true)); }); - describe('listTables', function () { - it('should list tables', function (done) { - program.listTables(options.datasetId, function (err, tables) { - assert.equal(err, null); - assert.equal(Array.isArray(tables), true); - assert.equal(tables.length > 0, true); - var matchingTables = tables.filter(function (table) { - return table.id === options.tableId; - }); - assert.equal(matchingTables.length, 1, 'newly created table is in list'); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Found %d table(s)!', tables.length]); - - done(); - }); - }); + it(`should list tables`, (done) => { + // Listing is eventually consistent. Give the indexes time to update. + setTimeout(() => { + const output = run(`${cmd} list ${datasetId}`, cwd); + assert.notEqual(output.indexOf(`Tables:`), -1); + assert.notEqual(output.indexOf(tableId), -1); + done(); + }, 5000); }); - describe('importLocalFile', function () { - it('should import local file', function (done) { - program.importLocalFile(options.datasetId, options.tableId, options.localFilePath, function (err, metadata, apiResponse) { - assert.equal(err, null); - assert.notEqual(metadata, undefined); - assert.deepEqual(metadata.status, { - state: 'DONE' - }, 'job completed'); - assert.notEqual(apiResponse, undefined); - - done(); - }); + it(`should import a local file`, () => { + const output = run(`${cmd} import ${datasetId} ${tableId} ${localFilePath}`, cwd); + assert.notEqual(output.indexOf(`started.`), -1); + assert.notEqual(output.indexOf(`completed.`), -1); + return new Promise((resolve, reject) => { + setTimeout(() => { + return bigquery.dataset(datasetId).table(tableId).getRows() + .then((results) => { + assert.equal(results[0].length, 1); + resolve(); + }) + .catch(reject); + }, 2000); }); - }); - - describe('exportTableToGCS', function () { - it('should export data to GCS', function (done) { - program.exportTableToGCS(options.datasetId, options.tableId, options.bucketName, options.fileName, function (err, metadata, apiResponse) { - assert.equal(err, null); - assert.notEqual(metadata, undefined); - assert.deepEqual(metadata.status, { state: 'DONE' }); - assert.notEqual(apiResponse, undefined); - - storage.bucket(options.bucketName).file(options.fileName).exists(function (err, exists) { - assert.equal(err, null); - assert.equal(exists, true); + }).timeout(120000); - done(); - }); - }); - }); + it(`should browse table rows`, () => { + const output = run(`${cmd} browse ${datasetId} ${tableId}`, cwd); + assert.equal(output, `Rows:\n{ Name: 'Gandalf', Age: 2000, Weight: 140, IsMagic: true }`); }); - describe('insertRowsAsStream', function () { - it('should insert rows into a table', function (done) { - var table = bigquery.dataset(options.datasetId).table(options.tableId); - - table.getRows(function (err, startRows) { - assert.equal(err, null); - - program.insertRowsAsStream(options.datasetId, options.tableId, options.rows, function (err, insertErrors, apiResponse) { - assert.equal(err, null); - assert.deepEqual(insertErrors, []); - assert.notEqual(apiResponse, undefined); - - setTimeout(function () { - table.getRows(function (err, endRows) { - assert.equal(err, null); - assert.equal(startRows.length + 2, endRows.length); - - done(); - }); - }, 2000); - }); - }); + it(`should export a table to GCS`, () => { + const output = run(`${cmd} export ${datasetId} ${tableId} ${bucketName} ${exportFileName}`, cwd); + assert.notEqual(output.indexOf(`started.`), -1); + assert.notEqual(output.indexOf(`completed.`), -1); + return new Promise((resolve, reject) => { + setTimeout(() => { + storage.bucket(bucketName).file(exportFileName).exists() + .then((results) => { + assert.equal(results[0], true); + resolve(); + }) + .catch(reject); + }, 10000); }); - }); - - describe('copyTable', function () { - it('should copy a table between datasets', function (done) { - program.copyTable(srcDatasetId, srcTableId, destDatasetId, destTableId, function (err, metadata, apiResponse) { - assert.equal(err, null); - assert.notEqual(metadata, undefined); - assert.deepEqual(metadata.status, { state: 'DONE' }); - assert.notEqual(apiResponse, undefined); - - bigquery.dataset(srcDatasetId).table(srcTableId).exists(function (err, exists) { - assert.equal(err, null); - assert.equal(exists, true); - - bigquery.dataset(destDatasetId).table(destTableId).exists(function (err, exists) { - assert.equal(err, null); - assert.equal(exists, true); - - done(); - }); - }); - }); + }).timeout(120000); + + it(`should import a GCS file`, () => { + const output = run(`${cmd} import-gcs ${datasetId} ${tableId} ${bucketName} ${importFileName}`, cwd); + assert.notEqual(output.indexOf(`started.`), -1); + assert.notEqual(output.indexOf(`completed.`), -1); + return new Promise((resolve, reject) => { + setTimeout(() => { + return bigquery.dataset(datasetId).table(tableId).getRows() + .then((results) => { + assert.equal(results[0].length, 2); + resolve(); + }) + .catch(reject); + }, 2000); }); - }); - - describe('browseRows', function () { - it('should display rows in a table', function (done) { - program.browseRows(options.datasetId, options.tableId, function (err, rows) { - assert.equal(err, null); - assert.equal(Array.isArray(rows), true); - assert.equal(rows.length > 0, true); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Found %d row(s)!', rows.length]); - - done(); - }); + }).timeout(120000); + + it(`should copy a table`, () => { + const output = run(`${cmd} copy ${srcDatasetId} ${srcTableId} ${destDatasetId} ${destTableId}`, cwd); + assert.notEqual(output.indexOf(`started.`), -1); + assert.notEqual(output.indexOf(`completed.`), -1); + return new Promise((resolve, reject) => { + setTimeout(() => { + bigquery.dataset(destDatasetId).table(destTableId).getRows() + .then((results) => { + assert.equal(results[0].length, 2); + resolve(); + }) + .catch(reject); + }, 2000); }); - }); - - describe('deleteTable', function () { - it('should delete table', function (done) { - program.deleteTable(options.datasetId, options.tableId, function (err) { - assert.equal(err, null); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Deleted table %s from %s', options.tableId, options.datasetId]); - - done(); - }); + }).timeout(120000); + + it(`should insert rows`, () => { + assert.throws(() => { + run(`${cmd} insert ${datasetId} ${tableId} 'foo.bar'`, cwd); + }, Error, `"json_or_file" (or the file it points to) is not a valid JSON array.`); + const output = run(`${cmd} insert ${datasetId} ${tableId} '${JSON.stringify(rows)}'`, cwd); + assert.equal(output, `Inserted:\n{ Name: 'foo', Age: 27, Weight: 80.3, IsMagic: true }\n{ Name: 'bar', Age: 13, Weight: 54.6, IsMagic: false }`); + return new Promise((resolve, reject) => { + setTimeout(() => { + bigquery.dataset(datasetId).table(tableId).getRows() + .then((results) => { + assert.equal(results[0].length, 4); + resolve(); + }) + .catch(reject); + }, 2000); }); + }).timeout(120000); + + it(`should delete a table`, () => { + const output = run(`${cmd} delete ${datasetId} ${tableId}`, cwd); + assert.equal(output, `Table ${tableId} deleted.`); + return bigquery.dataset(datasetId).table(tableId).exists() + .then((results) => assert.equal(results[0], false)); }); }); diff --git a/bigquery/tables.js b/bigquery/tables.js index 2858c87375..557325b14f 100644 --- a/bigquery/tables.js +++ b/bigquery/tables.js @@ -1,217 +1,270 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; -// [START setup] -// By default, gcloud will authenticate using the service account file specified -// by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use the -// project specified by the GCLOUD_PROJECT environment variable. See -// https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/authentication -var BigQuery = require('@google-cloud/bigquery'); -// [END setup] +const BigQuery = require('@google-cloud/bigquery'); +const Storage = require('@google-cloud/storage'); -function createTable (datasetId, tableId, schema, callback) { - var bigquery = BigQuery(); - var dataset = bigquery.dataset(datasetId); +// [START bigquery_create_table] +function createTable (datasetId, tableId, schema, projectId) { + // Instantiates a client + const bigquery = BigQuery({ + projectId: projectId + }); + + // References an existing dataset, e.g. "my_dataset" + const dataset = bigquery.dataset(datasetId); + // Specify a schema, e.g. "Name:string, Age:integer, Weight:float, IsMagic:boolean" // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource - var options = { + const options = { schema: schema }; - // Create a new table in the given dataset - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=createTable - dataset.createTable(tableId, options, function (err, table, apiResponse) { - if (err) { - return callback(err); - } - - console.log('Created table %s in %s', tableId, datasetId); - return callback(null, table, apiResponse); - }); + // Create a new table in the dataset + return dataset.createTable(tableId, options) + .then((results) => { + const table = results[0]; + console.log(`Table ${table.id} created.`); + return table; + }); } +// [END bigquery_create_table] -function listTables (datasetId, callback) { - var bigquery = BigQuery(); - var dataset = bigquery.dataset(datasetId); - - // List the tables in the specified dataset - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/dataset?method=getTables - dataset.getTables(function (err, tables) { - if (err) { - return callback(err); - } - - console.log('Found %d table(s)!', tables.length); - return callback(null, tables); +// [START bigquery_delete_table] +function deleteTable (datasetId, tableId, projectId) { + // Instantiates a client + const bigquery = BigQuery({ + projectId: projectId }); -} -function browseRows (datasetId, tableId, callback) { - var bigquery = BigQuery(); - var table = bigquery.dataset(datasetId).table(tableId); - - // Retrieve rows from the specified table - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=getRows - table.getRows(function (err, rows) { - if (err) { - return callback(err); - } + // References an existing dataset, e.g. "my_dataset" + const dataset = bigquery.dataset(datasetId); + // References an existing table, e.g. "my_table" + const table = dataset.table(tableId); - console.log('Found %d row(s)!', rows.length); - return callback(null, rows); - }); + // Deletes the table + return table.delete() + .then(() => { + console.log(`Table ${table.id} deleted.`); + }); } +// [END bigquery_delete_table] -function deleteTable (datasetId, tableId, callback) { - var bigquery = BigQuery(); - var table = bigquery.dataset(datasetId).table(tableId); - - // Delete the specified table - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=delete - table.delete(function (err) { - if (err) { - return callback(err); - } - - console.log('Deleted table %s from %s', tableId, datasetId); - return callback(null); +// [START bigquery_list_tables] +function listTables (datasetId, projectId) { + // Instantiates a client + const bigquery = BigQuery({ + projectId: projectId }); -} - -function copyTable (srcDatasetId, srcTableId, destDatasetId, destTableId, callback) { - var bigquery = BigQuery(); - var srcTable = bigquery.dataset(srcDatasetId).table(srcTableId); - var destTable = bigquery.dataset(destDatasetId).table(destTableId); - - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=copy - srcTable.copy(destTable, function (err, job, apiResponse) { - if (err) { - return callback(err); - } - - console.log('Started job: %s', job.id); - job - .on('error', callback) - .on('complete', function (metadata) { - console.log('Completed job: %s', job.id); - return callback(null, metadata, apiResponse); - }); - }); + // References an existing dataset, e.g. "my_dataset" + const dataset = bigquery.dataset(datasetId); + + // Lists all tables in the dataset + return dataset.getTables() + .then((results) => { + const tables = results[0]; + console.log('Tables:'); + tables.forEach((table) => console.log(table.id)); + return tables; + }); } +// [END bigquery_list_tables] -function importLocalFile (datasetId, tableId, fileName, callback) { - var bigquery = BigQuery(); - var table = bigquery.dataset(datasetId).table(tableId); - - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=import - table.import(fileName, function (err, job, apiResponse) { - if (err) { - console.log(err.stack); - return callback(err); - } - - console.log('Started job: %s', job.id); - job - .on('error', callback) - .on('complete', function (metadata) { - console.log('Completed job: %s', job.id); - return callback(null, metadata, apiResponse); - }); +// [START bigquery_browse_table] +function browseRows (datasetId, tableId, projectId) { + // Instantiates a client + const bigquery = BigQuery({ + projectId: projectId }); -} -// [START import_file_from_gcs] -var Storage = require('@google-cloud/storage'); - -function importFileFromGCS (datasetId, tableId, bucketName, fileName, callback) { - var bigquery = BigQuery(); - var storage = Storage(); + // References an existing dataset, e.g. "my_dataset" + const dataset = bigquery.dataset(datasetId); + // References an existing table, e.g. "my_table" + const table = dataset.table(tableId); + + // Lists rows in the table + return table.getRows() + .then((results) => { + const rows = results[0]; + console.log('Rows:'); + rows.forEach((row) => console.log(row)); + return rows; + }); +} +// [END bigquery_browse_table] - var table = bigquery.dataset(datasetId).table(tableId); - var file = storage.bucket(bucketName).file(fileName); +// [START bigquery_copy_table] +function copyTable (srcDatasetId, srcTableId, destDatasetId, destTableId, projectId) { + // Instantiates a client + const bigquery = BigQuery({ + projectId: projectId + }); - // Import the file from Google Cloud Storage - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=import - table.import(file, function (err, job, apiResponse) { - if (err) { - return callback(err); - } + // References the source dataset, e.g. "my_dataset" + const srcDataset = bigquery.dataset(srcDatasetId); + // References the source table, e.g. "my_table" + const srcTable = srcDataset.table(srcTableId); + // References the destination dataset, e.g. "my_other_dataset" + const destDataset = bigquery.dataset(destDatasetId); + // References the destination table, e.g. "my_other_table" + const destTable = destDataset.table(destTableId); + + let job; + + // Copies the table contents into another table + return srcTable.copy(destTable) + .then((results) => { + job = results[0]; + console.log(`Job ${job.id} started.`); + return job.promise(); + }) + .then((results) => { + console.log(`Job ${job.id} completed.`); + return results; + }); +} +// [END bigquery_copy_table] - console.log('Started job: %s', job.id); - job - .on('error', callback) - .on('complete', function (metadata) { - console.log('Completed job: %s', job.id); - return callback(null, metadata, apiResponse); - }); +// [START bigquery_import_from_file] +function importLocalFile (datasetId, tableId, fileName, projectId) { + // Instantiates a client + const bigquery = BigQuery({ + projectId: projectId }); -} -// [END import_file_from_gcs] -/* eslint-disable no-redeclare */ -// [START export_table_to_gcs] -var Storage = require('@google-cloud/storage'); + // References an existing dataset, e.g. "my_dataset" + const dataset = bigquery.dataset(datasetId); + // References an existing dataset, e.g. "my_dataset" + const table = dataset.table(tableId); + + let job; + + // Imports data from a local file into the table + return table.import(fileName) + .then((results) => { + job = results[0]; + console.log(`Job ${job.id} started.`); + return job.promise(); + }) + .then((results) => { + console.log(`Job ${job.id} completed.`); + return results; + }); +} +// [END bigquery_import_from_file] -function exportTableToGCS (datasetId, tableId, bucketName, fileName, callback) { - var bigquery = BigQuery(); - var storage = Storage(); +// [START bigquery_import_from_gcs] +function importFileFromGCS (datasetId, tableId, bucketName, fileName, projectId) { + // Instantiates clients + const bigquery = BigQuery({ + projectId: projectId + }); + const storage = Storage({ + projectId: projectId + }); - var table = bigquery.dataset(datasetId).table(tableId); - var file = storage.bucket(bucketName).file(fileName); + // References an existing dataset, e.g. "my_dataset" + const dataset = bigquery.dataset(datasetId); + // References an existing dataset, e.g. "my_dataset" + const table = dataset.table(tableId); + // References an existing bucket, e.g. "my-bucket" + const bucket = storage.bucket(bucketName); + // References an existing file, e.g. "file.txt" + const file = bucket.file(fileName); + + let job; + + // Imports data from a GCS file into a table + return table.import(file) + .then((results) => { + job = results[0]; + console.log(`Job ${job.id} started.`); + return job.promise(); + }) + .then((results) => { + console.log(`Job ${job.id} completed.`); + return results; + }); +} +// [END bigquery_import_from_gcs] - // Export a table to Google Cloud Storage - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=export - table.export(file, function (err, job, apiResponse) { - if (err) { - return callback(err); - } - console.log('Started job: %s', job.id); - - job - .on('error', callback) - .on('complete', function (metadata) { - console.log('Completed job: %s', job.id); - return callback(null, metadata, apiResponse); - }); +// [START bigquery_export_gcs] +function exportTableToGCS (datasetId, tableId, bucketName, fileName, projectId) { + // Instantiates clients + const bigquery = BigQuery({ + projectId: projectId }); + const storage = Storage({ + projectId: projectId + }); + + // References an existing dataset, e.g. "my_dataset" + const dataset = bigquery.dataset(datasetId); + // References an existing dataset, e.g. "my_dataset" + const table = dataset.table(tableId); + // References an existing bucket, e.g. "my-bucket" + const bucket = storage.bucket(bucketName); + // References an existing file, e.g. "file.txt" + const file = bucket.file(fileName); + + let job; + + // Exports data in a table into a Google Cloud Storage file + return table.export(file) + .then((results) => { + job = results[0]; + console.log(`Job ${job.id} started.`); + return job.promise(); + }) + .then((results) => { + console.log(`Job ${job.id} completed.`); + return results; + }); } -// [END export_table_to_gcs] -/* eslint-enable no-redeclare */ - -function insertRowsAsStream (datasetId, tableId, rows, callback) { - var bigquery = BigQuery(); - var table = bigquery.dataset(datasetId).table(tableId); - - // Insert rows into a table - // See https://googlecloudplatform.github.io/google-cloud-node/#/docs/bigquery/latest/bigquery/table?method=insert - table.insert(rows, function (err, insertErrors, apiResponse) { - if (err) { - return callback(err); - } - console.log('Inserted %d row(s)!', rows.length); - return callback(null, insertErrors, apiResponse); +// [END bigquery_export_gcs] + +// [START bigquery_insert_stream] +function insertRowsAsStream (datasetId, tableId, rows, projectId) { + // Instantiates a client + const bigquery = BigQuery({ + projectId: projectId }); + + // References an existing dataset, e.g. "my_dataset" + const dataset = bigquery.dataset(datasetId); + // References an existing dataset, e.g. "my_dataset" + const table = dataset.table(tableId); + + // Inserts data into a table + return table.insert(rows) + .then((insertErrors) => { + console.log('Inserted:'); + rows.forEach((row) => console.log(row)); + return insertErrors; + }); } +// [END bigquery_insert_stream] // The command-line program -var cli = require('yargs'); -var utils = require('../utils'); -var fs = require('fs'); +const cli = require(`yargs`); +const fs = require(`fs`); -var program = module.exports = { +const program = module.exports = { createTable: createTable, listTables: listTables, browseRows: browseRows, @@ -221,7 +274,7 @@ var program = module.exports = { exportTableToGCS: exportTableToGCS, insertRowsAsStream: insertRowsAsStream, copyTable: copyTable, - main: function (args) { + main: (args) => { // Run the command-line program cli.help().strict().parse(args).argv; } @@ -229,46 +282,38 @@ var program = module.exports = { cli .demand(1) - .command('create ', 'Create a new table with the specified ID in the specified dataset.', {}, function (options) { - program.createTable(options.datasetId, options.tableId, utils.makeHandler(false)); + .command(`create [projectId]`, `Creates a new table.`, {}, (opts) => { + program.createTable(opts.datasetId, opts.tableId, opts.schema, opts.projectId || process.env.GCLOUD_PROJECT); }) - .command('list ', 'List tables in the specified dataset.', {}, function (options) { - program.listTables(options.datasetId, utils.makeHandler(true, 'id')); + .command(`list [projectId]`, `Lists all tables in a dataset.`, {}, (opts) => { + program.listTables(opts.datasetId, opts.projectId || process.env.GCLOUD_PROJECT); }) - .command('delete ', 'Delete the specified table from the specified dataset.', {}, function (options) { - program.deleteTable(options.datasetId, options.tableId, utils.makeHandler(false)); + .command(`delete [projectId]`, `Deletes a table.`, {}, (opts) => { + program.deleteTable(opts.datasetId, opts.tableId, opts.projectId || process.env.GCLOUD_PROJECT); }) - .command('copy ', 'Make a copy of an existing table.', {}, function (options) { - program.copyTable(options.srcDatasetId, options.srcTableId, options.destDatasetId, options.destTableId, utils.makeHandler(false)); + .command(`copy [projectId]`, `Makes a copy of a table.`, {}, (opts) => { + program.copyTable(opts.srcDatasetId, opts.srcTableId, opts.destDatasetId, opts.destTableId, opts.projectId || process.env.GCLOUD_PROJECT); }) - .command('browse ', 'List the rows from the specified table.', {}, function (options) { - program.browseRows(options.datasetId, options.tableId, utils.makeHandler()); + .command(`browse [projectId]`, `Lists rows in a table.`, {}, (opts) => { + program.browseRows(opts.datasetId, opts.tableId, opts.projectId || process.env.GCLOUD_PROJECT); }) - .command('import ', 'Import data from a local file or a Google Cloud Storage file into the specified table.', { - bucketName: { - alias: 'b', - requiresArg: true, - description: 'Specify a Cloud Storage bucket.', - type: 'string' - } - }, function (options) { - if (options.bucketName) { - program.importFileFromGCS(options.datasetId, options.tableId, options.bucketName, options.fileName, utils.makeHandler(false)); - } else { - program.importLocalFile(options.datasetId, options.tableId, options.fileName, utils.makeHandler(false)); - } + .command(`import [projectId]`, `Imports data from a local file into a table.`, {}, (opts) => { + program.importLocalFile(opts.datasetId, opts.tableId, opts.fileName, opts.projectId || process.env.GCLOUD_PROJECT); + }) + .command(`import-gcs [projectId]`, `Imports data from a Google Cloud Storage file into a table.`, {}, (opts) => { + program.importFileFromGCS(opts.datasetId, opts.tableId, opts.bucketName, opts.fileName, opts.projectId || process.env.GCLOUD_PROJECT); }) - .command('export ', 'Export a table from BigQuery to Google Cloud Storage.', {}, function (options) { - program.exportTableToGCS(options.datasetId, options.tableId, options.bucketName, options.fileName, utils.makeHandler(false)); + .command(`export [projectId]`, `Export a table from BigQuery to Google Cloud Storage.`, {}, (opts) => { + program.exportTableToGCS(opts.datasetId, opts.tableId, opts.bucketName, opts.fileName, opts.projectId || process.env.GCLOUD_PROJECT); }) - .command('insert ', - 'Insert a JSON array (as a string or newline-delimited file) into a BigQuery table.', {}, - function (options) { + .command(`insert [projectId]`, + `Insert a JSON array (as a string or newline-delimited file) into a BigQuery table.`, {}, + (opts) => { var content; try { - content = fs.readFileSync(options.json_or_file); + content = fs.readFileSync(opts.json_or_file); } catch (err) { - content = options.json_or_file; + content = opts.json_or_file; } var rows = null; @@ -277,59 +322,59 @@ cli } catch (err) {} if (!Array.isArray(rows)) { - throw new Error('"json_or_file" (or the file it points to) is not a valid JSON array.'); + throw new Error(`"json_or_file" (or the file it points to) is not a valid JSON array.`); } - program.insertRowsAsStream(options.datasetId, options.tableId, rows, utils.makeHandler(false)); + program.insertRowsAsStream(opts.datasetId, opts.tableId, rows, opts.projectId || process.env.GCLOUD_PROJECT); } ) .example( - 'node $0 create my_dataset my_table', - 'Create table "my_table" in "my_dataset".' + `node $0 create my_dataset my_table "Name:string, Age:integer, Weight:float, IsMagic:boolean"`, + `Createss a new table named "my_table" in "my_dataset".` ) .example( - 'node $0 list my_dataset', - 'List tables in "my_dataset".' + `node $0 list my_dataset`, + `Lists tables in "my_dataset".` ) .example( - 'node $0 browse my_dataset my_table', - 'Display rows from "my_table" in "my_dataset".' + `node $0 browse my_dataset my_table`, + `Displays rows from "my_table" in "my_dataset".` ) .example( - 'node $0 delete my_dataset my_table', - 'Delete "my_table" from "my_dataset".' + `node $0 delete my_dataset my_table`, + `Deletes "my_table" from "my_dataset".` ) .example( - 'node $0 import my_dataset my_table ./data.csv', - 'Import a local file into a table.' + `node $0 import my_dataset my_table ./data.csv`, + `Imports a local file into a table.` ) .example( - 'node $0 import my_dataset my_table data.csv --bucket my-bucket', - 'Import a GCS file into a table.' + `node $0 import-gcs my_dataset my_table my-bucket data.csv`, + `Imports a GCS file into a table.` ) .example( - 'node $0 export my_dataset my_table my-bucket my-file', - 'Export my_dataset:my_table to gcs://my-bucket/my-file as raw CSV.' + `node $0 export my_dataset my_table my-bucket my-file`, + `Exports my_dataset:my_table to gcs://my-bucket/my-file as raw CSV.` ) .example( - 'node $0 export my_dataset my_table my-bucket my-file -f JSON --gzip', - 'Export my_dataset:my_table to gcs://my-bucket/my-file as gzipped JSON.' + `node $0 export my_dataset my_table my-bucket my-file -f JSON --gzip`, + `Exports my_dataset:my_table to gcs://my-bucket/my-file as gzipped JSON.` ) .example( - 'node $0 insert my_dataset my_table json_string', - 'Insert the JSON array represented by json_string into my_dataset:my_table.' + `node $0 insert my_dataset my_table json_string`, + `Inserts the JSON array represented by json_string into my_dataset:my_table.` ) .example( - 'node $0 insert my_dataset my_table json_file', - 'Insert the JSON objects contained in json_file (one per line) into my_dataset:my_table.' + `node $0 insert my_dataset my_table json_file`, + `Inserts the JSON objects contained in json_file (one per line) into my_dataset:my_table.` ) .example( - 'node $0 copy src_dataset src_table dest_dataset dest_table', - 'Copy src_dataset:src_table to dest_dataset:dest_table.' + `node $0 copy src_dataset src_table dest_dataset dest_table`, + `Copies src_dataset:src_table to dest_dataset:dest_table.` ) .wrap(120) .recommendCommands() - .epilogue('For more information, see https://cloud.google.com/bigquery/docs'); + .epilogue(`For more information, see https://cloud.google.com/bigquery/docs`); if (module === require.main) { program.main(process.argv.slice(2)); diff --git a/bigquery/test/datasets.test.js b/bigquery/test/datasets.test.js index 5d91dda111..5af24bfd5d 100644 --- a/bigquery/test/datasets.test.js +++ b/bigquery/test/datasets.test.js @@ -1,220 +1,27 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; -var proxyquire = require('proxyquire').noCallThru(); -var datasetId = 'foo'; -var projectId = process.env.GCLOUD_PROJECT; +const program = require(`../datasets`); -function getSample () { - var apiResponseMock = {}; - var tableMock = { - get: sinon.stub(), - metadata: { - numBytes: 1000000 - } - }; - tableMock.get.yields(null, tableMock); - var tablesMock = [tableMock]; - var datasetsMock = [{ id: datasetId }]; - var datasetMock = { - getTables: sinon.stub().yields(null, tablesMock), - create: sinon.stub().yields(null, datasetsMock[0], apiResponseMock), - delete: sinon.stub().yields(null) - }; - var bigqueryMock = { - getDatasets: sinon.stub().yields(null, datasetsMock), - dataset: sinon.stub().returns(datasetMock) - }; - var BigQueryMock = sinon.stub().returns(bigqueryMock); - - return { - program: proxyquire('../datasets', { - '@google-cloud/bigquery': BigQueryMock - }), - mocks: { - BigQuery: BigQueryMock, - bigquery: bigqueryMock, - datasets: datasetsMock, - dataset: datasetMock, - tables: tablesMock, - table: tableMock, - apiResponse: apiResponseMock - } - }; -} - -describe('bigquery:datasets', function () { - describe('createDataset', function () { - it('should create a dataset', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.createDataset(datasetId, callback); - - assert.equal(sample.mocks.dataset.create.calledOnce, true); - assert.deepEqual(sample.mocks.dataset.create.firstCall.args.slice(0, -1), []); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.datasets[0], sample.mocks.apiResponse]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Created dataset: %s', datasetId]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.dataset.create.yields(error); - - sample.program.createDataset(datasetId, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('deleteDataset', function () { - it('should delete a dataset', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.deleteDataset(datasetId, callback); - - assert.equal(sample.mocks.dataset.delete.calledOnce, true); - assert.deepEqual(sample.mocks.dataset.delete.firstCall.args.slice(0, -1), []); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Deleted dataset: %s', datasetId]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.dataset.delete.yields(error); - - sample.program.deleteDataset(datasetId, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('listDatasets', function () { - it('should list datasets', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.listDatasets(projectId, callback); - - assert.equal(sample.mocks.bigquery.getDatasets.calledOnce, true); - assert.deepEqual(sample.mocks.bigquery.getDatasets.firstCall.args.slice(0, -1), []); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.datasets]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Found %d dataset(s)!', sample.mocks.datasets.length]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.bigquery.getDatasets.yields(error); - - sample.program.listDatasets(projectId, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('getDatasetSize', function () { - it('should calculate size of a dataset', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.getDatasetSize(datasetId, projectId, callback); - - assert.equal(sample.mocks.dataset.getTables.calledOnce, true); - assert.deepEqual(sample.mocks.dataset.getTables.firstCall.args.slice(0, -1), []); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, 1]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Size of %s: %d MB', datasetId, 1]); - }); - - it('should handle dataset.getTables error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.dataset.getTables.yields(error); - - sample.program.getDatasetSize(datasetId, projectId, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - - it('should handle table.get error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.table.get.yields(error); - - sample.program.getDatasetSize(datasetId, projectId, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('main', function () { - it('should call createDataset', function () { - var program = getSample().program; - - sinon.stub(program, 'createDataset'); - program.main(['create', datasetId]); - assert.equal(program.createDataset.calledOnce, true); - assert.deepEqual(program.createDataset.firstCall.args.slice(0, -1), [datasetId]); - }); - - it('should call deleteDataset', function () { - var program = getSample().program; - - sinon.stub(program, 'deleteDataset'); - program.main(['delete', datasetId]); - assert.equal(program.deleteDataset.calledOnce, true); - assert.deepEqual(program.deleteDataset.firstCall.args.slice(0, -1), [datasetId]); - }); - - it('should call listDatasets', function () { - var program = getSample().program; - - sinon.stub(program, 'listDatasets'); - program.main(['list']); - assert.equal(program.listDatasets.calledOnce, true); - assert.deepEqual(program.listDatasets.firstCall.args.slice(0, -1), [projectId]); - }); - - it('should call getDatasetSize', function () { - var program = getSample().program; - - sinon.stub(program, 'getDatasetSize'); - program.main(['size', datasetId]); - assert.equal(program.getDatasetSize.calledOnce, true); - assert.deepEqual(program.getDatasetSize.firstCall.args.slice(0, -1), [datasetId, projectId]); - }); +describe(`bigquery:datasets`, () => { + it(`should have expected exports`, () => { + assert.equal(typeof program.createDataset, `function`); + assert.equal(typeof program.deleteDataset, `function`); + assert.equal(typeof program.listDatasets, `function`); + assert.equal(typeof program.getDatasetSize, `function`); }); }); diff --git a/bigquery/test/queries.test.js b/bigquery/test/queries.test.js index c5d95c6e27..468456546d 100644 --- a/bigquery/test/queries.test.js +++ b/bigquery/test/queries.test.js @@ -1,249 +1,26 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; -var proxyquire = require('proxyquire').noCallThru(); +const program = require(`../queries`); -var shakespeareQuery = 'SELECT\n' + - ' TOP(corpus, 10) as title,\n' + - ' COUNT(*) as unique_words\n' + - 'FROM `publicdata.samples.shakespeare`;'; - -function getSample () { - var natalityMock = [ - { year: '2001' }, - { year: '2002' }, - { year: '2003' }, - { year: '2004' }, - { year: '2005' } - ]; - - var metadataMock = { status: { state: 'DONE' } }; - - var jobId = 'abc'; - - var jobMock = { - id: jobId, - getQueryResults: sinon.stub().yields(null, natalityMock), - getMetadata: sinon.stub().yields(null, metadataMock), - on: sinon.stub().returnsThis() - }; - jobMock.on.withArgs('complete').yields(null, metadataMock); - - var bigqueryMock = { - job: sinon.stub().returns(jobMock), - startQuery: sinon.stub().yields(null, jobMock), - query: sinon.stub().yields(null, natalityMock) - }; - - var BigQueryMock = sinon.stub().returns(bigqueryMock); - - return { - program: proxyquire('../queries', { - '@google-cloud/bigquery': BigQueryMock - }), - mocks: { - BigQuery: BigQueryMock, - bigquery: bigqueryMock, - natality: natalityMock, - metadata: metadataMock, - job: jobMock - }, - jobId: jobId - }; -} - -describe('bigquery:query', function () { - describe('printExample', function () { - it('should return results', function () { - var example = getSample(); - - example.program.printExample([ - { - foo: 'bar', - beep: 'boop' - } - ]); - - assert.equal(console.log.calledTwice, true); - assert.deepEqual(console.log.firstCall.args, ['Query Results:']); - assert.deepEqual(console.log.secondCall.args, ['foo: bar\nbeep: boop']); - }); - }); - - describe('queryShakespeare', function () { - it('should query shakespeare', function () { - var example = getSample(); - var callback = sinon.stub(); - var mockResult = []; - example.mocks.bigquery.query.yields(null, mockResult); - - example.program.queryShakespeare(callback); - - assert.equal(example.mocks.bigquery.query.calledOnce, true); - assert.deepEqual(example.mocks.bigquery.query.firstCall.args.slice(0, -1), [{ - query: shakespeareQuery, - useLegacySql: false - }]); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, mockResult]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Query Results:']); - }); - - it('should handle error', function () { - var error = new Error('error'); - var example = getSample(); - var callback = sinon.stub(); - example.mocks.bigquery.query.yields(error); - - example.program.queryShakespeare(callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('syncQuery', function () { - var query = 'foo'; - - it('should return results', function () { - var example = getSample(); - var callback = sinon.stub(); - - example.program.syncQuery(query, callback); - - assert.equal(example.mocks.bigquery.query.calledOnce, true); - assert.deepEqual(example.mocks.bigquery.query.firstCall.args.slice(0, -1), [{ - query: query, - timeoutMs: 10000, - useLegacySql: false - }]); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, example.mocks.natality]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Received %d row(s)!', example.mocks.natality.length]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var example = getSample(); - var callback = sinon.stub(); - example.mocks.bigquery.query.yields(error); - - example.program.syncQuery(query, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('asyncQuery', function () { - var query = 'foo'; - - it('should submit a job', function () { - var example = getSample(); - var callback = sinon.stub(); - - example.program.asyncQuery(query, callback); - - assert.equal(example.mocks.bigquery.startQuery.calledOnce, true); - assert.deepEqual(example.mocks.bigquery.startQuery.firstCall.args.slice(0, -1), [{ - query: query, - useLegacySql: false - }]); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, example.mocks.natality]); - assert.equal(console.log.calledTwice, true); - assert.deepEqual(console.log.firstCall.args, ['Started job: %s', example.jobId]); - assert.deepEqual(console.log.secondCall.args, ['Job complete, received %d row(s)!', example.mocks.natality.length]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var example = getSample(); - var callback = sinon.stub(); - example.mocks.bigquery.startQuery.yields(error); - - example.program.asyncQuery(query, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('waitForJob', function () { - it('should get the results of a job given its ID', function () { - var example = getSample(); - var callback = sinon.stub(); - example.mocks.bigquery.job.returns(example.mocks.job); - - example.program.waitForJob(example.jobId, callback); - - assert.equal(example.mocks.job.on.calledTwice, true); - assert.deepEqual(example.mocks.job.on.firstCall.args.slice(0, -1), ['error']); - assert.deepEqual(example.mocks.job.on.secondCall.args.slice(0, -1), ['complete']); - assert.equal(example.mocks.job.getQueryResults.calledOnce, true); - assert.deepEqual(example.mocks.job.getQueryResults.firstCall.args.slice(0, -1), []); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, example.mocks.natality]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Job complete, received %d row(s)!', example.mocks.natality.length]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var example = getSample(); - var callback = sinon.stub(); - example.mocks.job.getQueryResults.yields(error); - - example.program.waitForJob(example.jobId, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('main', function () { - var query = 'foo'; - var jobId = 'foo'; - - it('should call syncQuery', function () { - var program = getSample().program; - - sinon.stub(program, 'syncQuery'); - program.main(['sync', query]); - assert.equal(program.syncQuery.calledOnce, true); - assert.deepEqual(program.syncQuery.firstCall.args.slice(0, -1), [query]); - }); - - it('should call asyncQuery', function () { - var program = getSample().program; - - sinon.stub(program, 'asyncQuery'); - program.main(['async', query]); - assert.equal(program.asyncQuery.calledOnce, true); - assert.deepEqual(program.asyncQuery.firstCall.args.slice(0, -1), [query]); - }); - - it('should call waitForJob', function () { - var program = getSample().program; - - sinon.stub(program, 'waitForJob'); - program.main(['wait', jobId]); - assert.equal(program.waitForJob.calledOnce, true); - assert.deepEqual(program.waitForJob.firstCall.args.slice(0, -1), [jobId]); - }); +describe(`bigquery:queries`, () => { + it(`should have expected exports`, () => { + assert.equal(typeof program.queryShakespeare, `function`); + assert.equal(typeof program.syncQuery, `function`); + assert.equal(typeof program.asyncQuery, `function`); }); }); diff --git a/bigquery/test/quickstart.test.js b/bigquery/test/quickstart.test.js index dcbd68a316..d862dceed1 100644 --- a/bigquery/test/quickstart.test.js +++ b/bigquery/test/quickstart.test.js @@ -23,7 +23,7 @@ describe(`bigquery:quickstart`, () => { before(() => { bigqueryMock = { - createDataset: sinon.stub().yields(error) + createDataset: sinon.stub().returns(Promise.reject(error)) }; BigqueryMock = sinon.stub().returns(bigqueryMock); }); @@ -36,8 +36,6 @@ describe(`bigquery:quickstart`, () => { assert.equal(BigqueryMock.calledOnce, true); assert.deepEqual(BigqueryMock.firstCall.args, [{ projectId: 'YOUR_PROJECT_ID' }]); assert.equal(bigqueryMock.createDataset.calledOnce, true); - assert.deepEqual(bigqueryMock.createDataset.firstCall.args.slice(0, -1), ['my_new_dataset']); - assert.equal(console.error.calledOnce, true); - assert.deepEqual(console.error.firstCall.args, [error]); + assert.deepEqual(bigqueryMock.createDataset.firstCall.args, ['my_new_dataset']); }); }); diff --git a/bigquery/test/tables.test.js b/bigquery/test/tables.test.js index 16592ffc5c..f39404cf94 100644 --- a/bigquery/test/tables.test.js +++ b/bigquery/test/tables.test.js @@ -1,529 +1,33 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; -var proxyquire = require('proxyquire').noCallThru(); -var bucketName = 'bucket'; -var fileName = 'file'; -var jobId = 'job'; -var datasetId = 'dataset'; -var tableId = 'table'; -var srcDatasetId = datasetId; -var srcTableId = tableId; -var destDatasetId = datasetId + '_dest'; -var destTableId = tableId + '_dest'; -var schema = 'schema'; -var jsonArray = [ - { name: 'foo', age: 27 }, - { name: 'bar', age: 13 } -]; -var validJsonFile = 'validJsonFile'; -var invalidJsonFile = 'invalidJsonFile'; -var validJsonString = JSON.stringify(jsonArray); -var invalidJsonString = 'INVALID'; -var errorList = ['error 1', 'error 2']; - -function getSample () { - var apiResponseMock = {}; - var tableMocks = [ - { - id: tableId - } - ]; - var bucketMock = { - file: sinon.stub().returns(fileMock) - }; - var storageMock = { - bucket: sinon.stub().returns(bucketMock) - }; - var fileMock = {}; - var metadataMock = { status: { state: 'DONE' } }; - var jobMock = { - id: jobId, - getMetadata: sinon.stub().yields(null, metadataMock), - on: sinon.stub().returnsThis() - }; - jobMock.on.withArgs('complete').yields(metadataMock); - var tableMock = { - export: sinon.stub().yields(null, jobMock, apiResponseMock), - copy: sinon.stub().yields(null, jobMock, apiResponseMock), - import: sinon.stub().yields(null, jobMock, apiResponseMock), - insert: sinon.stub().yields(null, errorList, apiResponseMock), - getRows: sinon.stub().yields(null, jsonArray), - delete: sinon.stub().yields(null) - }; - var datasetMock = { - table: sinon.stub().returns(tableMock), - createTable: sinon.stub().yields(null, tableMocks[0], apiResponseMock), - getTables: sinon.stub().yields(null, tableMocks) - }; - var bigqueryMock = { - job: sinon.stub().returns(jobMock), - dataset: sinon.stub().returns(datasetMock) - }; - var BigQueryMock = sinon.stub().returns(bigqueryMock); - var StorageMock = sinon.stub().returns(storageMock); - var fsMock = { - readFileSync: sinon.stub().throws(new Error('Invalid file.')) - }; - fsMock.readFileSync.withArgs(validJsonFile).returns(validJsonString); - fsMock.readFileSync.withArgs(invalidJsonFile).returns(invalidJsonString); - - return { - program: proxyquire('../tables', { - '@google-cloud/bigquery': BigQueryMock, - '@google-cloud/storage': StorageMock, - 'fs': fsMock, - yargs: proxyquire('yargs', {}) - }), - mocks: { - BigQuery: BigQueryMock, - bigquery: bigqueryMock, - Storage: StorageMock, - storage: storageMock, - metadata: metadataMock, - job: jobMock, - table: tableMock, - bucket: bucketMock, - dataset: datasetMock, - fs: fsMock, - tables: tableMocks, - apiResponse: apiResponseMock - } - }; -} - -describe('bigquery:tables', function () { - describe('createTable', function () { - it('should create a table', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.createTable(datasetId, tableId, undefined, callback); - - assert.equal(sample.mocks.dataset.createTable.calledOnce, true); - assert.deepEqual(sample.mocks.dataset.createTable.firstCall.args.slice(0, -1), [tableId, { schema: undefined }]); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.tables[0], sample.mocks.apiResponse]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Created table %s in %s', tableId, datasetId]); - }); - - it('should create a table with a schema', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.createTable(datasetId, tableId, schema, callback); - - assert.equal(sample.mocks.dataset.createTable.calledOnce, true); - assert.deepEqual(sample.mocks.dataset.createTable.firstCall.args.slice(0, -1), [tableId, { schema: schema }]); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.tables[0], sample.mocks.apiResponse]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Created table %s in %s', tableId, datasetId]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.dataset.createTable.yields(error); - - sample.program.createTable(datasetId, tableId, undefined, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('listTables', function () { - it('should list tables', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.listTables(datasetId, callback); - - assert.equal(sample.mocks.dataset.getTables.calledOnce, true); - assert.deepEqual(sample.mocks.dataset.getTables.firstCall.args.slice(0, -1), []); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.tables]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Found %d table(s)!', sample.mocks.tables.length]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.dataset.getTables.yields(error); - - sample.program.listTables({}, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('browseRows', function () { - it('should display rows', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.browseRows(datasetId, tableId, callback); - - assert.equal(sample.mocks.table.getRows.calledOnce, true); - assert.deepEqual(sample.mocks.table.getRows.firstCall.args.slice(0, -1), []); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, jsonArray]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Found %d row(s)!', jsonArray.length]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.table.getRows.yields(error); - - sample.program.browseRows(datasetId, tableId, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('deleteTable', function () { - it('should delete a table', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.deleteTable(datasetId, tableId, callback); - - assert.equal(sample.mocks.table.delete.calledOnce, true); - assert.deepEqual(sample.mocks.table.delete.firstCall.args.slice(0, -1), []); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Deleted table %s from %s', tableId, datasetId]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.table.delete.yields(error); - - sample.program.deleteTable(datasetId, tableId, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('importLocalFile', function () { - it('should import a local file', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.importLocalFile(datasetId, tableId, fileName, callback); - - assert.equal(sample.mocks.table.import.calledOnce, true); - assert.deepEqual(sample.mocks.table.import.firstCall.args.slice(0, -1), [fileName]); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata, sample.mocks.apiResponse]); - assert.equal(console.log.calledTwice, true); - assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); - assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.table.import.yields(error); - - sample.program.importLocalFile(datasetId, tableId, fileName, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('importFileFromGCS', function () { - it('should import a GCS file', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.importFileFromGCS(datasetId, tableId, bucketName, fileName, callback); - - assert.equal(sample.mocks.table.import.calledOnce, true); - assert.deepEqual(sample.mocks.table.import.firstCall.args.slice(0, -1), [sample.mocks.file]); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata, sample.mocks.apiResponse]); - assert.equal(console.log.calledTwice, true); - assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); - assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.table.import.yields(error); - - sample.program.importFileFromGCS(datasetId, tableId, bucketName, fileName, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('copyTable', function () { - it('should copy a table', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.copyTable(srcDatasetId, srcTableId, destDatasetId, destTableId, callback); - - assert.equal(sample.mocks.table.copy.calledOnce, true); - assert.deepEqual( - sample.mocks.table.copy.firstCall.args.slice(0, -1), - [sample.mocks.table] - ); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata, sample.mocks.apiResponse]); - assert.equal(console.log.calledTwice, true); - assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); - assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); - }); - - it('should handle error', function () { - var error = new Error('error'); - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.table.copy.yields(error); - - sample.program.copyTable(srcDatasetId, srcTableId, destDatasetId, destTableId, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('exportTableToGCS', function () { - it('should export to a table', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.exportTableToGCS(datasetId, tableId, bucketName, fileName, callback); - - assert.equal(sample.mocks.table.export.calledOnce, true); - assert.deepEqual(sample.mocks.table.export.firstCall.args.slice(0, -1), [sample.mocks.file]); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata, sample.mocks.apiResponse]); - assert.equal(console.log.calledTwice, true); - assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); - assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); - }); - - it('should handle export error', function () { - var error = new Error('error'); - var example = getSample(); - var callback = sinon.stub(); - example.mocks.table.export.yields(error); - - example.program.exportTableToGCS(datasetId, tableId, bucketName, fileName, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - }); - - describe('insertRowsAsStream', function () { - it('should stream-insert rows into a table', function () { - var sample = getSample(); - var callback = sinon.stub(); - - sample.program.insertRowsAsStream(datasetId, tableId, jsonArray, callback); - - assert.equal(sample.mocks.table.insert.calledOnce, true); - assert.deepEqual(sample.mocks.table.insert.firstCall.args.slice(0, -1), [jsonArray]); - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, errorList, sample.mocks.apiResponse]); - assert.equal(console.log.calledOnce, true); - assert.deepEqual(console.log.firstCall.args, ['Inserted %d row(s)!', jsonArray.length]); - }); - - it('should handle API errors', function () { - var sample = getSample(); - var callback = sinon.stub(); - var error = new Error('error'); - sample.mocks.table.insert.yields(error); - - sample.program.insertRowsAsStream(datasetId, tableId, jsonArray, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [error]); - }); - - it('should handle (per-row) insert errors', function () { - var sample = getSample(); - var callback = sinon.stub(); - sample.mocks.table.insert.yields(null, errorList, sample.mocks.apiResponse); - - sample.program.insertRowsAsStream(datasetId, tableId, jsonArray, callback); - - assert.equal(callback.calledOnce, true); - assert.deepEqual(callback.firstCall.args, [null, errorList, sample.mocks.apiResponse]); - }); - }); - - describe('main', function () { - it('should call createTable', function () { - var program = getSample().program; - program.createTable = sinon.stub(); - - program.main(['create', datasetId, tableId]); - assert.equal(program.createTable.calledOnce, true); - assert.deepEqual(program.createTable.firstCall.args.slice(0, -1), [datasetId, tableId]); - }); - - it('should call listTables', function () { - var program = getSample().program; - program.listTables = sinon.stub(); - - program.main(['list', datasetId]); - assert.equal(program.listTables.calledOnce, true); - assert.deepEqual(program.listTables.firstCall.args.slice(0, -1), [datasetId]); - }); - - it('should call browseRows', function () { - var program = getSample().program; - program.browseRows = sinon.stub(); - - program.main(['browse', datasetId, tableId]); - assert.equal(program.browseRows.calledOnce, true); - assert.deepEqual(program.browseRows.firstCall.args.slice(0, -1), [datasetId, tableId]); - }); - - it('should call deleteTable', function () { - var program = getSample().program; - program.deleteTable = sinon.stub(); - - program.main(['delete', datasetId, tableId]); - assert.equal(program.deleteTable.calledOnce, true); - assert.deepEqual(program.deleteTable.firstCall.args.slice(0, -1), [datasetId, tableId]); - }); - - it('should call importLocalFile', function () { - var program = getSample().program; - program.importLocalFile = sinon.stub(); - - program.main(['import', datasetId, tableId, fileName]); - assert.equal(program.importLocalFile.calledOnce, true); - assert.deepEqual(program.importLocalFile.firstCall.args.slice(0, -1), [datasetId, tableId, fileName]); - }); - - it('should call importFileFromGCS', function () { - var program = getSample().program; - program.importFileFromGCS = sinon.stub(); - - program.main(['import', datasetId, tableId, fileName, '-b', bucketName]); - assert.equal(program.importFileFromGCS.calledOnce, true); - assert.deepEqual(program.importFileFromGCS.firstCall.args.slice(0, -1), [datasetId, tableId, bucketName, fileName]); - }); - - it('should call copyTable', function () { - var program = getSample().program; - program.copyTable = sinon.stub(); - - program.main(['copy', srcDatasetId, srcTableId, destDatasetId, destTableId]); - assert.equal(program.copyTable.calledOnce, true); - assert.deepEqual(program.copyTable.firstCall.args.slice(0, -1), [srcDatasetId, srcTableId, destDatasetId, destTableId]); - }); - - it('should call exportTableToGCS', function () { - var program = getSample().program; - program.exportTableToGCS = sinon.stub(); - - program.main(['export', datasetId, tableId, bucketName, fileName]); - assert.equal(program.exportTableToGCS.calledOnce, true); - assert.deepEqual(program.exportTableToGCS.firstCall.args.slice(0, -1), [datasetId, tableId, bucketName, fileName]); - }); - - it('should call insertRowsAsStream', function () { - var program = getSample().program; - program.insertRowsAsStream = sinon.stub(); - - program.main(['insert', datasetId, tableId, validJsonFile]); - - assert.equal(program.insertRowsAsStream.calledOnce, true); - assert.deepEqual(program.insertRowsAsStream.firstCall.args.slice(0, -1), [datasetId, tableId, jsonArray]); - }); - - describe('insert', function () { - it('should accept valid JSON files', function () { - var program = getSample().program; - program.insertRowsAsStream = sinon.stub(); - - program.main(['insert', datasetId, tableId, validJsonFile]); - - assert.equal(program.insertRowsAsStream.calledOnce, true); - assert.deepEqual(program.insertRowsAsStream.firstCall.args.slice(0, -1), [datasetId, tableId, jsonArray]); - }); - - it('should reject files with invalid JSON', function () { - var program = getSample().program; - program.insertRowsAsStream = sinon.stub(); - - assert.throws( - function () { program.main(['insert', datasetId, tableId, invalidJsonFile]); }, - /"json_or_file" \(or the file it points to\) is not a valid JSON array\./ - ); - assert.equal(program.insertRowsAsStream.called, false); - }); - - it('should reject invalid file names', function () { - var program = getSample().program; - program.insertRowsAsStream = sinon.stub(); - - assert.throws( - function () { program.main(['insert', datasetId, tableId, '']); }, - /"json_or_file" \(or the file it points to\) is not a valid JSON array\./ - ); - assert.equal(program.insertRowsAsStream.called, false); - }); - - it('should accept valid JSON strings', function () { - var program = getSample().program; - program.insertRowsAsStream = sinon.stub(); - - program.main(['insert', datasetId, tableId, validJsonString]); - assert.equal(program.insertRowsAsStream.calledOnce, true); - assert.deepEqual(program.insertRowsAsStream.firstCall.args.slice(0, -1), [datasetId, tableId, jsonArray]); - }); - - it('should reject invalid JSON strings', function () { - var program = getSample().program; - program.insertRowsAsStream = sinon.stub(); - - assert.throws( - function () { program.main(['insert', datasetId, tableId, invalidJsonString]); }, - /"json_or_file" \(or the file it points to\) is not a valid JSON array\./ - ); - assert.equal(program.insertRowsAsStream.called, false); - }); - }); +const program = require(`../tables`); + +describe(`bigquery:tables`, () => { + it(`should have expected exports`, () => { + assert.equal(typeof program.createTable, `function`); + assert.equal(typeof program.deleteTable, `function`); + assert.equal(typeof program.listTables, `function`); + assert.equal(typeof program.browseRows, `function`); + assert.equal(typeof program.browseRows, `function`); + assert.equal(typeof program.importLocalFile, `function`); + assert.equal(typeof program.importFileFromGCS, `function`); + assert.equal(typeof program.exportTableToGCS, `function`); + assert.equal(typeof program.insertRowsAsStream, `function`); + assert.equal(typeof program.copyTable, `function`); }); }); diff --git a/package.json b/package.json index c8efba421e..38b70502ba 100644 --- a/package.json +++ b/package.json @@ -6,6 +6,10 @@ "license": "Apache Version 2.0", "author": "Google Inc.", "contributors": [ + { + "name": "Ace Nassry", + "email": "anassri@google.com" + }, { "name": "Jerjou", "email": "jerjou@google.com" diff --git a/pubsub/system-test/subscriptions.test.js b/pubsub/system-test/subscriptions.test.js index 3a6d9d215a..4f3850f62b 100644 --- a/pubsub/system-test/subscriptions.test.js +++ b/pubsub/system-test/subscriptions.test.js @@ -1,15 +1,17 @@ -// Copyright 2015-2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; diff --git a/pubsub/system-test/topics.test.js b/pubsub/system-test/topics.test.js index a2cd561b6c..92da7067dd 100644 --- a/pubsub/system-test/topics.test.js +++ b/pubsub/system-test/topics.test.js @@ -1,15 +1,17 @@ -// Copyright 2015-2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; diff --git a/pubsub/test/subscriptions.test.js b/pubsub/test/subscriptions.test.js index b675b27293..281e7f15d8 100644 --- a/pubsub/test/subscriptions.test.js +++ b/pubsub/test/subscriptions.test.js @@ -1,15 +1,17 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; diff --git a/pubsub/test/topics.test.js b/pubsub/test/topics.test.js index 22f6ab70dd..72539ff0a7 100644 --- a/pubsub/test/topics.test.js +++ b/pubsub/test/topics.test.js @@ -1,15 +1,17 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. +/** + * Copyright 2016, Google, Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ 'use strict'; diff --git a/pubsub/topics.js b/pubsub/topics.js index b7dfd0687a..b6c1970235 100644 --- a/pubsub/topics.js +++ b/pubsub/topics.js @@ -27,7 +27,7 @@ const PubSub = require(`@google-cloud/pubsub`); // [START pubsub_list_topics] function listTopics (callback) { - // Instantiates the client library + // Instantiates a client const pubsubClient = PubSub(); // Lists all topics in the current project @@ -46,7 +46,7 @@ function listTopics (callback) { // [START pubsub_create_topic] function createTopic (topicName, callback) { - // Instantiates the client library + // Instantiates a client const pubsubClient = PubSub(); // Creates a new topic, e.g. "my-new-topic" @@ -64,7 +64,7 @@ function createTopic (topicName, callback) { // [START pubsub_delete_topic] function deleteTopic (topicName, callback) { - // Instantiates the client library + // Instantiates a client const pubsubClient = PubSub(); // References an existing topic, e.g. "my-topic" @@ -85,7 +85,7 @@ function deleteTopic (topicName, callback) { // [START pubsub_publish_message] function publishMessage (topicName, data, callback) { - // Instantiates the client library + // Instantiates a client const pubsubClient = PubSub(); // References an existing topic, e.g. "my-topic" @@ -129,7 +129,7 @@ function setPublishCounterValue (value) { // [START pubsub_publish_ordered_message] function publishOrderedMessage (topicName, data, callback) { - // Instantiates the client library + // Instantiates a client const pubsubClient = PubSub(); // References an existing topic, e.g. "my-topic" @@ -171,7 +171,7 @@ function publishOrderedMessage (topicName, data, callback) { // [START pubsub_get_topic_policy] function getTopicPolicy (topicName, callback) { - // Instantiates the client library + // Instantiates a client const pubsubClient = PubSub(); // References an existing topic, e.g. "my-topic" @@ -192,7 +192,7 @@ function getTopicPolicy (topicName, callback) { // [START pubsub_set_topic_policy] function setTopicPolicy (topicName, callback) { - // Instantiates the client library + // Instantiates a client const pubsubClient = PubSub(); // References an existing topic, e.g. "my-topic" @@ -229,7 +229,7 @@ function setTopicPolicy (topicName, callback) { // [START pubsub_test_topic_permissions] function testTopicPermissions (topicName, callback) { - // Instantiates the client library + // Instantiates a client const pubsubClient = PubSub(); // References an existing topic, e.g. "my-topic"