diff --git a/bigquery/README.md b/bigquery/README.md index 0534b78987..2882fe6f3d 100644 --- a/bigquery/README.md +++ b/bigquery/README.md @@ -12,9 +12,8 @@ analytics data warehouse. * [Setup](#setup) * [Samples](#samples) * [Create A Simple Application With the API](#create-a-simple-application-with-the-api) - * [Calculate size of dataset](#calculate-size-of-dataset) - * [Loading Data with a POST Request](#loading-data-with-a-post-request) - * [Loading Data from Cloud Storage](#loading-data-from-cloud-storage) + * [Datasets](#datasets) + * [Tables](#tables) ## Setup @@ -39,46 +38,72 @@ __Run the sample:__ [basics_docs]: https://cloud.google.com/bigquery/create-simple-app-api [basics_code]: getting_started.js -### Calculate size of dataset - -View the [source code][size_code]. - -__Run the sample:__ - -Usage: `node dataset_size ` - -Example: - - node dataset_size bigquery-public-data hacker_news - -[size_code]: dataset_size.js - -### Loading Data with a POST Request - -View the [documentation][file_docs] or the [source code][file_code]. - -__Run the sample:__ - -Usage: `node load_data_from_csv ` - -Example: - - node load_data_from_csv resources/data.csv my-dataset my-table - -[file_docs]: https://cloud.google.com/bigquery/loading-data-post-request -[file_code]: load_data_from_csv.js - -### Loading Data from Cloud Storage - -View the [documentation][gcs_docs] or the [source code][gcs_code]. - -__Run the sample:__ - -Usage: `node load_data_from_gcs ` - -Example: - - node load_data_from_gcs my-bucket data.csv my-dataset my-table - -[gcs_docs]: https://cloud.google.com/bigquery/docs/loading-data-cloud-storage -[gcs_code]: load_data_from_gcs.js +### Datasets + +View the [documentation][datasets_docs] or the [source code][datasets_code]. + +__Usage:__ `node datasets --help` + +``` +Commands: + create Create a new dataset. + delete Delete the specified dataset. + list List datasets in the authenticated project. + size Calculate the size of the specified dataset. + +Options: + --projectId, -p Optionally specify the project ID to use. + [string] + --help Show help [boolean] + +Examples: + node datasets create my_dataset Create a new dataset named "my_dataset". + node datasets delete my_dataset Delete "my_dataset". + node datasets list List datasets. + node datasets list -p bigquery-public-data List datasets in a project other than the + authenticated project. + node datasets size my_dataset Calculate the size of "my_dataset". + node datasets size hacker_news -p Calculate the size of + bigquery-public-data "bigquery-public-data:hacker_news". + +For more information, see https://cloud.google.com/bigquery/docs +``` + +[datasets_docs]: https://cloud.google.com/bigquery/docs +[datasets_code]: datasets.js + +### Tables + +View the [documentation][tables_docs] or the [source code][tables_code]. + +__Usage:__ `node tables --help` + +``` +Commands: + create Create a new table in the specified dataset. + list List tables in the specified dataset. + delete
Delete a table in the specified dataset. + import
Import data from a local file or a Google Cloud Storage + file into BigQuery. + export
Export a table from BigQuery to Google Cloud Storage. + +Options: + --help Show help [boolean] + +Examples: + node tables create my_dataset my_table Create table "my_table" in "my_dataset". + node tables list my_dataset List tables in "my_dataset". + node tables delete my_dataset my_table Delete "my_table" from "my_dataset". + node tables import my_dataset my_table ./data.csv Import a local file into a table. + node tables import my_dataset my_table data.csv Import a GCS file into a table. + --bucket my-bucket + node tables export my_dataset my_table my-bucket Export my_dataset:my_table to + my-file gcs://my-bucket/my-file as raw CSV + node tables export my_dataset my_table my-bucket Export my_dataset:my_table to + my-file -f JSON --gzip gcs://my-bucket/my-file as gzipped JSON + +For more information, see https://cloud.google.com/bigquery/docs +``` + +[tables_docs]: https://cloud.google.com/bigquery/docs +[tables_code]: tables.js diff --git a/bigquery/dataset_size.js b/bigquery/dataset_size.js deleted file mode 100644 index 6847032d3c..0000000000 --- a/bigquery/dataset_size.js +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -var async = require('async'); - -// [START auth] -// By default, the client will authenticate using the service account file -// specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use -// the project specified by the GCLOUD_PROJECT environment variable. See -// https://googlecloudplatform.github.io/gcloud-node/#/docs/google-cloud/latest/guides/authentication -var BigQuery = require('@google-cloud/bigquery'); -// [END auth] - -// [START list_tables] -/** - * Retrieve all tables for the specified dataset. - * - * @param {Object} bigquery gcloud-node bigquery client. - * @param {string} datasetId Dataset of the tables to retrieve. - * @param {string} [pageToken] Page to retrieve. - * @param {Function} callback Callback function. - */ -function getAllTablesExample (bigquery, datasetId, pageToken, callback) { - if (typeof pageToken === 'function') { - callback = pageToken; - pageToken = undefined; - } - var dataset = bigquery.dataset(datasetId); - var options = {}; - if (pageToken) { - options.pageToken = pageToken; - } - - // Grab paginated tables - dataset.getTables(options, function (err, tables, nextQuery) { - // Quit on error - if (err) { - return callback(err); - } - - // There is another page of tables - if (nextQuery) { - // Grab the remaining pages of tables recursively - return getAllTablesExample( - bigquery, - datasetId, - nextQuery.token, - function (err, _tables) { - if (err) { - return callback(err); - } - callback(null, tables.concat(_tables)); - } - ); - } - // Last page of tables - return callback(null, tables); - }); -} -// [END list_tables] - -// [START get_size] -/** - * Retrieve the size of the specified dataset. - * - * @param {string} projectId The project, .e.g. "bigquery-public-data" - * @param {string} datasetId The dataset, e.g. "hacker_news" - * @param {Function} callback Callback function. - */ -function getSizeExample (projectId, datasetId, callback) { - if (!datasetId) { - return callback(new Error('datasetId is require!')); - } - - // Instantiate a bigquery client - var bigquery = BigQuery({ - projectId: projectId - }); - - // Fetch all tables in the dataset - getAllTablesExample(bigquery, datasetId, function (err, tables) { - if (err) { - return callback(err); - } - return async.parallel(tables.map(function (table) { - return function (cb) { - // Fetch more detailed info for each table - table.get(function (err, tableInfo) { - if (err) { - return cb(err); - } - // Return numBytes converted to Megabytes - var numBytes = tableInfo.metadata.numBytes; - return cb(null, (parseInt(numBytes, 10) / 1000) / 1000); - }); - }; - }), function (err, sizes) { - if (err) { - return callback(err); - } - var sum = sizes.reduce(function (cur, prev) { - return cur + prev; - }, 0); - return callback(null, sum); - }); - }); -} -// [END get_size] - -// Run the examples -exports.main = function (projectId, datasetId, cb) { - getSizeExample(projectId, datasetId, function (err, sum) { - if (err) { - return cb(err); - } - var size = 'MB'; - if (sum > 1000) { - sum = sum / 1000; - size = 'GB'; - } - if (sum > 1000) { - sum = sum / 1000; - size = 'TB'; - } - cb(null, '' + sum.toPrecision(5) + ' ' + size); - }); -}; - -if (module === require.main) { - var args = process.argv.slice(2); - if (args.length !== 2) { - throw new Error('Usage: node dataset_size.js '); - } - exports.main(args[0], args[1], console.log); -} diff --git a/bigquery/datasets.js b/bigquery/datasets.js new file mode 100644 index 0000000000..575061703b --- /dev/null +++ b/bigquery/datasets.js @@ -0,0 +1,196 @@ +// Copyright 2016, Google, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// [START all] +// [START setup] +// By default, the client will authenticate using the service account file +// specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use +// the project specified by the GCLOUD_PROJECT environment variable. See +// https://googlecloudplatform.github.io/gcloud-node/#/docs/google-cloud/latest/guides/authentication +var BigQuery = require('@google-cloud/bigquery'); + +// Instantiate the bigquery client +var bigquery = BigQuery(); +// [END setup] + +// Control-flow helper library +var async = require('async'); + +// [START create_dataset] +/** + * List datasets in the authenticated project. + * + * @param {string} name The name for the new dataset. + * @param {function} callback The callback function. + */ +function createDataset (name, callback) { + var dataset = bigquery.dataset(name); + + // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery + dataset.create(function (err, dataset) { + if (err) { + return callback(err); + } + + console.log('Created dataset: %s', name); + return callback(null, dataset); + }); +} +// [END create_dataset] + +// [START delete_dataset] +/** + * List datasets in the authenticated project. + * + * @param {string} name The name for the new dataset. + * @param {function} callback The callback function. + */ +function deleteDataset (name, callback) { + var dataset = bigquery.dataset(name); + + // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery + dataset.delete(function (err) { + if (err) { + return callback(err); + } + + console.log('Deleted dataset: %s', name); + return callback(null); + }); +} +// [END delete_dataset] + +// [START list_datasets] +/** + * List datasets in the authenticated project. + * + * @param {string} projectId The project ID to use. + * @param {function} callback The callback function. + */ +function listDatasets (projectId, callback) { + // Instantiate a bigquery client + var bigquery = BigQuery({ + projectId: projectId + }); + + // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery + bigquery.getDatasets(function (err, datasets) { + if (err) { + return callback(err); + } + + console.log('Found %d dataset(s)!', datasets.length); + return callback(null, datasets); + }); +} +// [END list_datasets] + +// [START get_dataset_size] +/** + * Calculate the size of the specified dataset. + * + * @param {string} datasetId The ID of the dataset. + * @param {string} projectId The project ID. + * @param {function} callback The callback function. + */ +function getDatasetSize (datasetId, projectId, callback) { + // Instantiate a bigquery client + var bigquery = BigQuery({ + projectId: projectId + }); + var dataset = bigquery.dataset(datasetId); + + // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/dataset + dataset.getTables(function (err, tables) { + if (err) { + return callback(err); + } + + return async.map(tables, function (table, cb) { + // Fetch more detailed info for each table + table.get(function (err, tableInfo) { + if (err) { + return cb(err); + } + // Return numBytes converted to Megabytes + var numBytes = tableInfo.metadata.numBytes; + return cb(null, (parseInt(numBytes, 10) / 1000) / 1000); + }); + }, function (err, sizes) { + if (err) { + return callback(err); + } + var sum = sizes.reduce(function (cur, prev) { + return cur + prev; + }, 0); + + console.log('Size of %s: %d MB', datasetId, sum); + return callback(null, sum); + }); + }); +} +// [END get_dataset_size] +// [END all] + +// The command-line program +var cli = require('yargs'); +var makeHandler = require('../utils').makeHandler; + +var program = module.exports = { + createDataset: createDataset, + deleteDataset: deleteDataset, + listDatasets: listDatasets, + getDatasetSize: getDatasetSize, + main: function (args) { + // Run the command-line program + cli.help().strict().parse(args).argv; + } +}; + +cli + .demand(1) + .command('create ', 'Create a new dataset.', {}, function (options) { + program.createDataset(options.name, makeHandler()); + }) + .command('delete ', 'Delete the specified dataset.', {}, function (options) { + program.deleteDataset(options.datasetId, makeHandler()); + }) + .command('list', 'List datasets in the authenticated project.', {}, function (options) { + program.listDatasets(options.projectId, makeHandler(true, 'id')); + }) + .command('size ', 'Calculate the size of the specified dataset.', {}, function (options) { + program.getDatasetSize(options.datasetId, options.projectId, makeHandler()); + }) + .option('projectId', { + alias: 'p', + requiresArg: true, + type: 'string', + default: process.env.GCLOUD_PROJECT, + description: 'Optionally specify the project ID to use.', + global: true + }) + .example('node $0 create my_dataset', 'Create a new dataset named "my_dataset".') + .example('node $0 delete my_dataset', 'Delete "my_dataset".') + .example('node $0 list', 'List datasets.') + .example('node $0 list -p bigquery-public-data', 'List datasets in a project other than the authenticated project.') + .example('node $0 size my_dataset', 'Calculate the size of "my_dataset".') + .example('node $0 size hacker_news -p bigquery-public-data', 'Calculate the size of "bigquery-public-data:hacker_news".') + .wrap(100) + .recommendCommands() + .epilogue('For more information, see https://cloud.google.com/bigquery/docs'); + +if (module === require.main) { + program.main(process.argv.slice(2)); +} diff --git a/bigquery/list_datasets_and_projects.js b/bigquery/list_datasets_and_projects.js deleted file mode 100644 index e807e5ac6d..0000000000 --- a/bigquery/list_datasets_and_projects.js +++ /dev/null @@ -1,120 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -/** - * Command-line application to list all projects and datasets in BigQuery. - * - * This sample is used on this page: - * - * https://cloud.google.com/bigquery/docs/managing_jobs_datasets_projects - */ - -'use strict'; - -// [START all] -// [START auth] -// By default, the client will authenticate using the service account file -// specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use -// the project specified by the GCLOUD_PROJECT environment variable. See -// https://googlecloudplatform.github.io/gcloud-node/#/docs/google-cloud/latest/guides/authentication -var BigQuery = require('@google-cloud/bigquery'); - -var Resource = require('@google-cloud/resource'); - -// Instantiate a resource client -var resource = Resource(); -// [END auth] - -// [START list_tables] -/** - * Retrieve all datasets for the specified project. - * - * @param {string} projectId The project to get datasets from. - * @param {Function} callback Callback function. - */ -function listDatasets (projectId, callback) { - if (!projectId) { - return callback(new Error('projectId is required!')); - } - - // Instantiate a bigquery client - var bigquery = BigQuery({ - projectId: projectId - }); - - bigquery.getDatasets(function (err, datasets) { - if (err) { - return callback(err); - } - - console.log('Found %d datasets!', datasets.length); - return callback(null, datasets); - }); -} -// [END list_tables] - -// [START list_projects] -/** - * Retrieve all projects a user has access to. - * - * @param {Function} callback Callback function. - */ -function listProjects (callback) { - resource.getProjects(function (err, projects) { - if (err) { - return callback(err); - } - - console.log('Found %d projects!', projects.length); - return callback(null, projects); - }); -} -// [END list_projects] - -// [START usage] -function printUsage () { - console.log('Usage: node list_datasets_and_projects [COMMAND] [ARGS...]'); - console.log('\nCommands:\n'); - console.log('\tlist-datasets PROJECT_ID'); - console.log('\tlist-projects'); -} -// [END usage] - -// The command-line program -var program = { - // Print usage instructions - printUsage: printUsage, - - // Exports - listDatasets: listDatasets, - listProjects: listProjects, - - // Run the examples - main: function (args, cb) { - var command = args.shift(); - if (command === 'list-datasets') { - this.listDatasets(args[0], cb); - } else if (command === 'list-projects') { - this.listProjects(cb); - } else { - this.printUsage(); - } - } -}; - -if (module === require.main) { - program.main(process.argv.slice(2), console.log); -} -// [END all] - -module.exports = program; diff --git a/bigquery/load_data_from_csv.js b/bigquery/load_data_from_csv.js deleted file mode 100644 index 9328aa50ff..0000000000 --- a/bigquery/load_data_from_csv.js +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// [START complete] -'use strict'; - -var fs = require('fs'); -var path = require('path'); - -// By default, the client will authenticate using the service account file -// specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use -// the project specified by the GCLOUD_PROJECT environment variable. See -// https://googlecloudplatform.github.io/gcloud-node/#/docs/google-cloud/latest/guides/authentication -var BigQuery = require('@google-cloud/bigquery'); - -// Instantiate a bigquery client -var bigquery = BigQuery(); - -/** - * Wait for the provided job to complete. - * - * @param {Object} job The job to watch. - * @param {number} timeout Maximum time to wait (milliseconds). - * @param {Function} Callback function. - */ -function pollJobUntilDone (job, timeout, timeWaited, callback) { - job.getMetadata(function (err, metadata) { - if (err) { - return callback(err); - } - if (timeWaited > timeout) { - return callback(new Error('Timed out waiting for job to complete')); - } - if (metadata.status && (metadata.status.state === 'RUNNING' || - metadata.status.state === 'PENDING')) { - setTimeout(function () { - console.log('working...'); - pollJobUntilDone(job, timeout, timeWaited + 5000, callback); - }, 5000); - } else { - callback(null, metadata); - } - }); -} - -/** - * Load a csv file into a BigQuery table. - * - * @param {string} pathToCsvFile Path to csv file to load. - * @param {string} datasetId The dataset. - * @param {string} tableName The table. - * @param {Function} callback Callback function. - */ -function loadDataFromCsvExample (pathToCsvFile, datasetId, tableName, callback) { - if (!pathToCsvFile || typeof pathToCsvFile !== 'string') { - return callback(new Error('pathToCsvFile is required!')); - } - if (!datasetId || typeof datasetId !== 'string') { - return callback(new Error('datasetId is require!')); - } - if (!tableName || typeof tableName !== 'string') { - return callback(new Error('tableName is require!')); - } - - var dataset = bigquery.dataset(datasetId); - var table = dataset.table(tableName); - - var options = { - skipLeadingRows: 0 - }; - - fs.createReadStream(pathToCsvFile) - .pipe(table.createWriteStream(options)) - .on('complete', function (job) { - // Wait up to 60 seconds for job to complete - pollJobUntilDone(job, 60000, 0, function (err, metadata) { - if (err) { - return callback(err); - } - console.log('job completed', metadata); - callback(null, metadata); - }); - }); -} -// [END complete] - -exports.createTable = function (datasetId, tableName, callback) { - var dataset = bigquery.dataset(datasetId); - var pathToSchemaFile = path.join(__dirname, '/resources/schema.json'); - fs.readFile(pathToSchemaFile, { encoding: 'utf8' }, function (err, file) { - if (err) { - return callback(err); - } - var schema = JSON.parse(file); - var columns = schema.map(function (column) { - return column.name + ':' + column.type; - }); - dataset.createTable(tableName, { schema: columns.join(',') }, callback); - }); -}; - -exports.deleteTable = function (datasetId, tableName, callback) { - var dataset = bigquery.dataset(datasetId); - var table = dataset.table(tableName); - table.delete(callback); -}; - -// Run the examples -exports.main = function (pathToCsvFile, datasetId, tableName, cb) { - loadDataFromCsvExample(pathToCsvFile, datasetId, tableName, cb); -}; - -if (module === require.main) { - var args = process.argv.slice(2); - exports.main( - args[0], - args[1], - args[2], - console.log - ); -} diff --git a/bigquery/load_data_from_gcs.js b/bigquery/load_data_from_gcs.js deleted file mode 100644 index 09ba2d224d..0000000000 --- a/bigquery/load_data_from_gcs.js +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// [START complete] -'use strict'; - -var request = require('request'); - -// By default, the client will authenticate using the service account file -// specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use -// the project specified by the GCLOUD_PROJECT environment variable. See -// https://googlecloudplatform.github.io/gcloud-node/#/docs/google-cloud/latest/guides/authentication -var BigQuery = require('@google-cloud/bigquery'); - -// Instantiate a bigquery client -var bigquery = BigQuery(); - -/** - * Wait for the provided job to complete. - * - * @param {Object} job The job to watch. - * @param {number} timeout Maximum time to wait (milliseconds). - * @param {Function} Callback function. - */ -function pollJobUntilDone (job, timeout, timeWaited, callback) { - job.getMetadata(function (err, metadata) { - if (err) { - return callback(err); - } - if (timeWaited > timeout) { - return callback(new Error('Timed out waiting for job to complete')); - } - if (metadata.status && (metadata.status.state === 'RUNNING' || - metadata.status.state === 'PENDING')) { - setTimeout(function () { - console.log('working...'); - pollJobUntilDone(job, timeout, timeWaited + 5000, callback); - }, 5000); - } else { - callback(null, metadata); - } - }); -} - -/** - * Load a csv file from a Google Cloud Storage bucket into a BigQuery table. - * - * @param {string} bucket Cloud Storage bucket. - * @param {string} file Csv file to load. - * @param {string} datasetId The dataset. - * @param {string} tableName The table. - * @param {Function} callback Callback function. - */ -function loadDataFromCsvExample (bucket, file, datasetId, tableName, callback) { - if (!bucket || typeof bucket !== 'string') { - throw new Error('bucket is required!'); - } - if (!file || typeof file !== 'string') { - throw new Error('file is required!'); - } - if (!datasetId || typeof datasetId !== 'string') { - throw new Error('datasetId is required!'); - } - if (!tableName || typeof tableName !== 'string') { - throw new Error('tableName is required!'); - } - - var dataset = bigquery.dataset(datasetId); - var table = dataset.table(tableName); - - var options = { - skipLeadingRows: 0 - }; - - var url = 'https://storage.googleapis.com/' + bucket + '/' + file; - - request.get(url) - .pipe(table.createWriteStream(options)) - .on('complete', function (job) { - // Wait up to 60 seconds for job to complete - pollJobUntilDone(job, 60000, 0, function (err, metadata) { - if (err) { - return callback(err); - } - console.log('job completed', metadata); - callback(null, metadata); - }); - }); -} -// [END complete] - -// Run the examples -exports.main = function (bucket, file, datasetId, tableName, cb) { - loadDataFromCsvExample(bucket, file, datasetId, tableName, cb); -}; - -if (module === require.main) { - var args = process.argv.slice(2); - exports.main( - args[0], - args[1], - args[2], - args[3], - console.log - ); -} diff --git a/bigquery/package.json b/bigquery/package.json index 07c3033af1..194b89d5b7 100644 --- a/bigquery/package.json +++ b/bigquery/package.json @@ -10,13 +10,13 @@ }, "dependencies": { "@google-cloud/bigquery": "^0.1.1", - "@google-cloud/resource": "^0.1.1", "@google-cloud/storage": "^0.1.1", "async": "^2.0.1", - "request": "^2.72.0", + "request": "^2.74.0", "yargs": "^5.0.0" }, "devDependencies": { - "mocha": "^3.0.2" + "mocha": "^3.0.2", + "node-uuid": "^1.4.7" } } diff --git a/bigquery/system-test/datasets.test.js b/bigquery/system-test/datasets.test.js new file mode 100644 index 0000000000..0045ae35d0 --- /dev/null +++ b/bigquery/system-test/datasets.test.js @@ -0,0 +1,85 @@ +// Copyright 2016, Google, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +var BigQuery = require('@google-cloud/bigquery'); +var uuid = require('node-uuid'); +var program = require('../datasets'); + +var bigquery = BigQuery(); +var projectId = process.env.GCLOUD_PROJECT; +var datasetId = 'nodejs-docs-samples-test-' + uuid.v4(); + +// BigQuery only accepts underscores +datasetId = datasetId.replace(/-/gi, '_'); + +describe('bigquery:datasets', function () { + after(function (done) { + bigquery.dataset(datasetId).delete({ + force: true + }, function () { + // Ignore any error, the dataset might already have been successfully deleted + done(); + }); + }); + + describe('createDataset', function () { + it('should create a new dataset', function (done) { + program.createDataset(datasetId, function (err, dataset) { + assert.ifError(err); + assert(dataset, 'new dataset was created'); + assert.equal(dataset.id, datasetId); + assert(console.log.calledWith('Created dataset: %s', datasetId)); + done(); + }); + }); + }); + + describe('listDatasets', function () { + it('should list datasets', function (done) { + program.listDatasets(projectId, function (err, datasets) { + assert.ifError(err); + assert(Array.isArray(datasets)); + assert(datasets.length > 0); + assert(datasets[0].id); + var matchingDatasets = datasets.filter(function (dataset) { + return dataset.id === datasetId; + }); + assert.equal(matchingDatasets.length, 1, 'newly created dataset is in list'); + assert(console.log.calledWith('Found %d dataset(s)!', datasets.length)); + done(); + }); + }); + }); + + describe('getDatasetSize', function () { + it('should return the size of a dataset', function (done) { + program.getDatasetSize(datasetId, projectId, function (err, size) { + assert.ifError(err); + assert.equal(typeof size, 'number', 'should have received a number'); + done(); + }); + }); + }); + + describe('deleteDataset', function () { + it('should list datasets', function (done) { + program.deleteDataset(datasetId, function (err) { + assert.ifError(err); + assert(console.log.calledWith('Deleted dataset: %s', datasetId)); + done(); + }); + }); + }); +}); diff --git a/bigquery/system-test/list_datasets_and_projects.test.js b/bigquery/system-test/list_datasets_and_projects.test.js deleted file mode 100644 index 699281a5f9..0000000000 --- a/bigquery/system-test/list_datasets_and_projects.test.js +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -var example = require('../list_datasets_and_projects'); -var projectId = process.env.GCLOUD_PROJECT || 'nodejs-docs-samples'; - -describe('bigquery:list_datasets_and_projects', function () { - describe('listDatasets', function () { - it('should list datasets', function (done) { - example.listDatasets(projectId, function (err, datasets) { - assert.ifError(err); - assert(Array.isArray(datasets)); - assert(datasets.length > 0); - assert(datasets[0].id); - assert(console.log.calledWith('Found %d datasets!', datasets.length)); - done(); - }); - }); - }); - describe('listProjects', function () { - it('should list projects', function (done) { - example.listProjects(function (err, projects) { - assert.ifError(err); - assert(Array.isArray(projects)); - assert(projects.length > 0); - assert(projects[0].id); - assert(console.log.calledWith('Found %d projects!', projects.length)); - done(); - }); - }); - }); -}); diff --git a/bigquery/system-test/load_data_from_csv.test.js b/bigquery/system-test/load_data_from_csv.test.js deleted file mode 100644 index b30906582c..0000000000 --- a/bigquery/system-test/load_data_from_csv.test.js +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -var async = require('async'); -var path = require('path'); -var loadDataFromCsvExample = require('../load_data_from_csv'); -var pathToCsvFile = path.join(__dirname, '/../resources/data.csv'); -var datasetId = 'nodejs_docs_samples'; -var tableName = 'test_' + new Date().getTime() + '_' + - Math.floor(Math.random() * 10000); - -describe('bigquery:load_data_from_csv', function () { - it('should load data from a csv file', function (done) { - async.series([ - function (cb) { - loadDataFromCsvExample.createTable(datasetId, tableName, cb); - }, - function (cb) { - loadDataFromCsvExample.main(pathToCsvFile, datasetId, tableName, cb); - }, - function (cb) { - loadDataFromCsvExample.deleteTable(datasetId, tableName, cb); - } - ], function (err, results) { - if (err) { - loadDataFromCsvExample.deleteTable(datasetId, tableName, function () { - done(err); - }); - } else { - assert.ifError(err); - // metadata - assert.equal(results[1].status.state, 'DONE'); - done(); - } - }); - }); -}); diff --git a/bigquery/system-test/load_data_from_gcs.test.js b/bigquery/system-test/load_data_from_gcs.test.js deleted file mode 100644 index ac6b0416db..0000000000 --- a/bigquery/system-test/load_data_from_gcs.test.js +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -var async = require('async'); -var loadDataFromCsvExample = require('../load_data_from_csv'); -var loadDataFromGcsExample = require('../load_data_from_gcs'); -var bucket = process.env.TEST_BUCKET_NAME || 'nodejs-docs-samples'; -var file = 'data.csv'; -var datasetId = 'nodejs_docs_samples'; -var tableName = 'test_' + new Date().getTime() + '_' + - Math.floor(Math.random() * 10000); - -describe('bigquery:load_data_from_gcs', function () { - it('should load data from a csv file in a GCS bucket', function (done) { - async.series([ - function (cb) { - loadDataFromCsvExample.createTable(datasetId, tableName, cb); - }, - function (cb) { - loadDataFromGcsExample.main(bucket, file, datasetId, tableName, cb); - }, - function (cb) { - loadDataFromCsvExample.deleteTable(datasetId, tableName, cb); - } - ], function (err, results) { - if (err) { - loadDataFromCsvExample.deleteTable(datasetId, tableName, function () { - done(err); - }); - } else { - assert.ifError(err); - // metadata - assert.equal(results[1].status.state, 'DONE'); - done(); - } - }); - }); -}); diff --git a/bigquery/system-test/tables.test.js b/bigquery/system-test/tables.test.js index 17a089e321..f26bb44f79 100644 --- a/bigquery/system-test/tables.test.js +++ b/bigquery/system-test/tables.test.js @@ -13,19 +13,26 @@ 'use strict'; +var bigquery = require('@google-cloud/bigquery')(); +var storage = require('@google-cloud/storage')(); var uuid = require('node-uuid'); -var generateUuid = function () { +var program = require('../tables'); +var path = require('path'); + +function generateUuid () { return 'nodejs_docs_samples_' + uuid.v4().replace(/-/gi, '_'); -}; -var example = require('../tables'); +} + var options = { + projectId: process.env.GCLOUD_PROJECT, + localFilePath: path.join(__dirname, '../resources/data.csv'), bucket: generateUuid(), file: 'data.json', dataset: generateUuid(), - table: generateUuid() + table: generateUuid(), + schema: 'Name:string, Age:integer, Weigth:float, IsMagic:boolean' }; -var bigquery = require('@google-cloud/bigquery')(); -var storage = require('@google-cloud/storage')(); + var file = storage.bucket(options.bucket).file(options.file); describe('bigquery:tables', function () { @@ -34,22 +41,18 @@ describe('bigquery:tables', function () { storage.createBucket(options.bucket, function (err, bucket) { assert.ifError(err, 'bucket creation succeeded'); - // Create dataset - bigquery.createDataset(options.dataset, function (err, dataset) { - assert.ifError(err, 'dataset creation succeeded'); + bucket.upload(options.localFilePath, function (err) { + assert.ifError(err, 'file upload succeeded'); - // Create table - dataset.createTable( - options.table, - { schema: 'name:string, age:integer' }, - function (err, table) { - assert.ifError(err, 'table creation succeeded'); - done(); - } - ); + // Create dataset + bigquery.createDataset(options.dataset, function (err, dataset) { + assert.ifError(err, 'dataset creation succeeded'); + done(); + }); }); }); }); + after(function (done) { // Delete testing dataset/table bigquery.dataset(options.dataset).delete({ force: true }, function () { @@ -64,9 +67,56 @@ describe('bigquery:tables', function () { }); }); - describe('export_table_to_gcs', function () { + describe('createTable', function () { + it('should create a new table', function (done) { + program.createTable(options, function (err, table) { + assert.ifError(err); + assert(table, 'new table was created'); + assert.equal(table.id, options.table); + assert(console.log.calledWith('Created table: %s', options.table)); + done(); + }); + }); + }); + + describe('listTables', function () { + it('should list tables', function (done) { + program.listTables(options, function (err, tables) { + assert.ifError(err); + assert(Array.isArray(tables)); + assert(tables.length > 0); + assert(tables[0].id); + var matchingTables = tables.filter(function (table) { + return table.id === options.table; + }); + assert.equal(matchingTables.length, 1, 'newly created table is in list'); + assert(console.log.calledWith('Found %d table(s)!', tables.length)); + done(); + }); + }); + }); + + describe('import', function () { + it('should import local file', function (done) { + program.importFile({ + file: options.localFilePath, + projectId: options.projectId, + dataset: options.dataset, + table: options.table + }, function (err, metadata) { + assert.ifError(err); + assert(metadata, 'got metadata'); + assert.deepEqual(metadata.status, { + state: 'DONE' + }, 'job completed'); + done(); + }); + }); + }); + + describe('exportTableToGCS', function () { it('should export data to GCS', function (done) { - example.exportTableToGCS(options, function (err, metadata) { + program.exportTableToGCS(options, function (err, metadata) { assert.ifError(err, 'no error occurred'); assert(metadata, 'job metadata was received'); assert(metadata.status, 'job metadata has status'); @@ -80,4 +130,14 @@ describe('bigquery:tables', function () { }); }); }); + + describe('deleteTable', function () { + it('should list tables', function (done) { + program.deleteTable(options, function (err) { + assert.ifError(err); + assert(console.log.calledWith('Deleted table: %s', options.table)); + done(); + }); + }); + }); }); diff --git a/bigquery/tables.js b/bigquery/tables.js index 7a574c7753..1c1cddcc0d 100644 --- a/bigquery/tables.js +++ b/bigquery/tables.js @@ -11,19 +11,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -// [START complete] -/** - * Command-line application to export a table from BigQuery to Google Cloud Storage. - * - * This sample is used on this page: - * - * https://cloud.google.com/bigquery/exporting-data-from-bigquery - * For more information, see the README.md under /bigquery. - */ - 'use strict'; -// [START auth] +// [START all] +// [START setup] // By default, gcloud will authenticate using the service account file specified // by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use the // project specified by the GCLOUD_PROJECT environment variable. See @@ -34,7 +25,126 @@ var Storage = require('@google-cloud/storage'); // Instantiate the BigQuery and Storage clients var bigquery = BigQuery(); var storage = Storage(); -// [END auth] +// [END setup] + +// [START create_table] +/** + * Creates a new table with the given name in the specified dataset. + * + * @param {object} options Configuration options. + * @param {string} options.dataset The dataset of the new table. + * @param {string} options.table The name for the new table. + * @param {string|object} [options.schema] The schema for the new table. + * @param {function} cb The callback function. + */ +function createTable (options, callback) { + // var table = bigquery.dataset(options.dataset).table(options.table); + var dataset = bigquery.dataset(options.dataset); + var config = {}; + if (options.schema) { + config.schema = options.schema; + } + + // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/table + dataset.createTable(options.table, config, function (err, table) { + if (err) { + return callback(err); + } + + console.log('Created table: %s', options.table); + return callback(null, table); + }); +} +// [END create_table] + +// [START list_tables] +/** + * List tables in the specified dataset. + * + * @param {object} options Configuration options. + * @param {string} options.dataset The dataset of the new table. + * @param {Function} callback Callback function. + */ +function listTables (options, callback) { + var dataset = bigquery.dataset(options.dataset); + + // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/dataset + dataset.getTables(function (err, tables) { + if (err) { + return callback(err); + } + + console.log('Found %d table(s)!', tables.length); + return callback(null, tables); + }); +} +// [END list_tables] + +// [START delete_table] +/** + * Creates a new table with the given name in the specified dataset. + * + * @param {object} options Configuration options. + * @param {string} options.dataset The dataset of the new table. + * @param {string} options.table The name for the new table. + * @param {function} cb The callback function. + */ +function deleteTable (options, callback) { + var table = bigquery.dataset(options.dataset).table(options.table); + + // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/table + table.delete(function (err) { + if (err) { + return callback(err); + } + + console.log('Deleted table: %s', options.table); + return callback(null); + }); +} +// [END delete_table] + +// [START import_file] +/** + * Load a csv file into a BigQuery table. + * + * @param {string} file Path to file to load. + * @param {string} dataset The dataset. + * @param {string} table The table. + * @param {string} [format] The format of the file to be imported. + * @param {function} callback The callback function. + */ +function importFile (options, callback) { + var file; + if (options.bucket) { + // File is in Google Cloud Storage, e.g. gs://my-bucket/file.csv + file = storage.bucket(options.bucket).file(options.file); + } else { + // File is local, e.g. ./data/file.csv + file = options.file; + } + var table = bigquery.dataset(options.dataset).table(options.table); + var config = { + format: options.format + }; + + // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/table?method=import + table.import(file, config, function (err, job) { + if (err) { + console.log(err.stack); + return callback(err); + } + + console.log('Started job: %s', job.id); + job + .on('error', callback) + .on('complete', function (metadata) { + console.log('Completed job: %s', job.id); + return callback(null, metadata); + }); + }); +} +// [END import_file] // [START export_table_to_gcs] /** @@ -58,28 +168,33 @@ function exportTableToGCS (options, callback) { }; // Export table - // See https://googlecloudplatform.github.io/gcloud-node/#/docs/google-cloud/latest/bigquery/table?method=export + // See https://googlecloudplatform.github.io/gcloud-node/#/docs/bigquery/latest/bigquery/table?method=export table.export(gcsFileObj, config, function (err, job) { if (err) { return callback(err); } - console.log('ExportTableToGCS: submitted job %s!', job.id); + console.log('Started job: %s', job.id); - job.on('error', function (err) { - return callback(err); - }); - job.on('complete', function (job) { - return callback(null, job); - }); + job + .on('error', callback) + .on('complete', function (metadata) { + console.log('Completed job: %s', job.id); + return callback(null, metadata); + }); }); } // [END export_table_to_gcs] -// [END complete] +// [END all] // The command-line program var cli = require('yargs'); +var utils = require('../utils'); var program = module.exports = { + createTable: createTable, + listTables: listTables, + deleteTable: deleteTable, + importFile: importFile, exportTableToGCS: exportTableToGCS, main: function (args) { // Run the command-line program @@ -88,29 +203,73 @@ var program = module.exports = { }; cli - .command('export
', 'Export a table from BigQuery to Google Cloud Storage.', { + .demand(1) + .command('create
', 'Create a new table in the specified dataset.', {}, function (options) { + program.createTable(utils.pick(options, ['dataset', 'table']), utils.makeHandler()); + }) + .command('list ', 'List tables in the specified dataset.', {}, function (options) { + program.listTables(utils.pick(options, ['dataset']), utils.makeHandler(true, 'id')); + }) + .command('delete
', 'Delete a table in the specified dataset.', {}, function (options) { + program.deleteTable(utils.pick(options, ['dataset', 'table']), utils.makeHandler()); + }) + .command('import
', 'Import data from a local file or a Google Cloud Storage file into BigQuery.', { + bucket: { + alias: 'b', + requiresArg: true, + description: 'Specify Cloud Storage bucket.', + type: 'string' + }, + format: { + alias: 'f', + requiresArg: true, + type: 'string', + choices: ['JSON', 'CSV', 'AVRO'] + } + }, function (options) { + program.importFile(utils.pick(options, ['dataset', 'table', 'file', 'format', 'bucket']), utils.makeHandler()); + }) + .command('export
', 'Export a table from BigQuery to Google Cloud Storage.', { format: { alias: 'f', - global: true, requiresArg: true, type: 'string', choices: ['JSON', 'CSV', 'AVRO'] }, gzip: { - global: true, type: 'boolean', description: 'Whether to compress the exported table using gzip. Defaults to false.' } }, function (options) { - program.exportTableToGCS(options, console.log); + program.exportTableToGCS(utils.pick(options, ['dataset', 'table', 'bucket', 'file', 'format', 'gzip']), utils.makeHandler()); }) .example( - 'node $0 export my-bucket my-file my-dataset my-table', - 'Export my-dataset:my-table to gcs://my-bucket/my-file as raw JSON' + 'node $0 create my_dataset my_table', + 'Create table "my_table" in "my_dataset".' + ) + .example( + 'node $0 list my_dataset', + 'List tables in "my_dataset".' + ) + .example( + 'node $0 delete my_dataset my_table', + 'Delete "my_table" from "my_dataset".' + ) + .example( + 'node $0 import my_dataset my_table ./data.csv', + 'Import a local file into a table.' + ) + .example( + 'node $0 import my_dataset my_table data.csv --bucket my-bucket', + 'Import a GCS file into a table.' + ) + .example( + 'node $0 export my_dataset my_table my-bucket my-file', + 'Export my_dataset:my_table to gcs://my-bucket/my-file as raw CSV' ) .example( - 'node $0 export my-bucket my-file my-dataset my-table -f CSV --gzip', - 'Export my-dataset:my-table to gcs://my-bucket/my-file as gzipped CSV' + 'node $0 export my_dataset my_table my-bucket my-file -f JSON --gzip', + 'Export my_dataset:my_table to gcs://my-bucket/my-file as gzipped JSON' ) .wrap(100) .recommendCommands() diff --git a/bigquery/test/dataset_size.test.js b/bigquery/test/dataset_size.test.js deleted file mode 100644 index 92def670f4..0000000000 --- a/bigquery/test/dataset_size.test.js +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -describe('bigquery:dataset_size', function () { - it('should be tested'); -}); diff --git a/bigquery/test/datasets.test.js b/bigquery/test/datasets.test.js new file mode 100644 index 0000000000..aa2601a3f6 --- /dev/null +++ b/bigquery/test/datasets.test.js @@ -0,0 +1,218 @@ +// Copyright 2016, Google, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +var proxyquire = require('proxyquire').noCallThru(); +var datasetId = 'foo'; +var projectId = process.env.GCLOUD_PROJECT; + +function getSample () { + var tableMock = { + get: sinon.stub(), + metadata: { + numBytes: 1000000 + } + }; + tableMock.get.yields(null, tableMock); + var tablesMock = [tableMock]; + var datasetsMock = [{ id: datasetId }]; + var datasetMock = { + getTables: sinon.stub().yields(null, tablesMock), + create: sinon.stub().yields(null, datasetsMock[0]), + delete: sinon.stub().yields(null) + }; + var bigqueryMock = { + getDatasets: sinon.stub().yields(null, datasetsMock), + dataset: sinon.stub().returns(datasetMock) + }; + var BigQueryMock = sinon.stub().returns(bigqueryMock); + + return { + program: proxyquire('../datasets', { + '@google-cloud/bigquery': BigQueryMock + }), + mocks: { + BigQuery: BigQueryMock, + bigquery: bigqueryMock, + datasets: datasetsMock, + dataset: datasetMock, + tables: tablesMock, + table: tableMock + } + }; +} + +describe('bigquery:datasets', function () { + describe('createDataset', function () { + it('should create a dataset', function () { + var sample = getSample(); + var callback = sinon.stub(); + + sample.program.createDataset(datasetId, callback); + + assert.equal(sample.mocks.dataset.create.calledOnce, true); + assert.deepEqual(sample.mocks.dataset.create.firstCall.args.slice(0, -1), []); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.datasets[0]]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Created dataset: %s', datasetId]); + }); + + it('should handle error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.dataset.create.yields(error); + + sample.program.createDataset(datasetId, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + + describe('deleteDataset', function () { + it('should delete a dataset', function () { + var sample = getSample(); + var callback = sinon.stub(); + + sample.program.deleteDataset(datasetId, callback); + + assert.equal(sample.mocks.dataset.delete.calledOnce, true); + assert.deepEqual(sample.mocks.dataset.delete.firstCall.args.slice(0, -1), []); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Deleted dataset: %s', datasetId]); + }); + + it('should handle error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.dataset.delete.yields(error); + + sample.program.deleteDataset(datasetId, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + + describe('listDatasets', function () { + it('should list datasets', function () { + var sample = getSample(); + var callback = sinon.stub(); + + sample.program.listDatasets(projectId, callback); + + assert.equal(sample.mocks.bigquery.getDatasets.calledOnce, true); + assert.deepEqual(sample.mocks.bigquery.getDatasets.firstCall.args.slice(0, -1), []); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.datasets]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Found %d dataset(s)!', sample.mocks.datasets.length]); + }); + + it('should handle error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.bigquery.getDatasets.yields(error); + + sample.program.listDatasets(projectId, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + + describe('getDatasetSize', function () { + it('should calculate size of a dataset', function () { + var sample = getSample(); + var callback = sinon.stub(); + + sample.program.getDatasetSize(datasetId, projectId, callback); + + assert.equal(sample.mocks.dataset.getTables.calledOnce, true); + assert.deepEqual(sample.mocks.dataset.getTables.firstCall.args.slice(0, -1), []); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, 1]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Size of %s: %d MB', datasetId, 1]); + }); + + it('should handle dataset.getTables error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.dataset.getTables.yields(error); + + sample.program.getDatasetSize(datasetId, projectId, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + + it('should handle table.get error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.table.get.yields(error); + + sample.program.getDatasetSize(datasetId, projectId, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + + describe('main', function () { + it('should call createDataset', function () { + var program = getSample().program; + + sinon.stub(program, 'createDataset'); + program.main(['create', datasetId]); + assert.equal(program.createDataset.calledOnce, true); + assert.deepEqual(program.createDataset.firstCall.args.slice(0, -1), [datasetId]); + }); + + it('should call deleteDataset', function () { + var program = getSample().program; + + sinon.stub(program, 'deleteDataset'); + program.main(['delete', datasetId]); + assert.equal(program.deleteDataset.calledOnce, true); + assert.deepEqual(program.deleteDataset.firstCall.args.slice(0, -1), [datasetId]); + }); + + it('should call listDatasets', function () { + var program = getSample().program; + + sinon.stub(program, 'listDatasets'); + program.main(['list']); + assert.equal(program.listDatasets.calledOnce, true); + assert.deepEqual(program.listDatasets.firstCall.args.slice(0, -1), [projectId]); + }); + + it('should call getDatasetSize', function () { + var program = getSample().program; + + sinon.stub(program, 'getDatasetSize'); + program.main(['size', datasetId]); + assert.equal(program.getDatasetSize.calledOnce, true); + assert.deepEqual(program.getDatasetSize.firstCall.args.slice(0, -1), [datasetId, projectId]); + }); + }); +}); diff --git a/bigquery/test/list_datasets_and_projects.test.js b/bigquery/test/list_datasets_and_projects.test.js deleted file mode 100644 index b21aa42a4b..0000000000 --- a/bigquery/test/list_datasets_and_projects.test.js +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -var proxyquire = require('proxyquire').noCallThru(); - -function getSample () { - var datasetsMock = [ - { - id: 'foo' - } - ]; - var projectsMock = [ - { - id: 'bar' - } - ]; - var bigqueryMock = { - getDatasets: sinon.stub().callsArgWith(0, null, datasetsMock) - }; - var resourceMock = { - getProjects: sinon.stub().callsArgWith(0, null, projectsMock) - }; - var BigQueryMock = sinon.stub().returns(bigqueryMock); - var ResourceMock = sinon.stub().returns(resourceMock); - return { - program: proxyquire('../list_datasets_and_projects', { - '@google-cloud/bigquery': BigQueryMock, - '@google-cloud/resource': ResourceMock - }), - mocks: { - BigQuery: BigQueryMock, - Resource: ResourceMock, - bigquery: bigqueryMock, - resource: resourceMock, - datasets: datasetsMock, - projects: projectsMock - } - }; -} - -describe('bigquery:list_datasets_and_projects', function () { - describe('main', function () { - it('should show usage if no arguments exist', function () { - var program = getSample().program; - - sinon.stub(program, 'printUsage'); - program.main([]); - assert(program.printUsage.calledOnce); - }); - it('should show usage if first argument is -h', function () { - var program = getSample().program; - - sinon.stub(program, 'printUsage'); - program.main(['-h']); - assert(program.printUsage.calledOnce); - - program.main(['--help']); - assert(program.printUsage.calledTwice); - }); - it('should call correct commands', function () { - var program = getSample().program; - - sinon.stub(program, 'listDatasets'); - program.main(['list-datasets']); - assert(program.listDatasets.calledOnce); - - sinon.stub(program, 'listProjects'); - program.main(['list-projects']); - assert(program.listProjects.calledOnce); - }); - }); - - describe('printUsage', function () { - it('should print usage', function () { - var example = getSample(); - example.program.printUsage(); - assert(console.log.calledWith('Usage: node list_datasets_and_projects [COMMAND] [ARGS...]')); - assert(console.log.calledWith('\nCommands:\n')); - assert(console.log.calledWith('\tlist-datasets PROJECT_ID')); - assert(console.log.calledWith('\tlist-projects')); - }); - }); - - describe('listProjects', function () { - it('should list projects', function () { - var example = getSample(); - example.program.listProjects(function (err, projects) { - assert.ifError(err); - assert.strictEqual(projects, example.mocks.projects); - assert(console.log.calledWith('Found %d projects!', projects.length)); - }); - }); - it('should handle error', function () { - var error = 'listProjectsError'; - var example = getSample(); - example.mocks.resource.getProjects = sinon.stub().callsArgWith(0, error); - example.program.listProjects(function (err, projects) { - assert.equal(err, error); - assert.equal(projects, undefined); - }); - }); - }); - - describe('listDatasets', function () { - it('should list datasets', function () { - var example = getSample(); - example.program.listDatasets('googledata', function (err, datasets) { - assert.ifError(err); - assert.strictEqual(datasets, example.mocks.datasets); - assert(console.log.calledWith('Found %d datasets!', datasets.length)); - }); - }); - it('should require a Project ID', function () { - var example = getSample(); - example.program.listDatasets(undefined, function (err) { - assert(err); - assert.equal(err.message, 'projectId is required!'); - }); - }); - it('should handle error', function () { - var error = 'listDatasetsError'; - var example = getSample(); - example.mocks.bigquery.getDatasets = sinon.stub().callsArgWith(0, error); - example.program.listDatasets('googledata', function (err, datasets) { - assert.equal(err, error); - assert.equal(datasets, undefined); - }); - }); - }); -}); diff --git a/bigquery/test/load_data_from_csv.test.js b/bigquery/test/load_data_from_csv.test.js deleted file mode 100644 index daabd8e539..0000000000 --- a/bigquery/test/load_data_from_csv.test.js +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -var path = require('path'); -var loadDataFromCsvExample = require('../load_data_from_csv'); -var pathToCsvFile = path.join(__dirname, '/../resources/data.csv'); -var datasetId = 'nodejs_docs_samples'; - -describe('bigquery:load_data_from_csv', function () { - it('should be tested'); - - it('should require correct arguments', function () { - assert.throws(function () { - loadDataFromCsvExample.main(); - }, Error, 'pathToCsvFile is required!'); - assert.throws(function () { - loadDataFromCsvExample.main(pathToCsvFile); - }, Error, 'datasetId is required!'); - assert.throws(function () { - loadDataFromCsvExample.main(pathToCsvFile, datasetId); - }, Error, 'tableName is required!'); - }); -}); diff --git a/bigquery/test/load_data_from_gcs.test.js b/bigquery/test/load_data_from_gcs.test.js deleted file mode 100644 index d030a8e285..0000000000 --- a/bigquery/test/load_data_from_gcs.test.js +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2016, Google, Inc. -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -'use strict'; - -var loadDataFromGcsExample = require('../load_data_from_gcs'); -var bucket = process.env.TEST_BUCKET_NAME || 'nodejs-docs-samples'; -var file = 'data.csv'; -var datasetId = 'nodejs_docs_samples'; - -describe('bigquery:load_data_from_gcs', function () { - it('should be tested'); - - it('should require correct arguments', function () { - assert.throws(function () { - loadDataFromGcsExample.main(); - }, Error, 'bucket is required!'); - assert.throws(function () { - loadDataFromGcsExample.main(bucket); - }, Error, 'file is required!'); - assert.throws(function () { - loadDataFromGcsExample.main(bucket, file); - }, Error, 'datasetId is required!'); - assert.throws(function () { - loadDataFromGcsExample.main(bucket, file, datasetId); - }, Error, 'tableName is required!'); - }); -}); diff --git a/bigquery/test/tables.test.js b/bigquery/test/tables.test.js index 0eeb9a70aa..59cd0b3833 100644 --- a/bigquery/test/tables.test.js +++ b/bigquery/test/tables.test.js @@ -16,11 +16,18 @@ var proxyquire = require('proxyquire').noCallThru(); var bucket = 'bucket'; var file = 'file'; +var job = 'job'; var dataset = 'dataset'; var table = 'table'; var format = 'JSON'; +var schema = 'schema'; function getSample () { + var tableMocks = [ + { + id: table + } + ]; var bucketMock = { file: sinon.stub().returns(fileMock) }; @@ -30,14 +37,19 @@ function getSample () { var fileMock = {}; var metadataMock = { status: { state: 'DONE' } }; var jobMock = { - getMetadata: sinon.stub().callsArgWith(0, null, metadataMock), - on: sinon.stub() + id: job, + getMetadata: sinon.stub().yields(null, metadataMock), + on: sinon.stub().returnsThis() }; var tableMock = { - export: sinon.stub().callsArgWith(2, null, jobMock) + export: sinon.stub().yields(null, jobMock), + delete: sinon.stub().yields(null), + import: sinon.stub().yields(null, jobMock) }; var datasetMock = { - table: sinon.stub().returns(tableMock) + table: sinon.stub().returns(tableMock), + createTable: sinon.stub().yields(null, tableMocks[0]), + getTables: sinon.stub().yields(null, tableMocks) }; var bigqueryMock = { job: sinon.stub().returns(jobMock), @@ -61,15 +73,188 @@ function getSample () { job: jobMock, table: tableMock, bucket: bucketMock, - dataset: datasetMock + dataset: datasetMock, + tables: tableMocks } }; } describe('bigquery:tables', function () { - describe('exportTable', function () { + describe('createTable', function () { + it('should create a table', function () { + var sample = getSample(); + var callback = sinon.stub(); + var options = { + dataset: dataset, + table: table + }; + + sample.program.createTable(options, callback); + + assert.equal(sample.mocks.dataset.createTable.calledOnce, true); + assert.deepEqual(sample.mocks.dataset.createTable.firstCall.args.slice(0, -1), [options.table, {}]); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.tables[0]]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Created table: %s', options.table]); + }); + + it('should create a table with a schema', function () { + var sample = getSample(); + var callback = sinon.stub(); + var options = { + dataset: dataset, + table: table, + schema: schema + }; + + sample.program.createTable(options, callback); + + assert.equal(sample.mocks.dataset.createTable.calledOnce, true); + assert.deepEqual(sample.mocks.dataset.createTable.firstCall.args.slice(0, -1), [options.table, { schema: schema }]); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.tables[0]]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Created table: %s', options.table]); + }); + + it('should handle error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.dataset.createTable.yields(error); + + sample.program.createTable({}, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + + describe('listTables', function () { + it('should list tables', function () { + var sample = getSample(); + var callback = sinon.stub(); + var options = { + dataset: dataset + }; + + sample.program.listTables(options, callback); + + assert.equal(sample.mocks.dataset.getTables.calledOnce, true); + assert.deepEqual(sample.mocks.dataset.getTables.firstCall.args.slice(0, -1), []); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.tables]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Found %d table(s)!', sample.mocks.tables.length]); + }); + + it('should handle error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.dataset.getTables.yields(error); + + sample.program.listTables({}, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + + describe('deleteTable', function () { + it('should delete a table', function () { + var sample = getSample(); + var callback = sinon.stub(); + var options = { + dataset: dataset, + table: table + }; + + sample.program.deleteTable(options, callback); + + assert.equal(sample.mocks.table.delete.calledOnce, true); + assert.deepEqual(sample.mocks.table.delete.firstCall.args.slice(0, -1), []); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Deleted table: %s', options.table]); + }); + + it('should handle error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.table.delete.yields(error); + + sample.program.deleteTable({}, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + + describe('importFile', function () { + it('should import a local file', function () { + var sample = getSample(); + var callback = sinon.stub(); + var options = { + dataset: dataset, + table: table, + file: file + }; + sample.mocks.job.on.withArgs('complete').yields(sample.mocks.metadata); + + sample.program.importFile(options, callback); + + assert.equal(sample.mocks.table.import.calledOnce, true); + assert.deepEqual(sample.mocks.table.import.firstCall.args.slice(0, -1), [options.file, { format: undefined }]); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata]); + assert.equal(console.log.calledTwice, true); + assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); + assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); + }); + + it('should import a GCS file', function () { + var sample = getSample(); + var callback = sinon.stub(); + var options = { + dataset: dataset, + table: table, + file: file, + bucket: bucket, + format: format + }; + sample.mocks.job.on.withArgs('complete').yields(sample.mocks.metadata); + + sample.program.importFile(options, callback); + + assert.equal(sample.mocks.table.import.calledOnce, true); + assert.deepEqual(sample.mocks.table.import.firstCall.args.slice(0, -1), [sample.mocks.file, { format: format }]); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata]); + assert.equal(console.log.calledTwice, true); + assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); + assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); + }); + + it('should handle error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.table.import.yields(error); + + sample.program.importFile({}, callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + + describe('exportTableToGCS', function () { it('should export to a table', function () { - var example = getSample(); + var sample = getSample(); var options = { bucket: bucket, file: file, @@ -79,83 +264,121 @@ describe('bigquery:tables', function () { gzip: true }; var callback = sinon.stub(); - example.mocks.job.on.withArgs('complete').callsArgWith(1, example.mocks.metadata); - - example.program.exportTableToGCS(options, callback); + sample.mocks.job.on.withArgs('complete').yields(sample.mocks.metadata); - assert(example.mocks.storage.bucket.calledWith(options.bucket), 'bucket found'); - assert(example.mocks.bucket.file.calledWith(options.file), 'file found'); - assert(example.mocks.bigquery.dataset.calledWith(options.dataset), 'dataset found'); - assert(example.mocks.dataset.table.calledWith(options.table), 'table found'); - assert(example.mocks.table.export.calledOnce, 'table.export called once'); - assert(console.log.calledWith('ExportTableToGCS: submitted job %s!', example.mocks.job.id), - 'job submittal was reported' - ); + sample.program.exportTableToGCS(options, callback); - assert(callback.calledOnce, 'callback called once'); - assert.equal(callback.firstCall.args.length, 2, 'callback received 2 arguments'); - assert.ifError(callback.firstCall.args[0], 'callback did not receive error'); - assert.equal(callback.firstCall.args[1], example.mocks.metadata, 'callback received metadata'); + assert.equal(sample.mocks.table.export.calledOnce, true); + assert.deepEqual(sample.mocks.table.export.firstCall.args.slice(0, -1), [sample.mocks.file, { format: format, gzip: true }]); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.metadata]); + assert.equal(console.log.calledTwice, true); + assert.deepEqual(console.log.firstCall.args, ['Started job: %s', sample.mocks.job.id]); + assert.deepEqual(console.log.secondCall.args, ['Completed job: %s', sample.mocks.job.id]); }); it('should handle export error', function () { var error = new Error('exportTableToGCSError'); var example = getSample(); var callback = sinon.stub(); - example.mocks.table.export = sinon.stub().callsArgWith(2, error); + example.mocks.table.export.yields(error); example.program.exportTableToGCS({ format: format }, callback); assert(callback.calledOnce, 'callback called once'); assert.equal(callback.firstCall.args.length, 1, 'callback received 1 argument'); assert.equal(callback.firstCall.args[0], error, 'callback received error'); }); + }); - it('should handle job-processing error', function () { - var error = new Error('exportTableToGCSError'); - var example = getSample(); - var callback = sinon.stub(); - example.mocks.job.on.withArgs('error').callsArgWith(1, error); - example.program.exportTableToGCS({ format: format }, callback); + describe('main', function () { + it('should call createTable', function () { + var program = getSample().program; + program.createTable = sinon.stub(); - assert(callback.calledOnce, 'callback called once'); - assert.equal(callback.firstCall.args.length, 1, 'callback received 1 argument'); - assert.equal(callback.firstCall.args[0], error, 'callback received error'); + program.main(['create', dataset, table]); + assert.equal(program.createTable.calledOnce, true); + assert.deepEqual(program.createTable.firstCall.args.slice(0, -1), [{ dataset: dataset, table: table }]); + }); + + it('should call listTables', function () { + var program = getSample().program; + program.listTables = sinon.stub(); + + program.main(['list', dataset]); + assert.equal(program.listTables.calledOnce, true); + assert.deepEqual(program.listTables.firstCall.args.slice(0, -1), [{ dataset: dataset }]); + }); + + it('should call deleteTable', function () { + var program = getSample().program; + program.deleteTable = sinon.stub(); + + program.main(['delete', dataset, table]); + assert.equal(program.deleteTable.calledOnce, true); + assert.deepEqual(program.deleteTable.firstCall.args.slice(0, -1), [{ dataset: dataset, table: table }]); + }); + + it('should call importFile', function () { + var program = getSample().program; + program.importFile = sinon.stub(); + + program.main(['import', dataset, table, file]); + assert.equal(program.importFile.calledOnce, true); + assert.deepEqual(program.importFile.firstCall.args.slice(0, -1), [{ + dataset: dataset, + table: table, + file: file, + bucket: undefined, + format: undefined + }]); }); - }); - describe('main', function () { it('should call exportTableToGCS', function () { var program = getSample().program; program.exportTableToGCS = sinon.stub(); - program.main(['export', bucket, file, dataset, table]); - assert(program.exportTableToGCS.calledOnce, 'exportTableToGCS called'); + program.main(['export', dataset, table, bucket, file]); + assert.equal(program.exportTableToGCS.calledOnce, true); + assert.deepEqual(program.exportTableToGCS.firstCall.args.slice(0, -1), [{ + dataset: dataset, + table: table, + file: file, + bucket: bucket, + format: undefined, + gzip: false + }]); }); it('should recognize --gzip flag', function () { var program = getSample().program; program.exportTableToGCS = sinon.stub(); - program.main(['export', bucket, file, dataset, table, '--gzip']); - assert(program.exportTableToGCS.calledOnce, 'exportTableToGCS called once'); - - var firstArgs = program.exportTableToGCS.firstCall.args; - assert.equal(firstArgs.length, 2, 'exportTableToGCS received 2 arguments'); - assert(firstArgs[0], 'exportTableToGCS received options'); - assert(firstArgs[0].gzip, 'exportTableToGCS received gzip as True'); + program.main(['export', dataset, table, bucket, file, '--gzip']); + assert.equal(program.exportTableToGCS.calledOnce, true); + assert.deepEqual(program.exportTableToGCS.firstCall.args.slice(0, -1), [{ + dataset: dataset, + table: table, + file: file, + bucket: bucket, + format: undefined, + gzip: true + }]); }); it('should recognize --format flag', function () { var program = getSample().program; program.exportTableToGCS = sinon.stub(); - program.main(['export', bucket, file, dataset, table, '--format', 'CSV']); - assert(program.exportTableToGCS.calledOnce, 'exportTableToGCS called once'); - - var firstArgs = program.exportTableToGCS.firstCall.args; - assert.equal(firstArgs.length, 2, 'exportTableToGCS received 2 arguments'); - assert(firstArgs[0], 'exportTableToGCS received options'); - assert.equal(firstArgs[0].format, 'CSV', 'exportTableToGCS received format as CSV'); + program.main(['export', dataset, table, bucket, file, '--format', 'CSV']); + assert.equal(program.exportTableToGCS.calledOnce, true); + assert.deepEqual(program.exportTableToGCS.firstCall.args.slice(0, -1), [{ + dataset: dataset, + table: table, + file: file, + bucket: bucket, + format: 'CSV', + gzip: false + }]); }); }); }); diff --git a/package.json b/package.json index c42db7143f..78877edb34 100644 --- a/package.json +++ b/package.json @@ -50,12 +50,12 @@ "scripts": { "lint": "semistandard", "pretest": "npm run lint && ./scripts/clean", - "mocha": "mocha -R spec -t 120000 --require intelli-espower-loader ./test/_setup.js '{*,appengine/*,functions/*}/test/*.test.js'", + "mocha": "mocha -R spec -t 120000 --require intelli-espower-loader ./test/_setup.js ./test/*.test.js '{*,appengine/*,functions/*}/test/*.test.js'", "test": "npm run mocha", "cover": "nyc --cache npm test && nyc report --reporter=html && nyc report --reporter=lcov", "system-test": "mocha -R spec -t 120000 --require intelli-espower-loader ./system-test/_setup.js '{*,appengine/*}/system-test/*.test.js'", "system-cover": "npm run pretest && nyc --cache npm run system-test && nyc report --reporter=html && nyc report --reporter=lcov", - "all-test": "mocha -R spec -t 120000 --require intelli-espower-loader ./system-test/_setup.js '{*,appengine/*}/system-test/*.test.js' '{*,appengine/*,functions/*}/test/*.test.js'", + "all-test": "mocha -R spec -t 120000 --require intelli-espower-loader ./system-test/_setup.js '{*,appengine/*}/system-test/*.test.js' ./test/*.test.js '{*,appengine/*,functions/*}/test/*.test.js'", "all-cover": "npm run pretest && nyc --cache npm run all-test && nyc report --reporter=html && nyc report --reporter=lcov" }, "devDependencies": { diff --git a/resource/README.md b/resource/README.md new file mode 100644 index 0000000000..9a75a28b28 --- /dev/null +++ b/resource/README.md @@ -0,0 +1,52 @@ +Google Cloud Platform logo + +# Google Cloud Resource Manager Node.js Samples + +Google Cloud Platform provides container resources such as Organizations and +Projects, that allow you to group and hierarchically organize other Cloud +Platform resources. This hierarchical organization lets you easily manage common +aspects of your resources such as access control and configuration settings. The +[Google Cloud Resource Manager API][resource_docs] enables you to +programmatically manage these container resources. + +[resource_docs]: https://cloud.google.com/resource-manager/docs/ + +## Table of Contents + +* [Setup](#setup) +* [Samples](#samples) + * [Projects](#projects) + +## Setup + +1. Read [Prerequisites][prereq] and [How to run a sample][run] first. +1. Install dependencies: + + npm install + +[prereq]: ../README.md#prerequisities +[run]: ../README.md#how-to-run-a-sample + +## Samples + +### Projects + +View the [documentation][projects_docs] or the [source code][projects_code]. + +__Usage:__ `node projects --help` + +``` +Commands: + list List all projects the authenticated user has access to. + +Options: + --help Show help [boolean] + +Examples: + node projects list List projects. + +For more information, see https://cloud.google.com/resource-manager/docs/ +``` + +[projects_docs]: https://cloud.google.com/resource-manager/docs/ +[projects_code]: projects.js diff --git a/resource/package.json b/resource/package.json new file mode 100644 index 0000000000..5cc68e45a6 --- /dev/null +++ b/resource/package.json @@ -0,0 +1,19 @@ +{ + "name": "nodejs-docs-samples-resource-manager", + "version": "0.0.1", + "private": true, + "license": "Apache Version 2.0", + "author": "Google Inc.", + "scripts": { + "test": "mocha -R spec -t 120000 --require intelli-espower-loader ../test/_setup.js test/*.test.js", + "system-test": "mocha -R spec -t 120000 --require intelli-espower-loader ../system-test/_setup.js system-test/*.test.js" + }, + "dependencies": { + "@google-cloud/resource": "^0.1.1", + "request": "^2.74.0", + "yargs": "^5.0.0" + }, + "devDependencies": { + "mocha": "^3.0.2" + } +} diff --git a/resource/projects.js b/resource/projects.js new file mode 100644 index 0000000000..6d6b17d01a --- /dev/null +++ b/resource/projects.js @@ -0,0 +1,72 @@ +// Copyright 2016, Google, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +// [START all] +// [START setup] +// By default, the client will authenticate using the service account file +// specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable and use +// the project specified by the GCLOUD_PROJECT environment variable. See +// https://googlecloudplatform.github.io/gcloud-node/#/docs/google-cloud/latest/guides/authentication +var Resource = require('@google-cloud/resource'); + +// Instantiate a resource client +var resource = Resource(); +// [END setup] + +// [START list_projects] +/** + * List all projects the authenticated user has access to. + * + * @param {function} callback The callback function. + */ +function listProjects (callback) { + // See https://googlecloudplatform.github.io/gcloud-node/#/docs/resource/latest/resource + resource.getProjects(function (err, projects) { + if (err) { + return callback(err); + } + + console.log('Found %d project(s)!', projects.length); + return callback(null, projects); + }); +} +// [END list_projects] +// [END all] + +// The command-line program +var cli = require('yargs'); +var makeHandler = require('../utils').makeHandler; + +var program = module.exports = { + listProjects: listProjects, + main: function (args) { + // Run the command-line program + cli.help().strict().parse(args).argv; + } +}; + +cli + .demand(1) + .command('list', 'List all projects the authenticated user has access to.', {}, function () { + program.listProjects(makeHandler(true, 'id')); + }) + .example('node $0 list', 'List projects.') + .wrap(80) + .recommendCommands() + .epilogue('For more information, see https://cloud.google.com/resource-manager/docs/'); + +if (module === require.main) { + program.main(process.argv.slice(2)); +} diff --git a/bigquery/system-test/dataset_size.test.js b/resource/system-test/projects.test.js similarity index 60% rename from bigquery/system-test/dataset_size.test.js rename to resource/system-test/projects.test.js index c0fb9df434..9d39c20915 100644 --- a/bigquery/system-test/dataset_size.test.js +++ b/resource/system-test/projects.test.js @@ -13,19 +13,18 @@ 'use strict'; -var datasetSizeExample = require('../dataset_size'); +var program = require('../projects'); -describe('bigquery:dataset_size', function () { - it('should return the size of a dataset', function (done) { - datasetSizeExample.main( - 'bigquery-public-data', - 'hacker_news', - function (err, size) { +describe('resource:projects', function () { + describe('list', function () { + it('should list projects', function (done) { + program.listProjects(function (err, projects) { assert.ifError(err); - assert.equal(typeof size, 'string'); - assert(size.indexOf(' GB') === size.length - 3); - done(); - } - ); + assert(Array.isArray(projects)); + assert(projects.length > 0); + assert(console.log.calledWith('Found %d project(s)!', projects.length)); + setTimeout(done, 2000); + }); + }); }); }); diff --git a/resource/test/projects.test.js b/resource/test/projects.test.js new file mode 100644 index 0000000000..b0da14fe88 --- /dev/null +++ b/resource/test/projects.test.js @@ -0,0 +1,76 @@ +// Copyright 2016, Google, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +var proxyquire = require('proxyquire').noCallThru(); + +function getSample () { + var projectsMock = [{ id: 'foo' }]; + var resourceMock = { + getProjects: sinon.stub().yields(null, projectsMock, null, projectsMock) + }; + var ResourceMock = sinon.stub().returns(resourceMock); + + return { + program: proxyquire('../projects', { + '@google-cloud/resource': ResourceMock, + yargs: proxyquire('yargs', {}) + }), + mocks: { + Resource: ResourceMock, + resource: resourceMock, + projects: projectsMock + } + }; +} + +describe('resource:projects', function () { + describe('listProjects', function () { + it('should list projects', function () { + var sample = getSample(); + var callback = sinon.stub(); + + sample.program.listProjects(callback); + + assert.equal(sample.mocks.resource.getProjects.calledOnce, true); + assert.deepEqual(sample.mocks.resource.getProjects.firstCall.args.slice(0, -1), []); + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [null, sample.mocks.projects]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['Found %d project(s)!', sample.mocks.projects.length]); + }); + + it('should handle error', function () { + var error = new Error('error'); + var sample = getSample(); + var callback = sinon.stub(); + sample.mocks.resource.getProjects.yields(error); + + sample.program.listProjects(callback); + + assert.equal(callback.calledOnce, true); + assert.deepEqual(callback.firstCall.args, [error]); + }); + }); + + describe('main', function () { + it('should call listProjects', function () { + var program = getSample().program; + + sinon.stub(program, 'listProjects'); + program.main(['list']); + assert(program.listProjects.calledOnce); + }); + }); +}); diff --git a/scripts/install b/scripts/install index ab110f922d..2cc89120a8 100755 --- a/scripts/install +++ b/scripts/install @@ -35,6 +35,7 @@ queue.push('logging'); queue.push('monitoring'); queue.push('prediction'); queue.push('pubsub'); +queue.push('resource'); queue.push('speech'); queue.push('storage'); queue.push('trace'); diff --git a/scripts/uninstall b/scripts/uninstall index 413c814c15..50695352a6 100755 --- a/scripts/uninstall +++ b/scripts/uninstall @@ -35,6 +35,7 @@ queue.push('logging'); queue.push('monitoring'); queue.push('prediction'); queue.push('pubsub'); +queue.push('resource'); queue.push('speech'); queue.push('storage'); queue.push('trace'); diff --git a/system-test/_setup.js b/system-test/_setup.js index e1e64e7b49..4cd3b2dd4b 100644 --- a/system-test/_setup.js +++ b/system-test/_setup.js @@ -13,7 +13,11 @@ 'use strict'; -var assert = require('power-assert'); +var assert = require('power-assert').customize({ + output: { + maxDepth: 2 + } +}); var sinon = require('sinon'); global.assert = assert; diff --git a/test/_setup.js b/test/_setup.js index e1e64e7b49..4cd3b2dd4b 100644 --- a/test/_setup.js +++ b/test/_setup.js @@ -13,7 +13,11 @@ 'use strict'; -var assert = require('power-assert'); +var assert = require('power-assert').customize({ + output: { + maxDepth: 2 + } +}); var sinon = require('sinon'); global.assert = assert; diff --git a/test/utils.test.js b/test/utils.test.js new file mode 100644 index 0000000000..9b27b2071a --- /dev/null +++ b/test/utils.test.js @@ -0,0 +1,79 @@ +// Copyright 2016, Google, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +var utils = require('../utils'); + +describe('utils', function () { + describe('makeHandler', function () { + it('should throw error', function () { + var handler = utils.makeHandler(); + var error = new Error('error'); + assert.throws(function () { + handler(error); + }, Error, error.message); + }); + + it('should do nothing', function () { + var callCount = console.log.callCount; + var handler = utils.makeHandler(false); + handler(); + assert.equal(console.log.callCount, callCount, 'Console.log was not called'); + }); + + it('should pretty print an array', function () { + var handler = utils.makeHandler(true, 'foo'); + handler(null, [{ + foo: 'utils:bar' + }, { + foo: 'utils:bar2' + }]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['utils:bar\nutils:bar2']); + }); + + it('should pretty print an array with multiple fields', function () { + var handler = utils.makeHandler(true, ['foo', 'bar']); + handler(null, [{ + foo: 'utils:bar', + bar: 'utils:foo' + }, { + foo: 'utils:bar2', + bar: 'utils:foo2' + }]); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['{"foo":"utils:bar","bar":"utils:foo"}\n{"foo":"utils:bar2","bar":"utils:foo2"}']); + }); + + it('should pretty print a single field', function () { + var handler = utils.makeHandler(true, 'foo'); + handler(null, { + foo: 'utils:bar' + }); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, ['utils:bar']); + }); + + it('should just print', function () { + var handler = utils.makeHandler(); + handler(null, { + foo: 'utils:bar' + }); + assert.equal(console.log.calledOnce, true); + assert.deepEqual(console.log.firstCall.args, [{ + foo: 'utils:bar' + }]); + }); + }); +}); diff --git a/utils/index.js b/utils/index.js new file mode 100644 index 0000000000..40abf17fc2 --- /dev/null +++ b/utils/index.js @@ -0,0 +1,53 @@ +// Copyright 2016, Google, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict'; + +exports.pick = function (obj, field) { + if (Array.isArray(field)) { + var _obj = {}; + field.forEach(function (_field) { + _obj[_field] = obj[_field]; + }); + return _obj; + } + return obj[field]; +}; + +exports.prettyPick = function (obj, field) { + if (Array.isArray(field)) { + return JSON.stringify(exports.pick(obj, field)); + } + return exports.pick(obj, field); +}; + +exports.makeHandler = function (print, field) { + return function (err, result) { + if (err) { + throw err; + } + if (print === false) { + return; + } + if (Array.isArray(result) && field) { + var mapped = result.map(function (_result) { + return exports.prettyPick(_result, field); + }); + console.log(mapped.join('\n')); + } else if (field) { + console.log(exports.prettyPick(result, field)); + } else { + console.log(result); + } + }; +};