diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2fc3ce0518a..3e93883137d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -20,7 +20,6 @@ $ npm test To run the regression tests, first create and configure a project in the Google Developers Console following the [instructions on how to run gcloud-node][elsewhere]. After that, set the following environment variables: - **GCLOUD_TESTS_PROJECT_ID**: Developers Console project's ID (e.g. bamboo-shift-455) -- **GCLOUD_TESTS_BUCKET_NAME**: The name of the bucket to use for the Cloud Storage API tests - **GCLOUD_TESTS_KEY**: The path to the JSON key file. Install the [gcloud command-line tool][gcloudcli] to your machine and use it to create the indexes used in the datastore regression tests with indexes found in `regression/data/index/yaml`: diff --git a/regression/bigquery.js b/regression/bigquery.js index d484c376068..3966a5c6071 100644 --- a/regression/bigquery.js +++ b/regression/bigquery.js @@ -24,16 +24,18 @@ var Dataset = require('../lib/bigquery/dataset'); var env = require('./env'); var fs = require('fs'); var Job = require('../lib/bigquery/job'); +var uuid = require('node-uuid'); var gcloud = require('../lib')(env); var bigquery = gcloud.bigquery(); -var bucket = gcloud.storage().bucket(); +var storage = gcloud.storage(); describe('BigQuery', function() { var DATASET_ID = 'testDatasetId'; var dataset; var TABLE_ID = 'myKittens'; var table; + var bucket; var query = 'SELECT url FROM [publicdata:samples.github_nested] LIMIT 100'; @@ -82,23 +84,17 @@ describe('BigQuery', function() { }); }, - // Create a Bucket, if necessary. + // Create a Bucket. function(next) { - bucket.getMetadata(function(err) { - if (!err) { - next(); + var bucketName = 'gcloud-test-bucket-temp-' + uuid.v1(); + storage.createBucket(bucketName, function(err, b) { + if (err) { + next(err); return; } - gcloud.storage().createBucket(bucket.name, function(err, b) { - if (err) { - next(err); - return; - } - - bucket = b; - next(); - }); + bucket = b; + next(); }); } ], done); diff --git a/regression/env.js b/regression/env.js index af2321e2f6c..8663d4f35f5 100644 --- a/regression/env.js +++ b/regression/env.js @@ -16,9 +16,7 @@ 'use strict'; -if (!process.env.GCLOUD_TESTS_PROJECT_ID && - !process.env.GCLOUD_TESTS_BUCKET_NAME && - !process.env.GCLOUD_TESTS_KEY) { +if (!process.env.GCLOUD_TESTS_PROJECT_ID && !process.env.GCLOUD_TESTS_KEY) { var error = [ 'To run the regression tests, you need to set some environment variables.', 'Please check the Contributing guide for instructions.' @@ -27,7 +25,6 @@ if (!process.env.GCLOUD_TESTS_PROJECT_ID && } module.exports = { - projectId: process.env.GCLOUD_TESTS_PROJECT_ID, - bucketName: process.env.GCLOUD_TESTS_BUCKET_NAME, - keyFilename: process.env.GCLOUD_TESTS_KEY + keyFilename: process.env.GCLOUD_TESTS_KEY, + projectId: process.env.GCLOUD_TESTS_PROJECT_ID }; diff --git a/regression/storage.js b/regression/storage.js index 957cc7e670c..0cc0f87f3c3 100644 --- a/regression/storage.js +++ b/regression/storage.js @@ -24,10 +24,13 @@ var crypto = require('crypto'); var fs = require('fs'); var request = require('request'); var tmp = require('tmp'); +var uuid = require('node-uuid'); var env = require('./env.js'); var storage = require('../lib/storage')(env); +var BUCKET_NAME = generateBucketName(); + var files = { logo: { path: 'regression/data/CloudPlatform_128px_Retina.png' @@ -37,30 +40,6 @@ var files = { } }; -function setHash(obj, file, done) { - var hash = crypto.createHash('md5'); - fs.createReadStream(obj[file].path) - .on('data', hash.update.bind(hash)) - .on('end', function() { - obj[file].hash = hash.digest('base64'); - done(); - }); -} - -function deleteBucketsAndFiles(callback) { - storage.getBuckets(function(err, buckets) { - if (err) { - callback(err); - return; - } - async.map(buckets, function(bucket, next) { - deleteFiles(bucket, function() { - bucket.delete(next); - }); - }, callback); - }); -} - function deleteFiles(bucket, callback) { bucket.getFiles(function(err, files) { if (err) { @@ -73,55 +52,81 @@ function deleteFiles(bucket, callback) { }); } +function generateBucketName() { + return 'gcloud-test-bucket-temp-' + uuid.v1(); +} + +function setHash(obj, file, done) { + var hash = crypto.createHash('md5'); + fs.createReadStream(obj[file].path) + .on('data', hash.update.bind(hash)) + .on('end', function() { + obj[file].hash = hash.digest('base64'); + done(); + }); +} + describe('storage', function() { var bucket; before(function(done) { - deleteBucketsAndFiles(function() { - storage.createBucket('new' + Date.now(), function(err, newBucket) { - if (err) { - done(err); - return; - } - bucket = newBucket; - done(); - }); + storage.createBucket(BUCKET_NAME, function(err, newBucket) { + assert.ifError(err); + bucket = newBucket; + done(); }); }); - after(deleteBucketsAndFiles); - - describe('creating a bucket', function() { - it('should create a bucket', function(done) { - storage.createBucket('a-new-bucket', function(err, bucket) { - assert.ifError(err); - bucket.delete(done); - }); + after(function(done) { + deleteFiles(bucket, function(err) { + assert.ifError(err); + bucket.delete(done); }); }); describe('getting buckets', function() { + var bucketsToCreate = [ + generateBucketName(), generateBucketName(), generateBucketName() + ]; + + before(function(done) { + async.map(bucketsToCreate, storage.createBucket.bind(storage), done); + }); + + after(function(done) { + async.parallel(bucketsToCreate.map(function(bucket) { + return function(done) { + storage.bucket(bucket).delete(done); + }; + }), done); + }); + it('should get buckets', function(done) { - var bucketsToCreate = [ - 'new' + Date.now(), - 'newer' + Date.now(), - 'newest' + Date.now() - ]; - async.map( - bucketsToCreate, - storage.createBucket.bind(storage), - function(err) { - assert.ifError(err); - storage.getBuckets(function(err, buckets) { - assert.equal( - buckets.filter(function(bucket) { - return bucketsToCreate.indexOf(bucket.name) > -1; - }).length, - bucketsToCreate.length - ); - done(); - }); + storage.getBuckets(getBucketsHandler); + + var createdBuckets = []; + var retries = 0; + var MAX_RETRIES = 2; + + function getBucketsHandler(err, buckets, nextQuery) { + buckets.forEach(function(bucket) { + if (bucketsToCreate.indexOf(bucket.name) > -1) { + createdBuckets.push(bucket); + } }); + + if (createdBuckets.length < bucketsToCreate.length && nextQuery) { + retries++; + + if (retries <= MAX_RETRIES) { + storage.getBuckets(nextQuery, getBucketsHandler); + return; + } + } + + assert.equal(createdBuckets.length, bucketsToCreate.length); + done(); + } }); });