Skip to content

Commit

Permalink
Merge pull request #275 from stephenplusplus/spp--fix-storage-tests
Browse files Browse the repository at this point in the history
remove bucket env var req. & stop deleting all buckets
  • Loading branch information
silvolu committed Nov 4, 2014
2 parents c10073f + 17d8f77 commit d457f72
Show file tree
Hide file tree
Showing 4 changed files with 78 additions and 81 deletions.
1 change: 0 additions & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ $ npm test
To run the regression tests, first create and configure a project in the Google Developers Console following the [instructions on how to run gcloud-node][elsewhere]. After that, set the following environment variables:

- **GCLOUD_TESTS_PROJECT_ID**: Developers Console project's ID (e.g. bamboo-shift-455)
- **GCLOUD_TESTS_BUCKET_NAME**: The name of the bucket to use for the Cloud Storage API tests
- **GCLOUD_TESTS_KEY**: The path to the JSON key file.

Install the [gcloud command-line tool][gcloudcli] to your machine and use it to create the indexes used in the datastore regression tests with indexes found in `regression/data/index/yaml`:
Expand Down
24 changes: 10 additions & 14 deletions regression/bigquery.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,16 +24,18 @@ var Dataset = require('../lib/bigquery/dataset');
var env = require('./env');
var fs = require('fs');
var Job = require('../lib/bigquery/job');
var uuid = require('node-uuid');

var gcloud = require('../lib')(env);
var bigquery = gcloud.bigquery();
var bucket = gcloud.storage().bucket();
var storage = gcloud.storage();

describe('BigQuery', function() {
var DATASET_ID = 'testDatasetId';
var dataset;
var TABLE_ID = 'myKittens';
var table;
var bucket;

var query = 'SELECT url FROM [publicdata:samples.github_nested] LIMIT 100';

Expand Down Expand Up @@ -82,23 +84,17 @@ describe('BigQuery', function() {
});
},

// Create a Bucket, if necessary.
// Create a Bucket.
function(next) {
bucket.getMetadata(function(err) {
if (!err) {
next();
var bucketName = 'gcloud-test-bucket-temp-' + uuid.v1();
storage.createBucket(bucketName, function(err, b) {
if (err) {
next(err);
return;
}

gcloud.storage().createBucket(bucket.name, function(err, b) {
if (err) {
next(err);
return;
}

bucket = b;
next();
});
bucket = b;
next();
});
}
], done);
Expand Down
9 changes: 3 additions & 6 deletions regression/env.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@

'use strict';

if (!process.env.GCLOUD_TESTS_PROJECT_ID &&
!process.env.GCLOUD_TESTS_BUCKET_NAME &&
!process.env.GCLOUD_TESTS_KEY) {
if (!process.env.GCLOUD_TESTS_PROJECT_ID && !process.env.GCLOUD_TESTS_KEY) {
var error = [
'To run the regression tests, you need to set some environment variables.',
'Please check the Contributing guide for instructions.'
Expand All @@ -27,7 +25,6 @@ if (!process.env.GCLOUD_TESTS_PROJECT_ID &&
}

module.exports = {
projectId: process.env.GCLOUD_TESTS_PROJECT_ID,
bucketName: process.env.GCLOUD_TESTS_BUCKET_NAME,
keyFilename: process.env.GCLOUD_TESTS_KEY
keyFilename: process.env.GCLOUD_TESTS_KEY,
projectId: process.env.GCLOUD_TESTS_PROJECT_ID
};
125 changes: 65 additions & 60 deletions regression/storage.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,13 @@ var crypto = require('crypto');
var fs = require('fs');
var request = require('request');
var tmp = require('tmp');
var uuid = require('node-uuid');

var env = require('./env.js');
var storage = require('../lib/storage')(env);

var BUCKET_NAME = generateBucketName();

var files = {
logo: {
path: 'regression/data/CloudPlatform_128px_Retina.png'
Expand All @@ -37,30 +40,6 @@ var files = {
}
};

function setHash(obj, file, done) {
var hash = crypto.createHash('md5');
fs.createReadStream(obj[file].path)
.on('data', hash.update.bind(hash))
.on('end', function() {
obj[file].hash = hash.digest('base64');
done();
});
}

function deleteBucketsAndFiles(callback) {
storage.getBuckets(function(err, buckets) {
if (err) {
callback(err);
return;
}
async.map(buckets, function(bucket, next) {
deleteFiles(bucket, function() {
bucket.delete(next);
});
}, callback);
});
}

function deleteFiles(bucket, callback) {
bucket.getFiles(function(err, files) {
if (err) {
Expand All @@ -73,55 +52,81 @@ function deleteFiles(bucket, callback) {
});
}

function generateBucketName() {
return 'gcloud-test-bucket-temp-' + uuid.v1();
}

function setHash(obj, file, done) {
var hash = crypto.createHash('md5');
fs.createReadStream(obj[file].path)
.on('data', hash.update.bind(hash))
.on('end', function() {
obj[file].hash = hash.digest('base64');
done();
});
}

describe('storage', function() {
var bucket;

before(function(done) {
deleteBucketsAndFiles(function() {
storage.createBucket('new' + Date.now(), function(err, newBucket) {
if (err) {
done(err);
return;
}
bucket = newBucket;
done();
});
storage.createBucket(BUCKET_NAME, function(err, newBucket) {
assert.ifError(err);
bucket = newBucket;
done();
});
});

after(deleteBucketsAndFiles);

describe('creating a bucket', function() {
it('should create a bucket', function(done) {
storage.createBucket('a-new-bucket', function(err, bucket) {
assert.ifError(err);
bucket.delete(done);
});
after(function(done) {
deleteFiles(bucket, function(err) {
assert.ifError(err);
bucket.delete(done);
});
});

describe('getting buckets', function() {
var bucketsToCreate = [
generateBucketName(), generateBucketName(), generateBucketName()
];

before(function(done) {
async.map(bucketsToCreate, storage.createBucket.bind(storage), done);
});

after(function(done) {
async.parallel(bucketsToCreate.map(function(bucket) {
return function(done) {
storage.bucket(bucket).delete(done);
};
}), done);
});

it('should get buckets', function(done) {
var bucketsToCreate = [
'new' + Date.now(),
'newer' + Date.now(),
'newest' + Date.now()
];
async.map(
bucketsToCreate,
storage.createBucket.bind(storage),
function(err) {
assert.ifError(err);
storage.getBuckets(function(err, buckets) {
assert.equal(
buckets.filter(function(bucket) {
return bucketsToCreate.indexOf(bucket.name) > -1;
}).length,
bucketsToCreate.length
);
done();
});
storage.getBuckets(getBucketsHandler);

var createdBuckets = [];
var retries = 0;
var MAX_RETRIES = 2;

function getBucketsHandler(err, buckets, nextQuery) {
buckets.forEach(function(bucket) {
if (bucketsToCreate.indexOf(bucket.name) > -1) {
createdBuckets.push(bucket);
}
});

if (createdBuckets.length < bucketsToCreate.length && nextQuery) {
retries++;

if (retries <= MAX_RETRIES) {
storage.getBuckets(nextQuery, getBucketsHandler);
return;
}
}

assert.equal(createdBuckets.length, bucketsToCreate.length);
done();
}
});
});

Expand Down

0 comments on commit d457f72

Please sign in to comment.