diff --git a/lib/storage/bucket.js b/lib/storage/bucket.js
index 248b4fc6b86..9345db586c5 100644
--- a/lib/storage/bucket.js
+++ b/lib/storage/bucket.js
@@ -23,6 +23,7 @@
var async = require('async');
var extend = require('extend');
var fs = require('fs');
+var globby = require('globby');
var mime = require('mime-types');
var path = require('path');
@@ -44,6 +45,12 @@ var File = require('./file.js');
*/
var util = require('../common/util.js');
+/**
+ * @const {number}
+ * @private
+ */
+var MAX_PARALLEL_UPLOADS = 5;
+
/**
* @const {string}
* @private
@@ -652,32 +659,36 @@ Bucket.prototype.setMetadata = function(metadata, callback) {
};
/**
- * Upload a file to the bucket. This is a convenience method that wraps the
- * functionality provided by a File object, {module:storage/file}.
+ * Upload files to your bucket using glob patterns.
*
- * @param {string} localPath - The fully qualified path to the file you wish to
- * upload to your bucket.
+ * If the input matches more than a single file, your callback will receive an
+ * array of {module:storage/file} objects.
+ *
+ * @param {string|string[]} pattern - A glob pattern, or array of patterns,
+ * matching the files you would like uploaded. See
+ * [sindresorhus/globby](http://goo.gl/42g2v7) for an overview.
* @param {object=} options - Configuration options.
- * @param {string|module:storage/file} options.destination - The place to save
- * your file. If given a string, the file will be uploaded to the bucket
- * using the string as a filename. When given a File object, your local file
- * will be uploaded to the File object's bucket and under the File object's
- * name. Lastly, when this argument is omitted, the file is uploaded to your
- * bucket using the name of the local file.
- * @param {object=} options.metadata - Metadata to set for your file.
- * @param {boolean=} options.resumable - Force a resumable upload. (default:
+ * @param {string} options.basePath - A parent directory to use as the tip of
+ * the resulting hierarchy in your bucket. File names are determined using
+ * this value, which defaults to the given `directoryPath`. See the example
+ * below for more.
+ * @param {boolean} options.force - Suppress errors until all files have been
+ * processed. (default: false)
+ * @param {object} options.globOptions - Glob options as defined by
+ * [`node-glob`](http://goo.gl/14UhaI).
+ * @param {boolean} options.resumable - Force a resumable upload. (default:
* true for files larger than 5MB). Read more about resumable uploads
* [here](http://goo.gl/1JWqCF). NOTE: This behavior is only possible with
* this method, and not {module:storage/file#createWriteStream}. When
* working with streams, the file format and size is unknown until it's
* completely consumed. Because of this, it's best for you to be explicit
* for what makes sense given your input.
- * @param {function} callback - The callback function.
* @param {string|boolean} options.validation - Possible values: `"md5"`,
* `"crc32c"`, or `false`. By default, data integrity is validated with an
* MD5 checksum for maximum reliability. CRC32c will provide better
* performance with less reliability. You may also choose to skip validation
* completely, however this is **not recommended**.
+ * @param {function} callback - The callback function.
*
* @example
* //-
@@ -685,7 +696,7 @@ Bucket.prototype.setMetadata = function(metadata, callback) {
* //-
* bucket.upload('/local/path/image.png', function(err, file, apiResponse) {
* // Your bucket now contains:
- * // - "image.png" (with the contents of `/local/path/image.png')
+ * // - "image.png" (with the contents of `/local/path/image.png')
*
* // `file` is an instance of a File object that refers to your new file.
* });
@@ -707,14 +718,14 @@ Bucket.prototype.setMetadata = function(metadata, callback) {
*
* bucket.upload('local-image.png', options, function(err, file) {
* // Your bucket now contains:
- * // - "new-image.png" (with the contents of `local-image.png')
+ * // - "new-image.png" (with the contents of `local-image.png')
*
* // `file` is an instance of a File object that refers to your new file.
* });
*
* //-
- * // You may also re-use a File object, {module:storage/file}, that references
- * // the file you wish to create or overwrite.
+ * // You may also re-use a File object ({module:storage/file}) that references
+ * // the file you wish to write to.
* //-
* var options = {
* destination: bucket.file('existing-file.png'),
@@ -723,68 +734,255 @@ Bucket.prototype.setMetadata = function(metadata, callback) {
*
* bucket.upload('local-img.png', options, function(err, newFile) {
* // Your bucket now contains:
- * // - "existing-file.png" (with the contents of `local-img.png')
+ * // - "existing-file.png" (with the contents of `local-img.png')
*
* // Note:
- * // The `newFile` parameter is equal to `file`.
+ * // The `newFile` parameter is equal to `options.destination`.
+ * });
+ *
+ * //-
+ * // For the power users, glob patterns are also supported, using
+ * // sindresorhus/globby
+ * // under the hood.
+ * //
+ * // NOTE: All of the options globby accepts can be provided with
+ * // `options.globOptions`. For a full list of options, see
+ * // node-glob's options object.
+ * //-
+ * bucket.upload('/Users/stephen/Desktop/*.{jpg|png}', function(errors, files) {
+ * // `errors` will always be an array.
+ * // `files` is an array of all successfully uploaded files that the glob
+ * // pattern matched.
+ * });
+ *
+ * //-
+ * // If you're uploading many files, you may wish to suppress errors until all
+ * // of the files have been processed.
+ * //-
+ * var options = {
+ * force: true
+ * };
+ *
+ * bucket.upload('/Users/stephen/Photos/*', options, function(errors, files) {
+ * // `errors` will always be an array.
+ * // `files` is an array of all successfully uploaded files that the glob
+ * // pattern matched.
* });
*/
-Bucket.prototype.upload = function(localPath, options, callback) {
+Bucket.prototype.upload = function(pattern, options, callback) {
+ var self = this;
+
if (util.is(options, 'function')) {
callback = options;
options = {};
}
- var newFile;
- if (options.destination instanceof File) {
- newFile = options.destination;
- } else if (util.is(options.destination, 'string')) {
- // Use the string as the name of the file.
- newFile = this.file(options.destination);
- } else {
- // Resort to using the name of the incoming file.
- newFile = this.file(path.basename(localPath));
+ options = options || {};
+
+ var errors = [];
+ var files = [];
+
+ var globOptions = extend({}, options.globOptions, { nodir: true });
+
+ globby(pattern, globOptions, function(err, filePaths) {
+ if (err) {
+ callback(err);
+ return;
+ }
+
+ var uploadFileFns = filePaths.map(function(filePath) {
+ return function(next) {
+ var fileName;
+
+ if (options.basePath) {
+ fileName = path.relative(options.basePath, filePath);
+ } else {
+ fileName = path.basename(filePath);
+ }
+
+ var opts = extend({}, options, { destination: fileName });
+
+ self.uploadFile(filePath, opts, function(err, file) {
+ if (err) {
+ errors.push(err);
+ } else {
+ files.push(file);
+ }
+
+ next(options.force ? null : err);
+ });
+ };
+ });
+
+ async.parallelLimit(uploadFileFns, MAX_PARALLEL_UPLOADS, function() {
+ callback(errors, files);
+ });
+ });
+};
+
+/**
+ * Upload the contents of a directory to the root of your bucket. The structure
+ * of the given directory will be maintained.
+ *
+ * @param {string} directoryPath - Path to the directory you wish to upload.
+ * @param {object=} options - Configuration object.
+ * @param {string} options.basePath - A parent directory to use as the tip of
+ * the resulting hierarchy in your bucket. File names are determined using
+ * this value, which defaults to the given `directoryPath`. See the example
+ * below for more.
+ * @param {boolean} options.resumable - Force a resumable upload. (default:
+ * true for files larger than 5MB). Read more about resumable uploads
+ * [here](http://goo.gl/1JWqCF). NOTE: This behavior is only possible with
+ * this method, and not {module:storage/file#createWriteStream}. When
+ * working with streams, the file format and size is unknown until it's
+ * completely consumed. Because of this, it's best for you to be explicit
+ * for what makes sense given your input.
+ * @param {string|boolean} options.validation - Possible values: `"md5"`,
+ * `"crc32c"`, or `false`. By default, data integrity is validated with an
+ * MD5 checksum for maximum reliability. CRC32c will provide better
+ * performance with less reliability. You may also choose to skip validation
+ * completely, however this is **not recommended**.
+ * @param {function} callback - The callback function.
+ *
+ * @example
+ * var zooPhotosPath = '/Users/stephen/Photos/zoo';
+ *
+ * bucket.uploadDirectory(zooPhotosPath, function(err, files) {
+ * // Your bucket now contains:
+ * // - "monkeys/monkey-1.jpg"
+ * // - "zebras/zebra-1.jpg"
+ * // - "sleeping-panda.jpg"
+ * });
+ *
+ * //-
+ * // You can also specify a `basePath` if you need more control.
+ * //-
+ * var options = {
+ * basePath: '/Users/stephen/Photos';
+ * };
+ *
+ * bucket.uploadDirectory(zooPhotosPath, options, function(err, files) {
+ * // Your bucket now contains:
+ * // - "zoo/monkeys/monkey-1.jpg"
+ * // - "zoo/zebras/zebra-1.jpg"
+ * // - "zoo/sleeping-panda.jpg"
+ * });
+ */
+Bucket.prototype.uploadDirectory = function(directoryPath, options, callback) {
+ if (util.is(options, 'function')) {
+ callback = options;
+ options = {};
}
- var metadata = options.metadata || {};
- var contentType = mime.contentType(path.basename(localPath));
+ options = options || {};
- if (contentType && !metadata.contentType) {
- metadata.contentType = contentType;
+ if (!options.basePath) {
+ options.basePath = directoryPath;
}
- var resumable;
- if (util.is(options.resumable, 'boolean')) {
- resumable = options.resumable;
- upload();
- } else {
- // Determine if the upload should be resumable if it's over the threshold.
- fs.stat(localPath, function(err, fd) {
- if (err) {
- callback(err);
- return;
- }
+ this.upload(path.join(directoryPath, '**/*'), options, callback);
+};
- resumable = fd.size > RESUMABLE_THRESHOLD;
+/**
+ * Upload a single file to your bucket.
+ *
+ * NOTE: It's often easier to use {module:storage/bucket#upload}, which can also
+ * accept a glob pattern.
+ *
+ * @param {string} filePath - Fully qualified path to a file.
+ * @param {object=} options - Configuration object.
+ * @param {string|module:storage/file} options.destination - The place to save
+ * your file. If given a string, the file will be uploaded to the bucket
+ * using the string as a filename. When given a File object, your local file
+ * will be uploaded to the File object's bucket and under the File object's
+ * name. Lastly, when this argument is omitted, the file is uploaded to your
+ * bucket using the name of the local file.
+ * @param {object} options.metadata - Metadata to set for your file.
+ * @param {boolean} options.resumable - Force a resumable upload. (default:
+ * true for files larger than 5MB). Read more about resumable uploads
+ * [here](http://goo.gl/1JWqCF). NOTE: This behavior is only possible with
+ * this method, and not {module:storage/file#createWriteStream}. When
+ * working with streams, the file format and size is unknown until it's
+ * completely consumed. Because of this, it's best for you to be explicit
+ * for what makes sense given your input.
+ * @param {number} options.size - Byte size of the file. This is used to
+ * determine if a resumable or simple upload technique should be used. If
+ * not provided, the file will be `stat`ed for its size.
+ * @param {string|boolean} options.validation - Possible values: `"md5"`,
+ * `"crc32c"`, or `false`. By default, data integrity is validated with an
+ * MD5 checksum for maximum reliability. CRC32c will provide better
+ * performance with less reliability. You may also choose to skip validation
+ * completely, however this is **not recommended**.
+ * @param {function} callback - The callback function.
+ */
+Bucket.prototype.uploadFile = function(filePath, options, callback) {
+ var self = this;
- upload();
- });
+ if (util.is(options, 'function')) {
+ callback = options;
+ options = {};
}
- function upload() {
- fs.createReadStream(localPath)
- .pipe(newFile.createWriteStream({
- validation: options.validation,
- resumable: resumable,
- metadata: metadata
- }))
- .on('error', function(err) {
- callback(err);
- })
- .on('complete', function() {
- callback(null, newFile);
+ options = options || {};
+
+ if (!util.is(options.resumable, 'boolean')) {
+ // User didn't specify a preference of resumable or simple upload. Check the
+ // file's size to determine which to use.
+ if (!util.is(options.size, 'number')) {
+ fs.stat(filePath, function(err, stats) {
+ if (err) {
+ callback(err);
+ return;
+ }
+
+ options.size = stats.size;
+ self.uploadFile(filePath, options, callback);
});
+ return;
+ }
+
+ options.resumable = options.size > RESUMABLE_THRESHOLD;
+ }
+
+ if (util.is(options.destination, 'string')) {
+ options.destination = this.file(options.destination);
+ }
+
+ if (!options.destination) {
+ options.destination = this.file(path.basename(filePath));
+ }
+
+ this.uploadFile_(filePath, options, callback);
+};
+
+/**
+ * All of the public methods {module:storage#upload},
+ * {module:storage#uploadDirectory}, and {module:storage#uploadFile} call this
+ * wrapper around {module:storage/file#createWriteStream}.
+ *
+ * Additionally, this method will try to set a contentType and charset.
+ *
+ * @private
+ */
+Bucket.prototype.uploadFile_ = function(filePath, options, callback) {
+ var file = options.destination;
+ var metadata = options.metadata || {};
+ var contentType = mime.contentType(path.basename(filePath));
+
+ if (contentType && !metadata.contentType) {
+ metadata.contentType = contentType;
}
+
+ fs.createReadStream(filePath)
+ .pipe(file.createWriteStream({
+ validation: options.validation,
+ resumable: options.resumable,
+ metadata: metadata
+ }))
+ .on('error', callback)
+ .on('complete', function() {
+ callback(null, file);
+ });
};
/**
diff --git a/package.json b/package.json
index a50b0a9a72b..0485f527def 100644
--- a/package.json
+++ b/package.json
@@ -54,6 +54,7 @@
"duplexify": "^3.2.0",
"extend": "^2.0.0",
"fast-crc32c": "^0.1.3",
+ "globby": "^2.0.0",
"google-auth-library": "^0.9.4",
"mime-types": "^2.0.8",
"node-uuid": "^1.4.2",
diff --git a/regression/storage.js b/regression/storage.js
index e2947915710..404816d63b8 100644
--- a/regression/storage.js
+++ b/regression/storage.js
@@ -22,6 +22,7 @@ var assert = require('assert');
var async = require('async');
var crypto = require('crypto');
var fs = require('fs');
+var path = require('path');
var request = require('request');
var through = require('through2');
var tmp = require('tmp');
@@ -301,7 +302,7 @@ describe('storage', function() {
var options = {
destination: uuid.v1() + '.png'
};
- bucket.upload(files.logo.path, options, function(err, f) {
+ bucket.uploadFile(files.logo.path, options, function(err, f) {
assert.ifError(err);
file = f;
done();
@@ -490,7 +491,7 @@ describe('storage', function() {
});
it('should read a byte range from a file', function(done) {
- bucket.upload(files.big.path, function(err, file) {
+ bucket.uploadFile(files.big.path, function(err, file) {
assert.ifError(err);
var fileSize = file.metadata.size;
@@ -516,7 +517,7 @@ describe('storage', function() {
it('should download a file to memory', function(done) {
var fileContents = fs.readFileSync(files.big.path);
- bucket.upload(files.big.path, function(err, file) {
+ bucket.uploadFile(files.big.path, function(err, file) {
assert.ifError(err);
file.download(function(err, remoteContents) {
@@ -545,7 +546,7 @@ describe('storage', function() {
resumable: false
};
- bucket.upload(files.logo.path, options, function(err, file) {
+ bucket.uploadFile(files.logo.path, options, function(err, file) {
assert.ifError(err);
file.getMetadata(function(err, metadata) {
@@ -631,7 +632,7 @@ describe('storage', function() {
});
it('should copy an existing file', function(done) {
- bucket.upload(files.logo.path, 'CloudLogo', function(err, file) {
+ bucket.uploadFile(files.logo.path, 'CloudLogo', function(err, file) {
assert.ifError(err);
file.copy('CloudLogoCopy', function(err, copiedFile) {
assert.ifError(err);
@@ -644,6 +645,93 @@ describe('storage', function() {
});
});
+ describe('upload files', function() {
+ it('should upload a file', function(done) {
+ bucket.uploadFile(files.logo.path, function(err, file) {
+ assert.ifError(err);
+ file.delete(done);
+ });
+ });
+
+ it('should upload files from a glob pattern', function(done) {
+ tmp.setGracefulCleanup();
+ tmp.dir(function(err, directoryPath) {
+ assert.ifError(err);
+
+ var filesToCreate = ['a.txt', 'b.jpg'];
+
+ async.each(
+ filesToCreate,
+
+ function createFile(fileName, next) {
+ fs.createWriteStream(path.join(directoryPath, fileName))
+ .on('error', next)
+ .on('finish', next)
+ .end('file content');
+ },
+
+ function filesCreated(err) {
+ assert.ifError(err);
+
+ var onlyImages = path.join(directoryPath, '*.jpg');
+ bucket.upload(onlyImages, function(errors, files) {
+ if (errors.length > 0) {
+ done(errors);
+ return;
+ }
+
+ assert.equal(files.length, 1);
+ deleteFile(files[0], done);
+ });
+ });
+ });
+ });
+
+ it('should upload a directory', function(done) {
+ tmp.setGracefulCleanup();
+ tmp.dir(function(err, directoryPath) {
+ assert.ifError(err);
+
+ var directoriesToCreate = ['txt', 'jpg'];
+ var filesToCreate = ['txt/a.txt', 'jpg/b.jpg', 'c.png'];
+
+ function createDirectories(callback) {
+ async.each(directoriesToCreate, function(directoryName, next) {
+ fs.mkdir(path.join(directoryPath, directoryName), next);
+ }, callback);
+ }
+
+ function createFiles(callback) {
+ async.each(filesToCreate, function(fileName, next) {
+ fs.createWriteStream(path.join(directoryPath, fileName))
+ .on('error', next)
+ .on('finish', next)
+ .end('file content');
+ }, callback);
+ }
+
+ async.series([createDirectories, createFiles], function(err) {
+ assert.ifError(err);
+
+ bucket.uploadDirectory(directoryPath, function(errors, files) {
+ if (errors.length > 0) {
+ done(errors);
+ return;
+ }
+
+ assert.equal(files.length, 3);
+
+ assert(files.every(function(file) {
+ return filesToCreate.indexOf(file.name) > -1;
+ }));
+
+ async.each(files, deleteFile, done);
+ });
+ });
+ });
+ });
+ });
+
describe('combine files', function() {
it('should combine multiple files into one', function(done) {
var files = [
diff --git a/test/storage/bucket.js b/test/storage/bucket.js
index 9e185308c32..e5c5b0858f0 100644
--- a/test/storage/bucket.js
+++ b/test/storage/bucket.js
@@ -14,14 +14,15 @@
* limitations under the License.
*/
-/*global describe, it, beforeEach, before, after */
-
'use strict';
var assert = require('assert');
var async = require('async');
var extend = require('extend');
+var fs = require('fs');
+var globby = require('globby');
var mime = require('mime-types');
+var path = require('path');
var mockery = require('mockery');
var request = require('request');
var stream = require('stream');
@@ -50,11 +51,28 @@ function fakeRequest() {
}
var eachLimit_Override;
+var parallelLimit_Override;
var fakeAsync = extend({}, async);
fakeAsync.eachLimit = function() {
(eachLimit_Override || async.eachLimit).apply(null, arguments);
};
+fakeAsync.parallelLimit = function() {
+ (parallelLimit_Override || async.parallelLimit).apply(null, arguments);
+};
+
+var globby_Override;
+
+function fakeGlobby() {
+ return (globby_Override || globby).apply(null, arguments);
+}
+
+var stat_Override;
+
+var fakeFs = extend({}, fs);
+fakeFs.stat = function() {
+ return (stat_Override || fakeFs.stat).apply(null, arguments);
+};
describe('Bucket', function() {
var Bucket;
@@ -69,6 +87,8 @@ describe('Bucket', function() {
before(function() {
mockery.registerMock('./file.js', FakeFile);
mockery.registerMock('async', fakeAsync);
+ mockery.registerMock('fs', fakeFs);
+ mockery.registerMock('globby', fakeGlobby);
mockery.registerMock('request', fakeRequest);
mockery.enable({
useCleanCache: true,
@@ -85,6 +105,8 @@ describe('Bucket', function() {
beforeEach(function() {
request_Override = null;
eachLimit_Override = null;
+ parallelLimit_Override = null;
+ stat_Override = null;
bucket = new Bucket(options, BUCKET_NAME);
});
@@ -661,166 +683,505 @@ describe('Bucket', function() {
});
describe('upload', function() {
- var basename = 'proto_query.json';
- var filepath = 'test/testdata/' + basename;
- var textFilepath = 'test/testdata/textfile.txt';
- var metadata = { a: 'b', c: 'd' };
+ var globPattern = '/Users/stephen/Photos/zoo/**/*.jpg';
beforeEach(function() {
- bucket.file = function(name, metadata) {
- return new FakeFile(bucket, name, metadata);
+ bucket.uploadFile = util.noop;
+ });
+
+ it('should extend provided glob options', function(done) {
+ var globOptions = { a: 'b', c: 'd' };
+ var options = { globOptions: globOptions };
+ var expectedGlobOptions = extend({}, globOptions, { nodir: true });
+
+ globby_Override = function(pattern, opts) {
+ assert.deepEqual(opts, expectedGlobOptions);
+ done();
};
+
+ bucket.upload(globPattern, options, assert.ifError);
});
- it('should accept a path & cb', function(done) {
- bucket.upload(filepath, function(err, file) {
- assert.ifError(err);
- assert.equal(file.bucket.name, bucket.name);
- assert.equal(file.name, basename);
+ it('should always ignore directory paths in glob options', function(done) {
+ var globOptions = { a: 'b', c: 'd', nodir: false };
+ var options = { globOptions: globOptions };
+
+ globby_Override = function(pattern, opts) {
+ assert.strictEqual(opts.nodir, true);
done();
- });
+ };
+
+ bucket.upload(globPattern, options, assert.ifError);
});
- it('should accept a path, metadata, & cb', function(done) {
- var options = { metadata: metadata };
- bucket.upload(filepath, options, function(err, file) {
- assert.ifError(err);
- assert.equal(file.bucket.name, bucket.name);
- assert.deepEqual(file.metadata, metadata);
+ it('should execute callback with error from globby', function(done) {
+ var error = new Error('Error.');
+
+ globby_Override = function(pattern, opts, callback) {
+ callback(error);
+ };
+
+ bucket.upload(globPattern, {}, function(err) {
+ assert.deepEqual(err, error);
done();
});
});
- it('should accept a path, a string dest, & cb', function(done) {
- var newFileName = 'new-file-name.png';
- var options = { destination: newFileName };
- bucket.upload(filepath, options, function(err, file) {
- assert.ifError(err);
- assert.equal(file.bucket.name, bucket.name);
- assert.equal(file.name, newFileName);
+ it('should upload limited to 5 files in parallel', function(done) {
+ globby_Override = function(pattern, opts, callback) {
+ callback(null, []);
+ };
+
+ parallelLimit_Override = function(fns, limit) {
+ assert.equal(limit, 5);
done();
+ };
+
+ bucket.upload(globPattern, {}, assert.ifError);
+ });
+
+ describe('file processing', function() {
+ var filePath = '/Users/stephen/Photos/zoo/a-monkey.jpg';
+
+ beforeEach(function() {
+ globby_Override = function(pattern, opts, callback) {
+ callback(null, [filePath]);
+ };
+ });
+
+ it('should use a basePath to determine the filename', function(done) {
+ var basePath = path.resolve(filePath, '..');
+ var expectedFileName = path.relative(basePath, filePath);
+ var options = { basePath: basePath };
+
+ parallelLimit_Override = function(fns) {
+ var processFileFn = fns[0];
+ processFileFn(assert.ifError);
+ };
+
+ bucket.uploadFile = function(uploadFilePath, opts) {
+ assert.equal(opts.destination, expectedFileName);
+ done();
+ };
+
+ bucket.upload(globPattern, options, assert.ifError);
+ });
+
+ it('should use the basename to determine the filename', function(done) {
+ parallelLimit_Override = function(fns) {
+ var processFileFn = fns[0];
+ processFileFn(assert.ifError);
+ };
+
+ bucket.uploadFile = function(uploadFilePath, opts) {
+ assert.equal(opts.destination, path.basename(filePath));
+ done();
+ };
+
+ bucket.upload(globPattern, {}, assert.ifError);
});
+
+ it('should pass correct arguments to uploadFile', function(done) {
+ var options = { a: 'b', c: 'd' };
+ var expectedOptions = extend({}, options, {
+ destination: path.basename(filePath)
+ });
+
+ parallelLimit_Override = function(fns) {
+ var processFileFn = fns[0];
+ processFileFn(assert.ifError);
+ };
+
+ bucket.uploadFile = function(uploadFilePath, opts) {
+ assert.equal(uploadFilePath, filePath);
+ assert.deepEqual(opts, expectedOptions);
+ done();
+ };
+
+ bucket.upload(globPattern, options, assert.ifError);
+ });
+
+
+ it('should stop processing files after error', function(done) {
+ var error = new Error('Error.');
+
+ bucket.uploadFile = function(uploadFilePath, opts, callback) {
+ callback(error);
+ };
+
+ parallelLimit_Override = function(fns) {
+ var processFileFn = fns[0];
+
+ processFileFn(function(err) {
+ assert.deepEqual(err, error);
+ done();
+ });
+ };
+
+ bucket.upload(globPattern, {}, assert.ifError);
+ });
+
+ it('should continue processing files if in force mode', function(done) {
+ var options = { force: true };
+ var error = new Error('Error.');
+
+ bucket.uploadFile = function(uploadFilePath, opts, callback) {
+ callback(error);
+ };
+
+ parallelLimit_Override = function(fns) {
+ var processFileFn = fns[0];
+
+ processFileFn(function(err) {
+ assert.strictEqual(err, null);
+ done();
+ });
+ };
+
+ bucket.upload(globPattern, options, assert.ifError);
+ });
+
+ it('should execute callback with all errors and files', function(done) {
+ var options = { force: true };
+ var error = new Error('Error.');
+ var filePaths = [filePath, filePath];
+ var file = new FakeFile(bucket, filePath);
+
+ globby_Override = function(pattern, opts, callback) {
+ callback(null, filePaths);
+ };
+
+ var filesProcessed = 0;
+ bucket.uploadFile = function(uploadFilePath, opts, callback) {
+ filesProcessed++;
+
+ if (filesProcessed === 1) {
+ callback(error);
+ } else if (filesProcessed === 2) {
+ callback(null, file);
+ }
+ };
+
+ bucket.upload(globPattern, options, function(errors, files) {
+ assert.equal(errors.length, 1);
+ assert.equal(files.length, 1);
+
+ assert.deepEqual(errors[0], error);
+ assert.deepEqual(files[0], file);
+
+ done();
+ });
+ });
+ });
+ });
+
+ describe('uploadDirectory', function() {
+ var directoryPath = '/Users/stephen/Photos/zoo';
+
+ beforeEach(function() {
+ bucket.upload = util.noop;
});
- it('should accept a path, a string dest, metadata, & cb', function(done) {
- var newFileName = 'new-file-name.png';
- var options = { destination: newFileName, metadata: metadata };
- bucket.upload(filepath, options, function(err, file) {
- assert.ifError(err);
- assert.equal(file.bucket.name, bucket.name);
- assert.equal(file.name, newFileName);
- assert.deepEqual(file.metadata, metadata);
+ it('should assign a basepath if one is not given', function(done) {
+ bucket.upload = function(pattern, options) {
+ assert.equal(options.basePath, directoryPath);
done();
- });
+ };
+
+ bucket.uploadDirectory(directoryPath, {}, assert.ifError);
});
- it('should accept a path, a File dest, & cb', function(done) {
- var fakeFile = new FakeFile(bucket, 'file-name');
- fakeFile.isSameFile = function() {
- return true;
+ it('should not override a given basepath', function(done) {
+ var options = { basePath: path.resolve(directoryPath, '..') };
+
+ bucket.upload = function(pattern, opts) {
+ assert.equal(opts.basePath, options.basePath);
+ done();
};
- var options = { destination: fakeFile };
- bucket.upload(filepath, options, function(err, file) {
- assert.ifError(err);
- assert(file.isSameFile());
+
+ bucket.uploadDirectory(directoryPath, options, assert.ifError);
+ });
+
+ it('should call upload with self and children pattern', function(done) {
+ bucket.upload = function(pattern) {
+ assert.equal(pattern, path.join(directoryPath, '**/*'));
done();
- });
+ };
+
+ bucket.uploadDirectory(directoryPath, {}, assert.ifError);
});
- it('should accept a path, a File dest, metadata, & cb', function(done) {
- var fakeFile = new FakeFile(bucket, 'file-name');
- fakeFile.isSameFile = function() {
- return true;
+ it('should call upload with all arguments', function(done) {
+ var options = { a: 'b', c: 'd' };
+
+ bucket.upload = function(pattern, opts, callback) {
+ assert.equal(pattern, path.join(directoryPath, '**/*'));
+ assert.deepEqual(opts, options);
+ callback();
};
- var options = { destination: fakeFile, metadata: metadata };
- bucket.upload(filepath, options, function(err, file) {
- assert.ifError(err);
- assert(file.isSameFile());
- assert.deepEqual(file.metadata, metadata);
- done();
+
+ bucket.uploadDirectory(directoryPath, options, done);
+ });
+ });
+
+ describe('uploadFile', function() {
+ var filePath = 'file-path.txt';
+ var RESUMABLE_THRESHOLD = 5000000;
+
+ beforeEach(function() {
+ bucket.uploadFile_ = util.noop;
+ });
+
+ describe('resumable undefined', function() {
+ describe('size unknown', function() {
+ it('should stat file, assign size & re-attempt upload', function(done) {
+ var size = 1000;
+ var options = { a: 'b', c: 'd' };
+
+ stat_Override = function(statFilePath, callback) {
+ assert.equal(statFilePath, filePath);
+
+ // Should call `uploadFile` again.
+ bucket.uploadFile = function(uploadFilePath, opts, callback) {
+ assert.equal(uploadFilePath, filePath);
+ assert.equal(opts.size, size);
+ assert.deepEqual(opts, options);
+ callback();
+ };
+
+ callback(null, { size: size });
+ };
+
+ bucket.uploadFile(filePath, options, done);
+ });
+
+ it('return stat error to callback', function(done) {
+ var error = new Error('Error.');
+
+ stat_Override = function(statFilePath, callback) {
+ callback(error);
+ };
+
+ bucket.uploadFile(filePath, {}, function(err) {
+ assert.deepEqual(err, error);
+ done();
+ });
+ });
+ });
+
+ describe('size known', function() {
+ beforeEach(function() {
+ stat_Override = function() {
+ throw new Error('`stat` should not be called.');
+ };
+ });
+
+ it('should set resumable false <= 5MB', function(done) {
+ var options = { size: RESUMABLE_THRESHOLD };
+
+ bucket.uploadFile_ = function(filePath, opts) {
+ assert.strictEqual(opts.resumable, false);
+ done();
+ };
+
+ bucket.uploadFile(filePath, options, assert.ifError);
+ });
+
+ it('should set resumable true > 5MB', function(done) {
+ var options = { size: RESUMABLE_THRESHOLD + 1 };
+
+ bucket.uploadFile_ = function(filePath, opts) {
+ assert.strictEqual(opts.resumable, true);
+ done();
+ };
+
+ bucket.uploadFile(filePath, options, assert.ifError);
+ });
+ });
+ });
+
+ describe('resumable defined', function() {
+ it('should not stat file if resumable is specified', function(done) {
+ var options = { resumable: true };
+
+ stat_Override = function() {
+ throw new Error('`stat` should not be called.');
+ };
+
+ bucket.uploadFile_ = function(filePath, opts) {
+ assert.strictEqual(opts.resumable, options.resumable);
+ done();
+ };
+
+ bucket.uploadFile(filePath, options, assert.ifError);
});
});
+ it('should create a File from a string destination', function(done) {
+ var options = { destination: 'a-new-file.txt', resumable: true };
+
+ bucket.file = function(name) {
+ assert.equal(name, options.destination);
+ setImmediate(done);
+ return {};
+ };
+
+ bucket.uploadFile(filePath, options, assert.ifError);
+ });
+
+ it('should name the file its basename if no destination', function(done) {
+ var baseName = 'a-new-file.txt';
+ var options = { resumable: true };
+
+ bucket.file = function(name) {
+ assert.equal(name, baseName);
+ setImmediate(done);
+ return {};
+ };
+
+ bucket.uploadFile('a/filepath/to/' + baseName, options, assert.ifError);
+ });
+
+ it('should pass all arguments to uploadFile_', function(done) {
+ var options = { a: 'b', c: 'd', resumable: true };
+
+ bucket.uploadFile_ = function(uploadFilePath, opts, callback) {
+ assert.equal(uploadFilePath, filePath);
+ assert.deepEqual(options, opts);
+ callback();
+ };
+
+ bucket.uploadFile(filePath, options, done);
+ });
+ });
+
+ describe('uploadFile_', function() {
+ var basename = 'proto_query.json';
+ var filepath = 'test/testdata/' + basename;
+ var textFilepath = 'test/testdata/textfile.txt';
+
+ var destinationFile;
+
+ beforeEach(function() {
+ destinationFile = new FakeFile(bucket, 'file-name');
+ });
+
it('should guess at the content type', function(done) {
- var fakeFile = new FakeFile(bucket, 'file-name');
- var options = { destination: fakeFile };
- fakeFile.createWriteStream = function(options) {
+ var options = { destination: destinationFile };
+
+ destinationFile.createWriteStream = function(options) {
var ws = new stream.Writable();
ws.write = util.noop;
+
setImmediate(function() {
var expectedContentType = 'application/json; charset=utf-8';
assert.equal(options.metadata.contentType, expectedContentType);
done();
});
+
return ws;
};
- bucket.upload(filepath, options, assert.ifError);
+
+ bucket.uploadFile_(filepath, options, assert.ifError);
});
it('should guess at the charset', function(done) {
- var fakeFile = new FakeFile(bucket, 'file-name');
- var options = { destination: fakeFile };
- fakeFile.createWriteStream = function(options) {
+ var options = { destination: destinationFile };
+
+ destinationFile.createWriteStream = function(options) {
var ws = new stream.Writable();
ws.write = util.noop;
+
setImmediate(function() {
var expectedContentType = 'text/plain; charset=utf-8';
assert.equal(options.metadata.contentType, expectedContentType);
done();
});
+
return ws;
};
- bucket.upload(textFilepath, options, assert.ifError);
+
+ bucket.uploadFile_(textFilepath, options, assert.ifError);
});
it('should allow overriding content type', function(done) {
- var fakeFile = new FakeFile(bucket, 'file-name');
var metadata = { contentType: 'made-up-content-type' };
- var options = { destination: fakeFile, metadata: metadata };
- fakeFile.createWriteStream = function(options) {
+ var options = { destination: destinationFile, metadata: metadata };
+
+ destinationFile.createWriteStream = function(options) {
var ws = new stream.Writable();
ws.write = util.noop;
+
setImmediate(function() {
assert.equal(options.metadata.contentType, metadata.contentType);
done();
});
+
return ws;
};
- bucket.upload(filepath, options, assert.ifError);
+
+ bucket.uploadFile_(filepath, options, assert.ifError);
});
it('should allow specifying options.resumable', function(done) {
- var fakeFile = new FakeFile(bucket, 'file-name');
- var options = { destination: fakeFile, resumable: false };
- fakeFile.createWriteStream = function(options) {
+ var options = { destination: destinationFile, resumable: false };
+
+ destinationFile.createWriteStream = function(options) {
var ws = new stream.Writable();
ws.write = util.noop;
+
setImmediate(function() {
assert.strictEqual(options.resumable, false);
done();
});
+
return ws;
};
- bucket.upload(filepath, options, assert.ifError);
+
+ bucket.uploadFile_(filepath, options, assert.ifError);
});
it('should execute callback on error', function(done) {
var error = new Error('Error.');
- var fakeFile = new FakeFile(bucket, 'file-name');
- var options = { destination: fakeFile };
- fakeFile.createWriteStream = function() {
+ var options = { destination: destinationFile };
+
+ destinationFile.createWriteStream = function() {
var ws = new stream.Writable();
+
setImmediate(function() {
ws.emit('error', error);
ws.end();
});
+
return ws;
};
- bucket.upload(filepath, options, function(err) {
+
+ bucket.uploadFile_(filepath, options, function(err) {
assert.equal(err, error);
done();
});
});
+
+ it('should return destination File on complete', function(done) {
+ var options = { destination: destinationFile };
+
+ destinationFile.createWriteStream = function() {
+ var ws = new stream.Writable();
+
+ setImmediate(function() {
+ ws.emit('complete');
+ ws.end();
+ });
+
+ return ws;
+ };
+
+ bucket.uploadFile_(filepath, options, function(err, file) {
+ assert.ifError(err);
+ assert.deepEqual(file, destinationFile);
+ done();
+ });
+ });
});
describe('makeAllFilesPublicPrivate_', function() {