Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP

Loading…

Added gzip helper to tar module #65

Closed
wants to merge 6 commits into from

2 participants

@jstuckey

Added gzip helper option to the tar module as mentioned in #36.

@ctalkington
Owner

thanks for the PR, will review as soon as possible.

@ctalkington ctalkington referenced this pull request from a commit
@ctalkington ctalkington few adjustments to #65.
* add gzipOptions to control compression
* make pipe logic more general for potential future compression formats.
d95e0c1
@ctalkington
Owner

manually merged bits of this. really don't like adding deps for the tests and gzip def adds hassle to the test. its such a simple change that we can deal with tests when a better way of testing is setup.

published as 0.7.0, let me know if you have comments / concerns.

@jstuckey

Works for me. Just wanted to contribute.

I agree that the second test is complex. The first test it('should gzip the tar archive', ... might be good to include for code coverage though. It is the same as your other tests in that it just checks the digest. No extra dependancies required.

@ctalkington
Owner

@jstuckey noted. the test might fail though with the digest because of zlib differences between OS. ran into this in the very beginning of this project.

@ctalkington
Owner

that said in a future version, the tests will actually un-gzip and try to parse the archive.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
This page is out of date. Refresh to see the latest.
View
4 README.md
@@ -127,6 +127,10 @@ Sets the size (in bytes) of each record in a block, default is 512 (for advanced
Sets the number of records in a block, default is 20 (for advanced users only).
+#### gzip `boolean`
+
+Compresses the tar archive using gzip, default is false.
+
### File Data
#### name `string` `required`
View
19 lib/modules/tar/index.js
@@ -7,6 +7,7 @@
*/
var inherits = require('util').inherits;
var Transform = require('stream').Transform || require('readable-stream').Transform;
+var zlib = require('zlib');
var headers = require('./headers');
var util = require('../../util');
@@ -14,7 +15,8 @@ var util = require('../../util');
var Tar = module.exports = function(options) {
options = this.options = util.defaults(options, {
recordSize: 512,
- recordsPerBlock: 20
+ recordsPerBlock: 20,
+ gzip: false
});
Transform.call(this, options);
@@ -24,6 +26,11 @@ var Tar = module.exports = function(options) {
this.recordSize = options.recordSize;
this.blockSize = options.recordsPerBlock * options.recordSize;
+ this.gzip = options.gzip;
+
+ if (this.gzip) {
+ this.gzipper = zlib.createGzip();
+ }
};
inherits(Tar, Transform);
@@ -95,4 +102,14 @@ Tar.prototype.write = function(chunk, cb) {
}
return Transform.prototype.write.call(this, chunk, cb);
+};
+
+Tar.prototype.pipe = function(destination, options) {
+ if (this.gzip && this.gzipper) {
+ // Pipe to gzip stream before piping to destination
+ return Transform.prototype.pipe.call(this, this.gzipper, options).pipe(destination);
+ } else {
+ // Pipe to the destination like normal
+ return Transform.prototype.pipe.call(this, destination, options)
+ }
};
View
5 package.json
@@ -40,7 +40,8 @@
"mocha": "~1.16.0",
"rimraf": "~2.2.0",
"mkdirp": "~0.3.5",
- "stream-bench": "~0.1.2"
+ "stream-bench": "~0.1.2",
+ "async": "~0.2.10"
},
"keywords": [
"archive",
@@ -48,4 +49,4 @@
"zip",
"tar"
]
-}
+}
View
73 test/archiver.js
@@ -1,10 +1,12 @@
/*global before,describe,it */
var fs = require('fs');
+var zlib = require('zlib');
var PassThrough = require('stream').PassThrough || require('readable-stream/passthrough');
var WriteStream = fs.createWriteStream;
var assert = require('chai').assert;
var mkdir = require('mkdirp');
+var async = require('async');
var common = require('./helpers/common');
var HashStream = common.HashStream;
@@ -206,6 +208,77 @@ describe('archiver', function() {
});
});
+ describe('#gzip', function() {
+ it('should gzip the tar archive', function(done) {
+ var archive = archiver('tar', { gzip: true });
+ var testStream = new WriteHashStream('tmp/multiple.tar.gz');
+
+ testStream.on('close', function() {
+ assert.equal(testStream.digest, '3219ba9c6ffa3ec5f62179ec201bfafea43e32c3');
+ done();
+ });
+
+ archive.pipe(testStream);
+
+ archive
+ .append('string', { name: 'string.txt', date: testDate })
+ .append(binaryBuffer(20000), { name: 'buffer.txt', date: testDate2 })
+ .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate })
+ .finalize();
+ });
+
+ it('should match internal gzip digest with external gzip digest', function(done) {
+
+ // Gzip inside of node-archiver using the gzip helper
+ var gzipInternal = function(asynCallback) {
+
+ var archive = archiver('tar', { gzip: true });
+ var testStream = new WriteHashStream('tmp/multiple.tar.gz');
+
+ testStream.on('close', function() {
+ asynCallback(null, testStream.digest);
+ });
+
+ archive.pipe(testStream);
+
+ archive
+ .append('string', { name: 'string.txt', date: testDate })
+ .append(binaryBuffer(20000), { name: 'buffer.txt', date: testDate2 })
+ .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate })
+ .finalize();
+ };
+
+ // Gzip outside of node-archiver by piping to a zlib stream
+ var gzipExternal = function(asynCallback) {
+ var archive = archiver('tar');
+ var gzipper = zlib.createGzip();
+ var testStream = new WriteHashStream('tmp/multiple.tar.gz');
+
+ testStream.on('close', function() {
+ asynCallback(null, testStream.digest);
+ });
+
+ archive.pipe(gzipper).pipe(testStream);
+
+ archive
+ .append('string', { name: 'string.txt', date: testDate })
+ .append(binaryBuffer(20000), { name: 'buffer.txt', date: testDate2 })
+ .append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate })
+ .finalize();
+ };
+
+ async.parallel({
+ internalDigest: gzipInternal,
+ externalDigest: gzipExternal
+ },
+ function(err, results) {
+ // Compare the digest of the gzip helper (internal) to the zlib stream (external)
+ assert(results.internalDigest, results.externalDigest);
+ done();
+ });
+ });
+ });
+
});
});
Something went wrong with that request. Please try again.