Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP

Loading…

pull request for untar changes #3

Closed
wants to merge 19 commits into from

4 participants

@dmcaulay

made the following changes

  • added an 'end' event on untar
  • fixed filterTypes option
    • was adding the name of the filter instead of the index
  • added error checking if we reach the end of a file before expected
  • moved package.json to the root (you might not want this?)
@dmcaulay

added 13fa00c

  • fixes some issues with tar, see commit comment.
@coolaj86
Collaborator

@beatgammit would you like to take a look at this and merge it in?

@dunxm

These changes allow me to untar more than one file in a tar. However I'm unable to add files to a tar which already exists (and was previously created with tar-async with dmcaulay 6e22ad0 changes). Should I be able to add more files to an already existing tar?

@dmcaulay
@dmcaulay dmcaulay closed this
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Commits on Jun 18, 2012
  1. @dmcaulay-3vr

    add an end event on untar

    dmcaulay-3vr authored
  2. @dmcaulay-3vr
Commits on Jun 19, 2012
  1. @dmcaulay-3vr
  2. @dmcaulay-3vr
Commits on Jun 26, 2012
  1. @dmcaulay
  2. @dmcaulay

    fixing tar: 1. no longer infinite loop when enqueuing multiple jobs 2…

    dmcaulay authored
    …. added type option (directories) 3. allows dates for mtime options 4. added a couple utility methods
Commits on Jun 28, 2012
  1. @dmcaulay

    removing the queue implementation and switching to 'async.queue'. the…

    dmcaulay authored
    … cool thing is the close method is also added to the queue so i can call it immediately after i queue up all the files and ill get the end event when everything is done. did have to use async.queue but i thought it'd be easier. it's a pretty lightweight lib
  2. @dmcaulay
Commits on Jun 30, 2012
  1. @dmcaulay
  2. @dmcaulay
  3. @dmcaulay
  4. @dmcaulay

    needed to change it here too

    dmcaulay authored
  5. @dmcaulay
Commits on Jul 2, 2012
  1. @dmcaulay
  2. @dmcaulay
  3. @dmcaulay
  4. @dmcaulay
Commits on Jul 3, 2012
  1. @dmcaulay

    version bump

    dmcaulay authored
Commits on Jul 30, 2012
  1. @dmcaulay

    we need to at least attempt to read the rest of the buffer before dec…

    dmcaulay authored
    …laring we have a corrupt archive
This page is out of date. Refresh to see the latest.
View
2  .gitignore
@@ -1 +1,3 @@
+*.swp
node_modules
+
View
27 lib/header.js
@@ -28,7 +28,7 @@ struct posix_header { // byte offset
headerFormat = [
{
- 'field': 'filename',
+ 'field': 'name',
'length': 100,
'type': 'string'
},
@@ -58,47 +58,52 @@ struct posix_header { // byte offset
'type': 'number'
},
{
- 'field': 'checksum',
+ 'field': 'chksum',
'length': 8,
'type': 'number'
},
{
- 'field': 'type',
+ 'field': 'typeflag',
'length': 1,
'type': 'number'
},
{
- 'field': 'linkName',
+ 'field': 'linkname',
'length': 100,
'type': 'string'
},
{
- 'field': 'ustar',
- 'length': 8,
+ 'field': 'magic',
+ 'length': 6,
+ 'type': 'string'
+ },
+ {
+ 'field': 'version',
+ 'length': 2,
'type': 'string'
},
{
- 'field': 'owner',
+ 'field': 'uname',
'length': 32,
'type': 'string'
},
{
- 'field': 'group',
+ 'field': 'gname',
'length': 32,
'type': 'string'
},
{
- 'field': 'majorNumber',
+ 'field': 'devmajor',
'length': 8,
'type': 'number'
},
{
- 'field': 'minorNumber',
+ 'field': 'devminor',
'length': 8,
'type': 'number'
},
{
- 'field': 'filenamePrefix',
+ 'field': 'prefix',
'length': 155,
'type': 'string'
},
View
121 lib/tar.js
@@ -3,14 +3,17 @@
var path = require('path'),
Stream = require('stream').Stream,
+ async = require('async'),
header = require("./header"),
utils = require("./utils"),
recordSize = 512,
blockSize,
- queue = [];
+ queue = async.queue(function(task, done) {
+ task(done);
+ }, 1);
function Tar(opt) {
- var tape;
+ var tape = this;
opt = opt || {};
@@ -18,8 +21,6 @@
Stream.apply(this, arguments);
- tape = this;
-
this.written = 0;
this.consolidate = 'consolidate' in opt ? opt.consolidate : false;
@@ -27,7 +28,7 @@
this.on('end', function () {
tape.emit('data', utils.clean(blockSize - (tape.written % blockSize)));
- this.written += blockSize - (tape.written % blockSize);
+ tape.written += blockSize - (tape.written % blockSize);
});
if (opt && opt.output) {
@@ -40,7 +41,11 @@
});
Tar.prototype.close = function () {
- this.emit('end');
+ var tape = this;
+ queue.push(function(done) {
+ tape.emit('end');
+ done();
+ });
};
Tar.prototype.createHeader = function (data) {
@@ -50,7 +55,7 @@
headerBuf;
if (this.normalize && !this.consolidate) {
- data.filename = path.normalize(data.filename);
+ data.name = path.normalize(data.name);
}
// format the header without the checksum
@@ -80,12 +85,16 @@
Tar.prototype.writeData = function (callback, header, input, size) {
var extraBytes,
- tape = this;
+ tape = this
// and write it out to the stream
this.emit('data', header);
this.written += header.length;
+ if (size == 0) {
+ return callback();
+ }
+
// if it's a string/Buffer, we can just write it out to the stream
if (typeof input === 'string' || input instanceof Buffer) {
this.emit('data', input);
@@ -98,8 +107,6 @@
return callback();
} else {
// otherwise we need to do it asynchronously
- this.processing = true;
-
input.on('data', function (chunk) {
tape.emit('data', chunk);
tape.written += chunk.length;
@@ -109,34 +116,13 @@
extraBytes = recordSize - (size % recordSize || recordSize);
tape.emit('data', utils.clean(extraBytes));
tape.written += extraBytes;
-
- tape.processing = false;
-
- if (queue.length > 0) {
- process.nextTick(function () {
- var job = queue.shift();
-
- if (typeof job.input === 'object' && typeof job.input.resume === 'function') {
- job.input.resume();
- }
-
- tape.append(job.filepath, job.input, job.opts, job.cb);
- });
- }
-
return callback();
});
}
};
Tar.prototype.append = function (filepath, input, opts, callback) {
- var data,
- mode,
- mtime,
- uid,
- gid,
- size,
- tape = this;
+ var tape = this;
if (typeof opts === 'function') {
callback = opts;
@@ -153,27 +139,44 @@
};
}
- if (this.processing || queue.length) {
- if (typeof input === 'object' && typeof input.pause === 'function') {
- input.pause();
- }
+ if (input && typeof input === 'object' && typeof input.pause === 'function') {
+ input.pause();
+ }
- queue.push({
- filepath: filepath,
- input: input,
- opts: opts,
- cb: callback
- });
- return;
- }
+ queue.push(function(done) {
+ tape.processAppend(filepath, input, opts, function() {
+ callback.apply(this, arguments);
+ done();
+ });
+ });
+ };
+
+ Tar.prototype.processAppend = function (filepath, input, opts, callback) {
+ var data,
+ mode,
+ mtime,
+ uid,
+ gid,
+ size,
+ type,
+ linkname,
+ tape = this;
opts = opts || {};
mode = typeof opts.mode === 'number' ? opts.mode : parseInt('777', 8) & 0xfff;
- mtime = typeof opts.mtime === 'number' ? opts.mtime : parseInt(+new Date() / 1000);
uid = typeof opts.uid === 'number' ? opts.uid : 0;
gid = typeof opts.gid === 'number' ? opts.gid : 0;
size = typeof opts.size === 'number' ? opts.size : input.length;
+ linkname = typeof opts.linkname == 'string' ? opts.linkname : null;
+ mtime = utils.calculateTarDate(opts.mtime, new Date());
+ utils.fileTypeToIndex(opts.type, function(err, index) {
+ type = err ? '0' : index.toString();
+ });
+
+ if (input && typeof input === 'object' && typeof input.resume === 'function') {
+ input.resume();
+ }
// if you give me a stream, you must tell me how big it is
// since the header comes first, the only other solution is to
@@ -188,25 +191,33 @@
}
}
+ var filename = this.consolidate ? path.basename(filepath) : filepath;
+ var prefix = null;
+ if (filename.length > 99) {
+ var offset = filename.indexOf('/', filename.length - 100);
+ prefix = filename.slice(0, offset);
+ filename = filename.slice(offset + 1, filename.length);
+ }
+
data = {
- filename: this.consolidate ? path.basename(filepath) : filepath,
+ name: filename,
mode: utils.pad(mode, 7),
uid: utils.pad(uid, 7),
gid: utils.pad(gid, 7),
size: utils.pad(size, 11),
mtime: utils.pad(mtime, 11),
- checksum: ' ',
- type: '0', // just a file
- ustar: 'ustar ',
- owner: '',
- group: ''
+ chksum: ' ',
+ typeflag: type,
+ linkname: linkname,
+ magic: 'ustar',
+ version: '00',
+ uname: '',
+ gname: '',
+ prefix: prefix
};
if (size === -1 && opts.allowPipe) {
- this.processing = true;
utils.readAll(function (err, buf) {
- tape.processing = false;
-
size = buf.length;
data.size = utils.pad(size, 11);
tape.writeData(callback, tape.createHeader(data), buf, size);
@@ -215,6 +226,6 @@
this.writeData(callback, this.createHeader(data), input, size);
}
};
-
+
module.exports = Tar;
}());
View
66 lib/untar.js
@@ -2,15 +2,14 @@
"use strict";
var Stream = require('stream').Stream,
+ path = require('path'),
+ utils = require('./utils'),
headerFormat = require('./header').structure,
buffer,
totalRead = 0,
recordSize = 512,
fileStream,
- leftToRead,
- fileTypes = [
- 'normal', 'hard-link', 'symbolic-link', 'character-special', 'block-special', 'directory', 'fifo', 'contiguous-file'
- ];
+ leftToRead;
function filterDecoder(input) {
var filter = [];
@@ -28,13 +27,13 @@
}
input.forEach(function (i) {
- var index = fileTypes.indexOf(i);
- if (index < 0) {
- console.error('Filetype not valid. Ignoring input:', i);
- return;
- }
-
- filter.push(i);
+ utils.fileTypeToIndex(i, function(err, index) {
+ if (err) {
+ console.error(err + " Ignoring input:" + i);
+ } else {
+ filter.push(index);
+ }
+ });
});
return filter;
@@ -70,10 +69,10 @@
offset += field.length;
- if (field.field === 'ustar' && !/ustar/.test(tString)) {
+ if (field.field === 'magic' && !/ustar/.test(tString)) {
// end the loop if not using the extended header
return true;
- } else if (field.field === 'checksum') {
+ } else if (field.field === 'chksum') {
updateChecksum(' ');
} else {
updateChecksum(tString);
@@ -87,8 +86,8 @@
});
if (typeof cb === 'function') {
- if (checksum !== data.checksum) {
- return cb.call(this, 'Checksum not equal', checksum, data.checksum);
+ if (checksum !== data.chksum) {
+ return cb.call(this, 'Checksum not equal', checksum, data.chksum);
}
cb.call(this, null, data, recordSize);
}
@@ -124,6 +123,19 @@
if (data) {
this.write(data, encoding);
}
+
+ var oldLength = 0;
+ while (buffer && buffer.length != oldLength) {
+ oldLength = buffer.length;
+ this.write();
+ }
+
+ if (buffer || leftToRead > 0) {
+ this.cb('unexpected end of archive, archive is corrupt');
+ return;
+ }
+
+ this.emit('end');
};
Untar.prototype.write = function write(data, encoding) {
@@ -184,7 +196,7 @@
// if we don't have enough bytes to account for the nulls
if (tBuf.length < bytesBuffer) {
- totalRead += bytesBuffer;
+ totalRead += tBuf.length;
return;
}
@@ -213,16 +225,22 @@
fileStream = new Stream();
- if (this.fileTypes.indexOf(data.type) >= 0) {
+ if (this.fileTypes.indexOf(data.typeflag) >= 0) {
// we'll let the user know if they want this type of file
- this.cb(err, data, fileStream);
+ if (data.prefix) {
+ data.name = path.join(data.prefix, data.name);
+ }
+ data.typeflag = utils.getFileType(data.typeflag);
+ this.cb(err, data, fileStream);
}
if (buffer.length >= data.size) {
- fileStream.emit('data', buffer.slice(0, data.size));
+ if (data.size > 0) {
+ fileStream.emit('data', buffer.slice(0, data.size));
+ totalRead += data.size;
+ buffer = buffer.slice(data.size);
+ }
fileStream.emit('end');
- totalRead += data.size;
- buffer = buffer.slice(data.size);
fileStream = undefined;
@@ -231,8 +249,10 @@
}
leftToRead = data.size - buffer.length;
- fileStream.emit('data', buffer);
- totalRead += buffer.length;
+ if (buffer.length > 0) {
+ fileStream.emit('data', buffer);
+ totalRead += buffer.length;
+ }
buffer = undefined;
});
View
37 lib/utils.js
@@ -1,6 +1,40 @@
(function () {
"use strict";
+ var util = require('util'),
+ fileTypes = [
+ 'normal', 'hard-link', 'symbolic-link', 'character-special', 'block-special', 'directory', 'fifo', 'contiguous-file'
+ ];
+
+ function fileTypeToIndex(fileType, callback) {
+ var index = fileTypes.indexOf(fileType);
+ if (index < 0) {
+ callback('Invalid fileType.', index);
+ } else {
+ callback(null, index);
+ }
+ }
+
+ function getFileType(index) {
+ return fileTypes[index];
+ }
+
+ function convertToTarDate(date) {
+ return parseInt(date/1000);
+ }
+
+ function calculateTarDate(input, defaultDate) {
+ if (typeof input === 'number') {
+ return input;
+ } else if (util.isDate(input)) {
+ return convertToTarDate(input);
+ } else if (typeof defaultDate == 'number') {
+ return defaultDate;
+ } else {
+ return convertToTarDate(defaultDate);
+ }
+ }
+
function clean(length) {
var i, buffer = new Buffer(length);
for (i = 0; i < length; i += 1) {
@@ -36,6 +70,9 @@
});
}
+ module.exports.fileTypeToIndex = fileTypeToIndex;
+ module.exports.getFileType = getFileType;
+ module.exports.calculateTarDate = calculateTarDate;
module.exports.clean = clean;
module.exports.pad = pad;
module.exports.readAll = readAll;
View
8 lib/package.json → package.json
@@ -3,18 +3,20 @@
"name": "tar-async",
"description": "Asynchronous tar and untar",
"keywords": ["tar", "untar", "asynchronous", "stream", "async", "chunk", "chunked"],
- "version": "1.2.0",
+ "version": "1.2.1",
"repository": {
"type": "git",
"url": "git://github.com/beatgammit/tar-async.git"
},
- "main": "index.js",
+ "main": "lib/index.js",
"directories": {
"lib": "."
},
"engines": {
"node": ">=0.1.90"
},
- "dependencies": {},
+ "dependencies": {
+ "async": "~0.1.22"
+ },
"devDependencies": {}
}
Something went wrong with that request. Please try again.