Skip to content

Commit

Permalink
feat: drop support for Node < 8 (#170)
Browse files Browse the repository at this point in the history
BREAKING CHANGE: drop support for versions of Node which are EOL'd as of 2019-04-30.

* chore: upgrade mocha to fix some devDeps issues with npm audit
* chore: upgrade commander and electron deps
* chore: upgrade electron-mocha, which requires Node >= 8
* docs: update code examples for Node 8
* refactor: replace pify with util.promisify
* refactor: use async/await everywhere
* chore: remove Node 6 from the Node versions tested in CI
* refactor: use async readFile method
* refactor: simplify pattern construction
* docs: reformat createPackageFromFiles doc comment
* refactor: DRY up determineFileType
* chore: remove mz devDependency
  • Loading branch information
malept committed Apr 27, 2019
1 parent 22756a7 commit 8a03eae
Show file tree
Hide file tree
Showing 13 changed files with 603 additions and 555 deletions.
28 changes: 13 additions & 15 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,15 +85,14 @@ $ asar pack app app.asar --unpack-dir "{**/x1,**/x2,z4/w1}"

### Example

```js
var asar = require('asar');
```javascript
const asar = require('asar');

var src = 'some/path/';
var dest = 'name.asar';
const src = 'some/path/';
const dest = 'name.asar';

asar.createPackage(src, dest).then(function() {
console.log('done.');
})
await asar.createPackage(src, dest);
console.log('done.');
```

Please note that there is currently **no** error handling provided!
Expand All @@ -103,19 +102,18 @@ You can pass in a `transform` option, that is a function, which either returns
nothing, or a `stream.Transform`. The latter will be used on files that will be
in the `.asar` file to transform them (e.g. compress).

```js
var asar = require('asar');
```javascript
const asar = require('asar');

var src = 'some/path/';
var dest = 'name.asar';
const src = 'some/path/';
const dest = 'name.asar';

function transform(filename) {
function transform (filename) {
return new CustomTransformStream()
}

asar.createPackageWithOptions(src, dest, { transform: transform }).then(function() {
console.log('done.');
})
await asar.createPackageWithOptions(src, dest, { transform: transform });
console.log('done.');
```

## Using with grunt
Expand Down
1 change: 0 additions & 1 deletion appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ skip_tags: true

environment:
matrix:
- nodejs_version: "6"
- nodejs_version: "8"
- nodejs_version: "10"

Expand Down
67 changes: 28 additions & 39 deletions lib/asar.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
'use strict'

const pify = require('pify')

const fs = process.versions.electron ? require('original-fs') : require('fs')
const fs = require('./wrapped-fs')
const path = require('path')
const minimatch = require('minimatch')
const mkdirp = pify(require('mkdirp'))

const Filesystem = require('./filesystem')
const disk = require('./disk')
Expand All @@ -29,32 +26,30 @@ function isUnpackedDir (dirPath, pattern, unpackDirs) {
}
}

module.exports.createPackage = function (src, dest) {
module.exports.createPackage = async function (src, dest) {
return module.exports.createPackageWithOptions(src, dest, {})
}

module.exports.createPackageWithOptions = function (src, dest, options) {
module.exports.createPackageWithOptions = async function (src, dest, options) {
const globOptions = options.globOptions ? options.globOptions : {}
globOptions.dot = options.dot === undefined ? true : options.dot

let pattern = src + '/**/*'
if (options.pattern) {
pattern = src + options.pattern
}
const pattern = src + (options.pattern ? options.pattern : '/**/*')

return crawlFilesystem(pattern, globOptions)
.then(([filenames, metadata]) => module.exports.createPackageFromFiles(src, dest, filenames, metadata, options))
const [filenames, metadata] = await crawlFilesystem(pattern, globOptions)
return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options)
}

/*
createPackageFromFiles - Create an asar-archive from a list of filenames
src: Base path. All files are relative to this.
dest: Archive filename (& path).
filenames: Array of filenames relative to src.
metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
options: The options.
/**
* Create an ASAR archive from a list of filenames.
*
* @param {string} src: Base path. All files are relative to this.
* @param {string} dest: Archive filename (& path).
* @param {array} filenames: List of filenames relative to src.
* @param {object} metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
* @param {object} options: Options passed to `createPackageWithOptions`.
*/
module.exports.createPackageFromFiles = function (src, dest, filenames, metadata, options) {
module.exports.createPackageFromFiles = async function (src, dest, filenames, metadata, options) {
if (typeof metadata === 'undefined' || metadata === null) { metadata = {} }
if (typeof options === 'undefined' || options === null) { options = {} }

Expand All @@ -68,7 +63,7 @@ module.exports.createPackageFromFiles = function (src, dest, filenames, metadata

let filenamesSorted = []
if (options.ordering) {
const orderingFiles = fs.readFileSync(options.ordering).toString().split('\n').map(function (line) {
const orderingFiles = (await fs.readFile(options.ordering)).toString().split('\n').map(line => {
if (line.includes(':')) { line = line.split(':').pop() }
line = line.trim()
if (line.startsWith('/')) { line = line.slice(1) }
Expand Down Expand Up @@ -106,17 +101,11 @@ module.exports.createPackageFromFiles = function (src, dest, filenames, metadata
filenamesSorted = filenames
}

const handleFile = function (filename) {
let file = metadata[filename]
let type
if (!file) {
const stat = fs.lstatSync(filename)
if (stat.isDirectory()) { type = 'directory' }
if (stat.isFile()) { type = 'file' }
if (stat.isSymbolicLink()) { type = 'link' }
file = { stat, type }
metadata[filename] = file
const handleFile = async function (filename) {
if (!metadata[filename]) {
metadata[filename] = await crawlFilesystem.determineFileType(filename)
}
const file = metadata[filename]

let shouldUnpack
switch (file.type) {
Expand Down Expand Up @@ -146,18 +135,18 @@ module.exports.createPackageFromFiles = function (src, dest, filenames, metadata
return Promise.resolve()
}

const insertsDone = function () {
return mkdirp(path.dirname(dest))
.then(() => disk.writeFilesystem(dest, filesystem, files, metadata))
const insertsDone = async function () {
await fs.mkdirp(path.dirname(dest))
return disk.writeFilesystem(dest, filesystem, files, metadata)
}

const names = filenamesSorted.slice()

const next = function (name) {
const next = async function (name) {
if (!name) { return insertsDone() }

return handleFile(name)
.then(() => next(names.shift()))
await handleFile(name)
return next(names.shift())
}

return next(names.shift())
Expand Down Expand Up @@ -185,15 +174,15 @@ module.exports.extractAll = function (archive, dest) {
const followLinks = process.platform === 'win32'

// create destination directory
mkdirp.sync(dest)
fs.mkdirpSync(dest)

return filenames.map((filename) => {
filename = filename.substr(1) // get rid of leading slash
const destFilename = path.join(dest, filename)
const file = filesystem.getFile(filename, followLinks)
if (file.files) {
// it's a directory, create it and continue with the next entry
mkdirp.sync(destFilename)
fs.mkdirpSync(destFilename)
} else if (file.link) {
// it's a symlink, create a symlink
const linkSrcPath = path.dirname(path.join(dest, file.link))
Expand Down
52 changes: 23 additions & 29 deletions lib/crawlfs.js
Original file line number Diff line number Diff line change
@@ -1,37 +1,31 @@
'use strict'

const pify = require('pify')
const { promisify } = require('util')

const fs = pify(process.versions.electron ? require('original-fs') : require('fs'))
const glob = pify(require('glob'))
const fs = require('./wrapped-fs')
const glob = promisify(require('glob'))

function determineFileType (filename) {
return fs.lstat(filename)
.then(stat => {
if (stat.isFile()) {
return [filename, { type: 'file', stat: stat }]
} else if (stat.isDirectory()) {
return [filename, { type: 'directory', stat: stat }]
} else if (stat.isSymbolicLink()) {
return [filename, { type: 'link', stat: stat }]
}

return [filename, undefined]
})
async function determineFileType (filename) {
const stat = await fs.lstat(filename)
if (stat.isFile()) {
return { type: 'file', stat }
} else if (stat.isDirectory()) {
return { type: 'directory', stat }
} else if (stat.isSymbolicLink()) {
return { type: 'link', stat }
}
}

module.exports = function (dir, options) {
module.exports = async function (dir, options) {
const metadata = {}
return glob(dir, options)
.then(filenames => Promise.all(filenames.map(filename => determineFileType(filename))))
.then(results => {
const filenames = []
for (const [filename, type] of results) {
filenames.push(filename)
if (type) {
metadata[filename] = type
}
}
return [filenames, metadata]
})
const crawled = await glob(dir, options)
const results = await Promise.all(crawled.map(async filename => [filename, await determineFileType(filename)]))
const filenames = results.map(([filename, type]) => {
if (type) {
metadata[filename] = type
}
return filename
})
return [filenames, metadata]
}
module.exports.determineFileType = determineFileType
29 changes: 13 additions & 16 deletions lib/disk.js
Original file line number Diff line number Diff line change
@@ -1,24 +1,21 @@
'use strict'

const pify = require('pify')

const fs = pify(process.versions.electron ? require('original-fs') : require('fs'))
const fs = require('./wrapped-fs')
const path = require('path')
const mkdirp = pify(require('mkdirp'))
const pickle = require('chromium-pickle-js')

const Filesystem = require('./filesystem')
let filesystemCache = {}

function copyFile (dest, src, filename) {
async function copyFile (dest, src, filename) {
const srcFile = path.join(src, filename)
const targetFile = path.join(dest, filename)

return Promise.all([fs.readFile(srcFile), fs.stat(srcFile), mkdirp(path.dirname(targetFile))])
.then(([content, stats, _]) => fs.writeFile(targetFile, content, { mode: stats.mode }))
const [content, stats] = await Promise.all([fs.readFile(srcFile), fs.stat(srcFile), fs.mkdirp(path.dirname(targetFile))])
return fs.writeFile(targetFile, content, { mode: stats.mode })
}

function streamTransformedFile (originalFilename, outStream, transformed) {
async function streamTransformedFile (originalFilename, outStream, transformed) {
return new Promise((resolve, reject) => {
const stream = fs.createReadStream(transformed ? transformed.path : originalFilename)
stream.pipe(outStream, { end: false })
Expand All @@ -27,20 +24,19 @@ function streamTransformedFile (originalFilename, outStream, transformed) {
})
}

const writeFileListToStream = function (dest, filesystem, out, list, metadata) {
let promise = Promise.resolve()
const writeFileListToStream = async function (dest, filesystem, out, list, metadata) {
for (const file of list) {
if (file.unpack) { // the file should not be packed into archive
const filename = path.relative(filesystem.src, file.filename)
promise = promise.then(() => copyFile(`${dest}.unpacked`, filesystem.src, filename))
await copyFile(`${dest}.unpacked`, filesystem.src, filename)
} else {
promise = promise.then(() => streamTransformedFile(file.filename, out, metadata[file.filename].transformed))
await streamTransformedFile(file.filename, out, metadata[file.filename].transformed)
}
}
return promise.then(() => out.end())
return out.end()
}

module.exports.writeFilesystem = function (dest, filesystem, files, metadata) {
module.exports.writeFilesystem = async function (dest, filesystem, files, metadata) {
const headerPickle = pickle.createEmpty()
headerPickle.writeString(JSON.stringify(filesystem.header))
const headerBuf = headerPickle.toBuffer()
Expand All @@ -50,11 +46,12 @@ module.exports.writeFilesystem = function (dest, filesystem, files, metadata) {
const sizeBuf = sizePickle.toBuffer()

const out = fs.createWriteStream(dest)
return new Promise((resolve, reject) => {
await new Promise((resolve, reject) => {
out.on('error', reject)
out.write(sizeBuf)
return out.write(headerBuf, () => resolve())
}).then(() => writeFileListToStream(dest, filesystem, out, files, metadata))
})
return writeFileListToStream(dest, filesystem, out, files, metadata)
}

module.exports.readArchiveHeaderSync = function (archive) {
Expand Down
37 changes: 15 additions & 22 deletions lib/filesystem.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
'use strict'

const pify = require('pify')

const fs = pify(process.versions.electron ? require('original-fs') : require('fs'))
const fs = require('./wrapped-fs')
const path = require('path')
const tmp = require('tmp-promise')
const UINT64 = require('cuint').UINT64
Expand Down Expand Up @@ -50,7 +48,7 @@ class Filesystem {
return node.files
}

insertFile (p, shouldUnpack, file, options) {
async insertFile (p, shouldUnpack, file, options) {
const dirNode = this.searchNodeFromPath(path.dirname(p))
const node = this.searchNodeFromPath(p)
if (shouldUnpack || dirNode.unpacked) {
Expand Down Expand Up @@ -84,25 +82,20 @@ class Filesystem {

const transformed = options.transform && options.transform(p)
if (transformed) {
return tmp.file()
.then(tmpfile => {
return new Promise((resolve, reject) => {
const out = fs.createWriteStream(tmpfile.path)
const stream = fs.createReadStream(p)

stream.pipe(transformed).pipe(out)
return out.on('close', () => {
return fs.lstat(tmpfile.path)
.then(stat => {
file.transformed = {
path: tmpfile.path,
stat
}
return handler(resolve, reject)
})
})
})
const tmpfile = await tmp.file()
return new Promise((resolve, reject) => {
const out = fs.createWriteStream(tmpfile.path)
const stream = fs.createReadStream(p)

stream.pipe(transformed).pipe(out)
return out.on('close', async () => {
file.transformed = {
path: tmpfile.path,
stat: await fs.lstat(tmpfile.path)
}
return handler(resolve, reject)
})
})
} else {
return handler()
}
Expand Down
Loading

0 comments on commit 8a03eae

Please sign in to comment.