Skip to content

Commit

Permalink
feat: add file hashes to asar header (#221)
Browse files Browse the repository at this point in the history
* feat: add file hashes to asar header

* feat: add getRawHeader method to public API

* chore: fix lint

* chore: update docs

* refactor: use integrity instead of hash pairs

* feat: add block hashes

* fix: ensure executables are extracted with executable permission

* fix: ensure symlinks are not deeply resolved when packaging

* chore: update test files

* chore: remove DS_Store

* perf: generate block hashes as we parse the stream

* docs: update README with new options

* revert

* chore: update per feedback
  • Loading branch information
MarshallOfSound committed Sep 9, 2021
1 parent 6fb376b commit 94cb8bd
Show file tree
Hide file tree
Showing 17 changed files with 137 additions and 4 deletions.
30 changes: 27 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -153,12 +153,24 @@ Structure of `header` is something like this:
"ls": {
"offset": "0",
"size": 100,
"executable": true
"executable": true,
"integrity": {
"algorithm": "SHA256",
"hash": "...",
"blockSize": 1024,
"blocks": ["...", "..."]
}
},
"cd": {
"offset": "100",
"size": 100,
"executable": true
"executable": true,
"integrity": {
"algorithm": "SHA256",
"hash": "...",
"blockSize": 1024,
"blocks": ["...", "..."]
}
}
}
}
Expand All @@ -168,7 +180,13 @@ Structure of `header` is something like this:
"files": {
"hosts": {
"offset": "200",
"size": 32
"size": 32,
"integrity": {
"algorithm": "SHA256",
"hash": "...",
"blockSize": 1024,
"blocks": ["...", "..."]
}
}
}
}
Expand All @@ -187,6 +205,12 @@ precisely represent UINT64 in JavaScript `Number`. `size` is a JavaScript
because file size in Node.js is represented as `Number` and it is not safe to
convert `Number` to UINT64.

`integrity` is an object consisting of a few keys:
* A hashing `algorithm`, currently only `SHA256` is supported.
* A hex encoded `hash` value representing the hash of the entire file.
* An array of hex encoded hashes for the `blocks` of the file. i.e. for a blockSize of 4KB this array contains the hash of every block if you split the file into N 4KB blocks.
* A integer value `blockSize` representing the size in bytes of each block in the `blocks` hashes above

[pickle]: https://chromium.googlesource.com/chromium/src/+/master/base/pickle.h
[node-pickle]: https://www.npmjs.org/package/chromium-pickle
[grunt-asar]: https://github.com/bwin/grunt-asar
7 changes: 7 additions & 0 deletions lib/asar.js
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,10 @@ module.exports.statFile = function (archive, filename, followLinks) {
return filesystem.getFile(filename, followLinks)
}

module.exports.getRawHeader = function (archive) {
return disk.readArchiveHeaderSync(archive)
}

module.exports.listPackage = function (archive, options) {
return disk.readFilesystemSync(archive).listFiles(options)
}
Expand Down Expand Up @@ -199,6 +203,9 @@ module.exports.extractAll = function (archive, dest) {
// it's a file, extract it
const content = disk.readFileSync(filesystem, filename, file)
fs.writeFileSync(destFilename, content)
if (file.executable) {
fs.chmodSync(destFilename, '755')
}
}
}
}
Expand Down
10 changes: 10 additions & 0 deletions lib/crawlfs.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,21 @@ module.exports = async function (dir, options) {
const metadata = {}
const crawled = await glob(dir, options)
const results = await Promise.all(crawled.map(async filename => [filename, await determineFileType(filename)]))
const links = []
const filenames = results.map(([filename, type]) => {
if (type) {
metadata[filename] = type
if (type.type === 'link') links.push(filename)
}
return filename
}).filter((filename) => {
// Newer glob can return files inside symlinked directories, to avoid
// those appearing in archives we need to manually exclude theme here
const exactLinkIndex = links.findIndex(link => filename === link)
return links.every((link, index) => {
if (index === exactLinkIndex) return true
return !filename.startsWith(link)
})
})
return [filenames, metadata]
}
Expand Down
2 changes: 1 addition & 1 deletion lib/disk.js
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ module.exports.readArchiveHeaderSync = function (archive) {

const headerPickle = pickle.createFromBuffer(headerBuf)
const header = headerPickle.createIterator().readString()
return { header: JSON.parse(header), headerSize: size }
return { headerString: header, header: JSON.parse(header), headerSize: size }
}

module.exports.readFilesystemSync = function (archive) {
Expand Down
3 changes: 3 additions & 0 deletions lib/filesystem.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ const os = require('os')
const path = require('path')
const { promisify } = require('util')
const stream = require('stream')
const getFileIntegrity = require('./integrity')

const UINT32_MAX = 2 ** 32 - 1

Expand Down Expand Up @@ -57,6 +58,7 @@ class Filesystem {
if (shouldUnpack || dirNode.unpacked) {
node.size = file.stat.size
node.unpacked = true
node.integrity = await getFileIntegrity(p)
return Promise.resolve()
}

Expand Down Expand Up @@ -86,6 +88,7 @@ class Filesystem {

node.size = size
node.offset = this.offset.toString()
node.integrity = await getFileIntegrity(p)
if (process.platform !== 'win32' && (file.stat.mode & 0o100)) {
node.executable = true
}
Expand Down
24 changes: 24 additions & 0 deletions lib/index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,29 @@ export type InputMetadata = {
}
};

export type DirectoryRecord = {
files: Record<string, DirectoryRecord | FileRecord>;
};

export type FileRecord = {
offset: string;
size: number;
executable?: boolean;
integrity: {
hash: string;
algorithm: 'SHA256';
blocks: string[];
blockSize: number;
};
}

export type ArchiveHeader = {
// The JSON parsed header string
header: DirectoryRecord;
headerString: string;
headerSize: number;
}

export function createPackage(src: string, dest: string): Promise<void>;
export function createPackageWithOptions(
src: string,
Expand All @@ -59,6 +82,7 @@ export function createPackageFromFiles(
): Promise<void>;

export function statFile(archive: string, filename: string, followLinks?: boolean): Metadata;
export function getRawHeader(archive: string): ArchiveHeader;
export function listPackage(archive: string, options?: ListOptions): string[];
export function extractFile(archive: string, filename: string): Buffer;
export function extractAll(archive: string, dest: string): void;
Expand Down
62 changes: 62 additions & 0 deletions lib/integrity.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
const crypto = require('crypto')
const fs = require('fs')
const stream = require('stream')
const { promisify } = require('util')

const ALGORITHM = 'SHA256'
// 4MB default block size
const BLOCK_SIZE = 4 * 1024 * 1024

const pipeline = promisify(stream.pipeline)

function hashBlock (block) {
return crypto.createHash(ALGORITHM).update(block).digest('hex')
}

async function getFileIntegrity (path) {
const fileHash = crypto.createHash(ALGORITHM)

const blocks = []
let currentBlockSize = 0
let currentBlock = []

await pipeline(
fs.createReadStream(path),
new stream.PassThrough({
decodeStrings: false,
transform (_chunk, encoding, callback) {
fileHash.update(_chunk)

function handleChunk (chunk) {
const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength)
currentBlockSize += diffToSlice
currentBlock.push(chunk.slice(0, diffToSlice))
if (currentBlockSize === BLOCK_SIZE) {
blocks.push(hashBlock(Buffer.concat(currentBlock)))
currentBlock = []
currentBlockSize = 0
}
if (diffToSlice < chunk.byteLength) {
handleChunk(chunk.slice(diffToSlice))
}
}
handleChunk(_chunk)
callback()
},
flush (callback) {
blocks.push(hashBlock(Buffer.concat(currentBlock)))
currentBlock = []
callback()
}
})
)

return {
algorithm: ALGORITHM,
hash: fileHash.digest('hex'),
blockSize: BLOCK_SIZE,
blocks: blocks
}
}

module.exports = getFileIntegrity
Binary file modified test/expected/packthis-all-unpacked.asar
Binary file not shown.
Binary file modified test/expected/packthis-transformed.asar
Binary file not shown.
Binary file modified test/expected/packthis-unicode-path.asar
Binary file not shown.
Binary file modified test/expected/packthis-unpack-dir-glob.asar
Binary file not shown.
Binary file modified test/expected/packthis-unpack-dir-globstar.asar
Binary file not shown.
Binary file modified test/expected/packthis-unpack-dir.asar
Binary file not shown.
Binary file modified test/expected/packthis-unpack.asar
Binary file not shown.
Binary file modified test/expected/packthis-without-hidden.asar
Binary file not shown.
Binary file modified test/expected/packthis.asar
Binary file not shown.
3 changes: 3 additions & 0 deletions test/util/compareFiles.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ const assert = require('assert')
const fs = require('../../lib/wrapped-fs')

module.exports = async function (actualFilePath, expectedFilePath) {
if (process.env.ELECTRON_ASAR_SPEC_UPDATE) {
await fs.writeFile(expectedFilePath, await fs.readFile(actualFilePath))
}
const [actual, expected] = await Promise.all([fs.readFile(actualFilePath, 'utf8'), fs.readFile(expectedFilePath, 'utf8')])
assert.strictEqual(actual, expected)
}

0 comments on commit 94cb8bd

Please sign in to comment.