Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add support for validating asar archives on macOS #30900

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
9 changes: 8 additions & 1 deletion BUILD.gn
Expand Up @@ -1006,13 +1006,20 @@ if (is_mac) {
outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
}

asar_hashed_info_plist("electron_app_plist") {
keys = [ "DEFAULT_APP_ASAR_HEADER_SHA" ]
hash_targets = [ ":default_app_asar_header_hash" ]
plist_file = "shell/browser/resources/mac/Info.plist"
}

mac_app_bundle("electron_app") {
output_name = electron_product_name
sources = filenames.app_sources
sources += [ "shell/common/electron_constants.cc" ]
include_dirs = [ "." ]
deps = [
":electron_app_framework_bundle_data",
":electron_app_plist",
":electron_app_resources",
":electron_fuses",
"//base",
Expand All @@ -1021,7 +1028,7 @@ if (is_mac) {
if (is_mas_build) {
deps += [ ":electron_login_helper_app" ]
}
info_plist = "shell/browser/resources/mac/Info.plist"
info_plist_target = ":electron_app_plist"
extra_substitutions = [
"ELECTRON_BUNDLE_ID=$electron_mac_bundle_id",
"ELECTRON_VERSION=$electron_version",
Expand Down
38 changes: 38 additions & 0 deletions build/asar.gni
Expand Up @@ -57,4 +57,42 @@ template("asar") {
rebase_path(outputs[0]),
]
}

node_action(target_name + "_header_hash") {
invoker_out = invoker.outputs

deps = [ ":" + invoker.target_name ]
sources = invoker.outputs

script = "//electron/script/gn-asar-hash.js"
outputs = [ "$target_gen_dir/asar_hashes/$target_name.hash" ]

args = [
rebase_path(invoker_out[0]),
rebase_path(outputs[0]),
]
}
}

template("asar_hashed_info_plist") {
node_action(target_name) {
assert(defined(invoker.plist_file),
"Need plist_file to add hashed assets to")
assert(defined(invoker.keys), "Need keys to replace with asset hash")
assert(defined(invoker.hash_targets), "Need hash_targets to read hash from")

deps = invoker.hash_targets

script = "//electron/script/gn-plist-but-with-hashes.js"
inputs = [ invoker.plist_file ]
outputs = [ "$target_gen_dir/hashed_plists/$target_name.plist" ]
hash_files = []
foreach(hash_target, invoker.hash_targets) {
hash_files += get_target_outputs(hash_target)
}
args = [
rebase_path(invoker.plist_file),
rebase_path(outputs[0]),
] + invoker.keys + rebase_path(hash_files)
}
}
4 changes: 3 additions & 1 deletion build/fuses/fuses.json5
Expand Up @@ -5,5 +5,7 @@
"run_as_node": "1",
"cookie_encryption": "0",
"node_options": "1",
"node_cli_inspect": "1"
"node_cli_inspect": "1",
"embedded_asar_integrity_validation": "0",
"only_load_app_from_asar": "0"
}
3 changes: 3 additions & 0 deletions filenames.gni
Expand Up @@ -195,6 +195,7 @@ filenames = {
"shell/browser/ui/tray_icon_cocoa.mm",
"shell/common/api/electron_api_clipboard_mac.mm",
"shell/common/api/electron_api_native_image_mac.mm",
"shell/common/asar/archive_mac.mm",
"shell/common/application_info_mac.mm",
"shell/common/language_util_mac.mm",
"shell/common/mac/main_application_bundle.h",
Expand Down Expand Up @@ -408,6 +409,8 @@ filenames = {
"shell/browser/native_window.cc",
"shell/browser/native_window.h",
"shell/browser/native_window_observer.h",
"shell/browser/net/asar/asar_file_validator.cc",
"shell/browser/net/asar/asar_file_validator.h",
"shell/browser/net/asar/asar_url_loader.cc",
"shell/browser/net/asar/asar_url_loader.h",
"shell/browser/net/asar/asar_url_loader_factory.cc",
Expand Down
24 changes: 21 additions & 3 deletions lib/asar/fs-wrapper.ts
@@ -1,6 +1,7 @@
import { Buffer } from 'buffer';
import * as path from 'path';
import * as util from 'util';
import type * as Crypto from 'crypto';

const asar = process._linkedBinding('electron_common_asar');

Expand Down Expand Up @@ -194,6 +195,20 @@ const overrideAPI = function (module: Record<string, any>, name: string, pathArg
}
};

let crypto: typeof Crypto;
function validateBufferIntegrity (buffer: Buffer, integrity: NodeJS.AsarFileInfo['integrity']) {
if (!integrity) return;

// Delay load crypto to improve app boot performance
// when integrity protection is not enabled
crypto = crypto || require('crypto');
const actual = crypto.createHash(integrity.algorithm).update(buffer).digest('hex');
if (actual !== integrity.hash) {
console.error(`ASAR Integrity Violation: got a hash mismatch (${actual} vs ${integrity.hash})`);
process.exit(1);
}
}

const makePromiseFunction = function (orig: Function, pathArgumentIndex: number) {
return function (this: any, ...args: any[]) {
const pathArgument = args[pathArgumentIndex];
Expand Down Expand Up @@ -531,7 +546,7 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {
}

const buffer = Buffer.alloc(info.size);
const fd = archive.getFd();
const fd = archive.getFdAndValidateIntegrityLater();
if (!(fd >= 0)) {
const error = createError(AsarError.NOT_FOUND, { asarPath, filePath });
nextTick(callback, [error]);
Expand All @@ -540,6 +555,7 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {

logASARAccess(asarPath, filePath, info.offset);
fs.read(fd, buffer, 0, info.size, info.offset, (error: Error) => {
validateBufferIntegrity(buffer, info.integrity);
callback(error, encoding ? buffer.toString(encoding) : buffer);
});
}
Expand Down Expand Up @@ -595,11 +611,12 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {

const { encoding } = options;
const buffer = Buffer.alloc(info.size);
const fd = archive.getFd();
const fd = archive.getFdAndValidateIntegrityLater();
if (!(fd >= 0)) throw createError(AsarError.NOT_FOUND, { asarPath, filePath });

logASARAccess(asarPath, filePath, info.offset);
fs.readSync(fd, buffer, 0, info.size, info.offset);
validateBufferIntegrity(buffer, info.integrity);
return (encoding) ? buffer.toString(encoding) : buffer;
};

Expand Down Expand Up @@ -713,11 +730,12 @@ export const wrapFsWithAsar = (fs: Record<string, any>) => {
}

const buffer = Buffer.alloc(info.size);
const fd = archive.getFd();
const fd = archive.getFdAndValidateIntegrityLater();
if (!(fd >= 0)) return [];

logASARAccess(asarPath, filePath, info.offset);
fs.readSync(fd, buffer, 0, info.size, info.offset);
validateBufferIntegrity(buffer, info.integrity);
const str = buffer.toString('utf8');
return [str, str.length > 0];
};
Expand Down
3 changes: 2 additions & 1 deletion lib/browser/init.ts
Expand Up @@ -81,9 +81,10 @@ require('@electron/internal/browser/guest-view-manager');
require('@electron/internal/browser/guest-window-proxy');

// Now we try to load app's package.json.
const v8Util = process._linkedBinding('electron_common_v8_util');
let packagePath = null;
let packageJson = null;
const searchPaths = ['app', 'app.asar', 'default_app.asar'];
const searchPaths: string[] = v8Util.getHiddenValue(global, 'appSearchPaths');

if (process.resourcesPath) {
for (packagePath of searchPaths) {
Expand Down
2 changes: 1 addition & 1 deletion package.json
Expand Up @@ -30,7 +30,7 @@
"@types/webpack-env": "^1.15.2",
"@typescript-eslint/eslint-plugin": "^4.4.1",
"@typescript-eslint/parser": "^4.4.1",
"asar": "^3.0.3",
"asar": "^3.1.0",
"aws-sdk": "^2.727.1",
"check-for-leaks": "^1.2.1",
"colors": "^1.4.0",
Expand Down
9 changes: 9 additions & 0 deletions script/gn-asar-hash.js
@@ -0,0 +1,9 @@
const asar = require('asar');
const crypto = require('crypto');
const fs = require('fs');

const archive = process.argv[2];
const hashFile = process.argv[3];

const { headerString } = asar.getRawHeader(archive);
fs.writeFileSync(hashFile, crypto.createHash('SHA256').update(headerString).digest('hex'));
16 changes: 16 additions & 0 deletions script/gn-plist-but-with-hashes.js
@@ -0,0 +1,16 @@
const fs = require('fs');

const [,, plistPath, outputPath, ...keySet] = process.argv;

const keyPairs = {};
for (let i = 0; i * 2 < keySet.length; i++) {
keyPairs[keySet[i]] = fs.readFileSync(keySet[(keySet.length / 2) + i], 'utf8');
}

let plistContents = fs.readFileSync(plistPath, 'utf8');

for (const key of Object.keys(keyPairs)) {
plistContents = plistContents.replace(`$\{${key}}`, keyPairs[key]);
}

fs.writeFileSync(outputPath, plistContents);
152 changes: 152 additions & 0 deletions shell/browser/net/asar/asar_file_validator.cc
@@ -0,0 +1,152 @@
// Copyright (c) 2021 Slack Technologies, Inc.
// Use of this source code is governed by the MIT license that can be
// found in the LICENSE file.

#include "shell/browser/net/asar/asar_file_validator.h"

#include <algorithm>
#include <string>
#include <utility>
#include <vector>

#include "base/logging.h"
#include "base/notreached.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
#include "crypto/sha2.h"

namespace asar {

AsarFileValidator::AsarFileValidator(IntegrityPayload integrity,
base::File file)
: file_(std::move(file)), integrity_(std::move(integrity)) {
current_block_ = 0;
max_block_ = integrity_.blocks.size() - 1;
}

AsarFileValidator::~AsarFileValidator() = default;

void AsarFileValidator::OnRead(base::span<char> buffer,
mojo::FileDataSource::ReadResult* result) {
DCHECK(!done_reading_);

uint64_t buffer_size = result->bytes_read;

// Compute how many bytes we should hash, and add them to the current hash.
uint32_t block_size = integrity_.block_size;
uint64_t bytes_added = 0;
while (bytes_added < buffer_size) {
if (current_block_ > max_block_) {
LOG(FATAL)
<< "Unexpected number of blocks while validating ASAR file stream";
return;
}

// Create a hash if we don't have one yet
if (!current_hash_) {
current_hash_byte_count_ = 0;
switch (integrity_.algorithm) {
case HashAlgorithm::SHA256:
current_hash_ =
crypto::SecureHash::Create(crypto::SecureHash::SHA256);
break;
case HashAlgorithm::NONE:
CHECK(false);
break;
}
}

// Compute how many bytes we should hash, and add them to the current hash.
// We need to either add just enough bytes to fill up a block (block_size -
// current_bytes) or use every remaining byte (buffer_size - bytes_added)
int bytes_to_hash = std::min(block_size - current_hash_byte_count_,
buffer_size - bytes_added);
DCHECK_GT(bytes_to_hash, 0);
current_hash_->Update(buffer.data() + bytes_added, bytes_to_hash);
bytes_added += bytes_to_hash;
current_hash_byte_count_ += bytes_to_hash;
total_hash_byte_count_ += bytes_to_hash;

if (current_hash_byte_count_ == block_size && !FinishBlock()) {
LOG(FATAL) << "Failed to validate block while streaming ASAR file: "
<< current_block_;
return;
}
}
}

bool AsarFileValidator::FinishBlock() {
if (current_hash_byte_count_ == 0) {
if (!done_reading_ || current_block_ > max_block_) {
return true;
}
}

if (!current_hash_) {
// This happens when we fail to read the resource. Compute empty content's
// hash in this case.
current_hash_ = crypto::SecureHash::Create(crypto::SecureHash::SHA256);
}

uint8_t actual[crypto::kSHA256Length];

// If the file reader is done we need to make sure we've either read up to the
// end of the file (the check below) or up to the end of a block_size byte
// boundary. If the below check fails we compute the next block boundary, how
// many bytes are needed to get there and then we manually read those bytes
// from our own file handle ensuring the data producer is unaware but we can
// validate the hash still.
if (done_reading_ &&
total_hash_byte_count_ - extra_read_ != read_max_ - read_start_) {
uint64_t bytes_needed = std::min(
integrity_.block_size - current_hash_byte_count_,
read_max_ - read_start_ - total_hash_byte_count_ + extra_read_);
uint64_t offset = read_start_ + total_hash_byte_count_ - extra_read_;
std::vector<uint8_t> abandoned_buffer(bytes_needed);
if (!file_.ReadAndCheck(offset, abandoned_buffer)) {
LOG(FATAL) << "Failed to read required portion of streamed ASAR archive";
return false;
}

current_hash_->Update(&abandoned_buffer.front(), bytes_needed);
}

current_hash_->Finish(actual, sizeof(actual));
current_hash_.reset();
current_hash_byte_count_ = 0;

const std::string expected_hash = integrity_.blocks[current_block_];
const std::string actual_hex_hash =
base::ToLowerASCII(base::HexEncode(actual, sizeof(actual)));

if (expected_hash != actual_hex_hash) {
return false;
}

current_block_++;

return true;
}

void AsarFileValidator::OnDone() {
DCHECK(!done_reading_);
done_reading_ = true;
if (!FinishBlock()) {
LOG(FATAL) << "Failed to validate block while ending ASAR file stream: "
<< current_block_;
}
}

void AsarFileValidator::SetRange(uint64_t read_start,
uint64_t extra_read,
uint64_t read_max) {
read_start_ = read_start;
extra_read_ = extra_read;
read_max_ = read_max;
}

void AsarFileValidator::SetCurrentBlock(int current_block) {
current_block_ = current_block;
}

} // namespace asar