Browse files

Merge pull request #7457 from meteor/prefer-native-tar-or-7z

Extract .tar.gz files using native tar or 7z.exe when possible.
  • Loading branch information...
2 parents b8066e2 + 536eece commit a9cb9c4f53324b523cefbabf33e59e50f15faa34 @benjamn benjamn committed on GitHub Jul 28, 2016
Showing with 212 additions and 64 deletions.
  1. +1 −1 meteor
  2. +11 −0 scripts/generate-dev-bundle.ps1
  3. +9 −3 tools/cli/commands-packages.js
  4. +143 −19 tools/fs/files.js
  5. +42 −40 tools/packaging/tropohouse.js
  6. +6 −1 tools/packaging/warehouse.js
View
2 meteor
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-BUNDLE_VERSION=4.1.4
+BUNDLE_VERSION=4.2.0
# OS Check. Put here because here is where we download the precompiled
# bundles that are arch specific.
View
11 scripts/generate-dev-bundle.ps1
@@ -35,6 +35,16 @@ cd bin
$webclient = New-Object System.Net.WebClient
$shell = New-Object -com shell.application
+mkdir "$DIR\7z"
+cd "$DIR\7z"
+$webclient.DownloadFile("http://www.7-zip.org/a/7z1602.msi", "$DIR\7z\7z.msi")
+$webclient.DownloadFile("http://www.7-zip.org/a/7z1602-extra.7z", "$DIR\7z\extra.7z")
+msiexec /i 7z.msi /quiet /qn /norestart
+ping -n 4 127.0.0.1 | out-null
+& "C:\Program Files*\7-Zip\7z.exe" x extra.7z
+mv 7za.exe "$DIR\bin\7z.exe"
+cd "$DIR\bin"
+
# download node
# same node on 32bit vs 64bit?
$node_link = "http://nodejs.org/dist/v${NODE_VERSION}/win-x86/node.exe"
@@ -63,6 +73,7 @@ foreach($item in $zip.items()) {
}
rm -Recurse -Force $npm_zip
+rm -Recurse -Force "$DIR\7z"
# add bin to the front of the path so we can use our own node for building
$env:PATH = "${DIR}\bin;${env:PATH}"
View
12 tools/cli/commands-packages.js
@@ -603,15 +603,21 @@ main.registerCommand({
// Download the source to the package.
var sourceTarball = buildmessage.enterJob("downloading package source", function () {
- return httpHelpers.getUrl({
+ return httpHelpers.getUrlWithResuming({
url: pkgVersion.source.url,
encoding: null
});
});
+ if (buildmessage.hasMessages()) {
+ return 1;
+ }
+
var sourcePath = files.mkdtemp('package-source');
- // XXX check tarballHash!
- files.extractTarGz(sourceTarball, sourcePath);
+ buildmessage.enterJob("extracting package source", () => {
+ // XXX check tarballHash!
+ files.extractTarGz(sourceTarball, sourcePath);
+ });
// XXX Factor out with packageClient.bundleSource so that we don't
// have knowledge of the tarball structure in two places.
View
162 tools/fs/files.js
@@ -12,6 +12,7 @@ var util = require('util');
var _ = require('underscore');
var Fiber = require('fibers');
var crypto = require('crypto');
+var spawn = require("child_process").spawn;
var rimraf = require('rimraf');
var sourcemap = require('source-map');
@@ -695,19 +696,154 @@ files.extractTarGz = function (buffer, destPath, options) {
var tempDir = files.pathJoin(parentDir, '.tmp' + utils.randomToken());
files.mkdir_p(tempDir);
+ if (! _.has(options, "verbose")) {
+ options.verbose = require("../console/console.js").Console.verbose;
+ }
+
+ const startTime = +new Date;
+ let promise = tryExtractWithNativeTar(buffer, tempDir, options);
+
+ if (process.platform === "win32") {
+ promise = promise.catch(
+ error => tryExtractWithNative7z(buffer, tempDir, options)
+ );
+ }
+
+ promise = promise.catch(
+ error => tryExtractWithNpmTar(buffer, tempDir, options)
+ );
+
+ promise.await();
+
+ // succeed!
+ var topLevelOfArchive = files.readdir(tempDir)
+ // On Windows, the 7z.exe tool sometimes creates an auxiliary
+ // PaxHeader directory.
+ .filter(file => ! file.startsWith("PaxHeader"));
+
+ if (topLevelOfArchive.length !== 1) {
+ throw new Error(
+ "Extracted archive '" + tempDir + "' should only contain one entry");
+ }
+
+ var extractDir = files.pathJoin(tempDir, topLevelOfArchive[0]);
+ makeTreeReadOnly(extractDir);
+ files.rename(extractDir, destPath);
+ files.rm_recursive(tempDir);
+
+ if (options.verbose) {
+ console.log("Finished extracting in", new Date - startTime, "ms");
+ }
+};
+
+function ensureDirectoryEmpty(dir) {
+ files.readdir(dir).forEach(file => {
+ files.rm_recursive(files.pathJoin(dir, file));
+ });
+}
+
+function tryExtractWithNativeTar(buffer, tempDir, options) {
+ ensureDirectoryEmpty(tempDir);
+
+ if (options.forceConvert) {
+ return Promise.reject(new Error(
+ "Native tar cannot convert colons in package names"));
+ }
+
+ return new Promise((resolve, reject) => {
+ const flags = options.verbose ? "-xzvf" : "-xzf";
+ const tarProc = spawn("tar", [flags, "-"], {
+ cwd: files.convertToOSPath(tempDir),
+ stdio: options.verbose ? [
+ "pipe", // Always need to write to tarProc.stdin.
+ process.stdout,
+ process.stderr
+ ] : "pipe",
+ });
+
+ tarProc.on("error", reject);
+ tarProc.on("exit", resolve);
+
+ tarProc.stdin.write(buffer);
+ tarProc.stdin.end();
+ });
+}
+
+function tryExtractWithNative7z(buffer, tempDir, options) {
+ ensureDirectoryEmpty(tempDir);
+
+ if (options.forceConvert) {
+ return Promise.reject(new Error(
+ "Native 7z.exe cannot convert colons in package names"));
+ }
+
+ const exeOSPath = files.convertToOSPath(
+ files.pathJoin(files.getCurrentNodeBinDir(), "7z.exe"));
+ const tarGzBasename = "out.tar.gz";
+ const spawnOptions = {
+ cwd: files.convertToOSPath(tempDir),
+ stdio: options.verbose ? "inherit" : "pipe",
+ };
+
+ files.writeFile(files.pathJoin(tempDir, tarGzBasename), buffer);
+
+ return new Promise((resolve, reject) => {
+ spawn(exeOSPath, [
+ "x", "-y", tarGzBasename
+ ], spawnOptions)
+ .on("error", reject)
+ .on("exit", resolve);
+
+ }).then(code => {
+ assert.strictEqual(code, 0);
+
+ let tarBasename;
+ const foundTar = files.readdir(tempDir).some(file => {
+ if (file !== tarGzBasename) {
+ tarBasename = file;
+ return true;
+ }
+ });
+
+ assert.ok(foundTar, "failed to find .tar file");
+
+ function cleanUp() {
+ files.unlink(files.pathJoin(tempDir, tarGzBasename));
+ files.unlink(files.pathJoin(tempDir, tarBasename));
+ }
+
+ return new Promise((resolve, reject) => {
+ spawn(exeOSPath, [
+ "x", "-y", tarBasename
+ ], spawnOptions)
+ .on("error", reject)
+ .on("exit", resolve);
+
+ }).then(code => {
+ cleanUp();
+ return code;
+ }, error => {
+ cleanUp();
+ throw error;
+ });
+ });
+}
+
+function tryExtractWithNpmTar(buffer, tempDir, options) {
+ ensureDirectoryEmpty(tempDir);
+
var tar = require("tar");
var zlib = require("zlib");
- new Promise(function (resolve, reject) {
+ return new Promise((resolve, reject) => {
var gunzip = zlib.createGunzip().on('error', reject);
-
var extractor = new tar.Extract({
path: files.convertToOSPath(tempDir)
}).on('entry', function (e) {
if (process.platform === "win32" || options.forceConvert) {
- // On Windows, try to convert old packages that have colons in paths
- // by blindly replacing all of the paths. Otherwise, we can't even
- // extract the tarball
+ // On Windows, try to convert old packages that have colons in
+ // paths by blindly replacing all of the paths. Otherwise, we
+ // can't even extract the tarball
e.path = colonConverter.convert(e.path);
}
}).on('error', reject)
@@ -718,20 +854,8 @@ files.extractTarGz = function (buffer, destPath, options) {
gunzip.pipe(extractor);
gunzip.write(buffer);
gunzip.end();
- }).await();
-
- // succeed!
- var topLevelOfArchive = files.readdir(tempDir);
- if (topLevelOfArchive.length !== 1) {
- throw new Error(
- "Extracted archive '" + tempDir + "' should only contain one entry");
- }
-
- var extractDir = files.pathJoin(tempDir, topLevelOfArchive[0]);
- makeTreeReadOnly(extractDir);
- files.rename(extractDir, destPath);
- files.rmdir(tempDir);
-};
+ });
+}
// Tar-gzips a directory, returning a stream that can then be piped as
// needed. The tar archive will contain a top-level directory named
View
82 tools/packaging/tropohouse.js
@@ -293,33 +293,6 @@ _.extend(exports.Tropohouse.prototype, {
});
},
- // Contacts the package server, downloads and extracts a tarball for a given
- // buildRecord into a temporary directory, whose path is returned.
- //
- // XXX: Error handling.
- _downloadBuildToTempDir: function (versionInfo, buildRecord) {
- var url = buildRecord.build.url;
-
- // Override the download domain name and protocol if METEOR_WAREHOUSE_URLBASE
- // provided.
- if (process.env.METEOR_WAREHOUSE_URLBASE) {
- url = url.replace(/^[a-zA-Z]+:\/\/[^\/]+/, process.env.METEOR_WAREHOUSE_URLBASE);
- }
-
- // XXX: We use one progress for download & untar; this isn't ideal:
- // it relies on extractTarGz being fast and not reporting any progress.
- // Really, we should create two subtasks
- // (and, we should stream the download to the tar extractor)
- var packageTarball = httpHelpers.getUrlWithResuming({
- url: url,
- encoding: null,
- progress: buildmessage.getCurrentProgressTracker(),
- wait: false
- });
-
- return exports._extractAndConvert(packageTarball);
- },
-
// Given a package name and version, returns the architectures for
// which we have downloaded this package
//
@@ -493,36 +466,65 @@ _.extend(exports.Tropohouse.prototype, {
// XXX how does concurrency work here? we could just get errors if we
// try to rename over the other thing? but that's the same as in
// warehouse?
- _.each(buildsToDownload, function (build) {
- buildmessage.enterJob({
+ _.each(buildsToDownload, ({ build: { url }}) => {
+ const packageTarball = buildmessage.enterJob({
title: "downloading " + packageName + "@" + version + "..."
- }, function() {
+ }, () => {
try {
- var buildTempDir = self._downloadBuildToTempDir(
- { packageName: packageName, version: version }, build);
+ // Override the download domain name and protocol if METEOR_WAREHOUSE_URLBASE
+ // provided.
+ if (process.env.METEOR_WAREHOUSE_URLBASE) {
+ url = url.replace(
+ /^[a-zA-Z]+:\/\/[^\/]+/,
+ process.env.METEOR_WAREHOUSE_URLBASE
+ );
+ }
+
+ return httpHelpers.getUrlWithResuming({
+ url: url,
+ encoding: null,
+ progress: buildmessage.getCurrentProgressTracker(),
+ wait: false
+ });
+
} catch (e) {
- if (!(e instanceof files.OfflineError)) {
+ if (! (e instanceof files.OfflineError)) {
throw e;
}
buildmessage.error(e.error.message);
}
+ });
+
+ if (buildmessage.jobHasMessages()) {
+ return;
+ }
+
+ buildmessage.enterJob({
+ title: "extracting " + packageName + "@" + version + "..."
+ }, () => {
+ const buildTempDir = exports._extractAndConvert(packageTarball);
buildInputDirs.push(buildTempDir);
buildTempDirs.push(buildTempDir);
});
});
+
if (buildmessage.jobHasMessages()) {
return;
}
- // We need to turn our builds into a single isopack.
- var isopack = new Isopack();
- _.each(buildInputDirs, function (buildTempDir, i) {
- isopack._loadUnibuildsFromPath(packageName, buildTempDir, {
- firstIsopack: i === 0,
+ buildmessage.enterJob({
+ title: "loading " + packageName + "@" + version + "..."
+ }, () => {
+ // We need to turn our builds into a single isopack.
+ var isopack = new Isopack();
+ _.each(buildInputDirs, (buildTempDir, i) => {
+ isopack._loadUnibuildsFromPath(packageName, buildTempDir, {
+ firstIsopack: i === 0,
+ });
});
- });
- self._saveIsopack(isopack, packageName, version);
+ self._saveIsopack(isopack, packageName, version);
+ });
// Delete temp directories now (asynchronously).
_.each(buildTempDirs, function (buildTempDir) {
View
7 tools/packaging/warehouse.js
@@ -394,8 +394,13 @@ _.extend(warehouse, {
"/" + version +
"/" + name + '-' + version + "-" + platform + ".tar.gz";
- var tarball = httpHelpers.getUrl({url: packageUrl, encoding: null});
+ var tarball = httpHelpers.getUrlWithResuming({
+ url: packageUrl,
+ encoding: null
+ });
+
files.extractTarGz(tarball, packageDir);
+
if (!dontWriteFreshFile) {
files.writeFile(warehouse.getPackageFreshFile(name, version), '');
}

0 comments on commit a9cb9c4

Please sign in to comment.