Skip to content

Commit

Permalink
cli: use decompress to decompress plugins
Browse files Browse the repository at this point in the history
`unzip-stream` does not restore file permissions when decompressing
archives.

This commit replaces this library by `decompress` which restores file
permissions as stored in the archives.

Signed-off-by: Paul Maréchal <paul.marechal@ericsson.com>
  • Loading branch information
paul-marechal committed Aug 11, 2020
1 parent d18b57e commit 1dfc6af
Show file tree
Hide file tree
Showing 3 changed files with 99 additions and 85 deletions.
3 changes: 1 addition & 2 deletions dev-packages/cli/package.json
Expand Up @@ -40,16 +40,15 @@
"@types/tar": "^4.0.3",
"chai": "^4.2.0",
"colors": "^1.4.0",
"decompress": "^4.2.1",
"https-proxy-agent": "^5.0.0",
"mkdirp": "^0.5.0",
"mocha": "^7.0.0",
"node-fetch": "^2.6.0",
"proxy-from-env": "^1.1.0",
"puppeteer": "^2.0.0",
"puppeteer-to-istanbul": "^1.2.2",
"tar": "^4.0.0",
"temp": "^0.9.1",
"unzip-stream": "^0.3.0",
"yargs": "^11.1.0"
},
"devDependencies": {
Expand Down
167 changes: 84 additions & 83 deletions dev-packages/cli/src/download-plugins.ts
Expand Up @@ -19,21 +19,21 @@
import fetch, { Response, RequestInit } from 'node-fetch';
import { HttpsProxyAgent } from 'https-proxy-agent';
import { getProxyForUrl } from 'proxy-from-env';
import * as fs from 'fs';
import { promises as fs, createWriteStream } from 'fs';
import * as mkdirp from 'mkdirp';
import * as path from 'path';
import * as process from 'process';
import * as stream from 'stream';
import * as tar from 'tar';
import * as zlib from 'zlib';
import * as decompress from 'decompress';
import * as temp from 'temp';

import { green, red } from 'colors/safe';

import { promisify } from 'util';
const mkdirpAsPromised = promisify<string, mkdirp.Made>(mkdirp);
const pipelineAsPromised = promisify(stream.pipeline);

const unzip = require('unzip-stream');
temp.track();

/**
* Available options when downloading.
Expand Down Expand Up @@ -69,92 +69,93 @@ export default async function downloadPlugins(options: DownloadPluginsOptions =
console.log(red('error: missing mandatory \'theiaPlugins\' property.'));
return;
}
try {
await Promise.all(Object.keys(pck.theiaPlugins).map(
plugin => downloadPluginAsync(failures, plugin, pck.theiaPlugins[plugin], pluginsDir, packed)
));
} finally {
temp.cleanupSync();
}
failures.forEach(console.error);
}

await Promise.all(Object.keys(pck.theiaPlugins).map(async plugin => {
if (!plugin) {
return;
}
const pluginUrl = pck.theiaPlugins[plugin];

let fileExt: string;
if (pluginUrl.endsWith('tar.gz')) {
fileExt = '.tar.gz';
} else if (pluginUrl.endsWith('vsix')) {
fileExt = '.vsix';
} else {
console.error(red(`error: '${plugin}' has an unsupported file type: '${pluginUrl}'`));
return;
}
/**
* Downloads a plugin, will make multiple attempts before actually failing.
*
* @param failures reference to an array storing all failures
* @param plugin plugin short name
* @param pluginUrl url to download the plugin at
* @param pluginsDir where to download the plugin in
* @param packed whether to decompress or not
*/
async function downloadPluginAsync(failures: string[], plugin: string, pluginUrl: string, pluginsDir: string, packed: boolean): Promise<void> {
if (!plugin) {
return;
}
let fileExt: string;
if (pluginUrl.endsWith('tar.gz')) {
fileExt = '.tar.gz';
} else if (pluginUrl.endsWith('vsix')) {
fileExt = '.vsix';
} else {
console.error(red(`error: '${plugin}' has an unsupported file type: '${pluginUrl}'`));
return;
}
const targetPath = path.join(process.cwd(), pluginsDir, `${plugin}${packed === true ? fileExt : ''}`);
// Skip plugins which have previously been downloaded.
if (await isDownloaded(targetPath)) {
console.warn('- ' + plugin + ': already downloaded - skipping');
return;
}

const targetPath = path.join(process.cwd(), pluginsDir, `${plugin}${packed === true ? fileExt : ''}`);
const maxAttempts = 5;
const retryDelay = 2000;

// Skip plugins which have previously been downloaded.
if (isDownloaded(targetPath)) {
console.warn('- ' + plugin + ': already downloaded - skipping');
return;
}
let attempts: number;
let lastError: Error | undefined;
let response: Response | undefined;

const maxAttempts = 5;
const retryDelay = 2000;

let attempts: number;
let lastError: Error | undefined;
let response: Response | undefined;

for (attempts = 0; attempts < maxAttempts; attempts++) {
if (attempts > 0) {
await new Promise(resolve => setTimeout(resolve, retryDelay));
}
lastError = undefined;
try {
response = await xfetch(pluginUrl);
} catch (error) {
lastError = error;
continue;
}
const retry = response.status === 439 || response.status >= 500;
if (!retry) {
break;
}
}
if (lastError) {
failures.push(red(`x ${plugin}: failed to download, last error:\n ${lastError}`));
return;
for (attempts = 0; attempts < maxAttempts; attempts++) {
if (attempts > 0) {
await new Promise(resolve => setTimeout(resolve, retryDelay));
}
if (typeof response === 'undefined') {
failures.push(red(`x ${plugin}: failed to download (unknown reason)`));
return;
lastError = undefined;
try {
response = await xfetch(pluginUrl);
} catch (error) {
lastError = error;
continue;
}
if (response.status !== 200) {
failures.push(red(`x ${plugin}: failed to download with: ${response.status} ${response.statusText}`));
return;
const retry = response.status === 439 || response.status >= 500;
if (!retry) {
break;
}
}
if (lastError) {
failures.push(red(`x ${plugin}: failed to download, last error:\n ${lastError}`));
return;
}
if (typeof response === 'undefined') {
failures.push(red(`x ${plugin}: failed to download (unknown reason)`));
return;
}
if (response.status !== 200) {
failures.push(red(`x ${plugin}: failed to download with: ${response.status} ${response.statusText}`));
return;
}

if (fileExt === '.tar.gz') {
// Decompress .tar.gz files.
await mkdirpAsPromised(targetPath);
const gunzip = zlib.createGunzip({
finishFlush: zlib.Z_SYNC_FLUSH,
flush: zlib.Z_SYNC_FLUSH
});
const untar = tar.x({ cwd: targetPath });
await pipelineAsPromised(response.body, gunzip, untar);
} else {
if (packed === true) {
// Download .vsix without decompressing.
const file = fs.createWriteStream(targetPath);
await pipelineAsPromised(response.body, file);
} else {
// Decompress .vsix.
await pipelineAsPromised(response.body, unzip.Extract({ path: targetPath }));
}
}
if (fileExt === '.vsix' && packed === true) {
// Download .vsix without decompressing.
const file = createWriteStream(targetPath);
await pipelineAsPromised(response.body, file);
} else {
await mkdirpAsPromised(targetPath);
const tempFile = temp.createWriteStream('theia-plugin-download');
await pipelineAsPromised(response.body, tempFile);
await decompress(tempFile.path, targetPath);
}

console.warn(green(`+ ${plugin}: downloaded successfully ${attempts > 1 ? `(after ${attempts} attempts)` : ''}`));
}));
failures.forEach(failure => {
console.log(failure);
});
console.warn(green(`+ ${plugin}: downloaded successfully ${attempts > 1 ? `(after ${attempts} attempts)` : ''}`));
}

/**
Expand All @@ -163,8 +164,8 @@ export default async function downloadPlugins(options: DownloadPluginsOptions =
*
* @returns `true` if the resource is already downloaded, else `false`.
*/
function isDownloaded(filePath: string): boolean {
return fs.existsSync(filePath);
async function isDownloaded(filePath: string): Promise<boolean> {
return fs.stat(filePath).then(() => true, () => false);
}

/**
Expand Down
14 changes: 14 additions & 0 deletions yarn.lock
Expand Up @@ -4860,6 +4860,20 @@ decompress@4.2.0:
pify "^2.3.0"
strip-dirs "^2.0.0"

decompress@^4.2.1:
version "4.2.1"
resolved "https://registry.yarnpkg.com/decompress/-/decompress-4.2.1.tgz#007f55cc6a62c055afa37c07eb6a4ee1b773f118"
integrity sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ==
dependencies:
decompress-tar "^4.0.0"
decompress-tarbz2 "^4.0.0"
decompress-targz "^4.0.0"
decompress-unzip "^4.0.1"
graceful-fs "^4.1.10"
make-dir "^1.0.0"
pify "^2.3.0"
strip-dirs "^2.0.0"

dedent@^0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c"
Expand Down

0 comments on commit 1dfc6af

Please sign in to comment.