Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bump various dependencies and devDependencies that are non-breaking for us #249

Merged
merged 11 commits into from Feb 27, 2024
Merged
1 change: 1 addition & 0 deletions .eslintrc.json
Expand Up @@ -11,6 +11,7 @@
"no-underscore-dangle": "off",
"no-use-before-define": "off",
"es-x/no-hashbang": "off",
"n/no-process-exit": "off",
"jsdoc/newline-after-description": "off"
}
}
2 changes: 1 addition & 1 deletion .github/workflows/nodejs.yml
Expand Up @@ -9,7 +9,7 @@ jobs:

strategy:
matrix:
node-version: [14.x, 16.x]
jdforrester marked this conversation as resolved.
Show resolved Hide resolved
node-version: [16.x]

steps:
- uses: actions/checkout@v2
Expand Down
12 changes: 6 additions & 6 deletions lib/base_service.js
Expand Up @@ -77,7 +77,7 @@ class BaseService {
this._basePath = config.app_base_path;
} else if ( /\/node_modules\/service-runner\/lib$/.test( __dirname ) ) {
// Default to guessing the base path
this._basePath = path.resolve( `${__dirname}/../../../` );
this._basePath = path.resolve( `${ __dirname }/../../../` );
} else {
this._basePath = path.resolve( './' );
}
Expand Down Expand Up @@ -109,7 +109,7 @@ class BaseService {

// display the version
if ( this.options.displayVersion ) {
console.log( `${config.serviceName} ${config.package.version}` );
console.log( `${ config.serviceName } ${ config.package.version }` );
process.exit( 0 );
}

Expand Down Expand Up @@ -337,7 +337,7 @@ class BaseService {
let configFile = this.options.configFile;
if ( !/^\//.test( configFile ) ) {
// resolve relative paths
configFile = path.resolve( `${process.cwd()}/${configFile}` );
configFile = path.resolve( `${ process.cwd() }/${ configFile }` );
}
action = fs.readFileAsync( configFile )
.then( ( yamlSource ) => yaml.load( this._replaceEnvVars( yamlSource ) ) );
Expand All @@ -349,7 +349,7 @@ class BaseService {
let packageJson = {};
try {
// eslint-disable-next-line security/detect-non-literal-require
packageJson = require( `${this._basePath}/package.json` );
packageJson = require( `${ this._basePath }/package.json` );
} catch ( e ) {
// Ignore error.
}
Expand All @@ -366,7 +366,7 @@ class BaseService {
this.config = config;
} )
.catch( ( e ) => {
console.error( `Error while reading config file: ${e}` );
console.error( `Error while reading config file: ${ e }` );
process.exit( 1 );
} );
}
Expand Down Expand Up @@ -412,7 +412,7 @@ class BaseService {
} catch ( e ) {
if ( path.isAbsolute( opts.mod ) ||
( opts.baseTried && opts.modsTried ) ||
!e.message.startsWith( `Cannot find module '${modName}'` ) ) {
!e.message.startsWith( `Cannot find module '${ modName }'` ) ) {
// we have a full path here which can't be required, we have tried
// all of the possible combinations, or the error is not about not
// finding modName, so bail out
Expand Down
84 changes: 42 additions & 42 deletions lib/docker.js
Expand Up @@ -55,7 +55,7 @@ function promisedSpawn( args, options ) {
let ret = '';
let err = '';
if ( opts.verbose ) {
console.log( `# RUNNING: ${args.join( ' ' )}\n (in ${process.cwd()})` );
console.log( `# RUNNING: ${ args.join( ' ' ) }\n (in ${ process.cwd() })` );
}
child = spawn( '/usr/bin/env', args, argOpts );
if ( options.capture ) {
Expand Down Expand Up @@ -87,11 +87,11 @@ function promisedSpawn( args, options ) {
if ( options.useErrHandler || options.errMessage ) {
promise = promise.catch( ( err ) => {
if ( options.errMessage ) {
console.error( `ERROR: ${options.errMessage.split( '\n' ).join( '\nERROR: ' )}` );
console.error( `ERROR: ${ options.errMessage.split( '\n' ).join( '\nERROR: ' ) }` );
}
let msg = `ERROR: ${args.slice( 0, 2 ).join( ' ' )} exited with code ${err.code}`;
let msg = `ERROR: ${ args.slice( 0, 2 ).join( ' ' ) } exited with code ${ err.code }`;
if ( err.message ) {
msg += ` and message ${err.message}`;
msg += ` and message ${ err.message }`;
}
console.error( msg );
process.exit( err.code );
Expand Down Expand Up @@ -153,7 +153,7 @@ function createDockerFile() {
// set the deploy target
// allow the user to specify the exact target to use, like "debian:sid"
const baseImg = /^.+:.+$/.test( pkg.deploy.target ) ? pkg.deploy.target : targets[ pkg.deploy.target ];
let contents = `FROM ${baseImg}\n`;
let contents = `FROM ${ baseImg }\n`;

if ( !baseImg || baseImg === '' ) {
console.error( 'ERROR: You must specify a valid target!' );
Expand All @@ -172,13 +172,13 @@ function createDockerFile() {
if ( pkg.uri ) {
debPkgs.push( pkg.uri );
} else if ( !pkg.repo_url || !pkg.pool || !pkg.packages || !pkg.release ) {
console.error( `ERROR: Incorrect dependency spec: ${JSON.stringify( pkg )}` );
console.error( `ERROR: Incorrect dependency spec: ${ JSON.stringify( pkg ) }` );
process.exit( 1 );
} else {
customSourcePkgs.push( pkg );
}
} else {
console.error( `ERROR: Incorrect dependency spec: ${pkg}` );
console.error( `ERROR: Incorrect dependency spec: ${ pkg }` );
process.exit( 1 );
}
} );
Expand All @@ -189,67 +189,67 @@ function createDockerFile() {
extraPkgs.push( 'apt-transport-https' );
}

contents += `RUN apt-get update && apt-get install -y ${extraPkgs.join( ' ' )} && rm -rf /var/lib/apt/lists/*\n`; /**/
contents += `RUN apt-get update && apt-get install -y ${ extraPkgs.join( ' ' ) } && rm -rf /var/lib/apt/lists/*\n`; /**/

if ( customSourcePkgs.length ) {
contents += `RUN echo > /etc/apt/sources.list && ${customSourcePkgs.map( ( customSourcePkgSpec ) =>
`echo deb "${customSourcePkgSpec.repo_url} ${customSourcePkgSpec.release} ${customSourcePkgSpec.pool}" >> /etc/apt/sources.list` ).join( ' && ' )}\n`;
contents += `RUN apt-get update && ${customSourcePkgs.map( ( customSourcePkgSpec ) => `apt-get install -y --force-yes -t ${customSourcePkgSpec.release} ${customSourcePkgSpec.packages.join( ' ' )}` ).join( ' && ' )} && rm -rf /var/lib/apt/lists/*\n`; /**/
contents += `RUN echo > /etc/apt/sources.list && ${ customSourcePkgs.map( ( customSourcePkgSpec ) =>
`echo deb "${ customSourcePkgSpec.repo_url } ${ customSourcePkgSpec.release } ${ customSourcePkgSpec.pool }" >> /etc/apt/sources.list` ).join( ' && ' ) }\n`;
contents += `RUN apt-get update && ${ customSourcePkgs.map( ( customSourcePkgSpec ) => `apt-get install -y --force-yes -t ${ customSourcePkgSpec.release } ${ customSourcePkgSpec.packages.join( ' ' ) }` ).join( ' && ' ) } && rm -rf /var/lib/apt/lists/*\n`; /**/
}

if ( debPkgs.length ) {
contents += `RUN ${debPkgs.map( ( uri ) => `wget ${uri} -O package.deb && dpkg -i package.deb && rm package.deb` ).join( ' && ' )}\n`;
contents += `RUN ${ debPkgs.map( ( uri ) => `wget ${ uri } -O package.deb && dpkg -i package.deb && rm package.deb` ).join( ' && ' ) }\n`;
}

let npmCommand = 'npm';
if ( nodeVersion !== 'system' ) {
const nvmDownloadURI = 'https://raw.githubusercontent.com/nvm-sh/nvm/v0.34.0/install.sh';
contents += 'RUN mkdir -p /usr/local/nvm\n';
contents += 'ENV NVM_DIR /usr/local/nvm\n';
contents += `RUN wget -qO- ${nvmDownloadURI} | bash && . $NVM_DIR/nvm.sh && nvm install ${nodeVersion}\n`;
npmCommand = `. $NVM_DIR/nvm.sh && nvm use ${nodeVersion} && npm`;
contents += `RUN wget -qO- ${ nvmDownloadURI } | bash && . $NVM_DIR/nvm.sh && nvm install ${ nodeVersion }\n`;
npmCommand = `. $NVM_DIR/nvm.sh && nvm use ${ nodeVersion } && npm`;
}

if ( !opts.deploy ) {
contents += `RUN mkdir /opt/service\nADD . /opt/service\nWORKDIR /opt/service\nRUN ${npmCommand} install && npm dedupe\n`;
contents += `RUN mkdir /opt/service\nADD . /opt/service\nWORKDIR /opt/service\nRUN ${ npmCommand } install && npm dedupe\n`;
}

if ( opts.uid !== 0 &&
// In 'Docker for Mac' the mapping between users/groups
// is done internally by docker, so we can run as root
os.type() !== 'Darwin' ) {
contents += `RUN groupadd -o -g ${opts.gid} -r rungroup && useradd -o -m -r -g rungroup -u ${opts.uid} runuser\nUSER runuser\nENV HOME=/home/runuser LINK=g++\n`;
contents += `RUN groupadd -o -g ${ opts.gid } -r rungroup && useradd -o -m -r -g rungroup -u ${ opts.uid } runuser\nUSER runuser\nENV HOME=/home/runuser LINK=g++\n`;
} else {
contents += 'ENV HOME=/root/ LINK=g++\n';
}

let envCommand = 'ENV IN_DOCKER=1';
if ( pkg.deploy.env && Object.keys( pkg.deploy.env ) ) {
Object.keys( pkg.deploy.env ).forEach( ( envVar ) => {
envCommand += ` ${envVar}="${pkg.deploy.env[ envVar ]}"`;
envCommand += ` ${ envVar }="${ pkg.deploy.env[ envVar ] }"`;
} );
}
contents += `${envCommand}\n`;
contents += `${ envCommand }\n`;

if ( opts.deploy ) {
let beforeInstall = '';
let afterInstall = '';
if ( npmVersion ) {
beforeInstall += `${npmCommand} install npm@${npmVersion} &&`;
beforeInstall += `${ npmCommand } install npm@${ npmVersion } &&`;
npmCommand = './node_modules/.bin/npm';
afterInstall = '&& rm -rf ./node_modules/npm ./node_modules/.bin/npm';
}
let installOpts = ' --production ';
if ( pkg.deploy.install_opts ) {
installOpts += `${pkg.deploy.install_opts.join( ' ' )} `;
installOpts += `${ pkg.deploy.install_opts.join( ' ' ) } `;
}
contents += `CMD ${beforeInstall} ${npmCommand} install${installOpts} ${afterInstall}`;
contents += `CMD ${ beforeInstall } ${ npmCommand } install${ installOpts } ${ afterInstall }`;
} else if ( opts.tests ) {
contents += `CMD ${npmCommand} test`;
contents += `CMD ${ npmCommand } test`;
} else if ( opts.coverage ) {
contents += `CMD ${npmCommand} run-script coverage`;
contents += `CMD ${ npmCommand } run-script coverage`;
} else {
contents += `CMD ${npmCommand} start`;
contents += `CMD ${ npmCommand } start`;
}

return fs.writeFileAsync( 'Dockerfile', contents );
Expand Down Expand Up @@ -289,7 +289,7 @@ function startContainer( args, hidePorts ) {
config.services.forEach( ( srv ) => {
srv.conf = srv.conf || {};
srv.conf.port = srv.conf.port || 8888;
cmd.push( '-p', `${srv.conf.port}:${srv.conf.port}` );
cmd.push( '-p', `${ srv.conf.port }:${ srv.conf.port }` );
} );
}

Expand Down Expand Up @@ -321,7 +321,7 @@ function ensureDockerVersion() {
const minimumDockerVersion = os.type() === 'Darwin' ? '1.12.0' : '1.8.0';
dockerVersion = dockerVersion.replace( /\.0+(0|[1-9]+)/g, '.$1' );
if ( semver.lt( dockerVersion, minimumDockerVersion ) ) {
console.error( `Building the deploy repo on ${os.type()} supported only with docker ${minimumDockerVersion}+` );
console.error( `Building the deploy repo on ${ os.type() } supported only with docker ${ minimumDockerVersion }+` );
process.exit( 1 );
}
} );
Expand Down Expand Up @@ -368,10 +368,10 @@ function updateDeploy() {
opts.name = props.name || pkg.name;
opts.remote_name = props.remote || 'origin';
opts.submodule_ref = props.submodule_ref ||
`https://gerrit.wikimedia.org/r/mediawiki/services/${opts.name}`;
`https://gerrit.wikimedia.org/r/mediawiki/services/${ opts.name }`;
opts.src_branch = props.src_branch || 'master';
opts.deploy_branch = props.deploy_branch || 'master';
opts.remote_branch = `${opts.remote_name}/${opts.deploy_branch}`;
opts.remote_branch = `${ opts.remote_name }/${ opts.deploy_branch }`;
// we need to CHDIR into the deploy dir for subsequent operations
process.chdir( opts.dir );
return chainedPgit( [
Expand All @@ -392,25 +392,25 @@ function updateDeploy() {
// update it fully
return promisedGit( [ 'submodule', 'update', '--init' ] )
.then( () => {
process.chdir( `${opts.dir}/${opts.submodule}` );
process.chdir( `${ opts.dir }/${ opts.submodule }` );
return chainedPgit( [
// fetch new commits
[ 'fetch', 'origin' ],
// inspect what has changed
[ 'diff', '--name-only', `origin/${opts.src_branch}` ]
[ 'diff', '--name-only', `origin/${ opts.src_branch }` ]
] ).then( ( changes ) => {
if ( /package\.json/.test( changes ) ) {
// package.json has changed, so we need
// to rebuild the node_modules directory
opts.need_build = true;
}
// get the SHA1 of the latest commit on the src branch
return promisedGit( [ 'rev-parse', '--short', `origin/${opts.src_branch}` ] );
return promisedGit( [ 'rev-parse', '--short', `origin/${ opts.src_branch }` ] );
} ).then( ( shortSha1 ) => {
opts.commit_msg = `Update ${opts.name} to ${shortSha1}\n\n`;
opts.commit_msg = `Update ${ opts.name } to ${ shortSha1 }\n\n`;
// get a nice list of commits included in the change
return promisedGit( [ 'log',
`..origin/${opts.src_branch}`,
`..origin/${ opts.src_branch }`,
'--oneline',
'--no-merges',
'--reverse',
Expand All @@ -431,8 +431,8 @@ function updateDeploy() {
} else if ( !logs ) {
logs = '';
}
opts.commit_msg += `List of changes:\n${logs}`;
return promisedGit( [ 'checkout', `origin/${opts.src_branch}` ] );
opts.commit_msg += `List of changes:\n${ logs }`;
return promisedGit( [ 'checkout', `origin/${ opts.src_branch }` ] );
} ).then( () => {
// go back to the root dir
process.chdir( opts.dir );
Expand All @@ -444,14 +444,14 @@ function updateDeploy() {
// no submodule, need to add it
opts.submodule = 'src';
opts.need_build = true;
opts.commit_msg = `Initial import of ${opts.name}`;
opts.commit_msg = `Initial import of ${ opts.name }`;
return promisedGit( [ 'submodule',
'add',
opts.submodule_ref,
opts.submodule ] );
}
} ).then( () => // make sure the package.json symlink is in place
fs.symlinkAsync( `${opts.submodule}/package.json`, 'package.json' )
fs.symlinkAsync( `${ opts.submodule }/package.json`, 'package.json' )
.catch( () => {} ).then( () => promisedGit( [ 'add', 'package.json' ] ) ) ).then( () => {
if ( !opts.need_build ) {
return;
Expand All @@ -465,7 +465,7 @@ function updateDeploy() {
.then( () => promisedSpawn( [ 'rm', '-rf', 'node_modules' ],
{ capture: true, ignoreErr: true } ) )
// start the container which builds the modules
.then( () => startContainer( [ '-v', `${opts.dir}:/opt/service`, '-w', '/opt/service' ], true ) )
.then( () => startContainer( [ '-v', `${ opts.dir }:/opt/service`, '-w', '/opt/service' ], true ) )
.then( () => {
// remove unnecessary files
let findAttr;
Expand Down Expand Up @@ -497,7 +497,7 @@ function updateDeploy() {
.then( () => {
if ( !opts.review ) {
console.log( '\n\nChanges are sitting in the sync-repo branch in' );
console.log( `${opts.dir} with the commit:` );
console.log( `${ opts.dir } with the commit:` );
console.log( opts.commit_msg );
return;
}
Expand Down Expand Up @@ -532,12 +532,12 @@ function getUid() {
).then( ( dir ) => {
opts.dir = dir;
// make sure that the dir exists and it is a git repo
return fs.statAsync( `${dir}/.git` );
return fs.statAsync( `${ dir }/.git` );
} ).then( ( stat ) => {
opts.uid = stat.uid;
opts.gid = stat.gid;
} ).catch( () => {
console.error( `ERROR: The deploy repo dir ${opts.dir} does not exist or is not a git repo!` );
console.error( `ERROR: The deploy repo dir ${ opts.dir } does not exist or is not a git repo!` );
process.exit( 3 );
} );
}
Expand Down Expand Up @@ -604,7 +604,7 @@ function main( options, configuration ) {
imgName += '-deploy';
}
// the container's name
name = `${pkg.name}-${Date.now()}-${Math.floor( Math.random() * 1000 )}`;
name = `${ pkg.name }-${ Date.now() }-${ Math.floor( Math.random() * 1000 ) }`;

// trap exit signals
process.on( 'SIGINT', sigHandle );
Expand Down
4 changes: 3 additions & 1 deletion lib/logger.js
Expand Up @@ -18,6 +18,7 @@ class NamedLevelStdout extends Writable {
super( Object.assign( options, { objectMode: true } ) );
this.downstream = downstream;
}

_write( logEntry, encoding, callback ) {
logEntry.level = bunyan.nameFromLevel[ logEntry.level ].toUpperCase();
this.downstream.write(
Expand All @@ -26,6 +27,7 @@ class NamedLevelStdout extends Writable {
callback
);
}

destroy() {
super.destroy();
this.downstream.destroy();
Expand Down Expand Up @@ -230,7 +232,7 @@ class Logger {
const pos = LEVELS.indexOf( level );
if ( pos !== -1 ) {
// eslint-disable-next-line security/detect-non-literal-regexp
return new RegExp( `^(${LEVELS.slice( pos ).join( '|' )})(?=/|$)` );
return new RegExp( `^(${ LEVELS.slice( pos ).join( '|' ) })(?=/|$)` );
} else {
// Match nothing
return /^$/;
Expand Down
5 changes: 3 additions & 2 deletions lib/master.js
Expand Up @@ -110,7 +110,7 @@ class Master extends BaseService {

// Fork workers.
this._logger.log( 'info/service-runner',
`master(${process.pid}) initializing ${this.config.num_workers} workers` );
`master(${ process.pid }) initializing ${ this.config.num_workers } workers` );

process.on( 'SIGINT', this._shutdownMasterHandler );
process.on( 'SIGTERM', this._shutdownMasterHandler );
Expand Down Expand Up @@ -289,13 +289,14 @@ class Master extends BaseService {
return; // Ignore prom-client internal communication.
default:
this._logger.log( 'error/service-runner/master',
`unknown message type received from worker ${msg.type}` );
`unknown message type received from worker ${ msg.type }` );
}
};

worker.on( 'message', workerMessageHandler );
} );
}

// Fork a single worker, wait for it to start executing and set everything up,
// and then fork all the rest of the workers.
_startWorkers( workersToStart ) {
Expand Down