diff --git a/.eslintignore b/.eslintignore deleted file mode 100644 index 2543904..0000000 --- a/.eslintignore +++ /dev/null @@ -1,2 +0,0 @@ -**/node_modules -build/ diff --git a/.eslintrc.json b/.eslintrc.json deleted file mode 100644 index 7821534..0000000 --- a/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/.prettierignore b/.prettierignore deleted file mode 100644 index 1bf0471..0000000 --- a/.prettierignore +++ /dev/null @@ -1,3 +0,0 @@ -coverage/ -build/ -**/node_modules \ No newline at end of file diff --git a/.prettierrc.json b/.prettierrc.json deleted file mode 100644 index 993c327..0000000 --- a/.prettierrc.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "bracketSpacing": false, - "singleQuote": true, - "trailingComma": "es5", - "arrowParens": "avoid" -} diff --git a/package.json b/package.json index 331b18f..a3a3fd0 100644 --- a/package.json +++ b/package.json @@ -19,9 +19,9 @@ }, "scripts": { "test": "c8 mocha build/test", - "lint": "gts check", + "lint": "xo --prettier", "compile": "tsc -p .", - "fix": "gts fix", + "fix": "xo --prettier --fix", "prepare": "npm run compile", "pretest": "npm run compile", "watch": "tsc -p . --watch" @@ -53,12 +53,12 @@ "@types/update-notifier": "^6.0.1", "c8": "^7.10.0", "gaxios": "^5.0.0", - "gts": "^3.1.0", "mocha": "^10.0.0", "nock": "^13.2.1", "semantic-release": "^21.0.0", "sinon": "^15.0.0", - "typescript": "~5.0.0" + "typescript": "~5.0.0", + "xo": "^0.55.0" }, "c8": { "exclude": [ diff --git a/src/cli.ts b/src/cli.ts index fe635be..373733c 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,22 +1,21 @@ #!/usr/bin/env node +import util from 'node:util'; +import fs from 'node:fs'; +import path from 'node:path'; +import process from 'node:process'; import meow from 'meow'; -import updateNotifier from 'update-notifier'; +import updateNotifier, {type Package} from 'update-notifier'; import ora from 'ora'; import chalk from 'chalk'; -import util from 'util'; -import fs from 'fs'; -import path from 'path'; -import {URL} from 'url'; - -import {Builder, BuildOptions, ProgressEvent} from './index.js'; +import {Builder, type BuildOptions, ProgressEvent} from './index.js'; const pkg = JSON.parse( - fs.readFileSync(new URL('../../package.json', import.meta.url), 'utf-8') -); + fs.readFileSync(new URL('../../package.json', import.meta.url), 'utf8'), +) as Package; updateNotifier({pkg}).notify(); const cli = meow( - ` + ` Usage $ gcb [SOURCE] [--flags] @@ -40,85 +39,88 @@ const cli = meow( $ gcb --config ../perfect.yaml --tag ohai123 $ gcp containers/web `, - { - importMeta: import.meta, - flags: { - config: {type: 'string'}, - tag: {type: 'string'}, - }, - } + { + importMeta: import.meta, + flags: { + config: {type: 'string'}, + tag: {type: 'string'}, + }, + }, ); async function main() { - if (cli.input.length > 1) { - cli.showHelp(); - return; - } - - const start = Date.now(); - const opts = cli.flags as BuildOptions; - opts.sourcePath = cli.input.length > 0 ? cli.input[0] : process.cwd(); - if (!path.isAbsolute(opts.sourcePath)) { - opts.sourcePath = path.join(process.cwd(), opts.sourcePath); - } - const hasIgnore = await hasIgnoreFile(opts.sourcePath); - if (!hasIgnore) { - await generateIgnoreFile(opts.sourcePath); - } - const spinny = ora('Initializing build...').start(); - const builder = new Builder(opts); - builder - .on(ProgressEvent.CREATING_BUCKET, bucket => { - spinny.stopAndPersist({ - symbol: '🌧', - text: `Bucket '${bucket}' created.`, - }); - spinny.start('Packing and uploading sources...'); - }) - .on(ProgressEvent.UPLOADING, () => { - spinny.stopAndPersist({symbol: '📦', text: 'Source code packaged.'}); - spinny.start('Uploading source...'); - }) - .on(ProgressEvent.BUILDING, () => { - spinny.stopAndPersist({ - symbol: '🛸', - text: 'Source uploaded to cloud.', - }); - spinny.start('Building container...'); - }) - .on(ProgressEvent.LOG, data => { - console.error('\n\n' + chalk.gray(data)); - }) - .on(ProgressEvent.COMPLETE, () => { - const seconds = (Date.now() - start) / 1000; - spinny.stopAndPersist({ - symbol: '🚀', - text: `Container built in ${seconds} seconds.`, - }); - }); - try { - await builder.build(); - } catch (e) { - const err = e as Error; - console.error(err); - spinny.fail(err.message); - // eslint-disable-next-line no-process-exit - process.exit(1); - } + if (cli.input.length > 1) { + cli.showHelp(); + return; + } + + const start = Date.now(); + const options = cli.flags as BuildOptions; + options.sourcePath = cli.input.length > 0 ? cli.input[0] : process.cwd(); + if (!path.isAbsolute(options.sourcePath)) { + options.sourcePath = path.join(process.cwd(), options.sourcePath); + } + + const hasIgnore = await hasIgnoreFile(options.sourcePath); + if (!hasIgnore) { + await generateIgnoreFile(options.sourcePath); + } + + const spinny = ora('Initializing build...').start(); + const builder = new Builder(options); + builder + .on(ProgressEvent.CREATING_BUCKET, (bucket) => { + spinny.stopAndPersist({ + symbol: '🌧', + text: `Bucket '${bucket}' created.`, + }); + spinny.start('Packing and uploading sources...'); + }) + .on(ProgressEvent.UPLOADING, () => { + spinny.stopAndPersist({symbol: '📦', text: 'Source code packaged.'}); + spinny.start('Uploading source...'); + }) + .on(ProgressEvent.BUILDING, () => { + spinny.stopAndPersist({ + symbol: '🛸', + text: 'Source uploaded to cloud.', + }); + spinny.start('Building container...'); + }) + .on(ProgressEvent.LOG, (data) => { + console.error('\n\n' + chalk.gray(data)); + }) + .on(ProgressEvent.COMPLETE, () => { + const seconds = (Date.now() - start) / 1000; + spinny.stopAndPersist({ + symbol: '🚀', + text: `Container built in ${seconds} seconds.`, + }); + }); + try { + await builder.build(); + } catch (error) { + const error_ = error as Error; + console.error(error_); + spinny.fail(error_.message); + + process.exit(1); + } } async function generateIgnoreFile(targetDir: string) { - console.log(` + console.log(` 🤖 I generated a '.gcloudignore' file in the target directory. This file contains a list of glob patterns that should be ingored in your build. It works just like a .gitignore file 💜 `); - await new Promise((resolve, reject) => { - fs.createReadStream(path.join(__dirname, '../../src/.gcloudignore')) - .pipe(fs.createWriteStream(path.join(targetDir, '.gcloudignore'))) - .on('error', reject) - .on('close', resolve); - }); + await new Promise((resolve, reject) => { + // eslint-disable-next-line unicorn/prefer-module + fs.createReadStream(path.join(__dirname, '../../src/.gcloudignore')) + .pipe(fs.createWriteStream(path.join(targetDir, '.gcloudignore'))) + .on('error', reject) + .on('close', resolve); + }); } /** @@ -126,13 +128,13 @@ async function generateIgnoreFile(targetDir: string) { * @param targetDir The directory with the sources to deploy. */ async function hasIgnoreFile(targetDir: string) { - const ignoreFile = path.join(targetDir, '.gcloudignore'); - try { - await util.promisify(fs.stat)(ignoreFile); - return true; - } catch (e) { - return false; - } + const ignoreFile = path.join(targetDir, '.gcloudignore'); + try { + await util.promisify(fs.stat)(ignoreFile); + return true; + } catch { + return false; + } } -main().catch(console.error); +await main(); diff --git a/src/config.ts b/src/config.ts index f4208d2..38de348 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,75 +1,88 @@ -import fs from 'fs'; -import {cloudbuild_v1} from 'googleapis'; -import path from 'path'; +import fs from 'node:fs'; +import path from 'node:path'; +import {type cloudbuild_v1} from 'googleapis'; import yaml from 'js-yaml'; -export interface GetConfigOptions { - configPath?: string; - sourcePath: string; - projectId: string; - tag?: string; -} +export type GetConfigOptions = { + configPath?: string; + sourcePath: string; + projectId: string; + tag?: string; +}; -export async function getConfig(opts: GetConfigOptions) { - // If no config path was provided, work through the following set of default - // paths looking for some kind of config: - // 1. cloudbuild.yaml - // 2. cloudbuild.json - // 3. Dockerfile - if (!opts.configPath) { - const files = ['cloudbuild.yaml', 'cloudbuild.json', 'Dockerfile']; - for (const file of files) { - const fullpath = path.join(opts.sourcePath, file); - const fileExists = await exists(fullpath); - if (fileExists) { - opts.configPath = fullpath; - break; - } - } - } +export async function getConfig(options: GetConfigOptions) { + // If no config path was provided, work through the following set of default + // paths looking for some kind of config: + // 1. cloudbuild.yaml + // 2. cloudbuild.json + // 3. Dockerfile + if (!options.configPath) { + const files = ['cloudbuild.yaml', 'cloudbuild.json', 'Dockerfile']; + for (const file of files) { + const fullpath = path.join(options.sourcePath, file); + // eslint-disable-next-line no-await-in-loop + const fileExists = await exists(fullpath); + if (fileExists) { + options.configPath = fullpath; + break; + } + } + } - if (!opts.configPath) { - throw new Error(` + if (!options.configPath) { + throw new Error(` Unable to find configuration file. Please provide a cloudbuild.yaml, cloudbuild.json, or Dockerfile in the source directory.`); - } + } + + let config: cloudbuild_v1.Schema$Build; + if (path.basename(options.configPath) === 'Dockerfile') { + if (!options.tag) { + options.tag = path.basename(options.sourcePath); + } + + config = { + steps: [ + { + name: 'gcr.io/cloud-builders/docker', + args: [ + 'build', + '-t', + `gcr.io/${options.projectId}/${options.tag}`, + '.', + ], + }, + ], + images: [`gcr.io/${options.projectId}/${options.tag}`], + }; + } else { + const configFileContents = await fs.promises.readFile( + options.configPath, + 'utf8', + ); + const ext = path.extname(options.configPath); + switch (ext) { + case '.json': { + config = JSON.parse(configFileContents) as cloudbuild_v1.Schema$Build; + break; + } + + case '.yaml': { + config = (await yaml.load( + configFileContents, + )) as cloudbuild_v1.Schema$Build; + break; + } + + default: { + throw new Error( + `The ${ext} extension is not supported. Please pass yaml or json.`, + ); + } + } + } - let config: cloudbuild_v1.Schema$Build; - if (path.basename(opts.configPath) === 'Dockerfile') { - if (!opts.tag) { - opts.tag = path.basename(opts.sourcePath); - } - config = { - steps: [ - { - name: 'gcr.io/cloud-builders/docker', - args: ['build', '-t', `gcr.io/${opts.projectId}/${opts.tag}`, '.'], - }, - ], - images: [`gcr.io/${opts.projectId}/${opts.tag}`], - }; - } else { - const configFileContents = await fs.promises.readFile( - opts.configPath, - 'utf8' - ); - const ext = path.extname(opts.configPath); - switch (ext) { - case '.json': - config = JSON.parse(configFileContents); - break; - case '.yaml': - config = (await yaml.load( - configFileContents - )) as cloudbuild_v1.Schema$Build; - break; - default: - throw new Error( - `The ${ext} extension is not supported. Please pass yaml or json.` - ); - } - } - return config; + return config; } /** @@ -77,10 +90,10 @@ export async function getConfig(opts: GetConfigOptions) { * @param file path to the file to check */ async function exists(file: string) { - try { - await fs.promises.access(file, fs.constants.F_OK); - return true; - } catch (e) { - return false; - } + try { + await fs.promises.access(file, fs.constants.F_OK); + return true; + } catch { + return false; + } } diff --git a/src/index.ts b/src/index.ts index 98a6bb8..84b98f6 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,238 +1,243 @@ -import {EventEmitter} from 'events'; -import fs from 'fs'; +import {EventEmitter} from 'node:events'; +import fs from 'node:fs'; +import path from 'node:path'; +import {PassThrough} from 'node:stream'; +import process from 'node:process'; import {globby} from 'globby'; -// eslint-disable-next-line node/no-extraneous-import -import {GoogleAuth, GoogleAuthOptions} from 'google-auth-library'; -import {cloudbuild_v1, google, storage_v1} from 'googleapis'; -import path from 'path'; -import {PassThrough} from 'stream'; +import {type cloudbuild_v1, google, type storage_v1, Auth} from 'googleapis'; import tar from 'tar'; - import {getConfig} from './config.js'; export enum ProgressEvent { - CREATING_BUCKET = 'CREATING_BUCKET', - UPLOADING = 'UPLOADING', - BUILDING = 'BUILDING', - COMPLETE = 'COMPLETE', - LOG = 'LOG', + CREATING_BUCKET = 'CREATING_BUCKET', + UPLOADING = 'UPLOADING', + BUILDING = 'BUILDING', + COMPLETE = 'COMPLETE', + LOG = 'LOG', } -export interface BuildOptions extends GoogleAuthOptions { - /** - * The path to the container sources. - * Defaults to CWD if not defined. - */ - sourcePath?: string; - /** - * The path to the yaml/json config. - * Defaults to `${sourcePath}/cloudbuild.yaml` - */ - configPath?: string; - /** - * The docker tag to apply to the container that gets created. - */ - tag?: string; -} +export type BuildOptions = { + /** + * The path to the container sources. + * Defaults to CWD if not defined. + */ + sourcePath?: string; + /** + * The path to the yaml/json config. + * Defaults to `${sourcePath}/cloudbuild.yaml` + */ + configPath?: string; + /** + * The docker tag to apply to the container that gets created. + */ + tag?: string; +} & Auth.GoogleAuthOptions; /** * Class that provides the `deploy` method. */ +// eslint-disable-next-line unicorn/prefer-event-target export class Builder extends EventEmitter { - public readonly auth: GoogleAuth; - - private sourcePath: string; - private configPath?: string; - private tag?: string; - private gcb = google.cloudbuild('v1'); - private gcs = google.storage('v1'); - - constructor(options: BuildOptions = {}) { - super(); - this.tag = options.tag; - this.sourcePath = options.sourcePath || process.cwd(); - this.configPath = options.configPath; // || path.join(this.sourcePath, 'cloudbuild.yaml'); - options.scopes = ['https://www.googleapis.com/auth/cloud-platform']; - this.auth = new GoogleAuth(options); - } - - /** - * Deploy the current application using the given opts. - */ - async build(): Promise { - const auth = await this.auth.getClient(); - google.options({auth}); - - this.emit(ProgressEvent.UPLOADING); - const {bucket, file} = await this.upload(); - - this.emit(ProgressEvent.BUILDING); - const projectId = await this.auth.getProjectId(); - - // load configuration - const requestBody = await getConfig({ - configPath: this.configPath, - sourcePath: this.sourcePath, - projectId, - tag: this.tag, - }); - - requestBody!.source = {storageSource: {bucket, object: file}}; - - // create the request to perform a build - const res = await this.gcb.projects.builds.create({ - projectId, - requestBody, - }); - const result = res.data as BuildResult; - - // poll the operation until complete - const operationId = res.data.name!; - try { - await this.poll(operationId); - } catch (e) { - let log: string; - try { - log = await this.fetchLog(result); - } catch (e) { - // 🤷‍♂️ - } - (e as BuildError).log = log!; - throw e; - } - - // Log streaming is super hard to understand. For now, just fetch the - // log from a well known location *after* it's complete. - // TODO: make it stream - const log = await this.fetchLog(result); - result.log = log; - this.emit(ProgressEvent.COMPLETE); - return result; - } - - /** - * Obtain the full text of the log after the build is complete. - * At some point this should be replaced with streaming logs. - * @param result The BuildResult returned from the create operation - */ - private async fetchLog(result: BuildResult): Promise { - const build = result.metadata.build; - const logsBucket = build.logsBucket!.split('gs://').filter(x => !!x)[0]; - const logFilename = `log-${build.id}.txt`; - const logRes = await this.gcs.objects.get({ - bucket: logsBucket, - object: logFilename, - alt: 'media', - }); - this.emit(ProgressEvent.LOG, logRes.data); - return logRes.data as string; - } - - /** - * Given an operation, poll it until complete. - * @private - * @param name Fully qualified name of the operation. - */ - private async poll(name: string) { - const res = await this.gcb.operations.get({name}); - const operation = res.data; - if (operation.error) { - const message = JSON.stringify(operation.error); - throw new Error(message); - } - if (operation.done) { - return; - } - await new Promise(r => setTimeout(r, 3000)); - await this.poll(name); - } - - /** - * Upload a local file to GCS given a signed url - * @private - * @param localPath Fully qualified path to the zip on disk. - * @param remotePath Signed url used to put the file to - */ - private async upload() { - // check to see if the bucket exists - const projectId = await this.auth.getProjectId(); - const bucketName = `${projectId}-gcb-staging-bbq`; - const exists = await this.gcs.buckets.get({bucket: bucketName}).then( - () => true, - () => false - ); - - // if it does not exist, create it! - if (!exists) { - this.emit(ProgressEvent.CREATING_BUCKET, bucketName); - await this.gcs.buckets.insert({ - project: projectId, - requestBody: { - name: bucketName, - lifecycle: { - rule: [{action: {type: 'Delete'}, condition: {age: 1}}], - }, - }, - }); - } - - // Get the full list of files that don't match .gcloudignore - const ignorePatterns = await this.getIgnoreRules(); - const files = await globby('**/**', { - ignore: ignorePatterns, - cwd: this.sourcePath, - }); - - // create a tar stream with all the files - const tarStream = tar.c({gzip: true, cwd: this.sourcePath}, files); - - // There is a bizarre bug with node-tar where the stream it hands back - // looks like a stream and talks like a stream, but it ain't a real - // stream. Pass it through a Duplex to make node-fetch happy. - const bodyStream = new PassThrough(); - tarStream.pipe(bodyStream); - - // upload the object via stream to GCS - const file = Date.now().toString() + '.tar.gz'; - await this.gcs.objects.insert({ - bucket: bucketName, - name: file, - media: {mediaType: 'application/gzip', body: bodyStream}, - } as storage_v1.Params$Resource$Objects$Insert); - - return {bucket: bucketName, file}; - } - - /** - * Look in the CWD for a `.gcloudignore` file. If one is present, parse it, - * and return the ignore rules as an array of strings. - */ - public async getIgnoreRules() { - const ignoreFile = path.join(this.sourcePath, '.gcloudignore'); - let ignoreRules = new Array(); - try { - const contents = await fs.promises.readFile(ignoreFile, 'utf8'); - ignoreRules = contents.split('\n').filter(line => { - return !line.startsWith('#') && line.trim() !== ''; - }); - } catch (e) { - // 🤷‍♂️ - } - return ignoreRules; - } + public readonly auth: Auth.GoogleAuth; + + private readonly sourcePath: string; + private readonly configPath?: string; + private readonly tag?: string; + private readonly gcb = google.cloudbuild('v1'); + private readonly gcs = google.storage('v1'); + + constructor(options: BuildOptions = {}) { + super(); + this.tag = options.tag; + // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing + this.sourcePath = options.sourcePath || process.cwd(); + this.configPath = options.configPath; // || path.join(this.sourcePath, 'cloudbuild.yaml'); + options.scopes = ['https://www.googleapis.com/auth/cloud-platform']; + this.auth = new Auth.GoogleAuth(options); + } + + /** + * Deploy the current application using the given opts. + */ + async build(): Promise { + const auth = (await this.auth.getClient()) as Auth.Compute; + google.options({auth}); + + this.emit(ProgressEvent.UPLOADING); + const {bucket, file} = await this.upload(); + + this.emit(ProgressEvent.BUILDING); + const projectId = await this.auth.getProjectId(); + + // Load configuration + const requestBody = await getConfig({ + configPath: this.configPath, + sourcePath: this.sourcePath, + projectId, + tag: this.tag, + }); + + requestBody.source = {storageSource: {bucket, object: file}}; + + // Create the request to perform a build + const response = await this.gcb.projects.builds.create({ + projectId, + requestBody, + }); + const result = response.data as BuildResult; + + // Poll the operation until complete + const operationId = result.name; + try { + await this.poll(operationId); + } catch (error) { + let log: string; + try { + log = await this.fetchLog(result); + } catch { + // 🤷‍♂️ + } + + (error as BuildError).log = log!; + throw error; + } + + // Log streaming is super hard to understand. For now, just fetch the + // log from a well known location *after* it's complete. + const log = await this.fetchLog(result); + result.log = log; + this.emit(ProgressEvent.COMPLETE); + return result; + } + + /** + * Look in the CWD for a `.gcloudignore` file. If one is present, parse it, + * and return the ignore rules as an array of strings. + */ + public async getIgnoreRules() { + const ignoreFile = path.join(this.sourcePath, '.gcloudignore'); + let ignoreRules = new Array(); + try { + const contents = await fs.promises.readFile(ignoreFile, 'utf8'); + ignoreRules = contents.split('\n').filter((line) => { + return !line.startsWith('#') && line.trim() !== ''; + }); + } catch { + // 🤷‍♂️ + } + + return ignoreRules; + } + + /** + * Obtain the full text of the log after the build is complete. + * At some point this should be replaced with streaming logs. + * @param result The BuildResult returned from the create operation + */ + private async fetchLog(result: BuildResult): Promise { + const {build} = result.metadata; + const logsBucket = build.logsBucket!.split('gs://').find(Boolean); + const logFilename = `log-${build.id}.txt`; + const logResponse = await this.gcs.objects.get({ + bucket: logsBucket, + object: logFilename, + alt: 'media', + }); + this.emit(ProgressEvent.LOG, logResponse.data); + return logResponse.data as string; + } + + /** + * Given an operation, poll it until complete. + * @private + * @param name Fully qualified name of the operation. + */ + private async poll(name: string) { + const response = await this.gcb.operations.get({name}); + const operation = response.data; + if (operation.error) { + const message = JSON.stringify(operation.error); + throw new Error(message); + } + + if (operation.done) { + return; + } + + await new Promise((r) => { + setTimeout(r, 3000); + }); + await this.poll(name); + } + + /** + * Upload a local file to GCS given a signed url + * @private + * @param localPath Fully qualified path to the zip on disk. + * @param remotePath Signed url used to put the file to + */ + private async upload() { + // Check to see if the bucket exists + const projectId = await this.auth.getProjectId(); + const bucketName = `${projectId}-gcb-staging-bbq`; + const exists = await this.gcs.buckets.get({bucket: bucketName}).then( + () => true, + () => false, + ); + + // If it does not exist, create it! + if (!exists) { + this.emit(ProgressEvent.CREATING_BUCKET, bucketName); + await this.gcs.buckets.insert({ + project: projectId, + requestBody: { + name: bucketName, + lifecycle: { + rule: [{action: {type: 'Delete'}, condition: {age: 1}}], + }, + }, + }); + } + + // Get the full list of files that don't match .gcloudignore + const ignorePatterns = await this.getIgnoreRules(); + const files = await globby('**/**', { + ignore: ignorePatterns, + cwd: this.sourcePath, + }); + + // Create a tar stream with all the files + const tarStream = tar.c({gzip: true, cwd: this.sourcePath}, files); + + // There is a bizarre bug with node-tar where the stream it hands back + // looks like a stream and talks like a stream, but it ain't a real + // stream. Pass it through a Duplex to make node-fetch happy. + const bodyStream = new PassThrough(); + tarStream.pipe(bodyStream); + + // Upload the object via stream to GCS + const file = Date.now().toString() + '.tar.gz'; + await this.gcs.objects.insert({ + bucket: bucketName, + name: file, + media: {mediaType: 'application/gzip', body: bodyStream}, + } as storage_v1.Params$Resource$Objects$Insert); + + return {bucket: bucketName, file}; + } } export async function build(options: BuildOptions) { - const builder = new Builder(options); - return builder.build(); + const builder = new Builder(options); + return builder.build(); } -export interface BuildResult { - name: string; - log: string; - metadata: {build: cloudbuild_v1.Schema$Build}; -} +export type BuildResult = { + name: string; + log: string; + metadata: {build: cloudbuild_v1.Schema$Build}; +}; -export interface BuildError extends Error { - log?: string; -} +export type BuildError = { + log?: string; +} & Error; diff --git a/test/fixtures/builder/cloudbuild.yaml b/test/fixtures/builder/cloudbuild.yaml index 2924ec3..2472ed6 100644 --- a/test/fixtures/builder/cloudbuild.yaml +++ b/test/fixtures/builder/cloudbuild.yaml @@ -1,3 +1,3 @@ steps: -- name: 'node:11-alpine' +- name: 'node:18-alpine' args: ['npm', 'start'] diff --git a/test/fixtures/cloudbuild.yaml b/test/fixtures/cloudbuild.yaml index 2924ec3..2472ed6 100644 --- a/test/fixtures/cloudbuild.yaml +++ b/test/fixtures/cloudbuild.yaml @@ -1,3 +1,3 @@ steps: -- name: 'node:11-alpine' +- name: 'node:18-alpine' args: ['npm', 'start'] diff --git a/test/test.ts b/test/test.ts index 8fbc9f3..6eb3969 100644 --- a/test/test.ts +++ b/test/test.ts @@ -1,219 +1,227 @@ -import assert from 'assert'; +import assert from 'node:assert'; +import fs from 'node:fs'; +import path from 'node:path'; import {describe, it, afterEach} from 'mocha'; import chalk from 'chalk'; -import fs from 'fs'; import nock from 'nock'; -import path from 'path'; -import sinon from 'sinon'; -import {GaxiosOptions, request} from 'gaxios'; - -import {Builder, BuildError} from '../src/index.js'; +import * as sinon from 'sinon'; +import {type GaxiosOptions, request} from 'gaxios'; +import {Builder, type BuildError} from '../src/index.js'; import {getConfig} from '../src/config.js'; describe('gcbuild', () => { - nock.disableNetConnect(); - - afterEach(() => { - nock.cleanAll(); - sinon.restore(); - }); - - describe('🙈 ignore rules', () => { - it('should return 0 rules if no .gcloudignore is available', async () => { - const builder = new Builder(); - const rules = await builder.getIgnoreRules(); - assert.deepStrictEqual(rules, []); - }); - - it('should return expected rules if .gcloudignore is available', async () => { - const expected = [ - '.gcloudignore', - '.git', - '.gitignore', - 'node_modules', - 'test/', - ]; - const gcloudignore = path.resolve('test/fixtures/.gcloudignore'); - await new Promise((resolve, reject) => { - fs.createReadStream(gcloudignore) - .pipe(fs.createWriteStream('.gcloudignore')) - .on('close', resolve) - .on('error', reject); - }); - const builder = new Builder(); - const rules = await builder.getIgnoreRules(); - fs.unlinkSync('.gcloudignore'); - assert.deepStrictEqual(rules, expected); - }); - }); - - describe('📦 pack & upload', () => { - it('should create a GCS bucket if the expected one does not exist', async () => { - const scopes = [ - mockBucketNotExists(), - mockBucketCreate(), - mockUpload(), - mockBuild(), - mockPoll(), - mockLogFetch(), - ]; - const sourcePath = path.resolve('test/fixtures'); - const builder = new Builder({sourcePath}); - sinon.stub(builder.auth, 'getProjectId').resolves('el-gato'); - sinon.stub(builder.auth, 'getClient').resolves({ - request: async (options: GaxiosOptions) => { - return request(options); - }, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any); - const result = await builder.build(); - scopes.forEach(s => s.done()); - assert.ok(result.metadata); - }); - - it('should PUT the file to Google Cloud Storage', async () => { - new Builder(); - }); - }); - - describe('🚨 error handing', () => { - it('should include a log with an error', async () => { - const scopes = [ - mockBucketExists(), - mockUpload(), - mockBuild(), - mockPollError(), - mockLogFetch(), - ]; - const sourcePath = path.resolve('test/fixtures'); - const builder = new Builder({sourcePath}); - sinon.stub(builder.auth, 'getProjectId').resolves('el-gato'); - sinon.stub(builder.auth, 'getClient').resolves({ - request: async (options: GaxiosOptions) => { - return request(options); - }, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any); - try { - await builder.build(); - assert.fail('Expected to throw.'); - } catch (e) { - const err = e as BuildError; - assert.ok(err.log); - assert.ok( - err.log!.includes('🌳'), - ` + nock.disableNetConnect(); + + afterEach(() => { + nock.cleanAll(); + sinon.restore(); + }); + + describe('🙈 ignore rules', () => { + it('should return 0 rules if no .gcloudignore is available', async () => { + const builder = new Builder(); + const rules = await builder.getIgnoreRules(); + assert.deepStrictEqual(rules, []); + }); + + it('should return expected rules if .gcloudignore is available', async () => { + const expected = [ + '.gcloudignore', + '.git', + '.gitignore', + 'node_modules', + 'test/', + ]; + const gcloudignore = path.resolve('test/fixtures/.gcloudignore'); + await new Promise((resolve, reject) => { + fs.createReadStream(gcloudignore) + .pipe(fs.createWriteStream('.gcloudignore')) + .on('close', resolve) + .on('error', reject); + }); + const builder = new Builder(); + const rules = await builder.getIgnoreRules(); + fs.unlinkSync('.gcloudignore'); + assert.deepStrictEqual(rules, expected); + }); + }); + + describe('📦 pack & upload', () => { + it('should create a GCS bucket if the expected one does not exist', async () => { + const scopes = [ + mockBucketNotExists(), + mockBucketCreate(), + mockUpload(), + mockBuild(), + mockPoll(), + mockLogFetch(), + ]; + const sourcePath = path.resolve('test/fixtures'); + const builder = new Builder({sourcePath}); + sinon.stub(builder.auth, 'getProjectId').resolves('el-gato'); + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + sinon.stub(builder.auth, 'getClient').resolves({ + async request(options: GaxiosOptions) { + return request(options); + }, + } as any); + const result = await builder.build(); + for (const s of scopes) { + s.done(); + } + + assert.ok(result.metadata); + }); + + it('should PUT the file to Google Cloud Storage', async () => { + const builder = new Builder(); + }); + }); + + describe('🚨 error handing', () => { + it('should include a log with an error', async () => { + const scopes = [ + mockBucketExists(), + mockUpload(), + mockBuild(), + mockPollError(), + mockLogFetch(), + ]; + const sourcePath = path.resolve('test/fixtures'); + const builder = new Builder({sourcePath}); + sinon.stub(builder.auth, 'getProjectId').resolves('el-gato'); + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + sinon.stub(builder.auth, 'getClient').resolves({ + async request(options: GaxiosOptions) { + return request(options); + }, + } as any); + try { + await builder.build(); + assert.fail('Expected to throw.'); + } catch (error) { + const error_ = error as BuildError; + assert.ok(error_.log); + assert.ok( + error_.log.includes('🌳'), + ` Expected to match: ${chalk.green('🌳')} - ${chalk.red(err.log!)} - ` - ); - } - scopes.forEach(s => s.done()); - }); - }); - - describe('🌳 config', () => { - it('should find a Dockerfile if provided', async () => { - const config = await getConfig({ - sourcePath: path.resolve('test/fixtures/docker'), - tag: 'taggy', - projectId: 'el-gato', - }); - assert.strictEqual(config.steps![0].name, 'gcr.io/cloud-builders/docker'); - }); - - it('should throw an error if an unexpected config path is provided', async () => { - await assert.rejects( - getConfig({ - sourcePath: path.resolve('test/fixtures/docker'), - configPath: path.resolve('test/fixtures/docker/index.js'), - projectId: 'el-gato', - }), - /extension is not supported/ - ); - }); - }); - - describe('🏁 end to end', () => { - it('should work together end to end', async () => { - const scopes = [ - mockBucketExists(), - mockUpload(), - mockBuild(), - mockPoll(), - mockLogFetch(), - ]; - const sourcePath = path.resolve('test/fixtures'); - const builder = new Builder({sourcePath}); - sinon.stub(builder.auth, 'getProjectId').resolves('el-gato'); - sinon.stub(builder.auth, 'getClient').resolves({ - request: async (options: GaxiosOptions) => { - return request(options); - }, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any); - const result = await builder.build(); - scopes.forEach(s => s.done()); - assert.ok(result.metadata); - }); - }); + ${chalk.red(error_.log)} + `, + ); + } + + for (const s of scopes) { + s.done(); + } + }); + }); + + describe('🌳 config', () => { + it('should find a Dockerfile if provided', async () => { + const config = await getConfig({ + sourcePath: path.resolve('test/fixtures/docker'), + tag: 'taggy', + projectId: 'el-gato', + }); + assert.strictEqual(config.steps![0].name, 'gcr.io/cloud-builders/docker'); + }); + + it('should throw an error if an unexpected config path is provided', async () => { + await assert.rejects( + getConfig({ + sourcePath: path.resolve('test/fixtures/docker'), + configPath: path.resolve('test/fixtures/docker/index.js'), + projectId: 'el-gato', + }), + /extension is not supported/, + ); + }); + }); + + describe('🏁 end to end', () => { + it('should work together end to end', async () => { + const scopes = [ + mockBucketExists(), + mockUpload(), + mockBuild(), + mockPoll(), + mockLogFetch(), + ]; + const sourcePath = path.resolve('test/fixtures'); + const builder = new Builder({sourcePath}); + sinon.stub(builder.auth, 'getProjectId').resolves('el-gato'); + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + sinon.stub(builder.auth, 'getClient').resolves({ + async request(options: GaxiosOptions) { + return request(options); + }, + } as any); + const result = await builder.build(); + for (const s of scopes) { + s.done(); + } + + assert.ok(result.metadata); + }); + }); }); function mockBucketExists() { - return nock('https://storage.googleapis.com') - .get('/storage/v1/b/el-gato-gcb-staging-bbq') - .reply(200); + return nock('https://storage.googleapis.com') + .get('/storage/v1/b/el-gato-gcb-staging-bbq') + .reply(200); } function mockBucketNotExists() { - return nock('https://storage.googleapis.com') - .get('/storage/v1/b/el-gato-gcb-staging-bbq') - .reply(404); + return nock('https://storage.googleapis.com') + .get('/storage/v1/b/el-gato-gcb-staging-bbq') + .reply(404); } function mockBucketCreate() { - return nock('https://storage.googleapis.com') - .post('/storage/v1/b?project=el-gato', { - name: 'el-gato-gcb-staging-bbq', - lifecycle: { - rule: [{action: {type: 'Delete'}, condition: {age: 1}}], - }, - }) - .reply(200); + return nock('https://storage.googleapis.com') + .post('/storage/v1/b?project=el-gato', { + name: 'el-gato-gcb-staging-bbq', + lifecycle: { + rule: [{action: {type: 'Delete'}, condition: {age: 1}}], + }, + }) + .reply(200); } function mockUpload() { - return nock('https://storage.googleapis.com') - .post(url => { - return url.includes('/storage/v1/b/el-gato-gcb-staging-bbq/o?name='); - }) - .reply(200); + return nock('https://storage.googleapis.com') + .post((url) => { + return url.includes('/storage/v1/b/el-gato-gcb-staging-bbq/o?name='); + }) + .reply(200); } function mockBuild() { - return nock('https://cloudbuild.googleapis.com') - .post('/v1/projects/el-gato/builds') - .reply(200, { - name: 'not-a-real-operation', - metadata: {build: {logsBucket: 'gs://not-a-bucket', id: 'not-an-id'}}, - }); + return nock('https://cloudbuild.googleapis.com') + .post('/v1/projects/el-gato/builds') + .reply(200, { + name: 'not-a-real-operation', + metadata: {build: {logsBucket: 'gs://not-a-bucket', id: 'not-an-id'}}, + }); } function mockPoll() { - return nock('https://cloudbuild.googleapis.com') - .get('/v1/not-a-real-operation') - .reply(200, {done: true}); + return nock('https://cloudbuild.googleapis.com') + .get('/v1/not-a-real-operation') + .reply(200, {done: true}); } function mockLogFetch() { - return nock('https://storage.googleapis.com') - .get('/storage/v1/b/not-a-bucket/o/log-not-an-id.txt?alt=media') - .reply(200, '🌳'); + return nock('https://storage.googleapis.com') + .get('/storage/v1/b/not-a-bucket/o/log-not-an-id.txt?alt=media') + .reply(200, '🌳'); } function mockPollError() { - return nock('https://cloudbuild.googleapis.com') - .get('/v1/not-a-real-operation') - .reply(200, {error: '💩'}); + return nock('https://cloudbuild.googleapis.com') + .get('/v1/not-a-real-operation') + .reply(200, {error: '💩'}); } diff --git a/tsconfig.json b/tsconfig.json index 695f7a1..0525977 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,9 +1,9 @@ { - "extends": "./node_modules/gts/tsconfig-google.json", "compilerOptions": { - "rootDir": ".", + "strict": true, + "target": "ES2022", "outDir": "build", - "module": "ES2020", + "module": "ES2022", "moduleResolution": "node", "allowSyntheticDefaultImports": true },