From d32d4e938f2cd30ddba6f5cf549b0c8bbacf63f4 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 25 Oct 2023 10:24:51 -0600 Subject: [PATCH 1/5] feat: integrate stacks api tools --- .vscode/launch.json | 26 +++ .vscode/tasks.json | 37 ++++ docker/docker-compose.dev.postgres.yml | 11 + jest.config.js | 199 ++++++++++++++++++ src/postgres/__tests__/base-pg-store.test.ts | 121 +++++++++++ src/postgres/__tests__/connection.test.ts | 79 +++++++ src/postgres/base-pg-store.ts | 10 +- src/postgres/connection.ts | 9 +- src/postgres/migrations.ts | 109 +++++++++- .../__tests__/server-version.test.ts | 53 +++++ src/shutdown-handler/index.ts | 2 +- 11 files changed, 643 insertions(+), 13 deletions(-) create mode 100644 .vscode/launch.json create mode 100644 .vscode/tasks.json create mode 100644 docker/docker-compose.dev.postgres.yml create mode 100644 jest.config.js create mode 100644 src/postgres/__tests__/base-pg-store.test.ts create mode 100644 src/postgres/__tests__/connection.test.ts create mode 100644 src/server-version/__tests__/server-version.test.ts diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..f9e29e0 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,26 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Jest", + "program": "${workspaceFolder}/node_modules/jest/bin/jest", + "args": [ + "--testTimeout=3600000", + "--runInBand", + "--no-cache", + ], + "outputCapture": "std", + "console": "integratedTerminal", + "preLaunchTask": "npm: testenv:run", + "postDebugTask": "npm: testenv:stop", + "env": { + "PGHOST": "localhost", + "PGDATABASE": "postgres", + "PGUSER": "postgres", + "PGPASSWORD": "postgres", + }, + }, + ] +} diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..1065dca --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,37 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "npm: testenv:run", + "type": "shell", + "command": "npm run testenv:run -- -d", + "isBackground": true, + "problemMatcher": { + "pattern": { + "regexp": ".", + "file": 1, + "location": 2, + "message": 3 + }, + "background": { + "activeOnStart": true, + "beginsPattern": ".", + "endsPattern": "." + } + } + }, + { + "label": "npm: testenv:stop", + "type": "shell", + "command": "npm run testenv:stop", + "presentation": { + "echo": true, + "reveal": "silent", + "focus": false, + "panel": "shared", + "showReuseMessage": true, + "clear": false + } + } + ] +} diff --git a/docker/docker-compose.dev.postgres.yml b/docker/docker-compose.dev.postgres.yml new file mode 100644 index 0000000..9f4be69 --- /dev/null +++ b/docker/docker-compose.dev.postgres.yml @@ -0,0 +1,11 @@ +version: '3.7' +services: + postgres: + image: "postgres:15" + ports: + - "5432:5432" + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + POSTGRES_PORT: 5432 diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 0000000..85190fb --- /dev/null +++ b/jest.config.js @@ -0,0 +1,199 @@ +/* + * For a detailed explanation regarding each configuration property, visit: + * https://jestjs.io/docs/configuration + */ + +module.exports = { + // All imported modules in your tests should be mocked automatically + // automock: false, + + // Stop running tests after `n` failures + // bail: 0, + + // The directory where Jest should store its cached dependency information + // cacheDirectory: "/private/var/folders/v3/swygw5ld38x59y9wtc2qv3fc0000gn/T/jest_dx", + + // Automatically clear mock calls, instances, contexts and results before every test + // clearMocks: false, + + // Indicates whether the coverage information should be collected while executing the test + // collectCoverage: false, + + // An array of glob patterns indicating a set of files for which coverage information should be collected + collectCoverageFrom: [ + "src/**/*.ts", + ], + + // The directory where Jest should output its coverage files + // coverageDirectory: undefined, + + // An array of regexp pattern strings used to skip coverage collection + coveragePathIgnorePatterns: [ + "/node_modules/", + "/src/@types/" + ], + + // Indicates which provider should be used to instrument code for coverage + coverageProvider: "v8", + + // A list of reporter names that Jest uses when writing coverage reports + // coverageReporters: [ + // "json", + // "text", + // "lcov", + // "clover" + // ], + + // An object that configures minimum threshold enforcement for coverage results + // coverageThreshold: undefined, + + // A path to a custom dependency extractor + // dependencyExtractor: undefined, + + // Make calling deprecated APIs throw helpful error messages + // errorOnDeprecated: false, + + // The default configuration for fake timers + // fakeTimers: { + // "enableGlobally": false + // }, + + // Force coverage collection from ignored files using an array of glob patterns + // forceCoverageMatch: [], + + // A path to a module which exports an async function that is triggered once before all test suites + // globalSetup: './tests/setup.ts', + + // A path to a module which exports an async function that is triggered once after all test suites + // globalTeardown: undefined, + + // A set of global variables that need to be available in all test environments + // globals: {}, + + // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers. + // maxWorkers: "50%", + + // An array of directory names to be searched recursively up from the requiring module's location + // moduleDirectories: [ + // "node_modules" + // ], + + // An array of file extensions your modules use + // moduleFileExtensions: [ + // "js", + // "mjs", + // "cjs", + // "jsx", + // "ts", + // "tsx", + // "json", + // "node" + // ], + + // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module + // moduleNameMapper: {}, + + // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader + // modulePathIgnorePatterns: [], + + // Activates notifications for test results + // notify: false, + + // An enum that specifies notification mode. Requires { notify: true } + // notifyMode: "failure-change", + + // A preset that is used as a base for Jest's configuration + preset: 'ts-jest', + + // Run tests from one or more projects + // projects: undefined, + + // Use this configuration option to add custom reporters to Jest + // reporters: undefined, + + // Automatically reset mock state before every test + // resetMocks: false, + + // Reset the module registry before running each individual test + // resetModules: false, + + // A path to a custom resolver + // resolver: undefined, + + // Automatically restore mock state and implementation before every test + // restoreMocks: false, + + // The root directory that Jest should scan for tests and modules within + rootDir: '', + + // A list of paths to directories that Jest should use to search for files in + // roots: [ + // "" + // ], + + // Allows you to use a custom runner instead of Jest's default test runner + // runner: "jest-runner", + + // The paths to modules that run some code to configure or set up the testing environment before each test + // setupFiles: [], + + // A list of paths to modules that run some code to configure or set up the testing framework before each test + // setupFilesAfterEnv: [], + + // The number of seconds after which a test is considered as slow and reported as such in the results. + // slowTestThreshold: 5, + + // A list of paths to snapshot serializer modules Jest should use for snapshot testing + // snapshotSerializers: [], + + // The test environment that will be used for testing + // testEnvironment: "jest-environment-node", + + // Options that will be passed to the testEnvironment + // testEnvironmentOptions: {}, + + // Adds a location field to test results + // testLocationInResults: false, + + // The glob patterns Jest uses to detect test files + testMatch: [ + "**/__tests__/**/?(*.)+(spec|test).[tj]s?(x)", + "**/?(*.)+(spec|test).[tj]s?(x)" + ], + + // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped + testPathIgnorePatterns: [ + "/node_modules/", + "/dist/" + ], + + // The regexp pattern or array of patterns that Jest uses to detect test files + // testRegex: [], + + // This option allows the use of a custom results processor + // testResultsProcessor: undefined, + + // This option allows use of a custom test runner + // testRunner: "jest-circus/runner", + + // A map from regular expressions to paths to transformers + transform: {}, + + // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation + // transformIgnorePatterns: [ + // "/node_modules/", + // "\\.pnp\\.[^\\/]+$" + // ], + + // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them + // unmockedModulePathPatterns: undefined, + + // Indicates whether each individual test should be reported during the run + // verbose: undefined, + + // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode + // watchPathIgnorePatterns: [], + + // Whether to use watchman for file crawling + // watchman: true, + }; \ No newline at end of file diff --git a/src/postgres/__tests__/base-pg-store.test.ts b/src/postgres/__tests__/base-pg-store.test.ts new file mode 100644 index 0000000..aa92041 --- /dev/null +++ b/src/postgres/__tests__/base-pg-store.test.ts @@ -0,0 +1,121 @@ +import { BasePgStore, sqlTransactionContext } from '../base-pg-store'; +import { connectPostgres } from '../connection'; + +class TestPgStore extends BasePgStore { + static async connect(): Promise { + const sql = await connectPostgres({ usageName: 'test' }); + return new TestPgStore(sql); + } +} + +describe('BasePgStore', () => { + let db: TestPgStore; + + beforeEach(async () => { + db = await TestPgStore.connect(); + }); + + afterEach(async () => { + await db.close(); + }); + + test('bytea column serialization', async () => { + const vectors = [ + { + from: '0x0001', + to: '0x0001', + }, + { + from: '0X0002', + to: '0x0002', + }, + { + from: '0xFfF3', + to: '0xfff3', + }, + { + from: Buffer.from('0004', 'hex'), + to: '0x0004', + }, + { + from: new Uint16Array(new Uint8Array([0x00, 0x05]).buffer), + to: '0x0005', + }, + { + from: '\\x0006', + to: '0x0006', + }, + { + from: '\\xfFf7', + to: '0xfff7', + }, + { + from: '\\x', + to: '0x', + }, + { + from: '', + to: '0x', + }, + { + from: Buffer.alloc(0), + to: '0x', + }, + ]; + await db.sqlWriteTransaction(async sql => { + await sql` + CREATE TEMPORARY TABLE bytea_testing( + value bytea NOT NULL + ) ON COMMIT DROP + `; + for (const v of vectors) { + const query = await sql<{ value: string }[]>` + insert into bytea_testing (value) values (${v.from}) + returning value + `; + expect(query[0].value).toBe(v.to); + } + }); + const badInputs = ['0x123', '1234', '0xnoop', new Date(), 1234]; + for (const input of badInputs) { + const query = async () => + db.sql.begin(async sql => { + await sql` + CREATE TEMPORARY TABLE bytea_testing( + value bytea NOT NULL + ) ON COMMIT DROP + `; + return await sql`insert into bytea_testing (value) values (${input})`; + }); + await expect(query()).rejects.toThrow(); + } + }); + + test('postgres transaction connection integrity', async () => { + const usageName = 'postgres:test;datastore-crud'; + const obj = db.sql; + + expect(sqlTransactionContext.getStore()).toBeUndefined(); + await db.sqlTransaction(async sql => { + // Transaction flag is open. + expect(sqlTransactionContext.getStore()?.usageName).toBe(usageName); + // New connection object. + const newObj = sql; + expect(obj).not.toEqual(newObj); + expect(sqlTransactionContext.getStore()?.sql).toEqual(newObj); + + // Nested tx uses the same connection object. + await db.sqlTransaction(sql => { + expect(sqlTransactionContext.getStore()?.usageName).toBe(usageName); + expect(newObj).toEqual(sql); + }); + + // Getter returns the same connection object too. + expect(db.sql).toEqual(newObj); + }); + + // Back to normal. + expect(sqlTransactionContext.getStore()).toBeUndefined(); + expect(db.sql).toEqual(obj); + }); +}); diff --git a/src/postgres/__tests__/connection.test.ts b/src/postgres/__tests__/connection.test.ts new file mode 100644 index 0000000..60e52c7 --- /dev/null +++ b/src/postgres/__tests__/connection.test.ts @@ -0,0 +1,79 @@ +import { getPostgres } from '../connection'; + +function setTestEnvVars( + envVars: Record, + use: () => Promise +): Promise; +function setTestEnvVars(envVars: Record, use: () => void): void; +function setTestEnvVars( + envVars: Record, + use: () => void | Promise +): void | Promise { + const existing = Object.fromEntries( + Object.keys(envVars) + .filter(k => k in process.env) + .map(k => [k, process.env[k]]) + ); + const added = Object.keys(envVars).filter(k => !(k in process.env)); + Object.entries(envVars).forEach(([k, v]) => { + process.env[k] = v; + if (v === undefined) { + delete process.env[k]; + } + }); + const restoreEnvVars = () => { + added.forEach(k => delete process.env[k]); + Object.entries(existing).forEach(([k, v]) => (process.env[k] = v)); + }; + let runFn: void | Promise | undefined; + try { + runFn = use(); + if (runFn instanceof Promise) { + return runFn.finally(() => restoreEnvVars()); + } + } finally { + if (!(runFn instanceof Promise)) { + restoreEnvVars(); + } + } +} + +describe('postgres connection', () => { + test('postgres env var config', () => { + setTestEnvVars( + { + PGDATABASE: 'pg_db_db1', + PGUSER: 'pg_user_user1', + PGPASSWORD: 'pg_password_password1', + PGHOST: 'pg_host_host1', + PGPORT: '9876', + PGSSLMODE: 'allow', + PGAPPNAME: 'test-env-vars', + }, + () => { + const sql = getPostgres({ usageName: 'tests' }); + expect(sql.options.database).toBe('pg_db_db1'); + expect(sql.options.user).toBe('pg_user_user1'); + expect(sql.options.pass).toBe('pg_password_password1'); + expect(sql.options.host).toStrictEqual(['pg_host_host1']); + expect(sql.options.port).toStrictEqual([9876]); + expect(sql.options.ssl).toBe('allow'); + expect(sql.options.connection.application_name).toBe('test-env-vars:tests'); + } + ); + }); + + test('postgres uri config', () => { + const uri = + 'postgresql://test_user:secret_password@database.server.com:3211/test_db?ssl=true&search_path=test_schema&application_name=test-conn-str'; + const sql = getPostgres({ usageName: 'tests', connectionArgs: uri }); + expect(sql.options.database).toBe('test_db'); + expect(sql.options.user).toBe('test_user'); + expect(sql.options.pass).toBe('secret_password'); + expect(sql.options.host).toStrictEqual(['database.server.com']); + expect(sql.options.port).toStrictEqual([3211]); + expect(sql.options.ssl).toBe('true'); + expect(sql.options.connection.search_path).toBe('test_schema'); + expect(sql.options.connection.application_name).toBe('test-conn-str:tests'); + }); +}); diff --git a/src/postgres/base-pg-store.ts b/src/postgres/base-pg-store.ts index 5327ee0..90edd4f 100644 --- a/src/postgres/base-pg-store.ts +++ b/src/postgres/base-pg-store.ts @@ -6,7 +6,7 @@ import { isProdEnv } from '../helpers'; * AsyncLocalStorage used to determine if the current async context is running inside a SQL * transaction. */ -const sqlTransactionContext = new AsyncLocalStorage(); +export const sqlTransactionContext = new AsyncLocalStorage(); type SqlTransactionContext = { usageName: string; sql: PgSqlClient; @@ -20,7 +20,7 @@ type UnwrapPromiseArray = T extends any[] /** * Base class that provides access to a SQL client and SQL transaction management. */ -export class BasePgStore { +export abstract class BasePgStore { /** * Getter for a SQL client. If used inside `sqlTransaction`, the scoped client within the current * async context will be returned to guarantee transaction consistency. @@ -35,8 +35,8 @@ export class BasePgStore { this._sql = sql; } - async close() { - await this._sql.end(); + async close(args?: { timeout?: number }) { + await this._sql.end({ timeout: args?.timeout }); } /** @@ -89,7 +89,7 @@ export class BasePgStore { /** * Base module that extends PgStore functionality and allows organizing queries in separate files. */ -export class BasePgStoreModule { +export abstract class BasePgStoreModule { private readonly parent: BasePgStore; constructor(db: BasePgStore) { diff --git a/src/postgres/connection.ts b/src/postgres/connection.ts index 8226f5c..9824237 100644 --- a/src/postgres/connection.ts +++ b/src/postgres/connection.ts @@ -8,6 +8,7 @@ import { PG_TYPE_MAPPINGS } from './types'; export type PgSqlClient = postgres.Sql | postgres.TransactionSql; /** Postgres pending query or query fragment */ export type PgSqlQuery = postgres.PendingQuery; +export type PgSslMode = 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object; /** Postgres connection URI string */ export type PgConnectionUri = string; @@ -19,7 +20,7 @@ export type PgConnectionVars = { host?: string; port?: number; schema?: string; - ssl?: boolean; + ssl?: PgSslMode; application_name?: string; }; /** Postgres connection arguments */ @@ -57,7 +58,7 @@ export function standardizedConnectionArgs( password: process.env.PGPASSWORD, host: process.env.PGHOST, port: parseInt(process.env.PGPORT ?? '5432'), - ssl: true, + ssl: process.env.PGSSLMODE as PgSslMode | undefined, application_name: `${appName}:${appUsage}`, }; } @@ -84,7 +85,7 @@ export async function connectPostgres({ connectionConfig, }: { usageName: string; - connectionArgs: PgConnectionArgs; + connectionArgs?: PgConnectionArgs; connectionConfig?: PgConnectionOptions; }): Promise { const initTimer = stopwatch(); @@ -141,7 +142,7 @@ export function getPostgres({ connectionConfig, }: { usageName: string; - connectionArgs: PgConnectionArgs; + connectionArgs?: PgConnectionArgs; connectionConfig?: PgConnectionOptions; }): PgSqlClient { const args = standardizedConnectionArgs(connectionArgs, usageName); diff --git a/src/postgres/migrations.ts b/src/postgres/migrations.ts index 2f40d35..6fc715d 100644 --- a/src/postgres/migrations.ts +++ b/src/postgres/migrations.ts @@ -1,7 +1,8 @@ import PgMigrate from 'node-pg-migrate'; import { MigrationDirection } from 'node-pg-migrate/dist/types'; import { logger } from '../logger'; -import { PgConnectionArgs, standardizedConnectionArgs } from './connection'; +import { PgConnectionArgs, connectPostgres, standardizedConnectionArgs } from './connection'; +import { isDevEnv, isTestEnv } from '../helpers'; /** * Run migrations in one direction. @@ -12,8 +13,18 @@ import { PgConnectionArgs, standardizedConnectionArgs } from './connection'; export async function runMigrations( dir: string, direction: MigrationDirection, - connectionArgs?: PgConnectionArgs + connectionArgs?: PgConnectionArgs, + opts?: { + // Bypass the NODE_ENV check when performing a "down" migration which irreversibly drops data. + dangerousAllowDataLoss?: boolean; + } ) { + if (!opts?.dangerousAllowDataLoss && direction !== 'up' && !isTestEnv && !isDevEnv) { + throw new Error( + 'Whoa there! This is a testing function that will drop all data from PG. ' + + 'Set NODE_ENV to "test" or "development" to enable migration testing.' + ); + } const args = standardizedConnectionArgs(connectionArgs, 'migrations'); await PgMigrate({ dir, @@ -44,7 +55,99 @@ export async function runMigrations( * @param dir - Migrations directory * @param connectionArgs - Postgres connection args */ -export async function cycleMigrations(dir: string, connectionArgs?: PgConnectionArgs) { +export async function cycleMigrations( + dir: string, + connectionArgs?: PgConnectionArgs, + opts?: { + // Bypass the NODE_ENV check when performing a "down" migration which irreversibly drops data. + dangerousAllowDataLoss?: boolean; + checkForEmptyData?: boolean; + } +) { await runMigrations(dir, 'down', connectionArgs); + if ( + opts?.checkForEmptyData && + (await databaseHasData(connectionArgs, { ignoreMigrationTables: true })) + ) { + throw new Error('Migration down process did not completely remove DB tables'); + } await runMigrations(dir, 'up', connectionArgs); } + +/** + * Check the `pg_class` table for any data structures contained in the database. We will consider + * any and all results here as "data" contained in the DB, since anything that is not a completely + * empty DB could lead to strange errors when running the API. See: + * https://www.postgresql.org/docs/current/catalog-pg-class.html + * @returns `boolean` if the DB has data + */ +export async function databaseHasData( + connectionArgs?: PgConnectionArgs, + opts?: { + ignoreMigrationTables?: boolean; + } +): Promise { + const sql = await connectPostgres({ + usageName: 'contains-data-check', + connectionArgs: standardizedConnectionArgs(connectionArgs, 'contains-data-check'), + }); + try { + const ignoreMigrationTables = opts?.ignoreMigrationTables ?? false; + const result = await sql<{ count: number }[]>` + SELECT COUNT(*) + FROM pg_class c + JOIN pg_namespace s ON s.oid = c.relnamespace + WHERE s.nspname = ${sql.options.connection.search_path} + ${ignoreMigrationTables ? sql`AND c.relname NOT LIKE 'pgmigrations%'` : sql``} + `; + return result.count > 0 && result[0].count > 0; + } catch (error: any) { + if (error.message?.includes('does not exist')) { + return false; + } + throw error; + } finally { + await sql.end(); + } +} + +/** + * Drops all tables from the Postgres DB. DANGEROUS!!! + */ +export async function dangerousDropAllTables( + connectionArgs?: PgConnectionArgs, + opts?: { + acknowledgePotentialCatastrophicConsequences?: 'yes'; + } +) { + if (opts?.acknowledgePotentialCatastrophicConsequences !== 'yes') { + throw new Error('Dangerous usage error.'); + } + const sql = await connectPostgres({ + usageName: 'dangerous-drop-all-tables', + connectionArgs: standardizedConnectionArgs(connectionArgs, 'dangerous-drop-all-tables'), + }); + const schema = sql.options.connection.search_path; + try { + await sql.begin(async sql => { + const relNamesQuery = async (kind: string) => sql<{ relname: string }[]>` + SELECT relname + FROM pg_class c + JOIN pg_namespace s ON s.oid = c.relnamespace + WHERE s.nspname = ${schema} AND c.relkind = ${kind} + `; + // Remove materialized views first and tables second. + // Using CASCADE in these DROP statements also removes associated indexes and constraints. + const views = await relNamesQuery('m'); + for (const view of views) { + await sql`DROP MATERIALIZED VIEW IF EXISTS ${sql(view.relname)} CASCADE`; + } + const tables = await relNamesQuery('r'); + for (const table of tables) { + await sql`DROP TABLE IF EXISTS ${sql(table.relname)} CASCADE`; + } + }); + } finally { + await sql.end(); + } +} diff --git a/src/server-version/__tests__/server-version.test.ts b/src/server-version/__tests__/server-version.test.ts new file mode 100644 index 0000000..eb5387d --- /dev/null +++ b/src/server-version/__tests__/server-version.test.ts @@ -0,0 +1,53 @@ +import * as fs from 'fs'; +import * as path from 'path'; +import * as os from 'os'; +import { spawnSync, execSync } from 'child_process'; + +const scriptFilePath = path.resolve('bin/api-toolkit-git-info.js'); +const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), '.tmp')); + +describe('git info script', () => { + test('error when git repo data not available', () => { + const result = spawnSync(`node "${scriptFilePath}"`, { + cwd: tempDir, + shell: true, + encoding: 'utf8', + }); + expect(result.status).toStrictEqual(1); + expect(result.stderr).toEqual(expect.stringContaining('not a git repository')); + }); + + test('error when no git tags found', () => { + execSync( + 'git init && git config user.name test && git config user.email test && git commit --allow-empty -n -m test', + { cwd: tempDir } + ); + const result = spawnSync(`node "${scriptFilePath}"`, { + cwd: tempDir, + shell: true, + encoding: 'utf8', + }); + expect(result.status).toStrictEqual(1); + expect(result.stderr).toEqual(expect.stringContaining('no tag found')); + }); + + test('generates get info file correctly', () => { + execSync( + 'git init && git config user.name test && git config user.email test && git commit --allow-empty -n -m test && git tag v1.2.3 && git branch -m my_branch', + { cwd: tempDir } + ); + const result = spawnSync(`node "${scriptFilePath}"`, { + cwd: tempDir, + shell: true, + encoding: 'utf8', + }); + expect(result.status).toStrictEqual(0); + const gitInfoFilePath = path.join(tempDir, '.git-info'); + expect(fs.existsSync(gitInfoFilePath)); + const gitInfoContent = fs.readFileSync(gitInfoFilePath, { encoding: 'utf8' }); + const gitInfoParts = gitInfoContent.split('\n'); + expect(gitInfoParts[0]).toStrictEqual('my_branch'); + expect(gitInfoParts[1]).toBeTruthy(); + expect(gitInfoParts[2]).toStrictEqual('v1.2.3'); + }); +}); diff --git a/src/shutdown-handler/index.ts b/src/shutdown-handler/index.ts index 3ccba72..dddecc7 100644 --- a/src/shutdown-handler/index.ts +++ b/src/shutdown-handler/index.ts @@ -1,4 +1,4 @@ -import { logger } from "../logger"; +import { logger } from '../logger'; const SHUTDOWN_SIGNALS = ['SIGINT', 'SIGTERM'] as const; From 356b54d6193ea44f5ac3001c87b3369655d8d49d Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 25 Oct 2023 10:28:23 -0600 Subject: [PATCH 2/5] build: add ci --- .eslintignore | 1 + .eslintrc.js | 2 +- .github/workflows/ci.yml | 103 +++++++++++++++++++++++++++++++++++++++ package.json | 7 ++- 4 files changed, 111 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/ci.yml diff --git a/.eslintignore b/.eslintignore index 1723d82..0ea0ae6 100644 --- a/.eslintignore +++ b/.eslintignore @@ -1,2 +1,3 @@ node_modules/ +dist/ .eslintrc.js diff --git a/.eslintrc.js b/.eslintrc.js index e8f7f48..7f95c3b 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -9,7 +9,7 @@ module.exports = { ecmaVersion: 2020, sourceType: 'module', }, - ignorePatterns: ['*.config.js', 'config/*', '*.mjs', 'tests/*.js', 'client/*'], + ignorePatterns: ['*.config.js', 'bin/*.js'], plugins: ['@typescript-eslint', 'eslint-plugin-tsdoc', 'prettier'], rules: { 'prettier/prettier': 'error', diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..0199129 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,103 @@ +name: CI + +on: + push: + branches: + - master + - develop + tags-ignore: + - '**' + paths-ignore: + - '**/CHANGELOG.md' + - '**/package.json' + pull_request: + workflow_dispatch: + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Use Node.js + uses: actions/setup-node@v2 + with: + node-version-file: '.nvmrc' + + - name: Cache node modules + uses: actions/cache@v2 + env: + cache-name: cache-node-modules + with: + path: | + ~/.npm + **/node_modules + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}- + ${{ runner.os }}-build- + ${{ runner.os }}- + + - name: Install deps + run: npm ci --audit=false + + - name: Lint ESLint + run: npm run lint:eslint + + - name: Lint Prettier + run: npm run lint:prettier + + test: + runs-on: ubuntu-latest + env: + PGHOST: 127.0.0.1 + PGPORT: 5432 + PGUSER: postgres + PGPASSWORD: postgres + PGDATABASE: postgres + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - name: Use Node.js + uses: actions/setup-node@v2 + with: + node-version-file: '.nvmrc' + + - name: Cache node modules + uses: actions/cache@v2 + env: + cache-name: cache-node-modules + with: + path: | + ~/.npm + **/node_modules + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}- + ${{ runner.os }}-build- + ${{ runner.os }}- + + - name: Install deps + run: npm ci --audit=false + + - name: Setup integration environment + run: | + sudo ufw disable + npm run testenv:run -- -d + npm run testenv:logs -- --no-color &> docker-compose-logs.txt & + + - name: Run tests + run: npm run test -- --coverage + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + + - name: Print integration environment logs + run: cat docker-compose-logs.txt + if: failure() + + - name: Teardown integration environment + run: npm run testenv:stop + if: always() diff --git a/package.json b/package.json index 3caf104..d374f92 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,12 @@ "typings": "./dist/index.d.ts", "scripts": { "build": "rimraf ./dist && tsc --project tsconfig.build.json && copyfiles -u 1 ./src/server-version/*.mjs ./dist", - "test": "jest" + "test": "jest", + "lint:eslint": "eslint . --ext .js,.jsx,.ts,.tsx -f unix", + "lint:prettier": "prettier --check src/**/*.ts", + "testenv:run": "docker-compose -f docker/docker-compose.dev.postgres.yml up", + "testenv:stop": "docker-compose -f docker/docker-compose.dev.postgres.yml down -v -t 0", + "testenv:logs": "docker-compose -f docker/docker-compose.dev.postgres.yml logs -t -f" }, "bin": { "api-toolkit-git-info": "./bin/api-toolkit-git-info.js" From ece088a3b32dd4a41c4650b58ca78e941aae3e58 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 25 Oct 2023 12:50:22 -0600 Subject: [PATCH 3/5] feat: create helpers folder --- src/fastify/fastify.ts | 2 +- src/helpers.ts | 7 -- src/helpers/index.ts | 2 + src/helpers/time.ts | 139 ++++++++++++++++++++++++++++++++++ src/helpers/values.ts | 135 +++++++++++++++++++++++++++++++++ src/index.ts | 3 +- src/postgres/base-pg-store.ts | 2 +- src/postgres/connection.ts | 2 +- src/postgres/helpers.ts | 48 ------------ src/postgres/index.ts | 1 - src/postgres/migrations.ts | 2 +- src/shutdown-handler/index.ts | 39 +--------- 12 files changed, 283 insertions(+), 99 deletions(-) delete mode 100644 src/helpers.ts create mode 100644 src/helpers/index.ts create mode 100644 src/helpers/time.ts create mode 100644 src/helpers/values.ts delete mode 100644 src/postgres/helpers.ts diff --git a/src/fastify/fastify.ts b/src/fastify/fastify.ts index 3e97b75..74d3bc4 100644 --- a/src/fastify/fastify.ts +++ b/src/fastify/fastify.ts @@ -3,7 +3,7 @@ import Fastify, { FastifyInstance } from 'fastify'; import FastifyMetrics, { IFastifyMetrics } from 'fastify-metrics'; import { PINO_LOGGER_CONFIG } from '../logger'; import { TypeBoxTypeProvider } from '@fastify/type-provider-typebox'; -import { isProdEnv } from '../helpers'; +import { isProdEnv } from '../helpers/values'; /** * Creates a Fastify server that handles Prometheus metrics and CORS headers automatically. diff --git a/src/helpers.ts b/src/helpers.ts deleted file mode 100644 index f40375d..0000000 --- a/src/helpers.ts +++ /dev/null @@ -1,7 +0,0 @@ -export const isDevEnv = process.env.NODE_ENV === 'development'; -export const isTestEnv = process.env.NODE_ENV === 'test'; -export const isProdEnv = - process.env.NODE_ENV === 'production' || - process.env.NODE_ENV === 'prod' || - !process.env.NODE_ENV || - (!isTestEnv && !isDevEnv); diff --git a/src/helpers/index.ts b/src/helpers/index.ts new file mode 100644 index 0000000..430f360 --- /dev/null +++ b/src/helpers/index.ts @@ -0,0 +1,2 @@ +export * from './time'; +export * from './values'; diff --git a/src/helpers/time.ts b/src/helpers/time.ts new file mode 100644 index 0000000..3f4042d --- /dev/null +++ b/src/helpers/time.ts @@ -0,0 +1,139 @@ +/** + * Wait a set amount of milliseconds or until the timer is aborted. + * @param ms - Number of milliseconds to wait + * @param abortController - Abort controller + * @returns Promise + */ +export function timeout(ms: number, abortController?: AbortController): Promise { + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + resolve(); + }, ms); + abortController?.signal.addEventListener( + 'abort', + () => { + clearTimeout(timeout); + reject(new Error(`Timeout aborted`)); + }, + { once: true } + ); + }); +} + +/** + * Time the execution of an async function. + * @param fn - Async function + * @param onFinish - Callback with elapsed milliseconds + * @returns Promise + */ +export async function time( + fn: () => Promise, + onFinish: (elapsedMs: number) => void +): Promise { + const watch = stopwatch(); + try { + return await fn(); + } finally { + onFinish(watch.getElapsed()); + } +} + +/** + * Set an execution time limit for a promise. + * @param promise - The promise being capped to `timeoutMs` max execution time + * @param timeoutMs - Timeout limit in milliseconds + * @param wait - If we should wait another `timeoutMs` period for `promise` to resolve + * @param waitHandler - If `wait` is `true`, this closure will be executed before waiting another + * `timeoutMs` cycle + * @returns `true` if `promise` ended gracefully, `false` if timeout was reached + */ +export async function resolveOrTimeout( + promise: Promise, + timeoutMs: number, + wait: boolean = false, + waitHandler?: () => void +) { + let timer: NodeJS.Timeout; + const result = await Promise.race([ + new Promise((resolve, reject) => { + promise + .then(() => resolve(true)) + .catch(error => reject(error)) + .finally(() => clearTimeout(timer)); + }), + new Promise((resolve, _) => { + timer = setInterval(() => { + if (!wait) { + clearTimeout(timer); + resolve(false); + return; + } + if (waitHandler) { + waitHandler(); + } + }, timeoutMs); + }), + ]); + return result; +} + +export interface Stopwatch { + /** Milliseconds since stopwatch was created. */ + getElapsed: () => number; + /** Seconds since stopwatch was created. */ + getElapsedSeconds: () => number; + getElapsedAndRestart: () => number; + restart(): void; +} + +/** + * Start a `Stopwatch` that measures elapsed time based on `process.hrtime`. + * @returns Stopwatch + */ +export function stopwatch(): Stopwatch { + let start = process.hrtime.bigint(); + const result: Stopwatch = { + getElapsedSeconds: () => { + const elapsedMs = result.getElapsed(); + return elapsedMs / 1000; + }, + getElapsed: () => { + const end = process.hrtime.bigint(); + return Number((end - start) / 1_000_000n); + }, + getElapsedAndRestart: () => { + const end = process.hrtime.bigint(); + const result = Number((end - start) / 1_000_000n); + start = process.hrtime.bigint(); + return result; + }, + restart: () => { + start = process.hrtime.bigint(); + }, + }; + return result; +} + +export type Waiter = Promise & { + finish: (result: T) => void; + isFinished: boolean; +}; + +/** + * Creates a `Waiter` promise that can be resolved at a later time with a return value. + * @returns Waiter + */ +export function waiter(): Waiter { + let resolveFn: (result: T) => void; + const promise = new Promise(resolve => { + resolveFn = resolve; + }); + const completer = { + finish: (result: T) => { + completer.isFinished = true; + resolveFn(result); + }, + isFinished: false, + }; + return Object.assign(promise, completer); +} diff --git a/src/helpers/values.ts b/src/helpers/values.ts new file mode 100644 index 0000000..548ca67 --- /dev/null +++ b/src/helpers/values.ts @@ -0,0 +1,135 @@ +import { createHash } from 'node:crypto'; +import { isArrayBufferView } from 'node:util/types'; + +export const isDevEnv = process.env.NODE_ENV === 'development'; +export const isTestEnv = process.env.NODE_ENV === 'test'; +export const isProdEnv = + process.env.NODE_ENV === 'production' || + process.env.NODE_ENV === 'prod' || + !process.env.NODE_ENV || + (!isTestEnv && !isDevEnv); + +/** + * Digests a string value into a SHA256 hash. + * @param content - String input + * @returns Hashed value + */ +export function sha256(content: string) { + return createHash('sha256').update(content).digest('hex'); +} + +/** + * Parses a boolean string using conventions from CLI arguments, URL query params, and environmental + * variables. If the input is defined but empty string then true is returned. If the input is + * undefined or null than false is returned. For example, if the input comes from a CLI arg like + * `--enable_thing` or URL query param like `?enable_thing`, then this function expects to receive a + * defined but empty string, and returns true. Otherwise, it checks or values like `true`, `1`, + * `on`, `yes` (and the inverses). Throws if an unexpected input value is provided. + */ +export function parseBoolean(val: string | undefined | null): boolean { + if (typeof val === 'undefined' || val === null) { + return false; + } + switch (val.trim().toLowerCase()) { + case '': + case 'true': + case '1': + case 'on': + case 'yes': + return true; + case 'false': + case '0': + case 'off': + case 'no': + return false; + default: + throw new Error(`Cannot parse boolean from "${val}"`); + } +} + +/** + * Encodes a buffer as a `0x` prefixed lower-case hex string. Returns an empty string if the buffer + * is zero length. + */ +export function bufferToHex(buff: Buffer, prefix: boolean = true): string { + if (buff.length === 0) { + return ''; + } + return prefix ? '0x' : '' + buff.toString('hex'); +} + +/** + * Decodes a `0x` prefixed hex string to a buffer. + * @param hex - A hex string with a `0x` prefix. + */ +export function hexToBuffer(hex: string): Buffer { + if (hex.length === 0) { + return Buffer.alloc(0); + } + if (!hex.startsWith('0x')) { + throw new Error(`Hex string is missing the "0x" prefix: "${hex}"`); + } + if (hex.length % 2 !== 0) { + throw new Error(`Hex string is an odd number of digits: ${hex}`); + } + return Buffer.from(hex.substring(2), 'hex'); +} + +/** + * Decodes a hex string to a Buffer, trims the 0x-prefix if exists. + * If already a buffer, returns the input immediately. + */ +export function coerceToBuffer(hex: string | Buffer | ArrayBufferView): Buffer { + if (typeof hex === 'string') { + if (hex.startsWith('0x')) { + hex = hex.substring(2); + } + if (hex.length % 2 !== 0) { + throw new Error(`Hex string is an odd number of characters: ${hex}`); + } + if (!/^[0-9a-fA-F]*$/.test(hex)) { + throw new Error(`Hex string contains non-hexadecimal characters: ${hex}`); + } + return Buffer.from(hex, 'hex'); + } else if (Buffer.isBuffer(hex)) { + return hex; + } else if (isArrayBufferView(hex)) { + return Buffer.from(hex.buffer, hex.byteOffset, hex.byteLength); + } else { + throw new Error(`Cannot convert to Buffer, unexpected type: ${hex.constructor.name}`); + } +} + +/** + * Converts a hex string into a UTF-8 string. + * @param hex - Hex string + * @returns UTF-8 string + */ +export function hexToUtf8String(hex: string): string { + const buffer = hexToBuffer(hex); + return buffer.toString('utf8'); +} + +/** + * Converts a number to a hex string. + * @param number - Number + * @param paddingBytes - Padding bytes + * @returns Hex string + */ +export function numberToHex(number: number, paddingBytes: number = 4): string { + let result = number.toString(16); + if (result.length % 2 > 0) { + result = '0' + result; + } + if (paddingBytes && result.length / 2 < paddingBytes) { + result = '00'.repeat(paddingBytes - result.length / 2) + result; + } + return '0x' + result; +} + +/** + * Checks if a string has `0x` prefix. + * @param val - Hex string + * @returns Boolean + */ +export const has0xPrefix = (val: string) => val.substring(0, 2).toLowerCase() === '0x'; diff --git a/src/index.ts b/src/index.ts index 224b904..5f47d86 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,6 +1,7 @@ -export * from './helpers'; +export * from './helpers/values'; export * from './logger'; export * from './fastify'; +export * from './helpers'; export * from './postgres'; export * from './server-version'; export * from './shutdown-handler'; diff --git a/src/postgres/base-pg-store.ts b/src/postgres/base-pg-store.ts index 90edd4f..05d4585 100644 --- a/src/postgres/base-pg-store.ts +++ b/src/postgres/base-pg-store.ts @@ -1,6 +1,6 @@ import { AsyncLocalStorage } from 'async_hooks'; import { PgSqlClient } from '.'; -import { isProdEnv } from '../helpers'; +import { isProdEnv } from '../helpers/values'; /** * AsyncLocalStorage used to determine if the current async context is running inside a SQL diff --git a/src/postgres/connection.ts b/src/postgres/connection.ts index 9824237..9dd5620 100644 --- a/src/postgres/connection.ts +++ b/src/postgres/connection.ts @@ -1,7 +1,7 @@ import * as postgres from 'postgres'; import { logger } from '../logger'; import { isPgConnectionError } from './errors'; -import { stopwatch, timeout } from './helpers'; +import { stopwatch, timeout } from '../helpers/time'; import { PG_TYPE_MAPPINGS } from './types'; /** Postgres client instance */ diff --git a/src/postgres/helpers.ts b/src/postgres/helpers.ts deleted file mode 100644 index 106ce60..0000000 --- a/src/postgres/helpers.ts +++ /dev/null @@ -1,48 +0,0 @@ -export function timeout(ms: number, abortController?: AbortController): Promise { - return new Promise((resolve, reject) => { - const timeout = setTimeout(() => { - resolve(); - }, ms); - abortController?.signal.addEventListener( - 'abort', - () => { - clearTimeout(timeout); - reject(new Error(`Timeout aborted`)); - }, - { once: true } - ); - }); -} - -export interface Stopwatch { - /** Milliseconds since stopwatch was created. */ - getElapsed: () => number; - /** Seconds since stopwatch was created. */ - getElapsedSeconds: () => number; - getElapsedAndRestart: () => number; - restart(): void; -} - -export function stopwatch(): Stopwatch { - let start = process.hrtime.bigint(); - const result: Stopwatch = { - getElapsedSeconds: () => { - const elapsedMs = result.getElapsed(); - return elapsedMs / 1000; - }, - getElapsed: () => { - const end = process.hrtime.bigint(); - return Number((end - start) / 1_000_000n); - }, - getElapsedAndRestart: () => { - const end = process.hrtime.bigint(); - const result = Number((end - start) / 1_000_000n); - start = process.hrtime.bigint(); - return result; - }, - restart: () => { - start = process.hrtime.bigint(); - }, - }; - return result; -} diff --git a/src/postgres/index.ts b/src/postgres/index.ts index 2947862..af84411 100644 --- a/src/postgres/index.ts +++ b/src/postgres/index.ts @@ -1,6 +1,5 @@ export * from './base-pg-store'; export * from './connection'; export * from './errors'; -export * from './helpers'; export * from './migrations'; export * from './types'; diff --git a/src/postgres/migrations.ts b/src/postgres/migrations.ts index 6fc715d..9e7a213 100644 --- a/src/postgres/migrations.ts +++ b/src/postgres/migrations.ts @@ -2,7 +2,7 @@ import PgMigrate from 'node-pg-migrate'; import { MigrationDirection } from 'node-pg-migrate/dist/types'; import { logger } from '../logger'; import { PgConnectionArgs, connectPostgres, standardizedConnectionArgs } from './connection'; -import { isDevEnv, isTestEnv } from '../helpers'; +import { isDevEnv, isTestEnv } from '../helpers/values'; /** * Run migrations in one direction. diff --git a/src/shutdown-handler/index.ts b/src/shutdown-handler/index.ts index dddecc7..d7757a9 100644 --- a/src/shutdown-handler/index.ts +++ b/src/shutdown-handler/index.ts @@ -1,3 +1,4 @@ +import { resolveOrTimeout } from '../helpers/time'; import { logger } from '../logger'; const SHUTDOWN_SIGNALS = ['SIGINT', 'SIGTERM'] as const; @@ -14,44 +15,6 @@ const shutdownConfigs: ShutdownConfig[] = []; let isShuttingDown = false; -/** - * Set an execution time limit for a promise. - * @param promise - The promise being capped to `timeoutMs` max execution time - * @param timeoutMs - Timeout limit in milliseconds - * @param wait - If we should wait another `timeoutMs` period for `promise` to resolve - * @param waitHandler - If `wait` is `true`, this closure will be executed before waiting another `timeoutMs` cycle - * @returns `true` if `promise` ended gracefully, `false` if timeout was reached - */ -export async function resolveOrTimeout( - promise: Promise, - timeoutMs: number, - wait: boolean = false, - waitHandler?: () => void -) { - let timer: NodeJS.Timeout; - const result = await Promise.race([ - new Promise((resolve, reject) => { - promise - .then(() => resolve(true)) - .catch(error => reject(error)) - .finally(() => clearTimeout(timer)); - }), - new Promise((resolve, _) => { - timer = setInterval(() => { - if (!wait) { - clearTimeout(timer); - resolve(false); - return; - } - if (waitHandler) { - waitHandler(); - } - }, timeoutMs); - }), - ]); - return result; -} - async function startShutdown() { if (isShuttingDown) { return; From 21fc1b2a454791168c8d1974ea6750d5d3bb440c Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Wed, 25 Oct 2023 16:57:06 -0600 Subject: [PATCH 4/5] docs: readme --- README.md | 13 ++++++++++++- src/index.ts | 3 +-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 55c5f35..d38afb2 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,8 @@ Please see each tool's source directory for additional documentation ### Postgres -* Superclass for connection support and SQL transaction management using [postgres.js](https://github.com/porsager/postgres) +* Superclass for connection support and SQL transaction management using + [postgres.js](https://github.com/porsager/postgres) * Connection helpers with automatic retry logic, using the standard postgres ENV variables * Migration tools for migration apply and rollback using [node-pg-migrate](https://github.com/salsita/node-pg-migrate) @@ -47,6 +48,16 @@ gracefully on unhandled exceptions or interrupt signals. tag, and latest commit * Helpers to extract version info to display at runtime or on documentation +### Fastify + +* API server creation with CORS, Typebox and Pino logging +* OpenAPI generator plugin with YAML and JSON exports + +### Helpers + +* Value conversion functions (hex strings, hashes, etc.) +* Timer tools (stopwatch, waiters, etc.) + ## License The API Toolkit Library is released under the Apache 2.0 License. See the LICENSE file for more diff --git a/src/index.ts b/src/index.ts index 5f47d86..fa3c7d9 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,7 +1,6 @@ -export * from './helpers/values'; -export * from './logger'; export * from './fastify'; export * from './helpers'; +export * from './logger'; export * from './postgres'; export * from './server-version'; export * from './shutdown-handler'; From 66a278499bb12c1e0f3c5efc7895c790ff309992 Mon Sep 17 00:00:00 2001 From: Rafael Cardenas Date: Thu, 26 Oct 2023 10:19:23 -0600 Subject: [PATCH 5/5] fix: buffer to hex --- src/helpers/values.ts | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/helpers/values.ts b/src/helpers/values.ts index 548ca67..6f7dd98 100644 --- a/src/helpers/values.ts +++ b/src/helpers/values.ts @@ -52,10 +52,7 @@ export function parseBoolean(val: string | undefined | null): boolean { * is zero length. */ export function bufferToHex(buff: Buffer, prefix: boolean = true): string { - if (buff.length === 0) { - return ''; - } - return prefix ? '0x' : '' + buff.toString('hex'); + return buff.length === 0 ? '' : (prefix ? '0x' : '') + buff.toString('hex'); } /**