From c6d6e7f864460df23a6ae053b94f44a33826aa61 Mon Sep 17 00:00:00 2001 From: Andrew Newton Date: Thu, 20 Feb 2025 07:47:49 +0000 Subject: [PATCH 1/3] feat: Add pass percentage threshold option to surefire report summary --- .../surefire-report-summary/action.yaml | 4 +++ .../surefire-report-summary/src/main.ts | 36 ++++++++++++++++--- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/.github/actions/surefire-report-summary/action.yaml b/.github/actions/surefire-report-summary/action.yaml index 8b99f6c..73fcb65 100644 --- a/.github/actions/surefire-report-summary/action.yaml +++ b/.github/actions/surefire-report-summary/action.yaml @@ -13,6 +13,10 @@ inputs: fail-on-test-errors: required: false description: 'if true, the action will fail if there are test errors' + pass-percentage: + required: false + description: 'if set, the action will fail if the percentage of passing tests is below this threshold (e.g. 95)' + default: '0' runs: using: 'node20' main: 'dist/index.js' diff --git a/.github/actions/surefire-report-summary/src/main.ts b/.github/actions/surefire-report-summary/src/main.ts index 7be0650..7b2a6b9 100644 --- a/.github/actions/surefire-report-summary/src/main.ts +++ b/.github/actions/surefire-report-summary/src/main.ts @@ -11,22 +11,48 @@ export async function run(): Promise { core.debug(`file-path: ${filePath}`); core.debug(`report-path: ${filePath}`); const results = await parse( read(filePath) ); + + const passPercentage = calculatePassPercentage(results); + await core.summary.addHeading('Test Results') .addTable([ - [{data: 'Tests', header: true}, {data: 'Failures', header: true}, {data: 'Errors', header: true},{data: 'Skipped', header: true}], - [results.tests.toString(), results.failures.toString(), results.errors.toString(), results.skipped.toString()] + [{data: 'Tests', header: true}, {data: 'Failures', header: true}, {data: 'Errors', header: true}, {data: 'Skipped', header: true}, {data: 'Pass Rate', header: true}], + [results.tests.toString(), results.failures.toString(), results.errors.toString(), results.skipped.toString(), `${passPercentage.toFixed(2)}%`] ]) .addLink('View Test Results report', reportPath) .write(); core.debug(`Results: ${JSON.stringify(results)}`); - await failOnTestFailures(results) ? core.setFailed('Test failures found') : core.info('fail-on-test-failures is false, ignoring test failures'); - await failOnTestErrors(results) ? core.setFailed('Test errors found') : core.info('fail-on-test-errors is false, ignoring test errors'); + + const passThreshold = parseFloat(core.getInput('pass-percentage')); + + // If pass-percentage is set, use only that criteria + if (passThreshold > 0) { + if (passPercentage < passThreshold) { + core.setFailed(`Pass percentage ${passPercentage.toFixed(2)}% is below threshold of ${passThreshold}%`); + } else { + core.info(`Pass percentage ${passPercentage.toFixed(2)}% meets or exceeds threshold of ${passThreshold}%`); + } + } else { + // Only check individual failure conditions if pass-percentage is not set + if (await failOnTestFailures(results)) { + core.setFailed('Test failures found'); + } else if (await failOnTestErrors(results)) { + core.setFailed('Test errors found'); + } else { + core.info('All checks passed successfully'); + } + } } catch (error) { core.setFailed(`${(error as Error)?.message ?? error}`) } } - +export const calculatePassPercentage = (results: Results): number => { + const totalRun = results.tests - results.skipped; + if (totalRun === 0) return 0; + const passed = totalRun - (results.failures + results.errors); + return (passed / totalRun) * 100; +} export const failOnTestFailures = async (results: Results): Promise => { const fail_on_test_failures:boolean = core.getInput('fail-on-test-failures') === 'true'; From 532fbb3d3f74c29403ce074fca9c4286ace073a5 Mon Sep 17 00:00:00 2001 From: Andrew Newton Date: Thu, 20 Feb 2025 07:58:46 +0000 Subject: [PATCH 2/3] feat: test coverage for pass percentage changes --- .../__tests__/main.test.ts | 92 ++++++++++++++++--- 1 file changed, 81 insertions(+), 11 deletions(-) diff --git a/.github/actions/surefire-report-summary/__tests__/main.test.ts b/.github/actions/surefire-report-summary/__tests__/main.test.ts index af6d59b..9600522 100644 --- a/.github/actions/surefire-report-summary/__tests__/main.test.ts +++ b/.github/actions/surefire-report-summary/__tests__/main.test.ts @@ -1,15 +1,20 @@ import {expect, test, jest, beforeEach, describe} from '@jest/globals' import * as core from '@actions/core' -import { run } from '../src/main'; +import { run, calculatePassPercentage } from '../src/main'; import { parse } from '../src/reader'; jest.mock('../src/reader'); jest.mock('@actions/core'); - beforeEach(() => { - jest.clearAllMocks(); - }); +beforeEach(() => { + jest.clearAllMocks(); + (core.summary.addHeading as jest.Mock).mockReturnValue(core.summary); + (core.summary.addTable as jest.Mock).mockReturnValue(core.summary); + (core.summary.addLink as jest.Mock).mockReturnValue(core.summary); + (core.summary.write as jest.Mock).mockReturnValue(Promise.resolve()); +}); + describe('main', () => { test('should call parse and set output', async () => { const filePath = 'path/to/file.xml'; @@ -35,12 +40,10 @@ describe('main', () => { }; (parse as jest.Mock).mockReturnValue(new Promise((resolve) => resolve(results))); (core.getInput as jest.Mock).mockReturnValue(reportPath); - (core.summary.addHeading as jest.Mock).mockReturnValueOnce(core.summary); - (core.summary.addTable as jest.Mock).mockReturnValueOnce(core.summary); await run(); expect(core.summary.addLink).toHaveBeenCalledWith('View Test Results report', reportPath); }) - test('should call parse and add table to summary', async () => { + test('should call parse and add table to summary with pass rate', async () => { const filePath = 'path/to/file.xml'; const results = { tests: 2, @@ -49,8 +52,7 @@ describe('main', () => { skipped: 0 }; (parse as jest.Mock).mockReturnValue(new Promise((resolve) => resolve(results))); - (core.getInput as jest.Mock).mockReturnValueOnce(filePath); - (core.summary.addHeading as jest.Mock).mockReturnValueOnce(core.summary); + (core.getInput as jest.Mock).mockReturnValueOnce(filePath).mockReturnValueOnce('report'); await run(); expect(core.summary.addHeading).toHaveBeenLastCalledWith('Test Results'); expect(core.summary.addTable).toHaveBeenLastCalledWith([ @@ -58,13 +60,15 @@ describe('main', () => { { data: 'Tests', header: true }, { data: 'Failures', header: true }, { data: 'Errors', header: true }, - { data: 'Skipped', header: true } + { data: 'Skipped', header: true }, + { data: 'Pass Rate', header: true } ], [ "2", "1", "1", - "0" + "0", + "0.00%" ] ]); }) @@ -77,7 +81,73 @@ describe('main', () => { expect(core.setFailed).toHaveBeenCalledWith(error.message); }) + test('should fail when pass percentage is below threshold', async () => { + const filePath = 'path/to/file.xml'; + const results = { + tests: 10, + failures: 2, + errors: 0, + skipped: 0 + }; + (parse as jest.Mock).mockReturnValue(new Promise((resolve) => resolve(results))); + (core.getInput as jest.Mock) + .mockReturnValueOnce(filePath) + .mockReturnValueOnce('report') + .mockReturnValueOnce('95'); + await run(); + expect(core.setFailed).toHaveBeenCalledWith('Pass percentage 80.00% is below threshold of 95%'); + }) + + test('should pass when pass percentage meets threshold', async () => { + const filePath = 'path/to/file.xml'; + const results = { + tests: 10, + failures: 0, + errors: 0, + skipped: 0 + }; + (parse as jest.Mock).mockReturnValue(new Promise((resolve) => resolve(results))); + (core.getInput as jest.Mock) + .mockReturnValueOnce(filePath) + .mockReturnValueOnce('report') + .mockReturnValueOnce('95'); + await run(); + expect(core.setFailed).not.toHaveBeenCalled(); + expect(core.info).toHaveBeenCalledWith('Pass percentage 100.00% meets or exceeds threshold of 95%'); + }) }) +describe('calculatePassPercentage', () => { + test('should calculate pass percentage correctly', () => { + const results = { + tests: 10, + failures: 1, + errors: 1, + skipped: 2 + }; + expect(calculatePassPercentage(results)).toBe(75); + }); + + test('should handle all skipped tests', () => { + const results = { + tests: 10, + failures: 0, + errors: 0, + skipped: 10 + }; + expect(calculatePassPercentage(results)).toBe(0); + }); + + test('should handle all passing tests', () => { + const results = { + tests: 10, + failures: 0, + errors: 0, + skipped: 0 + }; + expect(calculatePassPercentage(results)).toBe(100); + }); +}); + From aa3b1ed8865cf1958f0053bb8af5a926d3c6a89a Mon Sep 17 00:00:00 2001 From: Andrew Newton Date: Thu, 20 Feb 2025 08:09:49 +0000 Subject: [PATCH 3/3] feat: add simple ci step for surefire-report-summary changes --- .github/workflows/test-surefire-action.yml | 27 ++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 .github/workflows/test-surefire-action.yml diff --git a/.github/workflows/test-surefire-action.yml b/.github/workflows/test-surefire-action.yml new file mode 100644 index 0000000..89bdb10 --- /dev/null +++ b/.github/workflows/test-surefire-action.yml @@ -0,0 +1,27 @@ +name: Test Surefire Report Summary + +on: + push: + paths: + - '.github/actions/surefire-report-summary/**' + pull_request: + paths: + - '.github/actions/surefire-report-summary/**' + +jobs: + test: + runs-on: ubuntu-latest + defaults: + run: + working-directory: .github/actions/surefire-report-summary + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'npm' + cache-dependency-path: .github/actions/surefire-report-summary/package-lock.json + - name: Install dependencies + run: npm ci + - name: Run tests + run: npm test \ No newline at end of file