Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 81 additions & 11 deletions .github/actions/surefire-report-summary/__tests__/main.test.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,20 @@
import {expect, test, jest, beforeEach, describe} from '@jest/globals'
import * as core from '@actions/core'
import { run } from '../src/main';
import { run, calculatePassPercentage } from '../src/main';
import { parse } from '../src/reader';


jest.mock('../src/reader');
jest.mock('@actions/core');

beforeEach(() => {
jest.clearAllMocks();
});
beforeEach(() => {
jest.clearAllMocks();
(core.summary.addHeading as jest.Mock).mockReturnValue(core.summary);
(core.summary.addTable as jest.Mock).mockReturnValue(core.summary);
(core.summary.addLink as jest.Mock).mockReturnValue(core.summary);
(core.summary.write as jest.Mock).mockReturnValue(Promise.resolve());
});

describe('main', () => {
test('should call parse and set output', async () => {
const filePath = 'path/to/file.xml';
Expand All @@ -35,12 +40,10 @@ describe('main', () => {
};
(parse as jest.Mock).mockReturnValue(new Promise((resolve) => resolve(results)));
(core.getInput as jest.Mock).mockReturnValue(reportPath);
(core.summary.addHeading as jest.Mock).mockReturnValueOnce(core.summary);
(core.summary.addTable as jest.Mock).mockReturnValueOnce(core.summary);
await run();
expect(core.summary.addLink).toHaveBeenCalledWith('View Test Results report', reportPath);
})
test('should call parse and add table to summary', async () => {
test('should call parse and add table to summary with pass rate', async () => {
const filePath = 'path/to/file.xml';
const results = {
tests: 2,
Expand All @@ -49,22 +52,23 @@ describe('main', () => {
skipped: 0
};
(parse as jest.Mock).mockReturnValue(new Promise((resolve) => resolve(results)));
(core.getInput as jest.Mock).mockReturnValueOnce(filePath);
(core.summary.addHeading as jest.Mock).mockReturnValueOnce(core.summary);
(core.getInput as jest.Mock).mockReturnValueOnce(filePath).mockReturnValueOnce('report');
await run();
expect(core.summary.addHeading).toHaveBeenLastCalledWith('Test Results');
expect(core.summary.addTable).toHaveBeenLastCalledWith([
[
{ data: 'Tests', header: true },
{ data: 'Failures', header: true },
{ data: 'Errors', header: true },
{ data: 'Skipped', header: true }
{ data: 'Skipped', header: true },
{ data: 'Pass Rate', header: true }
],
[
"2",
"1",
"1",
"0"
"0",
"0.00%"
]
]);
})
Expand All @@ -77,7 +81,73 @@ describe('main', () => {
expect(core.setFailed).toHaveBeenCalledWith(error.message);
})

test('should fail when pass percentage is below threshold', async () => {
const filePath = 'path/to/file.xml';
const results = {
tests: 10,
failures: 2,
errors: 0,
skipped: 0
};
(parse as jest.Mock).mockReturnValue(new Promise((resolve) => resolve(results)));
(core.getInput as jest.Mock)
.mockReturnValueOnce(filePath)
.mockReturnValueOnce('report')
.mockReturnValueOnce('95');
await run();
expect(core.setFailed).toHaveBeenCalledWith('Pass percentage 80.00% is below threshold of 95%');
})

test('should pass when pass percentage meets threshold', async () => {
const filePath = 'path/to/file.xml';
const results = {
tests: 10,
failures: 0,
errors: 0,
skipped: 0
};
(parse as jest.Mock).mockReturnValue(new Promise((resolve) => resolve(results)));
(core.getInput as jest.Mock)
.mockReturnValueOnce(filePath)
.mockReturnValueOnce('report')
.mockReturnValueOnce('95');
await run();
expect(core.setFailed).not.toHaveBeenCalled();
expect(core.info).toHaveBeenCalledWith('Pass percentage 100.00% meets or exceeds threshold of 95%');
})
})

describe('calculatePassPercentage', () => {
test('should calculate pass percentage correctly', () => {
const results = {
tests: 10,
failures: 1,
errors: 1,
skipped: 2
};
expect(calculatePassPercentage(results)).toBe(75);
});

test('should handle all skipped tests', () => {
const results = {
tests: 10,
failures: 0,
errors: 0,
skipped: 10
};
expect(calculatePassPercentage(results)).toBe(0);
});

test('should handle all passing tests', () => {
const results = {
tests: 10,
failures: 0,
errors: 0,
skipped: 0
};
expect(calculatePassPercentage(results)).toBe(100);
});
});



4 changes: 4 additions & 0 deletions .github/actions/surefire-report-summary/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@ inputs:
fail-on-test-errors:
required: false
description: 'if true, the action will fail if there are test errors'
pass-percentage:
required: false
description: 'if set, the action will fail if the percentage of passing tests is below this threshold (e.g. 95)'
default: '0'
runs:
using: 'node20'
main: 'dist/index.js'
36 changes: 31 additions & 5 deletions .github/actions/surefire-report-summary/src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,22 +11,48 @@ export async function run(): Promise<void> {
core.debug(`file-path: ${filePath}`);
core.debug(`report-path: ${filePath}`);
const results = await parse( read(filePath) );

const passPercentage = calculatePassPercentage(results);

await core.summary.addHeading('Test Results')
.addTable([
[{data: 'Tests', header: true}, {data: 'Failures', header: true}, {data: 'Errors', header: true},{data: 'Skipped', header: true}],
[results.tests.toString(), results.failures.toString(), results.errors.toString(), results.skipped.toString()]
[{data: 'Tests', header: true}, {data: 'Failures', header: true}, {data: 'Errors', header: true}, {data: 'Skipped', header: true}, {data: 'Pass Rate', header: true}],
[results.tests.toString(), results.failures.toString(), results.errors.toString(), results.skipped.toString(), `${passPercentage.toFixed(2)}%`]
])
.addLink('View Test Results report', reportPath)
.write();
core.debug(`Results: ${JSON.stringify(results)}`);
await failOnTestFailures(results) ? core.setFailed('Test failures found') : core.info('fail-on-test-failures is false, ignoring test failures');
await failOnTestErrors(results) ? core.setFailed('Test errors found') : core.info('fail-on-test-errors is false, ignoring test errors');

const passThreshold = parseFloat(core.getInput('pass-percentage'));

// If pass-percentage is set, use only that criteria
if (passThreshold > 0) {
if (passPercentage < passThreshold) {
core.setFailed(`Pass percentage ${passPercentage.toFixed(2)}% is below threshold of ${passThreshold}%`);
} else {
core.info(`Pass percentage ${passPercentage.toFixed(2)}% meets or exceeds threshold of ${passThreshold}%`);
}
} else {
// Only check individual failure conditions if pass-percentage is not set
if (await failOnTestFailures(results)) {
core.setFailed('Test failures found');
} else if (await failOnTestErrors(results)) {
core.setFailed('Test errors found');
} else {
core.info('All checks passed successfully');
}
}
} catch (error) {
core.setFailed(`${(error as Error)?.message ?? error}`)
}
}


export const calculatePassPercentage = (results: Results): number => {
const totalRun = results.tests - results.skipped;
if (totalRun === 0) return 0;
const passed = totalRun - (results.failures + results.errors);
return (passed / totalRun) * 100;
}

export const failOnTestFailures = async (results: Results): Promise<boolean> => {
const fail_on_test_failures:boolean = core.getInput('fail-on-test-failures') === 'true';
Expand Down
27 changes: 27 additions & 0 deletions .github/workflows/test-surefire-action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Test Surefire Report Summary

on:
push:
paths:
- '.github/actions/surefire-report-summary/**'
pull_request:
paths:
- '.github/actions/surefire-report-summary/**'

jobs:
test:
runs-on: ubuntu-latest
defaults:
run:
working-directory: .github/actions/surefire-report-summary
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'npm'
cache-dependency-path: .github/actions/surefire-report-summary/package-lock.json
- name: Install dependencies
run: npm ci
- name: Run tests
run: npm test