From 1bd9d2ee6e91acdf6caf4ee1e746de28872b55fa Mon Sep 17 00:00:00 2001 From: Walker Date: Wed, 15 Apr 2026 16:22:42 -0500 Subject: [PATCH] Test cFS/workflows#122 --- .gitattributes | 5 + .github/scripts/mcdc-analyze.sh | 142 +++++++ .github/scripts/mcdc-compare.sh | 203 ++++++++++ .github/workflows/add-to-project-reusable.yml | 381 ++++++++++++++++++ .github/workflows/add-to-project.yml | 15 + .../app-static-analysis-reusable.yml | 34 ++ .github/workflows/build-cfs-deprecated.yml | 229 ----------- .github/workflows/build-cfs-multitarget.yml | 120 ++++++ .github/workflows/build-cfs-rtems5.yml | 140 ------- .github/workflows/build-cfs.yml | 227 ----------- ...-deploy-doc.yml => build-doc-reusable.yml} | 86 ++-- .github/workflows/build-documentation.yml | 87 ---- .github/workflows/build-run-app-reusable.yml | 186 +++++++++ .github/workflows/build-run-app.yml | 119 ------ .github/workflows/format-check.yml | 43 +- .github/workflows/mcdc-reusable.yml | 208 ++++++++++ .github/workflows/mcdc.yml | 18 + .github/workflows/static-analysis-misra.yml | 131 ------ .github/workflows/static-analysis-reuse.yml | 9 - .github/workflows/static-analysis.yml | 122 ------ .github/workflows/test-cfs-qemu.yml | 206 ++++++++++ ...ge.yml => unit-test-coverage-reusable.yml} | 66 ++- actions/cppcheck/action.yml | 82 ++++ actions/healthcheck-logs/action.yml | 59 +++ actions/setup-app/action.yml | 87 ++++ actions/start-cfs-container/action.yml | 41 ++ actions/stop-cfs-container/action.yml | 24 ++ 27 files changed, 1907 insertions(+), 1163 deletions(-) create mode 100644 .gitattributes create mode 100644 .github/scripts/mcdc-analyze.sh create mode 100644 .github/scripts/mcdc-compare.sh create mode 100644 .github/workflows/add-to-project-reusable.yml create mode 100644 .github/workflows/add-to-project.yml create mode 100644 .github/workflows/app-static-analysis-reusable.yml delete mode 100644 .github/workflows/build-cfs-deprecated.yml create mode 100644 .github/workflows/build-cfs-multitarget.yml delete mode 100644 .github/workflows/build-cfs-rtems5.yml delete mode 100644 .github/workflows/build-cfs.yml rename .github/workflows/{build-deploy-doc.yml => build-doc-reusable.yml} (63%) delete mode 100644 .github/workflows/build-documentation.yml create mode 100644 .github/workflows/build-run-app-reusable.yml delete mode 100644 .github/workflows/build-run-app.yml create mode 100644 .github/workflows/mcdc-reusable.yml create mode 100644 .github/workflows/mcdc.yml delete mode 100644 .github/workflows/static-analysis-misra.yml delete mode 100644 .github/workflows/static-analysis-reuse.yml delete mode 100644 .github/workflows/static-analysis.yml create mode 100644 .github/workflows/test-cfs-qemu.yml rename .github/workflows/{unit-test-coverage.yml => unit-test-coverage-reusable.yml} (68%) create mode 100644 actions/cppcheck/action.yml create mode 100644 actions/healthcheck-logs/action.yml create mode 100644 actions/setup-app/action.yml create mode 100644 actions/start-cfs-container/action.yml create mode 100644 actions/stop-cfs-container/action.yml diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..f5b5e4f58 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,5 @@ +simple_defs export-ignore +simple.mk export-ignore +distbuild.mk export-ignore +apps/sbn_f_remap export-ignore +apps/sbn_udp export-ignore \ No newline at end of file diff --git a/.github/scripts/mcdc-analyze.sh b/.github/scripts/mcdc-analyze.sh new file mode 100644 index 000000000..721677d1d --- /dev/null +++ b/.github/scripts/mcdc-analyze.sh @@ -0,0 +1,142 @@ +#!/bin/bash + +# Redirect all echo outputs to mcdc_results.txt and capture gcov output +exec > >(tee -a mcdc_results.txt) 2>&1 + +# Pass the test modules after running unit tests +# Ex. echo "MODULES=$(grep -oP 'Test #\d+: \K[\w\-\_]+' test_results.txt | tr '\n' ' ' | sed 's/ $//')" >> $GITHUB_ENV +if [ -n "$MODULES" ]; then + modules="$MODULES" + echo "Test modules provided: " + for module in $modules; do + echo "$module" + done +else + echo "No test modules provided." + exit 1 +fi + +# Initialize overall counters +overall_total_functions=0 +overall_total_covered_functions=0 +overall_file_count=0 +overall_no_conditions_count=0 +module_count=0 + + +# Show coverage for each file in a module and summary coverage for each module +for module in $modules; do + module_name=$(basename "$module") + + # Skip specific files and directories + if [[ "$module_name" == "core-cpu1" || \ + "$module_name" == "Makefile" || \ + "$module_name" == "CTestTestfile" || \ + "$module_name" == "cmake_install" || \ + "$module_name" == "gmon" || \ + "$module_name" == *"stubs"* ]]; then + continue + fi + + module_name_no_testrunner=$(echo "$module_name" | sed 's/-testrunner$//') + + echo " " + echo "Processing $module_name_no_testrunner module..." + + # Initialize module-level counters + total_functions=0 + total_covered_functions=0 + file_count=0 + no_conditions_count=0 + + module_dirs="" + + if [ -n "$BASE_DIR" ]; then + # If BASE_DIR is provided, search within the BASE_DIR for the module directories. + # FIX, module dirs doesn't always show + module_dirs=$(find "$BASE_DIR" -type d -name "*${module_name}*") + echo "Base directory specified: $BASE_DIR" + echo "Searching for .gcda directory..." + else + # Otherwise, look for the default module directories. + module_dirs=$(find "build/native/default_cpu1" -type d -name "*${module_name}*.dir") + echo "No base directory provided: Searching for .gcda directory..." + fi + + if [ -n "$module_dirs" ]; then + for module_dir in $module_dirs; do + echo "Found module directory: $module_dir" + + parent_dir=$(dirname "$module_dir") + echo "Searching for .gcda files under parent directory: $parent_dir..." + gcda_files=$(find "$parent_dir" -type d -name "*${module_name_no_testrunner}*.dir" -exec find {} -type f -name "*.gcda" \;) + + if [ -n "$gcda_files" ]; then + for gcda_file in $gcda_files; do + c_file=$(echo "$gcda_file" | sed 's/\.gcda$/.c/') + + echo "Processing corresponding .c file: $c_file" + echo "Running gcov on $c_file..." + + # Capture gcov output and remove header files + gcov_output=$(gcov -abcgi "$c_file" | sed "/\.h/,/^$/d") + + # Output the gcov result of each file and save to mcdc_results.txt + echo "$gcov_output" | tee -a mcdc_results.txt + + # Process gcov results for coverage summary + while IFS= read -r line; do + if [[ $line == *"Condition outcomes covered:"* ]]; then + condition_covered=$(echo "$line" | grep -oP 'Condition outcomes covered:\K[0-9.]+') + total_conditions_in_file=$(echo "$line" | grep -oP 'of \K[0-9]+') + + covered_functions_in_file=$(awk -v pct="$condition_covered" -v total="$total_conditions_in_file" 'BEGIN {printf "%.2f", (pct / 100) * total}') + + total_functions=$((total_functions + total_conditions_in_file)) + total_covered_functions=$(awk -v covered="$total_covered_functions" -v new_covered="$covered_functions_in_file" 'BEGIN {printf "%.2f", covered + new_covered}') + + file_count=$((file_count + 1)) + elif [[ $line == *"No conditions"* ]]; then + no_conditions_count=$((no_conditions_count + 1)) + fi + done <<< "$gcov_output" + done + else + echo "No .gcda files found for $module_name under parent directory $parent_dir." + fi + done + else + echo "Directory for module $module_name \(e.g., ${module_name}.dir\) not found." + fi + + if [ "$total_functions" -ne 0 ]; then + average_condition_coverage=$(awk -v covered="$total_covered_functions" -v total="$total_functions" 'BEGIN {printf "%.2f", (covered / total) * 100}') + else + average_condition_coverage=0 + fi + + overall_total_functions=$((overall_total_functions + total_functions)) + overall_total_covered_functions=$(awk -v covered="$overall_total_covered_functions" -v new_covered="$total_covered_functions" 'BEGIN {printf "%.2f", covered + new_covered}') + overall_file_count=$((overall_file_count + file_count)) + overall_no_conditions_count=$((overall_no_conditions_count + no_conditions_count)) + + module_count=$((module_count + 1)) + + echo "Summary for $module_name_no_testrunner module:" + echo " Total files processed: $file_count" + echo " Number of files with no condition data: $no_conditions_count" + echo " Condition outcomes covered: ${average_condition_coverage}% of $total_functions" + echo " " +done + +if [ "$overall_total_functions" -ne 0 ]; then + overall_condition_coverage=$(awk -v covered="$overall_total_covered_functions" -v total="$overall_total_functions" 'BEGIN {printf "%.2f", (covered / total) * 100}') +else + overall_condition_coverage=0 +fi + +echo " " +echo "Overall summary:" +echo " Total files processed: $overall_file_count" +echo " Number of files with no condition data: $overall_no_conditions_count" +echo " Overall condition outcomes covered: ${overall_condition_coverage}% of $overall_total_functions" \ No newline at end of file diff --git a/.github/scripts/mcdc-compare.sh b/.github/scripts/mcdc-compare.sh new file mode 100644 index 000000000..548012b61 --- /dev/null +++ b/.github/scripts/mcdc-compare.sh @@ -0,0 +1,203 @@ +#!/bin/bash + +exec > >(tee -a mcdc_compare.txt) 2>&1 + +# Function to check if a file exists and return an error message for missing files +check_file_exists() { + file=$1 + if [ ! -f "$file" ]; then + echo "Error: File '$file' does not exist." + missing_files=true + fi +} + +# Function to extract the relevant numbers from a module's "Summary for module" section +extract_module_numbers() { + file=$1 + module=$2 + + total_files_processed=$(sed -n "/^Summary for ${module} module:/,/^$/p" "$file" | head -n 4 | grep -Po 'Total files processed:\s*\K\d*') + no_condition_data=$(sed -n "/^Summary for ${module} module:/,/^$/p" "$file" | head -n 4 | grep -Po 'Number of files with no condition data:\s*\K\d+') + condition_outcomes_covered_percent=$(sed -n "/^Summary for ${module} module:/,/^$/p" "$file" | head -n 4 | grep -Po 'Condition outcomes covered:\s*\K[0-9]+(\.[0-9]+)?') + condition_outcomes_out_of=$(sed -n "/^Summary for ${module} module:/,/^$/p" "$file" | head -n 4 | grep -Po 'Condition outcomes covered:.*of\s*\K\d*') + + echo "$total_files_processed $no_condition_data $condition_outcomes_covered_percent $condition_outcomes_out_of" +} + +# Compare results for each module between two files +compare_mcdc_results() { + main_results_file=$1 + pr_results_file=$2 + modules_file=$3 + + # Initialize a flag to track if any files are missing + missing_files=false + + # Check if the files exist before proceeding + check_file_exists "$main_results_file" + check_file_exists "$pr_results_file" + check_file_exists "$modules_file" + + # If any files are missing, exit early + if [ "$missing_files" = true ]; then + echo "Error: One or more input files are missing. Exiting." + exit 1 + fi + + # Read modules from modules.txt (passed as argument) + modules=$(cat "$modules_file") + + # Check if modules are empty or not + if [ -z "$modules" ]; then + echo "Error: No modules found in $modules_file" + exit 1 + fi + + # Initialize variables to store the output for modules with and without changes + modules_with_changes="" + modules_without_changes="" + + # Loop through all modules to compare each one + for module in $modules; do + + # Extract numbers for the main results file and PR results file for the current module + read main_total_files main_no_condition main_condition_covered_percent main_condition_out_of <<< $(extract_module_numbers "$main_results_file" "$module") + read pr_total_files pr_no_condition pr_condition_covered_percent pr_condition_out_of <<< $(extract_module_numbers "$pr_results_file" "$module") + + # Echo numbers extracted from each file for each module + echo -e "\nResults for module: $module" + echo "PR Branch - Total files processed: $pr_total_files, No condition data: $pr_no_condition, Covered condition %: $pr_condition_covered_percent%, Out of value: $pr_condition_out_of" + echo "Main Branch - Total files processed: $main_total_files, No condition data: $main_no_condition, Covered condition %: $main_condition_covered_percent%, Out of value: $main_condition_out_of" + + # Initialize variables to store differences + total_files_diff="" + no_condition_data_diff="" + condition_outcomes_covered_diff_percent="" + condition_outcomes_out_of_diff="" + + # Calculate difference between files + total_files_diff=$((pr_total_files - main_total_files)) + no_condition_data_diff=$((pr_no_condition - main_no_condition)) + condition_outcomes_covered_diff_percent=$(echo "$pr_condition_covered_percent - $main_condition_covered_percent" | bc) + condition_outcomes_out_of_diff=$((pr_condition_out_of - main_condition_out_of)) + + echo "Differences:" + echo " Total files processed difference: $total_files_diff" + echo " No condition data difference: $no_condition_data_diff" + echo " Covered condition % difference: $condition_outcomes_covered_diff_percent" + echo " Out of value difference: $condition_outcomes_out_of_diff" + echo " " + + changes="" + + if [ "$total_files_diff" -gt 0 ]; then + changes="${changes} Number of files processed: +$total_files_diff\n" + elif [ "$total_files_diff" -lt 0 ]; then + changes="${changes} Number of files processed: $total_files_diff\n" + fi + + if [ "$no_condition_data_diff" -gt 0 ]; then + changes="${changes} Number of files with no condition data: +$no_condition_data_diff\n" + elif [ "$no_condition_data_diff" -lt 0 ]; then + changes="${changes} Number of files with no condition data: $no_condition_data_diff\n" + fi + + if [ $(echo "$condition_outcomes_covered_diff_percent > 0" | bc) -eq 1 ]; then + changes="${changes} Percentage of covered conditions: +$condition_outcomes_covered_diff_percent%\n" + elif [ $(echo "$condition_outcomes_covered_diff_percent < 0" | bc) -eq 1 ]; then + changes="${changes} Percentage of covered conditions: $condition_outcomes_covered_diff_percent%\n" + fi + + if [ "$condition_outcomes_out_of_diff" -gt 0 ]; then + changes="${changes} Number of conditions: +$condition_outcomes_out_of_diff\n" + elif [ "$condition_outcomes_out_of_diff" -lt 0 ]; then + changes="${changes} Number of conditions: $condition_outcomes_out_of_diff\n" + fi + + if [ -n "$changes" ]; then + modules_with_changes="${modules_with_changes} $module\n$changes\n" + else + modules_without_changes="${modules_without_changes} $module\n" + fi + done + + echo " " + echo "MC/DC results compared to latest dev branch:" + echo " " + echo "Modules with changes:" + echo -e "$modules_with_changes" + echo "Modules without changes:" + echo -e "$modules_without_changes" + + # Write results to mcdc_comment.txt / pull request + if [ -n "$modules_with_changes" ]; then + echo "MC/DC results compared to latest dev branch:" > mcdc_comment.txt + echo "" >> mcdc_comment.txt + echo "Modules with changes:" >> mcdc_comment.txt + echo -e "$modules_with_changes" >> mcdc_comment.txt + echo "" >> mcdc_comment.txt + echo "See file uncovered.json for more details" + else + echo "No MC/DC changes were made." > mcdc_comment.txt + fi + +} + +# creates single json file that contains info on all uncovered branches +generate_json_report() { + jq_script=$(find $GITHUB_WORKSPACE -name "uncovered_filter.jq" | tail -n 1) + if [ -z "$jq_script" ]; then + echo "Error: Could not find uncovered_filter.jq" + return 1 + fi + + for zipped_file in *.gcov.json.gz; do + if [ -f "$zipped_file" ]; then + base_name="${zipped_file%.gcov.json.gz}" + gunzip -c "$zipped_file" > "${base_name}.json" + if [ -f "${base_name}.json" ]; then + jq -f "$jq_script" "${base_name}.json" > "${base_name}_filtered.json" + else + echo "Error: Failed to decompress $zipped_file" + return 1 + fi + else + echo "Warning: No .gcov.json.gz files found" + return 0 + fi + done + + if ls *_filtered.json 1> /dev/null 2>&1; then + jq -s '.' *_filtered.json > uncovered.json + echo "Successfully created uncovered.json" + else + echo "No filtered JSON files found to merge" + return 1 + fi + + if jq 'flatten' uncovered.json > temp.json; then + mv temp.json uncovered.json + else + rm -f temp.json + echo "Error processing JSON file" + exit 1 + fi + + if jq '.' uncovered.json > temp.json; then + mv temp.json uncovered.json + else + rm -f temp.json + echo "Error processing JSON file" + exit 1 + fi +} + +# Check the script arguments +if [ $# -ne 3 ]; then + echo "Usage: $0 " + exit 1 +fi + +# Run the comparison function with the provided arguments +generate_json_report +compare_mcdc_results "$1" "$2" "$3" \ No newline at end of file diff --git a/.github/workflows/add-to-project-reusable.yml b/.github/workflows/add-to-project-reusable.yml new file mode 100644 index 000000000..d58ace530 --- /dev/null +++ b/.github/workflows/add-to-project-reusable.yml @@ -0,0 +1,381 @@ +name: Add Issues or PRs to Project Reusable Workflow + +on: + workflow_call: + inputs: + project-url: + description: 'URL of the GitHub project to add items to' + required: false + type: string + # Default project is NASA cFS Development + default: 'https://github.com/orgs/nasa/projects/72' + +jobs: + add-to-project: + name: Add issue or pull request to project + runs-on: ubuntu-latest + steps: + - uses: actions/add-to-project@v1.0.2 + with: + project-url: ${{ inputs.project-url }} + github-token: ${{ secrets.ADD_TO_PROJECT_PAT }} + + - name: Add pull request to current sprint as to do + uses: actions/github-script@v7 + if: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.draft == false }} + env: + PROJECT_URL: ${{ inputs.project-url }} + with: + github-token: ${{ secrets.ADD_TO_PROJECT_PAT }} + script: | + const projectUrl = process.env.PROJECT_URL; + const pr = context.payload.pull_request; + const prId = pr.node_id; + + // Extract organization and project number from URL + // Format: https://developer.nasa.gov/orgs/{org}/projects/{number} + const urlMatch = projectUrl.match(/orgs\/([^\/]+)\/projects\/(\d+)/); + if (!urlMatch) { + core.setFailed(`Invalid project URL format: ${projectUrl}`); + return; + } + + const orgName = urlMatch[1]; + const projectNumber = parseInt(urlMatch[2]); + + // Get project ID and field configurations dynamically + const projectData = await github.graphql(` + query($org: String!, $number: Int!) { + organization(login: $org) { + projectV2(number: $number) { + id + title + sprintField: field(name: "Sprint") { + ... on ProjectV2IterationField { + id + name + configuration { + iterations { + id + title + startDate + duration + } + completedIterations { + id + title + startDate + duration + } + } + } + } + statusField: field(name: "Status") { + ... on ProjectV2SingleSelectField { + id + name + options { + id + name + } + } + } + } + } + } + `, { + org: orgName, + number: projectNumber + }); + + if (!projectData.organization || !projectData.organization.projectV2) { + core.setFailed(`Project #${projectNumber} not found in organization ${orgName}`); + return; + } + + const project = projectData.organization.projectV2; + const projectId = project.id; + const sprintField = project.sprintField; + const statusField = project.statusField; + + if (!sprintField) { + core.setFailed(`Sprint field not found in project`); + return; + } + + if (!statusField) { + core.setFailed(`Status field not found in project`); + return; + } + + // Find the Todo option in Status field + const todoOption = statusField.options.find(opt => opt.name === 'Todo'); + if (!todoOption) { + core.setFailed(`Todo option not found in Status field`); + return; + } + + // Combine active and completed iterations + const allIterations = [ + ...(sprintField.configuration.iterations || []), + ...(sprintField.configuration.completedIterations || []) + ]; + + if (allIterations.length === 0) { + core.setFailed('No iterations found in Sprint field'); + return; + } + + // Find current iteration + const today = new Date(); + const currentIteration = allIterations.find(iter => { + const start = new Date(iter.startDate); + const end = new Date(start); + end.setDate(start.getDate() + iter.duration); + return today >= start && today <= end; + }); + + if (!currentIteration) { + core.setFailed("No current iteration found."); + return; + } + + // Add PR to the project + const addItem = await github.graphql(` + mutation($projectId: ID!, $contentId: ID!) { + addProjectV2ItemById(input: { + projectId: $projectId, + contentId: $contentId + }) { + item { + id + } + } + } + `, { + projectId, + contentId: prId + }); + + const itemId = addItem.addProjectV2ItemById.item.id; + + console.log(`PR added to project with item ID: ${itemId}`); + + // Set iteration field to current iteration + await github.graphql(` + mutation($projectId: ID!, $itemId: ID!, $fieldId: ID!, $iterationId: String!) { + updateProjectV2ItemFieldValue(input: { + projectId: $projectId, + itemId: $itemId, + fieldId: $fieldId, + value: { + iterationId: $iterationId + } + }) { + projectV2Item { + id + } + } + } + `, { + projectId, + itemId, + fieldId: sprintField.id, + iterationId: currentIteration.id + }); + + console.log(`Set PR to iteration: ${currentIteration.title}`); + + // Set status to "Todo" + await github.graphql(` + mutation($projectId: ID!, $itemId: ID!, $fieldId: ID!, $optionId: String!) { + updateProjectV2ItemFieldValue(input: { + projectId: $projectId, + itemId: $itemId, + fieldId: $fieldId, + value: { + singleSelectOptionId: $optionId + } + }) { + projectV2Item { + id + } + } + } + `, { + projectId, + itemId, + fieldId: statusField.id, + optionId: todoOption.id + }); + + console.log("Set PR status to Todo"); + + remove-draft-pr-from-sprint: + if: ${{ github.event_name == 'pull_request_target' && github.event.action == 'converted_to_draft' }} + name: Remove draft PR from current sprint + runs-on: ubuntu-latest + steps: + - name: Remove draft PR from sprint iteration + uses: actions/github-script@v7 + env: + PROJECT_URL: ${{ inputs.project-url }} + with: + github-token: ${{ secrets.ADD_TO_PROJECT_PAT }} + script: | + const projectUrl = process.env.PROJECT_URL; + const pr = context.payload.pull_request; + const prNodeId = pr.node_id; + + console.log(`PR #${pr.number} is now draft, checking if it needs to be removed from sprint`); + + // Extract organization and project number from URL + const urlMatch = projectUrl.match(/orgs\/([^\/]+)\/projects\/(\d+)/); + if (!urlMatch) { + core.setFailed(`Invalid project URL format: ${projectUrl}`); + return; + } + + const orgName = urlMatch[1]; + const projectNumber = parseInt(urlMatch[2]); + + // Get project ID and Sprint field ID + const projectData = await github.graphql(` + query($org: String!, $number: Int!) { + organization(login: $org) { + projectV2(number: $number) { + id + sprintField: field(name: "Sprint") { + ... on ProjectV2IterationField { + id + } + } + } + } + } + `, { + org: orgName, + number: projectNumber + }); + + if (!projectData.organization || !projectData.organization.projectV2) { + core.setFailed(`Project #${projectNumber} not found`); + return; + } + + const projectId = projectData.organization.projectV2.id; + const iterationFieldId = projectData.organization.projectV2.sprintField.id; + + try { + // Find the PR in the project + let hasNextPage = true; + let cursor = null; + let foundPrItem = null; + + while (hasNextPage && !foundPrItem) { + const result = await github.graphql(` + query($projectId: ID!, $cursor: String) { + node(id: $projectId) { + ... on ProjectV2 { + items(first: 100, after: $cursor) { + pageInfo { + hasNextPage + endCursor + } + nodes { + id + content { + ... on PullRequest { + id + number + repository { + name + owner { + login + } + } + } + } + fieldValues(first: 20) { + nodes { + ... on ProjectV2ItemFieldIterationValue { + field { + ... on ProjectV2IterationField { + id + } + } + iterationId + title + } + } + } + } + } + } + } + } + `, { + projectId, + cursor + }); + + const items = result.node.items.nodes; + console.log(`Checking batch of ${items.length} items for PR #${pr.number}`); + + // Look for the PR + foundPrItem = items.find(item => + item.content && + item.content.id === prNodeId + ); + + if (foundPrItem) { + console.log(`Found PR #${pr.number} in project items`); + break; + } + + hasNextPage = result.node.items.pageInfo.hasNextPage; + cursor = result.node.items.pageInfo.endCursor; + } + + if (!foundPrItem) { + console.log(`PR #${pr.number} not found in project items - nothing to update`); + return; + } + + // Check if PR is assigned to an iteration + const iterationField = foundPrItem.fieldValues.nodes.find( + value => value.field && value.field.id === iterationFieldId + ); + + if (!iterationField || !iterationField.iterationId) { + console.log(`PR #${pr.number} is not assigned to any sprint iteration`); + return; + } + + console.log(`PR #${pr.number} is currently assigned to iteration: ${iterationField.title}`); + + // Clear the iteration field (set to null) + await github.graphql(` + mutation($projectId: ID!, $itemId: ID!, $fieldId: ID!) { + clearProjectV2ItemFieldValue(input: { + projectId: $projectId, + itemId: $itemId, + fieldId: $fieldId + }) { + projectV2Item { + id + } + } + } + `, { + projectId, + itemId: foundPrItem.id, + fieldId: iterationFieldId + }); + + console.log(`Successfully removed PR #${pr.number} from sprint iteration: ${iterationField.title}`); + + } catch (error) { + console.log(`Error removing PR from sprint:`, error.message); + console.log(`Details:`, JSON.stringify(error, null, 2)); + } \ No newline at end of file diff --git a/.github/workflows/add-to-project.yml b/.github/workflows/add-to-project.yml new file mode 100644 index 000000000..172d0b14b --- /dev/null +++ b/.github/workflows/add-to-project.yml @@ -0,0 +1,15 @@ +name: Add Issue or PR to Project + +on: + issues: + types: [opened] + pull_request_target: + types: [opened, ready_for_review, converted_to_draft] + +jobs: + add-to-project: + name: Add issue or pull request to project + # do not run on integration candidate branches + if: ${{ !startsWith(github.head_ref != '' && github.head_ref || github.ref_name, 'ic-') }} + uses: arielswalker/cFS/.github/workflows/add-to-project-reusable.yml@test-cfs/workflows122 + secrets: inherit \ No newline at end of file diff --git a/.github/workflows/app-static-analysis-reusable.yml b/.github/workflows/app-static-analysis-reusable.yml new file mode 100644 index 000000000..1ff37602e --- /dev/null +++ b/.github/workflows/app-static-analysis-reusable.yml @@ -0,0 +1,34 @@ +name: Static Analysis Reusable Workflow + +on: + workflow_call: + inputs: + # Optional inputs + source-dir: + description: Directory containing source files + type: string + default: 'fsw' + +# Force bash to apply pipefail option so pipeline failures aren't masked +defaults: + run: + shell: bash + +jobs: + static-analysis: + name: Run Static Analysis + runs-on: ubuntu-22.04 + container: ghcr.io/core-flight-system/cfsbuildenv-linux:latest + timeout-minutes: 15 + + steps: + - name: Checkout Source + uses: actions/checkout@v4 + with: + path: source + + - name: Run cppcheck + uses: nasa/cFS/actions/cppcheck@dev + id: run-cppcheck + with: + source-dir: source/${{ inputs.source-dir }} \ No newline at end of file diff --git a/.github/workflows/build-cfs-deprecated.yml b/.github/workflows/build-cfs-deprecated.yml deleted file mode 100644 index fbd048f1c..000000000 --- a/.github/workflows/build-cfs-deprecated.yml +++ /dev/null @@ -1,229 +0,0 @@ -name: Build, Test, and Run [OMIT_DEPRECATED = false] - -# Run every time a new commit pushed or for pull requests -on: - push: - branches: - - dev - - main - pull_request: - types: - - opened - - reopened - - synchronize - workflow_dispatch: - -env: - SIMULATION: native - OMIT_DEPRECATED: false - ENABLE_UNIT_TESTS: true - CTEST_OUTPUT_ON_FAILURE: true - REPO_NAME: ${{ github.event.repository.name }} - -# Force bash to apply pipefail option so pipeline failures aren't masked -defaults: - run: - shell: bash - -jobs: - #Checks for duplicate actions. Skips push actions if there is a matching or duplicate pull-request action. - check-for-duplicates: - runs-on: ubuntu-latest - # Map a step output to a job output - outputs: - should_skip: ${{ steps.skip_check.outputs.should_skip }} - steps: - - id: skip_check - uses: fkirc/skip-duplicate-actions@master - with: - concurrent_skipping: 'same_content' - skip_after_successful_duplicate: 'true' - do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' - - build-cfs-omit-deprecated-false: - name: "[Deprecated] Build" - needs: check-for-duplicates - if: ${{ needs.check-for-duplicates.outputs.should_skip != 'true' }} - runs-on: ubuntu-22.04 - - strategy: - fail-fast: false - matrix: - buildtype: [debug, release] - - # Set the type of machine to run on - env: - BUILDTYPE: ${{ matrix.buildtype }} - - steps: - - name: Cache Source and Build - id: cache-src-bld - uses: actions/cache@v4 - with: - path: /home/runner/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/* - key: deprecated-build-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.buildtype }} - - - name: Checkout cFS - if: steps.cache-src-bld.outputs.cache-hit != 'true' - uses: actions/checkout@v4 - with: - submodules: true - - - name: Check versions - run: git submodule - - - name: Copy Makefile - run: | - cp ./cfe/cmake/Makefile.sample Makefile - cp -r ./cfe/cmake/sample_defs sample_defs - - - name: Prep Build - run: make prep - - - name: Make - run: make install - - tests-and-coverage-omit-deprecated-false: - name: "[Deprecated] Run Unit Tests and Check Coverage" - needs: build-cfs-omit-deprecated-false - runs-on: ubuntu-22.04 - - strategy: - fail-fast: false - matrix: - buildtype: [debug, release] - - # Set the type of machine to run on - env: - BUILDTYPE: ${{ matrix.buildtype }} - ENABLE_UNIT_TESTS: true - - steps: - - name: Install Dependencies - run: sudo apt-get install lcov -y - - - name: Cache Source and Deprecated Build - id: cache-src-bld - uses: actions/cache@v4 - with: - path: /home/runner/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/* - key: deprecated-build-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.buildtype }} - - - name: Run Tests - run: make test - - - name: Check Coverage - run: make lcov - - run-cfs-omit-deprecated-false: - name: "[Deprecated] Run cFS" - needs: build-cfs-omit-deprecated-false - runs-on: ubuntu-22.04 - - strategy: - fail-fast: false - matrix: - buildtype: [debug, release] - - # Set the type of machine to run on - env: - BUILDTYPE: ${{ matrix.buildtype }} - - steps: - - name: Cache Source and Deprecated Build - id: cache-src-bld - uses: actions/cache@v4 - with: - path: /home/runner/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/* - key: deprecated-build-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.buildtype }} - - - - name: List cpu1 - run: ls build/exe/cpu1/ - - - name: Run cFS - run: | - ./core-cpu1 > cFS_startup_cpu1.txt & - sleep 30 - ../host/cmdUtil --endian=LE --pktid=0x1806 --cmdcode=2 --half=0x0002 - working-directory: ./build/exe/cpu1/ - - - name: Archive cFS Startup Artifacts - uses: actions/upload-artifact@v4 - with: - name: cFS-startup-log-omit-deprecate-false${{ matrix.buildtype }} - path: ./build/exe/cpu1/cFS_startup_cpu1.txt - - - name: Check for cFS Warnings - run: | - if [[ -n $(grep -i "warn\|err\|fail" cFS_startup_cpu1.txt) ]]; then - echo "Must resolve warn|err|fail in cFS startup before submitting a pull request" - echo "" - grep -i 'warn\|err\|fail' cFS_startup_cpu1.txt - exit -1 - fi - working-directory: ./build/exe/cpu1/ - - run-functional-test-app-omit-deprecated-false: - #Continue if check-for-duplicates found no duplicates. Always runs for pull-requests. - needs: run-cfs-omit-deprecated-false - name: "[DEPRECATED] cFS Functional Tests" - runs-on: ubuntu-22.04 - timeout-minutes: 15 - - strategy: - fail-fast: false - matrix: - buildtype: [debug, release] - - steps: - - name: Cache Source and Deprecated Build - id: cache-src-bld - uses: actions/cache@v4 - with: - path: /home/runner/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/* - key: deprecated-build-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.buildtype }} - - - name: List cpu1 - run: ls build/exe/cpu1/ - - # Run cFS, send commands to set perf trigger and start perf data, and run functional tests - - name: Run cFS Functional Tests - run: | - ./core-cpu1 & - sleep 10 - ../host/cmdUtil --pktid=0x1806 --cmdcode=17 --endian=LE --uint32=3 --uint32=0x40000000 - ../host/cmdUtil --pktid=0x1806 --cmdcode=14 --endian=LE --uint32=2 - ../host/cmdUtil --pktid=0x1806 --cmdcode=4 --endian=LE --string="20:CFE_TEST" --string="20:CFE_TestMain" --string="64:cfe_testcase" --uint64=16384 --uint8=0 --uint8=0 --uint16=100 --uint32=0 - sleep 30 - counter=0 - while [[ ! -f cf/cfe_test.log ]]; do - temp=$(grep -c "BEGIN" cf/cfe_test.tmp) - - if [ $temp -eq $counter ]; then - echo "Test is frozen. Quiting" - break - fi - counter=$(grep -c "BEGIN" cf/cfe_test.tmp) - echo "Waiting for CFE Tests" - sleep 120 - done - - ../host/cmdUtil --endian=LE --pktid=0x1806 --cmdcode=2 --half=0x0002 - working-directory: ./build/exe/cpu1/ - - - name: Archive Functional Test Artifacts - uses: actions/upload-artifact@v4 - with: - name: cFS-functional-test-log-omit-deprecate-false-${{ matrix.buildtype }} - path: ./build/exe/cpu1/cf/cfe_test.log - - - name: Check for cFS Warnings - run: | - if [[ -z $(grep -i "SUMMARY.*FAIL::0.*TSF::0.*TTF::0" cf/cfe_test.log) ]]; then - echo "Must resolve Test Failures in cFS Test App before submitting a pull request" - echo "" - grep -i '\[ FAIL]\|\[ TSF]\|\[ TTF]' cf/cfe_test.log - exit -1 - fi - working-directory: ./build/exe/cpu1/ diff --git a/.github/workflows/build-cfs-multitarget.yml b/.github/workflows/build-cfs-multitarget.yml new file mode 100644 index 000000000..78cbbc6e9 --- /dev/null +++ b/.github/workflows/build-cfs-multitarget.yml @@ -0,0 +1,120 @@ +name: Build and Test using multitarget makefile + +# Run every time a new commit pushed or for pull requests +on: + workflow_call: + inputs: + config-name: + description: 'configuration to build' + type: string + required: false + default: 'native_eds' + run-local-tests: + description: 'whether to execute test procedures locally' + type: boolean + required: false + default: false + omit-deprecated: + description: 'whether to use OMIT_DEPRECATED flag' + type: boolean + required: false + default: false + container-image: + description: 'container image to use for build' + type: string + required: false + default: 'aetd-dockerlab.gsfc.nasa.gov/gsfc-cfs/github-actions-ci-cd/cfsbuildenv-ubuntu22' + check-coverage: + description: 'whether to execute gcov locally' + type: boolean + required: false + default: false + compression-type: + description: 'which compression type to use, xz or gz is supported' + type: string + required: false + default: xz + +env: + OMIT_DEPRECATED: ${{ inputs.omit-deprecated }} + +# Force bash to apply pipefail option so pipeline failures aren't masked +defaults: + run: + shell: bash + +jobs: + build-cfs: + runs-on: ubuntu-22.04 + container: + image: ${{ inputs.container-image }} + # fakeroot runs extremely slow without this limit + # See https://github.com/moby/moby/issues/38814 + options: --ulimit "nofile=1024:1048576" + + steps: + - name: Check Environment + run: | + env + echo "WORK_PATH=$GITHUB_WORKSPACE" >> $GITHUB_ENV + + - name: Checkout cFS with submodules + uses: actions/checkout@v4 + with: + submodules: true + token: ${{ secrets.GH_PAT || github.token }} + + - name: Configure CFS + run: make ${{ inputs.config-name }}.prep + + - name: Build CFS + run: make ${{ inputs.config-name }}.install + + - name: Configure xz compression + if: inputs.compression-type == 'xz' + run: | + echo "SUFFIX=xz" >> $GITHUB_ENV + echo "COMPRESSOR=xz -z -c" >> $GITHUB_ENV + + - name: Configure gz compression + if: inputs.compression-type == 'gz' + run: | + echo "SUFFIX=gz" >> $GITHUB_ENV + echo "COMPRESSOR=gzip -c" >> $GITHUB_ENV + + - name: Archive binaries + run: | + cd $GITHUB_WORKSPACE/build-${{ inputs.config-name }}/exe + find -maxdepth 1 -mindepth 1 -type d | while read dir + do + inst=$(basename ${dir}) + tar cvf - -C ${inst} . | ${COMPRESSOR} > ${GITHUB_WORKSPACE}/${inst}-bin.tar.${SUFFIX} + done + + - name: Build target images + run: make IMAGE_TYPE=ext4 ${{ inputs.config-name }}.image + + - name: Archive target images + run: | + if cd $GITHUB_WORKSPACE/build-${{ inputs.config-name }}/deploy + then + find -maxdepth 1 -mindepth 1 -type d | while read dir + do + inst=$(basename ${dir}) + tar cvf - -C ${inst} . | ${COMPRESSOR} > ${GITHUB_WORKSPACE}/${inst}-target-img.tar.${SUFFIX} + done + fi + + - name: Upload all artifacts + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.config-name }}-bin + path: ./*.tar.${{ inputs.compression-type }} + + - name: Run Local Tests + if: ${{ inputs.run-local-tests }} + run: make ${{ inputs.config-name }}.runtest + + - name: Generate Coverage Report + if: ${{ inputs.check-coverage }} + run: make ${{ inputs.config-name }}.lcov \ No newline at end of file diff --git a/.github/workflows/build-cfs-rtems5.yml b/.github/workflows/build-cfs-rtems5.yml deleted file mode 100644 index 33eba69ca..000000000 --- a/.github/workflows/build-cfs-rtems5.yml +++ /dev/null @@ -1,140 +0,0 @@ -name: Build and Test rtems 5 [OMIT_DEPRECATED=true] - -# Run every time a new commit pushed or for pull requests -on: - push: - branches: - - dev - - main - pull_request: - types: - - opened - - reopened - - synchronize - workflow_dispatch: - -env: - OMIT_DEPRECATED: true - CTEST_OUTPUT_ON_FAILURE: true - -# Force bash to apply pipefail option so pipeline failures aren't masked -defaults: - run: - shell: bash - -jobs: - #Checks for duplicate actions. Skips push actions if there is a matching or duplicate pull-request action. - check-for-duplicates: - runs-on: ubuntu-latest - # Map a step output to a job output - outputs: - should_skip: ${{ steps.skip_check.outputs.should_skip }} - steps: - - id: skip_check - uses: fkirc/skip-duplicate-actions@master - with: - concurrent_skipping: 'same_content' - skip_after_successful_duplicate: 'true' - do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' - - build-cfs: - #Continue if check-for-duplicates found no duplicates. Always runs for pull-requests. - needs: check-for-duplicates - if: ${{ needs.check-for-duplicates.outputs.should_skip != 'true' }} - name: Build - runs-on: ubuntu-22.04 - container: ghcr.io/core-flight-system/qemu-rtems-5:latest - - strategy: - fail-fast: false - matrix: - buildtype: [debug, release] - - # Set the type of machine to run on - env: - BUILDTYPE: ${{ matrix.buildtype }} - # Set home to where rtems is located - HOME: /root - - steps: - # Check out the cfs bundle - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - - # Setup the build system - - name: Copy Files - run: | - cp ./cfe/cmake/Makefile.sample Makefile - cp -r ./cfe/cmake/sample_defs sample_defs - ln -s /root/rtems-5 /opt/rtems-5 - - # Setup the build system - - name: Make Prep - run: make SIMULATION=i686-rtems5 prep - - - name: Make - run: make - - test-cfs: - name: Test - runs-on: ubuntu-22.04 - container: ghcr.io/core-flight-system/qemu-rtems-5:latest - - needs: build-cfs - - strategy: - fail-fast: false - matrix: - buildtype: [debug, release] - - # Set the type of machine to run on - env: - BUILDTYPE: ${{ matrix.buildtype }} - ENABLE_UNIT_TESTS: true - # Set home to where rtems is located - HOME: /root - # Disable mcopy check otherwise disk image build fails - MTOOLS_SKIP_CHECK: 1 - - steps: - # Checks out a copy of your repository on the ubuntu-latest machine - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - - # Setup the build system - - name: Copy Files - run: | - cp ./cfe/cmake/Makefile.sample Makefile - cp -r ./cfe/cmake/sample_defs sample_defs - ln -s /root/rtems-5 /opt/rtems-5 - - # Setup the build system - - name: Make - run: | - make SIMULATION=i686-rtems5 prep - make install - - - name: Test - #run: .github/scripts/qemu_test.sh && .github/scripts/log_failed_tests.sh - run: make O=build SKIP_NET_TESTS=true RTEMS_VERSION=i686-rtems5 -f .github/scripts/rtems-test.mk all_tests -k - - - name: Output Failed Tests - run: | - # Check if failed-tests is empty or not - if [ -s ./build/exe/cpu1/failed-tests.log ]; then - echo "Failing tests found:" - cat ./build/exe/cpu1/failed-tests.log - fi - - # Always archive test logs - - name: Archive cFS Test Artifacts - uses: actions/upload-artifact@v4 - # Runs even if previous steps have failed - if: always() - with: - name: cFS-rtems-log-summary-${{ matrix.buildtype }} - path: ./build/exe/cpu1/*.log diff --git a/.github/workflows/build-cfs.yml b/.github/workflows/build-cfs.yml deleted file mode 100644 index 5024b02c5..000000000 --- a/.github/workflows/build-cfs.yml +++ /dev/null @@ -1,227 +0,0 @@ -name: Build, Test, and Run [OMIT_DEPRECATED = true] - -# Run every time a new commit pushed or for pull requests -on: - push: - branches: - - dev - - main - pull_request: - types: - - opened - - reopened - - synchronize - workflow_dispatch: - -env: - SIMULATION: native - OMIT_DEPRECATED: true - ENABLE_UNIT_TESTS: true - CTEST_OUTPUT_ON_FAILURE: true - REPO_NAME: ${{ github.event.repository.name }} - -# Force bash to apply pipefail option so pipeline failures aren't masked -defaults: - run: - shell: bash - -jobs: - #Checks for duplicate actions. Skips push actions if there is a matching or duplicate pull-request action. - check-for-duplicates: - runs-on: ubuntu-latest - # Map a step output to a job output - outputs: - should_skip: ${{ steps.skip_check.outputs.should_skip }} - steps: - - id: skip_check - uses: fkirc/skip-duplicate-actions@master - with: - concurrent_skipping: 'same_content' - skip_after_successful_duplicate: 'true' - do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' - - build-cfs-omit-deprecated-true: - name: Build - needs: check-for-duplicates - if: ${{ needs.check-for-duplicates.outputs.should_skip != 'true' }} - runs-on: ubuntu-22.04 - - strategy: - fail-fast: false - matrix: - buildtype: [debug, release] - - # Set the type of machine to run on - env: - BUILDTYPE: ${{ matrix.buildtype }} - - steps: - - name: Cache Source and Build - id: cache-src-bld - uses: actions/cache@v4 - with: - path: /home/runner/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/* - key: build-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.buildtype }} - - - name: Checkout cFS - if: steps.cache-src-bld.outputs.cache-hit != 'true' - uses: actions/checkout@v4 - with: - submodules: true - - - name: Check versions - run: git submodule - - - name: Copy Makefile - run: | - cp ./cfe/cmake/Makefile.sample Makefile - cp -r ./cfe/cmake/sample_defs sample_defs - - name: Prep Build - run: make prep - - - name: Make - run: make install - - tests-and-coverage-omit-deprecated-true: - name: Run Unit Tests and Check Coverage - needs: build-cfs-omit-deprecated-true - runs-on: ubuntu-22.04 - - strategy: - fail-fast: false - matrix: - buildtype: [debug, release] - - # Set the type of machine to run on - env: - BUILDTYPE: ${{ matrix.buildtype }} - ENABLE_UNIT_TESTS: true - - steps: - - name: Install Dependencies - run: sudo apt-get install lcov -y - - - name: Cache Source and Build - id: cache-src-bld - uses: actions/cache@v4 - with: - path: /home/runner/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/* - key: build-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.buildtype }} - - - name: Run Tests - run: make test - - - name: Check Coverage - run: make lcov - - run-cfs-omit-deprecated-true: - name: Run - needs: build-cfs-omit-deprecated-true - runs-on: ubuntu-22.04 - - strategy: - fail-fast: false - matrix: - buildtype: [debug, release] - - # Set the type of machine to run on - env: - BUILDTYPE: ${{ matrix.buildtype }} - - steps: - - name: Cache Source and Build - id: cache-src-bld - uses: actions/cache@v4 - with: - path: /home/runner/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/* - key: build-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.buildtype }} - - - - name: List cpu1 - run: ls build/exe/cpu1/ - - - name: Run cFS - run: | - ./core-cpu1 > cFS_startup_cpu1.txt & - sleep 30 - ../host/cmdUtil --endian=LE --pktid=0x1806 --cmdcode=2 --half=0x0002 - working-directory: ./build/exe/cpu1/ - - - name: Archive cFS Startup Artifacts - uses: actions/upload-artifact@v4 - with: - name: cFS-startup-log-omit-deprecate-true-${{ matrix.buildtype }} - path: ./build/exe/cpu1/cFS_startup_cpu1.txt - - - name: Check for cFS Warnings - run: | - if [[ -n $(grep -i "warn\|err\|fail" cFS_startup_cpu1.txt) ]]; then - echo "Must resolve warn|err|fail in cFS startup before submitting a pull request" - echo "" - grep -i 'warn\|err\|fail' cFS_startup_cpu1.txt - exit -1 - fi - working-directory: ./build/exe/cpu1/ - - run-functional-test-app-omit-deprecated-true: - #Continue if check-for-duplicates found no duplicates. Always runs for pull-requests. - needs: run-cfs-omit-deprecated-true - runs-on: ubuntu-22.04 - timeout-minutes: 15 - - strategy: - fail-fast: false - matrix: - buildtype: [debug, release] - - steps: - - name: Cache Source and Build - id: cache-src-bld - uses: actions/cache@v4 - with: - path: /home/runner/work/${{ env.REPO_NAME }}/${{ env.REPO_NAME }}/* - key: build-${{ github.run_id }}-${{ github.run_attempt }}-${{ matrix.buildtype }} - - - name: List cpu1 - run: ls build/exe/cpu1/ - - # Run cFS, send commands to set perf trigger and start perf data, and run functional tests - - name: Run cFS Functional Tests - run: | - ./core-cpu1 & - sleep 10 - ../host/cmdUtil --pktid=0x1806 --cmdcode=17 --endian=LE --uint32=3 --uint32=0x40000000 - ../host/cmdUtil --pktid=0x1806 --cmdcode=14 --endian=LE --uint32=2 - ../host/cmdUtil --pktid=0x1806 --cmdcode=4 --endian=LE --string="20:CFE_TEST" --string="20:CFE_TestMain" --string="64:cfe_testcase" --uint64=16384 --uint8=0 --uint8=0 --uint16=100 --uint32=0 - sleep 30 - counter=0 - while [[ ! -f cf/cfe_test.log ]]; do - temp=$(grep -c "BEGIN" cf/cfe_test.tmp) - - if [ $temp -eq $counter ]; then - echo "Test is frozen. Quiting" - break - fi - counter=$(grep -c "BEGIN" cf/cfe_test.tmp) - echo "Waiting for CFE Tests" - sleep 120 - done - - ../host/cmdUtil --endian=LE --pktid=0x1806 --cmdcode=2 --half=0x0002 - working-directory: ./build/exe/cpu1/ - - - name: Archive cFS Startup Artifacts - uses: actions/upload-artifact@v4 - with: - name: cFS-functional-test-log-omit-deprecate-true-${{ matrix.buildtype }} - path: ./build/exe/cpu1/cf/cfe_test.log - - - name: Check for cFS Warnings - run: | - if [[ -z $(grep -i "SUMMARY.*FAIL::0.*TSF::0.*TTF::0" cf/cfe_test.log) ]]; then - echo "Must resolve Test Failures in cFS Test App before submitting a pull request" - echo "" - grep -i '\[ FAIL]\|\[ TSF]\|\[ TTF]' cf/cfe_test.log - exit -1 - fi - working-directory: ./build/exe/cpu1/ diff --git a/.github/workflows/build-deploy-doc.yml b/.github/workflows/build-doc-reusable.yml similarity index 63% rename from .github/workflows/build-deploy-doc.yml rename to .github/workflows/build-doc-reusable.yml index 7f94e710f..9bc5b32fe 100644 --- a/.github/workflows/build-deploy-doc.yml +++ b/.github/workflows/build-doc-reusable.yml @@ -1,4 +1,4 @@ -name: Build Document +name: Build Document Reusable Workflow on: workflow_call: @@ -23,17 +23,20 @@ on: description: Build the PDF type: boolean required: false - default: true - deploy: - description: Deploy archived PDF to gh-pages - type: boolean - required: false - default: true + default: false needs_osal_api: description: Whether this depends on the osal public api (compatibility bridge) type: boolean required: false default: true + defs: + description: '*_defs directory' + type: string + required: false + default: 'sample_defs' + +env: + WORK_PATH: ${{ github.workspace }} # Force bash to apply pipefail option so pipeline failures aren't masked defaults: @@ -58,9 +61,10 @@ jobs: build-doc: needs: checks-for-duplicates - if: ${{ needs.checks-for-duplicates.outputs.should_skip != 'true' || contains(github.ref, 'main') }} + if: ${{ needs.checks-for-duplicates.outputs.should_skip != 'true' || contains(github.ref, 'main') || contains(github.ref, 'dev') }} name: Build Documentation runs-on: ubuntu-22.04 + container: ghcr.io/core-flight-system/cfsbuildenv-doxygen:latest strategy: fail-fast: false @@ -68,52 +72,17 @@ jobs: target: ${{ fromJson(inputs.target) }} steps: - - name: Reject non-compatible deployment settings - if: ${{ inputs.deploy == true && inputs.cache-key != '' }} - run: | - echo "Deployment when using cache not supported due to password fail issue" - exit -1 - - name: Get cache if supplied id: cache-src-bld if: ${{ inputs.cache-key != '' }} uses: actions/cache@v4 with: - path: /home/runner/work/${{ github.event.repository.name }}/${{ github.event.repository.name }}/* + path: ${{ env.WORK_PATH }}/ key: ${{ inputs.cache-key }} - - name: Checkout Bundle Main - if: ${{ inputs.app-name != '' }} - uses: actions/checkout@v4 - with: - submodules: true - repository: nasa/cFS - - - name: Checkout Repo - if: ${{ inputs.app-name != '' }} - uses: actions/checkout@v4 - with: - path: apps/${{ inputs.app-name }} - - - name: Copy Files - run: | - cp ./cfe/cmake/Makefile.sample Makefile - cp -r ./cfe/cmake/sample_defs sample_defs - - - name: Add Repo To Build - if: ${{ inputs.app-name != '' }} - run: echo 'set(MISSION_GLOBAL_APPLIST ${{ inputs.app-name }})' >> sample_defs/targets.cmake - - - name: Make Prep - run: make prep - - - name: Install Doxygen Dependencies - run: sudo apt-get update && sudo apt-get install doxygen graphviz -y - - - name: Install PDF Generation Dependencies - if: ${{ inputs.buildpdf == true }} - run: | - sudo apt-get install texlive-latex-base texlive-fonts-recommended texlive-fonts-extra texlive-latex-extra + - name: Set up app source + if: steps.cache-src-bld.outputs.cache-hit != 'true' + uses: nasa/cFS/actions/setup-app@dev - name: Generate OSAL header list if: ${{ inputs.needs_osal_api == true }} @@ -137,21 +106,25 @@ jobs: run: | if [[ -s ${{ matrix.target }}_stderr.txt ]]; then cat ${{ matrix.target }}_stderr.txt - exit -1 + exit 1 fi - name: Check For Document Warnings run: | if [[ -s ${{ matrix.target }}-warnings.log ]]; then cat ${{ matrix.target }}-warnings.log - exit -1 + exit 1 fi - name: Generate PDF if: ${{ inputs.buildpdf == true }} run: | - make -C ./build/docs/${{ matrix.target }}/latex - mkdir deploy + if ! make LATEX_CMD="pdflatex -file-line-error -halt-on-error" -C ./build/docs/${{ matrix.target }}/latex > pdflatex.log; then + echo "Errors reported, tail of latex output follows" + tail -100 pdflatex.log + exit -1 + fi + mkdir -p deploy mv ./build/docs/${{ matrix.target }}/latex/refman.pdf ./deploy/${{ matrix.target }}.pdf # Could add pandoc and convert to github markdown # pandoc ${{ matrix.target }}.pdf -t gfm @@ -161,13 +134,4 @@ jobs: uses: actions/upload-artifact@v4 with: name: ${{ matrix.target }}_pdf - path: ./deploy/${{ matrix.target }}.pdf - - - name: Deploy to GitHub - if: ${{ inputs.deploy == true }} - uses: JamesIves/github-pages-deploy-action@v4 - with: - token: ${{ secrets.GITHUB_TOKEN }} - branch: gh-pages - folder: deploy - single-commit: true + path: ./deploy/${{ matrix.target }}.pdf \ No newline at end of file diff --git a/.github/workflows/build-documentation.yml b/.github/workflows/build-documentation.yml deleted file mode 100644 index ab68bda86..000000000 --- a/.github/workflows/build-documentation.yml +++ /dev/null @@ -1,87 +0,0 @@ -name: cFS Documentation and Guides - -on: - push: - branches: - - dev - - main - pull_request: - types: - - opened - - reopened - - synchronize - workflow_dispatch: - -# Force bash to apply pipefail option so pipeline failures aren't masked -defaults: - run: - shell: bash - -jobs: - # Checks for duplicate actions. Skips push actions if there is a matching or - # duplicate pull-request action. - checks-for-duplicates: - runs-on: ubuntu-latest - # Map a step output to a job output - outputs: - should_skip: ${{ steps.skip_check.outputs.should_skip }} - steps: - - id: skip_check - uses: fkirc/skip-duplicate-actions@master - with: - concurrent_skipping: 'same_content' - skip_after_successful_duplicate: 'true' - do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' - - checkout-and-cache: - name: Custom checkout and cache for cFS documents - needs: checks-for-duplicates - if: ${{ needs.checks-for-duplicates.outputs.should_skip != 'true' || contains(github.ref, 'main') }} - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - - - name: Cache Source and Build - id: cache-src-bld - uses: actions/cache@v4 - with: - path: /home/runner/work/${{ github.event.repository.name }}/${{ github.event.repository.name }}/* - key: cfs-doc-${{ github.run_id }}-${{ github.run_attempt }} - - build-cfs-documentation: - needs: checkout-and-cache - name: Build cFS documents - uses: nasa/cFS/.github/workflows/build-deploy-doc.yml@main - with: - target: "[\"cfe-usersguide\", \"osal-apiguide\"]" - cache-key: cfs-doc-${{ github.run_id }}-${{ github.run_attempt }} - deploy: false - - deploy-documentation: - needs: build-cfs-documentation - if: ${{ github.event_name == 'push' && contains(github.ref, 'main') }} - name: Deploy documentation to gh-pages - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - uses: actions/download-artifact@v4 - - - name: Display structure of downloaded files - run: ls -R - - - name: Move pdfs to deployment directory - run: mkdir deploy; mv */*.pdf deploy - - - name: Deploy to GitHub - uses: JamesIves/github-pages-deploy-action@v4 - with: - token: ${{ secrets.GITHUB_TOKEN }} - branch: gh-pages - folder: deploy - single-commit: true diff --git a/.github/workflows/build-run-app-reusable.yml b/.github/workflows/build-run-app-reusable.yml new file mode 100644 index 000000000..260bd1c4d --- /dev/null +++ b/.github/workflows/build-run-app-reusable.yml @@ -0,0 +1,186 @@ +name: Build And Run Reusable Workflow + +on: + workflow_call: + inputs: + # Optional inputs + app-name: + description: Application name, if different from repo name + type: string + required: false + default: ${{ github.event.repository.name }} + startup-string: + description: Startup string to confirm, default will use " Initialized." + type: string + required: false + default: '' + # Currently CFS apps have at most one dependency, so this only handles one for now + dependency: + description: Additional module/library that this app depends on + type: string + required: false + default: '' + app-entrypoint-suffix: + description: Symbol suffix to use as app entry point + type: string + required: false + default: 'AppMain' + +# Force bash to apply pipefail option so pipeline failures aren't masked +defaults: + run: + shell: bash + +jobs: + # Checks for duplicate actions. Skips push actions if there is a matching or + # duplicate pull-request action. + checks-for-duplicates: + runs-on: ubuntu-latest + # Map a step output to a job output + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@master + with: + concurrent_skipping: 'same_content' + skip_after_successful_duplicate: 'true' + do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' + + build-app: + needs: checks-for-duplicates + if: ${{ needs.checks-for-duplicates.outputs.should_skip != 'true' || contains(github.ref, 'main') || contains(github.ref, 'dev') }} + name: Build CFE with app + runs-on: ubuntu-22.04 + container: ghcr.io/core-flight-system/cfsbuildenv-linux:latest + + steps: + # Note this also sets the APP_UPPER and APP_LOWER environment variables + - name: Set up app source + uses: nasa/cFS/actions/setup-app@dev + with: + app-name: ${{ inputs.app-name }} + dependency: ${{ inputs.dependency }} + + - name: Set up start string for verification + run: | + if [[ "${{ inputs.startup-string }}" == '' ]]; then + echo "START_STRING=$APP_UPPER Initialized." >> $GITHUB_ENV + else + echo "START_STRING=${{ inputs.startup-string }}" >> $GITHUB_ENV + fi + + - name: Make install + run: make -C build mission-install + + - name: Generate Startup Link + run: ln -s core-cpu1 ./build/exe/cpu1/container-start + + - name: Replace startup script + run: | + truncate -s 0 ./build/exe/cpu1/cf/cfe_es_startup.scr + if [ "x$APP_DEP_LOWER" != "x" ] + then + echo "CFE_LIB, $APP_DEP_LOWER, ${APP_DEP_UPPER}_Init, $APP_DEP_UPPER, 0, 0, 0x0, 0;" >> ./build/exe/cpu1/cf/cfe_es_startup.scr + fi + echo "CFE_APP, $APP_LOWER, ${APP_UPPER}_${{ inputs.app-entrypoint-suffix }}, $APP_UPPER, 80, 16384, 0x0, 0;" >> ./build/exe/cpu1/cf/cfe_es_startup.scr + cat ./build/exe/cpu1/cf/cfe_es_startup.scr + + - name: Archive binaries + run: | + cd $GITHUB_WORKSPACE/build/exe + find -maxdepth 1 -mindepth 1 -type d | while read dir + do + inst=$(basename ${dir}) + tar Jcv -f $GITHUB_WORKSPACE/${inst}-bin.tar.xz -C ${inst} . + done + + - name: Upload all artifacts + uses: actions/upload-artifact@v4 + with: + name: ${{ inputs.app-name }}-bin + path: ./*.tar.xz + + run-app: + needs: build-app + name: Run CFE with app, check for startup messages + runs-on: ubuntu-22.04 + + steps: + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.app-name }}-bin + path: ${{ inputs.app-name }}-bin + + - name: List Files 1 + run: ls -lR . + + - name: Unpack artifacts + run: | + for i in cpu1 host + do + mkdir -p "$i" + tar Jxv -C "$i" -f "$GITHUB_WORKSPACE/${{ inputs.app-name }}-bin/$i-bin.tar.xz" + done + + - name: List Files 2 + run: | + pwd + ls -lR . + + - name: Start CPU1 container + id: start-cpu1 + uses: nasa/cFS/actions/start-cfs-container@dev + with: + binary-dir: ${{ github.workspace }}/cpu1 + + - name: Check CPU1 container + id: check-cpu1 + uses: nasa/cFS/actions/healthcheck-logs@dev + with: + container-id: ${{ steps.start-cpu1.outputs.container-id }} + healthcheck-regex: 'CFE_ES_Main entering OPERATIONAL state$' + + - name: Shut down CFE + if: ${{ steps.check-cpu1.outputs.ip-addr != '' }} + working-directory: ./host + run: | + ./cmd_send -v --host=${{ steps.check-cpu1.outputs.ip-addr }} --endian=LE --pktid=0x1806 --cmdcode=2 --half=0x0002 + sleep 2 + + - name: Capture Logs + if: ${{ always() && steps.start-cpu1.outputs.container-id != '' }} + run: docker logs ${{ steps.start-cpu1.outputs.container-id }} > cFS_startup_cpu1.txt + + - name: Stop CPU1 Container + uses: nasa/cFS/actions/stop-cfs-container@dev + with: + container-id: ${{ steps.start-cpu1.outputs.container-id }} + + - name: Archive results + if: success() || failure() + uses: actions/upload-artifact@v4 + with: + name: cFS_startup_log + path: cFS_startup_cpu1.txt + + - name: Confirm startup string + run: | + if [[ -z $(grep "$START_STRING" cFS_startup_cpu1.txt) ]]; then + echo "Startup verification string not found in log: $START_STRING" + echo "" + echo "Possible related event messages:" + grep "/$APP_UPPER " cFS_startup_cpu1.txt + exit -1 + fi + + - name: Check for cFS Warnings + if: success() || failure() + run: | + if [[ -n $(grep -i "warn\|err\|fail" cFS_startup_cpu1.txt) ]]; then + echo "cFS startup warn|err|fail:" + echo "" + grep -i 'warn\|err\|fail' cFS_startup_cpu1.txt + exit -1 + fi \ No newline at end of file diff --git a/.github/workflows/build-run-app.yml b/.github/workflows/build-run-app.yml deleted file mode 100644 index e6249c5ad..000000000 --- a/.github/workflows/build-run-app.yml +++ /dev/null @@ -1,119 +0,0 @@ -name: Build And Run - -on: - workflow_call: - inputs: - # Optional inputs - app-name: - description: Application name, if different from repo name - type: string - required: false - default: ${{ github.event.repository.name }} - startup-string: - description: Startup string to confirm, default will use " Initialized." - type: string - required: false - default: '' - -# Force bash to apply pipefail option so pipeline failures aren't masked -defaults: - run: - shell: bash - -jobs: - # Checks for duplicate actions. Skips push actions if there is a matching or - # duplicate pull-request action. - checks-for-duplicates: - runs-on: ubuntu-latest - # Map a step output to a job output - outputs: - should_skip: ${{ steps.skip_check.outputs.should_skip }} - steps: - - id: skip_check - uses: fkirc/skip-duplicate-actions@master - with: - concurrent_skipping: 'same_content' - skip_after_successful_duplicate: 'true' - do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' - - build-and-run: - needs: checks-for-duplicates - if: ${{ needs.checks-for-duplicates.outputs.should_skip != 'true' || contains(github.ref, 'main') }} - name: Build and run app, confirm startup message - runs-on: ubuntu-22.04 - - steps: - - name: Set up environment variables - # Apps typically use lowercase targets and uppercase names, this logic is fragile but works - run: | - echo "APP_UPPER=$(echo ${{ inputs.app-name }} | sed 's/[a-z]/\U&/g')" >> $GITHUB_ENV - echo "APP_LOWER=$(echo ${{ inputs.app-name }} | sed 's/[A-Z]/\L&/g')" >> $GITHUB_ENV - - - name: Set up start string for verification - run: | - if [[ "${{ inputs.startup-string }}" == '' ]]; then - echo "START_STRING=$APP_UPPER Initialized." >> $GITHUB_ENV - else - echo "START_STRING=${{ inputs.startup-string }}" >> $GITHUB_ENV - fi - - - name: Checkout Bundle Main - uses: actions/checkout@v4 - with: - submodules: true - repository: nasa/cFS - - - name: Checkout Repo - uses: actions/checkout@v4 - with: - path: apps/${{ env.APP_LOWER }} - - - name: Copy Files - run: | - cp ./cfe/cmake/Makefile.sample Makefile - cp -r ./cfe/cmake/sample_defs sample_defs - - - name: Add To Build - run: | - sed -i "/list(APPEND MISSION_GLOBAL_APPLIST/a list(APPEND MISSION_GLOBAL_APPLIST $APP_LOWER)" sample_defs/targets.cmake - - - name: Add To Startup - run: | - sed -i "1i CFE_APP, $APP_LOWER, ${APP_UPPER}_AppMain, $APP_UPPER, 80, 16384, 0x0, 0;" sample_defs/cpu1_cfe_es_startup.scr - cat sample_defs/cpu1_cfe_es_startup.scr - - - name: Make install - run: make SIMULATION=native BUILDTYPE=release OMIT_DEPRECATED=true install - - - name: Run cFS - working-directory: ./build/exe/cpu1 - run: | - ./core-cpu1 > ../../../cFS_startup_cpu1.txt & - sleep 30 - ../host/cmdUtil --endian=LE --pktid=0x1806 --cmdcode=2 --half=0x0002 - - - name: Archive results - uses: actions/upload-artifact@v4 - with: - name: cFS_startup_log - path: cFS_startup_cpu1.txt - - - name: Confirm startup string - run: | - if [[ -z $(grep "$START_STRING" cFS_startup_cpu1.txt) ]]; then - echo "Startup verification string not found in log: $START_STRING" - echo "" - echo "Possible related event messages:" - grep "/$APP_UPPER " cFS_startup_cpu1.txt - exit -1 - fi - - - name: Check for cFS Warnings - if: success() || failure() - run: | - if [[ -n $(grep -i "warn\|err\|fail" cFS_startup_cpu1.txt) ]]; then - echo "cFS startup warn|err|fail:" - echo "" - grep -i 'warn\|err\|fail' cFS_startup_cpu1.txt - exit -1 - fi diff --git a/.github/workflows/format-check.yml b/.github/workflows/format-check.yml index f0bf34c67..6ab99dbe1 100644 --- a/.github/workflows/format-check.yml +++ b/.github/workflows/format-check.yml @@ -1,12 +1,13 @@ name: Format Check -# Run on all push and pull requests on: + workflow_call: pull_request: types: - opened - reopened - synchronize + workflow_dispatch: # Force bash to apply pipefail option so pipeline failures aren't masked defaults: @@ -35,29 +36,30 @@ jobs: if: ${{ needs.check-for-duplicates.outputs.should_skip != 'true' }} runs-on: ubuntu-22.04 timeout-minutes: 15 + container: ghcr.io/core-flight-system/cfsbuildenv-linux:latest steps: - - name: Install format checker - run: | - sudo apt-get update && sudo apt-get install clang-format - - - name: Checkout bundle - uses: actions/checkout@v4 - with: - repository: nasa/cFS - - name: Checkout uses: actions/checkout@v4 with: path: repo - - name: Generate format differences - run: | - cd repo - find . -name "*.[ch]" -exec clang-format -i -style=file {} + - git diff > $GITHUB_WORKSPACE/style_differences.txt + - name: Get style from bundle + run: curl -fsL ${{ github.server_url }}/nasa/cFS/raw/refs/heads/dev/.clang-format > $GITHUB_WORKSPACE/clang-format.yml - - name: Archive Static Analysis Artifacts + - name: Check configuration + run: clang-format-19 -style=file:$GITHUB_WORKSPACE/clang-format.yml --dump-config + + - name: Execute clang-format + run: find ./repo -type f -name "*.[ch]" -print0 | xargs -0 clang-format-19 -style=file:$GITHUB_WORKSPACE/clang-format.yml -i + + - name: Check for differences + run: (cd repo && git diff --exit-code HEAD) | tee $GITHUB_WORKSPACE/style_differences.txt + + # If there are no diffs, this will be a 0 byte file and no need to archive it + # Otherwise if there was a failure the file should be non-empty + - name: Archive difference artifacts + if: failure() uses: actions/upload-artifact@v4 with: name: style_differences @@ -65,9 +67,8 @@ jobs: check-commit-message: name: Check Commit Message - needs: check-for-duplicates # Only run for pull-requests. - if: ${{ github.event_name == 'pull_request' }} + if: ${{ github.event_name == 'pull_request' && !startsWith(github.head_ref, 'ic-') }} runs-on: ubuntu-22.04 timeout-minutes: 15 steps: @@ -76,9 +77,9 @@ jobs: uses: gsactions/commit-message-checker@v2 if: always() with: - pattern: '^((Fix|HotFix|Part)\s\#[0-9]+,\s[a-zA-Z0-9]+|Merge\spull\srequest\s\#[0-9]+\s[a-zA-Z0-9]+|IC:\s[a-zA-Z0-9]+)' - error: 'You need at least one "Fix|HotFix|Part #, " line in the commit message.' + pattern: ^((Fix|HotFix|Part|Issue|Ticket)\s([a-zA-Z0-9_-]+/)?[a-zA-Z0-9_-]*\#[0-9]+[,:]\s.+|Merge\spull\srequest\s\#[0-9]+\s.+|IC:\s.+) + error: 'You need at least one "Fix|HotFix|Part|Issue|Ticket #<,|:> " line in the commit message.' excludeDescription: 'true' excludeTitle: 'true' checkAllCommitMessages: 'true' - accessToken: ${{ secrets.GITHUB_TOKEN }} + accessToken: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/mcdc-reusable.yml b/.github/workflows/mcdc-reusable.yml new file mode 100644 index 000000000..809f01f43 --- /dev/null +++ b/.github/workflows/mcdc-reusable.yml @@ -0,0 +1,208 @@ +name: MCDC Reusable Workflow + +on: + workflow_call: + inputs: + # Optional inputs + app-name: + description: Application name, if different from repo name + type: string + required: false + default: '' + +env: + SIMULATION: native + ENABLE_UNIT_TESTS: true + OMIT_DEPRECATED: false + BUILDTYPE: debug + TESTS_RAN: false + LOCAL_BRANCH: dev + +# Force bash to apply pipefail option so pipeline failures aren't masked +defaults: + run: + shell: bash + +jobs: + # Checks for duplicate actions. Skips push actions if there is a matching or + # duplicate pull-request action. + checks-for-duplicates: + runs-on: ubuntu-latest + # Map a step output to a job output + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@master + with: + concurrent_skipping: 'same_content' + skip_after_successful_duplicate: 'false' + do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' + + mcdc: + needs: checks-for-duplicates + if: needs.checks-for-duplicates.outputs.should_skip != 'true' || contains(github.ref, 'dev') || github.event_name == 'pull_request' + name: Build and Run MCDC + runs-on: ubuntu-22.04 + container: ghcr.io/core-flight-system/cfsbuildenv-mcdc:latest + + steps: + # Note this also sets up the environment variables + - name: Set up app source + uses: nasa/cFS/actions/setup-app@dev + + # This builds the whole bundle for cpu1 + - name: Set up local environment (bundle) + if: ${{ inputs.app-name == '' }} + run: | + echo "BUILD_SUBDIR=build/native/default_cpu1" >> $GITHUB_ENV + + # This isolates the build to a single app subdirectory (faster) + - name: Set up local environment (app) + if: ${{ inputs.app-name != '' }} + run: | + echo "BUILD_SUBDIR=build/native/default_cpu1/apps/$APP_LOWER" >> $GITHUB_ENV + + - name: Fetch MCDC Check Script + run: wget -nv -O mcdc-analyze.sh ${{ github.server_url }}/arielswalker/cFS/raw/refs/heads/test-cfs%2Fworkflows122/.github/scripts/mcdc-analyze.sh + + - name: Include conditional coverage flags + run: | + mkdir -p ./sample_defs/cpu1 + echo "target_compile_options(ut_coverage_compile INTERFACE -fcondition-coverage -fprofile-abs-path)" >> ./sample_defs/cpu1/install_custom.cmake + + - name: Reconfigure CMake with condition coverage flags + run: cmake build + + - name: Build dependencies + run: make -C build mission-prebuild + + - name: Build unit under test + run: make -C $BUILD_SUBDIR all + + - name: Generate test list as JSON + run: | + (cd $BUILD_SUBDIR && ctest --show-only=json-v1) | tee test_list.json + + - name: Make test + run: | + (cd $BUILD_SUBDIR && ctest --verbose || true) | tee test_results.txt + echo "TESTS_RAN=true" >> $GITHUB_ENV + + - name: Grab test modules + # Grab test modules if tests ran, even if step failed + if: ${{ env.TESTS_RAN == 'true' }} + run: | + cat test_list.json | jq -rc '.tests[] | .name | @sh' | xargs echo | tee modules.txt + echo "Got modules.txt" + echo "MODULES=$(cat modules.txt)" >> $GITHUB_ENV + + - name: Run MCDC analysis + # Run MCDC analysis if tests ran, even if step failed + if: ${{ env.TESTS_RAN == 'true' }} + run: bash ./mcdc-analyze.sh + + - name: Save PR number + if: always() && (github.event_name == 'pull_request' || github.event_name == 'pull_request_target') + env: + PR_NUMBER: ${{ github.event.number }} + run: echo $PR_NUMBER > pr_number + + - name: Archive unit test results + # Archive unit test results if tests ran, even if step failed + if: ${{ env.TESTS_RAN == 'true' }} + uses: actions/upload-artifact@v4 + with: + name: Unit test results + path: | + test_results.txt + + - name: Archive MCDC results + # Archive MCDC results if tests ran, even if step failed + if: success() || failure() + uses: actions/upload-artifact@v4 + with: + name: MCDC results + path: | + **/*.gcov.json.gz + mcdc_results.txt + pr_number + modules.txt + + summary-mcdc: + needs: mcdc + if: always() && (github.event_name == 'pull_request' ) && needs.mcdc.result != 'skipped' + name: Generate MCDC Comparison Summary + runs-on: ubuntu-22.04 + + steps: + - name: Checkout MCDC Script + uses: actions/checkout@v4 + with: + repository: arielswalker/cFS + path: workflows + + - name: Download latest main branch artifact + continue-on-error: true + uses: dawidd6/action-download-artifact@v2 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + workflow: mcdc-internal.yml + search_artifacts: true + branch: dev + name: MCDC results + path: ./main-branch-results + + - name: Downloads PR artifacts + uses: actions/download-artifact@v4 + with: + name: MCDC results + + - name: Compare main and PR artifacts + run: | + if [ -f "main-branch-results/mcdc_results.txt" ]; then + echo "Main branch artifact found. Running comparison." + bash workflows/.github/scripts/mcdc-compare.sh main-branch-results/mcdc_results.txt mcdc_results.txt main-branch-results/modules.txt + else + echo "Main branch artifact not found. Skipping comparison step." + fi + + - name: Output summary to workflow + run: | + if [ -s "mcdc_comment.txt" ]; then + echo "### MC/DC Results (Comparison with dev branch)" >> $GITHUB_STEP_SUMMARY + echo '```plaintext' >> $GITHUB_STEP_SUMMARY + cat mcdc_comment.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + elif [ -s "mcdc_results.txt" ]; then + echo "### MC/DC Results (Current PR)" >> $GITHUB_STEP_SUMMARY + echo '```plaintext' >> $GITHUB_STEP_SUMMARY + cat mcdc_results.txt >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + else + echo "No MCDC results found." >> $GITHUB_STEP_SUMMARY + fi + + # Output uncovered branches if the file exists and is not empty + if [ -s "uncovered.json" ]; then + echo "" >> $GITHUB_STEP_SUMMARY + echo "
" >> $GITHUB_STEP_SUMMARY + echo "Click to view uncovered branches (uncovered.json)" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo '```json' >> $GITHUB_STEP_SUMMARY + cat uncovered.json >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + echo "
" >> $GITHUB_STEP_SUMMARY + echo "" >> $GITHUB_STEP_SUMMARY + fi + + - name: Archive mcdc comparison + # Upload if success or failure which supports skipping, unlike always() + if: success() || failure() + uses: actions/upload-artifact@v4 + with: + name: MCDC main branch comparison + path: | + mcdc_comment.txt + mcdc_compare.txt \ No newline at end of file diff --git a/.github/workflows/mcdc.yml b/.github/workflows/mcdc.yml new file mode 100644 index 000000000..4b9d05045 --- /dev/null +++ b/.github/workflows/mcdc.yml @@ -0,0 +1,18 @@ +name: MCDC Analysis + +on: + push: + branches: + - dev + - main + workflow_dispatch: + pull_request: + types: + - opened + - reopened + - synchronize + +jobs: + mcdc: + name: Run MCDC Analysis + uses: arielswalker/cFS/.github/workflows/mcdc-reusable.yml@test-cfs/workflows122 \ No newline at end of file diff --git a/.github/workflows/static-analysis-misra.yml b/.github/workflows/static-analysis-misra.yml deleted file mode 100644 index 928d33a6d..000000000 --- a/.github/workflows/static-analysis-misra.yml +++ /dev/null @@ -1,131 +0,0 @@ -name: Static Analysis with MISRA - -# Run this workflow manually from the Actions tab -on: - workflow_dispatch: - -# Force bash to apply pipefail option so pipeline failures aren't masked -defaults: - run: - shell: bash - -jobs: - #Checks for duplicate actions. Skips push actions if there is a matching or duplicate pull-request action. - check-for-duplicates: - runs-on: ubuntu-latest - # Map a step output to a job output - outputs: - should_skip: ${{ steps.skip_check.outputs.should_skip }} - steps: - - id: skip_check - uses: fkirc/skip-duplicate-actions@master - with: - concurrent_skipping: 'same_content' - skip_after_successful_duplicate: 'true' - do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' - - misra-analysis: - #Continue if check-for-duplicates found no duplicates. Always runs for pull-requests. - needs: check-for-duplicates - if: ${{ needs.check-for-duplicates.outputs.should_skip != 'true' }} - name: Run cppcheck with misra - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - cppcheck: [bundle, cfe, osal, psp] - - steps: - - name: Install cppcheck - run: sudo apt-get install cppcheck -y - - # Checks out a copy of the cfs bundle - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - - - name: Get MISRA addon - run: | - sudo apt-get install git -y - git clone https://github.com/danmar/cppcheck.git - cp cppcheck/addons/misra.py misra.py - cp cppcheck/addons/cppcheckdata.py cppcheckdata.py - cp cppcheck/addons/misra_9.py misra_9.py - - - name: Run bundle cppcheck - if: ${{matrix.cppcheck =='bundle'}} - run: | - cppcheck --addon=misra --force --inline-suppr --quiet . --xml 2> ${{matrix.cppcheck}}_cppcheck_err.xml - cppcheck --addon=misra --force --inline-suppr --quiet . 2> ${{matrix.cppcheck}}_cppcheck_err.txt - - # Run strict static analysis for embedded portions of cfe, osal, and psp - - name: cfe strict cppcheck - if: ${{matrix.cppcheck =='cfe'}} - run: | - cd ${{matrix.cppcheck}} - cppcheck --addon=misra --force --inline-suppr --std=c99 --language=c --enable=warning,performance,portability,style --suppress=variableScope --inconclusive ./modules/core_api/fsw ./modules/core_private/fsw ./modules/es/fsw ./modules/evs/fsw ./modules/fs/fsw ./modules/msg/fsw ./modules/resourceid/fsw ./modules/sb/fsw ./modules/sbr/fsw ./modules/tbl/fsw ./modules/time/fsw -UCFE_PLATFORM_TIME_CFG_CLIENT -DCFE_PLATFORM_TIME_CFG_SERVER --xml 2> ${{matrix.cppcheck}}_cppcheck_err.xml - cppcheck --addon=misra --force --inline-suppr --std=c99 --language=c --enable=warning,performance,portability,style --suppress=variableScope --inconclusive ./modules/core_api/fsw ./modules/core_private/fsw ./modules/es/fsw ./modules/evs/fsw ./modules/fs/fsw ./modules/msg/fsw ./modules/resourceid/fsw ./modules/sb/fsw ./modules/sbr/fsw ./modules/tbl/fsw ./modules/time/fsw -UCFE_PLATFORM_TIME_CFG_CLIENT -DCFE_PLATFORM_TIME_CFG_SERVER 2> ${{matrix.cppcheck}}_cppcheck_err.txt - - - name: osal strict cppcheck - if: ${{matrix.cppcheck =='osal'}} - run: | - cd ${{matrix.cppcheck}} - cppcheck --addon=misra --force --inline-suppr --std=c99 --language=c --enable=warning,performance,portability,style --suppress=variableScope --inconclusive ./src/bsp ./src/os --xml 2> ${{matrix.cppcheck}}_cppcheck_err.xml - cppcheck --addon=misra --force --inline-suppr --std=c99 --language=c --enable=warning,performance,portability,style --suppress=variableScope --inconclusive ./src/bsp ./src/os 2> ${{matrix.cppcheck}}_cppcheck_err.txt - - - name: psp strict cppcheck - if: ${{matrix.cppcheck =='psp'}} - run: | - cd ${{matrix.cppcheck}} - cppcheck --addon=misra --force --inline-suppr --std=c99 --language=c --enable=warning,performance,portability,style --suppress=variableScope --inconclusive ./fsw --xml 2> ${{matrix.cppcheck}}_cppcheck_err.xml - cppcheck --addon=misra --force --inline-suppr --std=c99 --language=c --enable=warning,performance,portability,style --suppress=variableScope --inconclusive ./fsw 2> ${{matrix.cppcheck}}_cppcheck_err.txt - - - name: Convert bundle cppcheck to sarif - uses: airtower-luna/convert-to-sarif@v0.2.0 - if: ${{matrix.cppcheck =='bundle'}} - with: - tool: 'CppCheck' - input_file: '${{matrix.cppcheck}}_cppcheck_err.xml' - sarif_file: '${{matrix.cppcheck}}_cppcheck_err.sarif' - - - name: Convert cfe, osal, psp cppcheck to sarif - uses: airtower-luna/convert-to-sarif@v0.2.0 - if: ${{matrix.cppcheck !='bundle'}} - with: - tool: 'CppCheck' - input_file: '${{matrix.cppcheck}}/${{matrix.cppcheck}}_cppcheck_err.xml' - sarif_file: '${{matrix.cppcheck}}_cppcheck_err.sarif' - - - name: Define workspace - run: | - echo "CONTAINER_WORKSPACE=${PWD}" >> ${GITHUB_ENV} - - - name: Archive bundle static analysis artifacts - uses: actions/upload-artifact@v4 - if: ${{matrix.cppcheck =='bundle'}} - with: - name: ${{matrix.cppcheck}}-cppcheck-err - path: ./*cppcheck_err.* - - - name: Archive osal, cfe, and psp static analysis artifacts - uses: actions/upload-artifact@v4 - if: ${{matrix.cppcheck !='bundle'}} - with: - name: ${{matrix.cppcheck}}-cppcheck-err - path: ./${{matrix.cppcheck}}/*cppcheck_err.* - - - name: Upload sarif results - uses: github/codeql-action/upload-sarif@v3 - with: - sarif_file: '${{matrix.cppcheck}}_cppcheck_err.sarif' - checkout_path: ${{ env.CONTAINER_WORKSPACE }} - - - name: Check for errors - run: | - if [[ -s ${{matrix.cppcheck}}_cppcheck_err.txt ]]; - then - cat ${{matrix.cppcheck}}_cppcheck_err.txt - exit -1 - fi diff --git a/.github/workflows/static-analysis-reuse.yml b/.github/workflows/static-analysis-reuse.yml deleted file mode 100644 index 225b4a249..000000000 --- a/.github/workflows/static-analysis-reuse.yml +++ /dev/null @@ -1,9 +0,0 @@ -name: Bundle Static Analysis - -on: - workflow_dispatch: - -jobs: - static-analysis: - name: Static Analysis - uses: nasa/cFS/.github/workflows/static-analysis.yml@main diff --git a/.github/workflows/static-analysis.yml b/.github/workflows/static-analysis.yml deleted file mode 100644 index ec2a8c90e..000000000 --- a/.github/workflows/static-analysis.yml +++ /dev/null @@ -1,122 +0,0 @@ -name: Static Analysis - -on: - workflow_call: - inputs: - strict-dir-list: - description: 'Directory List' - type: string - default: '' - cmake-project-options: - description: 'Command line options to pass to CMake' - type: string - default: '' - cppcheck-xslt-path: - description: 'Path to XSLT file for translating cppcheck XML output' - type: string - default: 'nasa/cFS/main/.github/scripts' - -# Force bash to apply pipefail option so pipeline failures aren't masked -defaults: - run: - shell: bash - -jobs: - #Checks for duplicate actions. Skips push actions if there is a matching or duplicate pull-request action. - check-for-duplicates: - name: Check for Duplicates - runs-on: ubuntu-latest - # Map a step output to a job output - outputs: - should_skip: ${{ steps.skip_check.outputs.should_skip }} - steps: - - id: skip_check - uses: fkirc/skip-duplicate-actions@master - with: - concurrent_skipping: 'same_content' - skip_after_successful_duplicate: 'true' - do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' - - static-analysis: - #Continue if check-for-duplicates found no duplicates. Always runs for pull-requests. - needs: check-for-duplicates - if: ${{ needs.check-for-duplicates.outputs.should_skip != 'true' }} - name: Run cppcheck - runs-on: ubuntu-22.04 - - strategy: - fail-fast: false - - steps: - - name: Install cppcheck - run: | - sudo apt-get update - sudo apt-get install cppcheck xsltproc -y - - - name: Install sarif tool - run: npm install @microsoft/sarif-multitool - - - name: Fetch conversion XSLT - run: | - wget -O cppcheck-xml2text.xslt https://raw.githubusercontent.com/${{ inputs.cppcheck-xslt-path }}/cppcheck-xml2text.xslt - wget -O cppcheck-merge.xslt https://raw.githubusercontent.com/${{ inputs.cppcheck-xslt-path }}/cppcheck-merge.xslt - - # Checks out a copy of the reference repository - - name: Checkout subject repository - uses: actions/checkout@v4 - with: - path: source - submodules: true - - # For a CMake-based project, get the list of files by setting up a build with CMAKE_EXPORT_COMPILE_COMMANDS=ON and - # referencing the compile_commands.json file produced by the tool. This will capture the correct include paths and - # compile definitions based on how the source is actually compiled. - - name: CMake Setup - if: ${{ inputs.cmake-project-options != '' }} - run: | - cmake -DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/staging -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DCMAKE_BUILD_TYPE=debug ${{ inputs.cmake-project-options }} -S source -B build - echo CPPCHECK_OPTS=--project="$GITHUB_WORKSPACE/build/compile_commands.json" >> $GITHUB_ENV - - # For a Non-CMake project, just pass the base source directory of the repo. This will examine all .c files in the repo, - # but it will not see the macro definitions, and thus may not correctly interpret macro usage. - - name: Non-CMake Setup - if: ${{ inputs.cmake-project-options == '' }} - run: | - echo CPPCHECK_OPTS="$GITHUB_WORKSPACE/source" >> $GITHUB_ENV - - - name: Run general cppcheck - run: cppcheck --force --inline-suppr --xml $CPPCHECK_OPTS 2> cppcheck_err.xml - - # Run strict static analysis for selected portions of source code - - name: Run Strict cppcheck - if: ${{ inputs.strict-dir-list !='' }} - working-directory: ${{ github.workspace }}/source - run: cppcheck --force --inline-suppr --std=c99 --language=c --enable=warning,performance,portability,style --suppress=variableScope --inconclusive --xml ${{ inputs.strict-dir-list }} 2> ../strict_cppcheck_err.xml - - - name: Merge cppcheck results - if: ${{ inputs.strict-dir-list !='' }} - run: | - mv cppcheck_err.xml general_cppcheck_err.xml - xsltproc --stringparam merge_file strict_cppcheck_err.xml cppcheck-merge.xslt general_cppcheck_err.xml > cppcheck_err.xml - - - name: Convert cppcheck results to SARIF - run: npx "@microsoft/sarif-multitool" convert "cppcheck_err.xml" --tool "CppCheck" --output "cppcheck_err.sarif" - - - name: Convert cppcheck results to Markdown - run: xsltproc cppcheck-xml2text.xslt cppcheck_err.xml | tee $GITHUB_STEP_SUMMARY cppcheck_err.txt - - - name: Upload SARIF results - uses: github/codeql-action/upload-sarif@v3 - with: - sarif_file: ${{ github.workspace }}/cppcheck_err.sarif - checkout_path: ${{ github.workspace }}/source - category: 'cppcheck' - - - name: Archive static analysis artifacts - uses: actions/upload-artifact@v4 - with: - name: cppcheck-errors - path: ./*cppcheck_err.* - - - name: Check for reported errors - run: tail -n 1 cppcheck_err.txt | grep -q '^\*\*0 error(s) reported\*\*$' diff --git a/.github/workflows/test-cfs-qemu.yml b/.github/workflows/test-cfs-qemu.yml new file mode 100644 index 000000000..835f65095 --- /dev/null +++ b/.github/workflows/test-cfs-qemu.yml @@ -0,0 +1,206 @@ +name: Build and execute CFS with multiple configurations + +# Run on all pull requests, and pushes in dev and main branches +on: + push: + branches: + - dev + - main + pull_request: + types: + - opened + - reopened + - synchronize + workflow_dispatch: + +# Force bash to apply pipefail option so pipeline failures aren't masked +defaults: + run: + shell: bash + +jobs: + build-docs: + strategy: + fail-fast: false + matrix: + config: [ native_std ] + doctype: [ detaildesign, usersguide ] + include: + - doctype: detaildesign + path: mission-doc + - doctype: usersguide + path: cfe-usersguide + + name: Build ${{ matrix.config }}.${{ matrix.doctype }} Documentation + runs-on: ubuntu-22.04 + container: ghcr.io/core-flight-system/cfsbuildenv-doxygen:latest + steps: + - name: Checkout Bundle + uses: actions/checkout@v4 + with: + submodules: true + + - name: Prepare ${{ matrix.config }} Build + run: make ${{ matrix.config }}.prep + + - name: Build ${{ matrix.doctype }} documentation + run: make ${{ matrix.config }}.${{ matrix.doctype }} + + - name: Set environment + run: | + echo "FULL_WARNING_LOG=build-${{ matrix.config }}/docs/${{ matrix.path }}/${{ matrix.path }}-warnings.log" >> $GITHUB_ENV + echo "SCRUBBED_WARNING_LOG=build-${{ matrix.config }}/${{ matrix.path }}-internal-warnings.log" >> $GITHUB_ENV + echo "OTHER_WARNING_LOG=build-${{ matrix.config }}/${{ matrix.path }}-other-warnings.log" >> $GITHUB_ENV + + + # the intent of this is to separate the warnings into those caused by submodule problems vs those caused + # by problems in the configuration files within this bundle repo + - name: Scrub warnings + run: | + grep -Ev "^$GITHUB_WORKSPACE/(apps|libs|cfe|osal|psp|tools)" $FULL_WARNING_LOG > $SCRUBBED_WARNING_LOG || /bin/true + grep -E "^$GITHUB_WORKSPACE/(apps|libs|cfe|osal|psp|tools)" $FULL_WARNING_LOG > $OTHER_WARNING_LOG || /bin/true + + # warnings reported in submodules are likely a problem in that submodule. + # These can be reported for information but do not fail here. + - name: Check for submodule warnings + run: | + if [ -s "${OTHER_WARNING_LOG}" ] + then + echo '

Doxygen Warnings in other submodules

' >> $GITHUB_STEP_SUMMARY + echo '
' >> $GITHUB_STEP_SUMMARY
+            cat "${OTHER_WARNING_LOG}" >> $GITHUB_STEP_SUMMARY
+            echo '
' >> $GITHUB_STEP_SUMMARY + fi + + - name: Check for bundle errors + run: | + if [ -s "${SCRUBBED_WARNING_LOG}" ] + then + echo '

Doxygen Errors

' >> $GITHUB_STEP_SUMMARY + echo '
' >> $GITHUB_STEP_SUMMARY
+            cat "${SCRUBBED_WARNING_LOG}" >> $GITHUB_STEP_SUMMARY
+            echo '
' >> $GITHUB_STEP_SUMMARY + /bin/false + fi + + build-cfs: + strategy: + matrix: + include: + # - config: qemu_yocto_linux + # build-image: 'ghcr.io/core-flight-system/cfsbuildenv-yocto:latest' + - config: native_std + build-image: 'ghcr.io/core-flight-system/cfsbuildenv-linux:latest' + - config: native_eds + build-image: 'ghcr.io/core-flight-system/cfsbuildenv-linux:latest' + - config: pc686_rtems5 + build-image: 'ghcr.io/core-flight-system/cfsbuildenv-rtems5:latest' + - config: rpi_linux + build-image: 'ghcr.io/core-flight-system/cfsbuildenv-arm-linux:latest' + - config: gr712_rtems5 + build-image: 'ghcr.io/core-flight-system/cfsbuildenv-gaisler-sparc-rcc:latest' + name: Build CFS with ${{ matrix.config }} configuration + uses: arielswalker/cFS/.github/workflows/build-cfs-multitarget.yml@test-cfs/workflows122 + with: + config-name: ${{ matrix.config }} + container-image: ${{ matrix.build-image }} + compression-type: 'gz' + secrets: inherit +# continue-on-error: ${{ matrix.omit-deprecated }} + + execute-containers: + name: Execute CFS containers + permissions: + contents: read + actions: read + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_HOST: github.com + GH_REPO: nasa/cFS + needs: build-cfs + runs-on: ubuntu-22.04 + strategy: + matrix: + include: + - config: native_std + healthcheck-regex: 'CFE_ES_Main entering OPERATIONAL state$' + exec-image: ghcr.io/core-flight-system/cfsbuildenv-linux:latest + cpu1-test-procedure: trad_le + cpu2-test-procedure: trad_le + - config: native_eds + healthcheck-regex: 'CFE_ES_Main entering OPERATIONAL state$' + exec-image: ghcr.io/core-flight-system/cfsbuildenv-linux:latest + cpu1-test-procedure: eds + cpu2-test-procedure: eds + - config: qemu_yocto_linux + healthcheck-regex: '^Poky (Yocto Project Reference Distro)' + exec-image: ghcr.io/core-flight-system/cfsexec-qemu:latest + cpu1-test-procedure: trad_le + cpu2-test-procedure: trad_be + - config: pc686_rtems5 + healthcheck-regex: 'CFE_ES_Main entering OPERATIONAL state' + exec-image: ghcr.io/core-flight-system/cfsexec-qemu:latest + cpu1-test-procedure: trad_le + cpu2-test-procedure: trad_le + + steps: + - name: Download artifact from build +# This alternate download fetches the artifacts from a different run +# It is useful to skip the build step and go straight to execution- saving lots of time +# run: | +# echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token +# gh run download 58922 -p ${{ matrix.config }}-bin +# ls -lR . + uses: actions/download-artifact@v4 + with: + name: ${{ matrix.config }}-bin + path: ${{ matrix.config }}-bin + + - name: Unpack target runtime images + run: | + ls -lR + for tarball in $GITHUB_WORKSPACE/${{ matrix.config }}-bin/*.tar.gz + do + inst=$(basename ${tarball}) + echo tarball=${tarball} inst=${inst} + inst=${inst%%-*} + echo inst=${inst} + mkdir -p $GITHUB_WORKSPACE/${inst} + gunzip -c ${tarball} | tar xvf - -C $GITHUB_WORKSPACE/${inst} + done + ls -lR + + - name: Start CPU1 container + id: start-cpu1 + uses: nasa/cFS/actions/start-cfs-container@dev + with: + binary-dir: ${{ github.workspace }}/cpu1 + exec-image: ${{ matrix.exec-image }} + + - name: Check CPU1 container + id: check-cpu1 + uses: nasa/cFS/actions/healthcheck-logs@dev + with: + container-id: ${{ steps.start-cpu1.outputs.container-id }} + healthcheck-regex: ${{ matrix.healthcheck-regex }} + + - name: Execute cpu1 test (traditional little endian) + if: matrix.cpu1-test-procedure == 'trad_le' + run: | + ./host/cmd_send --host=${{ steps.check-cpu1.outputs.ip-addr }} --endian=LE --pktid=0x1806 --cmdcode=0 + + - name: Execute cpu1 test (traditional big endian) + if: matrix.cpu1-test-procedure == 'trad_be' + run: | + ./host/cmd_send --host=${{ steps.check-cpu1.outputs.ip-addr }} --endian=BE --pktid=0x1806 --cmdcode=0 + + - name: Execute test procedure (EDS) + if: matrix.cpu1-test-procedure == 'eds' + run: | + ./host/cmd_send --host=${{ steps.check-cpu1.outputs.ip-addr }} -D CFE_ES/Application/CMD.NoopCmd + + - name: Stop CPU1 Container + if: always() + uses: nasa/cFS/actions/stop-cfs-container@dev + with: + container-id: ${{ steps.start-cpu1.outputs.container-id }} \ No newline at end of file diff --git a/.github/workflows/unit-test-coverage.yml b/.github/workflows/unit-test-coverage-reusable.yml similarity index 68% rename from .github/workflows/unit-test-coverage.yml rename to .github/workflows/unit-test-coverage-reusable.yml index c3e0753fe..2328ebbdb 100644 --- a/.github/workflows/unit-test-coverage.yml +++ b/.github/workflows/unit-test-coverage-reusable.yml @@ -1,4 +1,4 @@ -name: Unit Test and Coverage +name: Unit Test and Coverage Reusable Workflow on: workflow_call: @@ -19,6 +19,12 @@ on: type: number required: false default: 0 + # Currently CFS apps have at most one dependency, so this only handles one for now + dependency: + description: Additional module/library that this app depends on + type: string + required: false + default: '' # Force bash to apply pipefail option so pipeline failures aren't masked defaults: @@ -43,45 +49,71 @@ jobs: unit-test-coverage: needs: checks-for-duplicates - if: ${{ needs.checks-for-duplicates.outputs.should_skip != 'true' || contains(github.ref, 'main') }} + if: ${{ needs.checks-for-duplicates.outputs.should_skip != 'true' || contains(github.ref, 'main') || contains(github.ref, 'dev') }} name: Build, run unit tests and enforce coverage runs-on: ubuntu-22.04 + container: ghcr.io/core-flight-system/cfsbuildenv-linux:latest steps: - - name: Install coverage tools - run: sudo apt-get install lcov -y - - name: Set up environment variables # Apps typically use lowercase targets and uppercase names, this logic is fragile but works run: | echo "APP_UPPER=$(echo ${{ inputs.app-name }} | sed 's/[a-z]/\U&/g')" >> $GITHUB_ENV echo "APP_LOWER=$(echo ${{ inputs.app-name }} | sed 's/[A-Z]/\L&/g')" >> $GITHUB_ENV + APP_DEP="${{ inputs.dependency }}" + echo "APP_DEP_UPPER=$(echo ${APP_DEP##*/} | sed 's/[a-z]/\U&/g')" >> $GITHUB_ENV + echo "APP_DEP_LOWER=$(echo ${APP_DEP##*/} | sed 's/[A-Z]/\L&/g')" >> $GITHUB_ENV - - name: Checkout Bundle Main + - name: Checkout CFE uses: actions/checkout@v4 with: - submodules: true - repository: nasa/cFS + repository: nasa/cFE + path: cfe + ref: dev + + - name: Set up Dependencies + uses: ./cfe/.github/actions/setup-cfe + with: + source-dir: . + preferred-ref: ${{ github.head_ref }} + org: nasa - name: Checkout Repo uses: actions/checkout@v4 with: path: apps/${{ env.APP_LOWER }} - - name: Copy Files - run: | - cp ./cfe/cmake/Makefile.sample Makefile - cp -r ./cfe/cmake/sample_defs sample_defs + - name: Checkout Dependency + if: ${{ inputs.dependency != '' }} + uses: actions/checkout@v4 + with: + repository: ${{ inputs.dependency }} + path: apps/${{ inputs.dependency }} + ref: dev - - name: Add Repo To Build + - name: Set up basic targets.cmake config + run: | + cat > ./sample_defs/targets.cmake << EOF + SET(MISSION_NAME GithubActions) + SET(SPACECRAFT_ID 0x42) + SET(MISSION_CPUNAMES cpu1) + SET(cpu1_PROCESSORID 1) + SET(MISSION_GLOBAL_APPLIST $APP_LOWER) + EOF + + - name: Add Dependencies to targets run: | - sed -i "/list(APPEND MISSION_GLOBAL_APPLIST/a list(APPEND MISSION_GLOBAL_APPLIST $APP_LOWER)" sample_defs/targets.cmake + if [ "x$APP_DEP_LOWER" != "x" ] + then + echo "LIST(APPEND MISSION_GLOBAL_APPLIST ${APP_DEP_LOWER})" >> ./sample_defs/targets.cmake + fi + cat ./sample_defs/targets.cmake - name: Make Prep - run: make SIMULATION=native ENABLE_UNIT_TESTS=true OMIT_DEPRECATED=true prep + run: make SIMULATION=native ENABLE_UNIT_TESTS=true OMIT_DEPRECATED=false prep - name: Build app build dependencies - run: make -C build/tools/elf2cfetbl + run: make -C build mission-prebuild - name: Build app target run: | @@ -122,4 +154,4 @@ jobs: path: | test_results.txt lcov_out.txt - lcov + lcov \ No newline at end of file diff --git a/actions/cppcheck/action.yml b/actions/cppcheck/action.yml new file mode 100644 index 000000000..953537f54 --- /dev/null +++ b/actions/cppcheck/action.yml @@ -0,0 +1,82 @@ +name: Static Analysis +description: 'Performs static analysis of source code using cppcheck' + +inputs: + strict-dir-list: + description: 'Directory List' + type: string + default: '' + compile-commands-json: + description: 'Reference to compile command JSON file' + type: string + default: '' + source-dir: + description: 'List of source directories' + type: string + default: '' + cppcheck-xslt-path: + description: 'Path to XSLT file for translating cppcheck XML output' + type: string + default: 'nasa/cFS/raw/refs/heads/main/.github/scripts' + module-path: + description: 'Path to subdirectory containing subject source files' + type: string + default: '/' + +runs: + using: 'composite' + steps: + - name: Fetch conversion XSLT + shell: bash + run: | + wget -O cppcheck-xml2text.xslt ${{ github.server_url }}/${{ inputs.cppcheck-xslt-path }}/cppcheck-xml2text.xslt + wget -O cppcheck-merge.xslt ${{ github.server_url }}/${{ inputs.cppcheck-xslt-path }}/cppcheck-merge.xslt + + # If source dirs are specified, just pass them through. This will examine all .c files in the repo, + # but it will not see the macro definitions, and thus may not correctly interpret macro usage. + - name: Source Directory Setup + if: ${{ inputs.source-dir != '' }} + shell: bash + run: | + echo CPPCHECK_OPTS=${{ inputs.source-dir }} >> $GITHUB_ENV + + # Get the list of files by setting up a build with CMAKE_EXPORT_COMPILE_COMMANDS=ON and + # referencing the compile_commands.json file produced by the tool. This will capture the + # correct include paths and compile definitions based on how the source is actually compiled. + - name: Compile Commands Setup + if: ${{ inputs.compile-commands-json != '' }} + shell: bash + run: | + MODULE_PATH=$(realpath $GITHUB_WORKSPACE/${{ inputs.module-path }}) + echo filtering on $MODULE_PATH + jq '[.[] | if (.file | startswith("'$MODULE_PATH'")) then . else empty end]' ${{ inputs.compile-commands-json }} > local-cppcheck-compile-commands.json + echo CPPCHECK_OPTS=--project=local-cppcheck-compile-commands.json >> $GITHUB_ENV + + - name: Run general cppcheck + shell: bash + run: | + echo Command options: $CPPCHECK_OPTS + cppcheck --force --inline-suppr --xml $CPPCHECK_OPTS 2> cppcheck_err.xml + + # Run strict static analysis for selected portions of source code + - name: Run Strict cppcheck + if: ${{ inputs.strict-dir-list !='' }} + shell: bash + run: | + mv cppcheck_err.xml general_cppcheck_err.xml + cppcheck --force --inline-suppr --std=c99 --language=c --enable=warning,performance,portability,style --suppress=variableScope --inconclusive --xml ${{ inputs.strict-dir-list }} 2> strict_cppcheck_err.xml + xsltproc --stringparam merge_file strict_cppcheck_err.xml cppcheck-merge.xslt general_cppcheck_err.xml > cppcheck_err.xml + + - name: Convert cppcheck results to Markdown + shell: bash + run: xsltproc cppcheck-xml2text.xslt cppcheck_err.xml | tee $GITHUB_STEP_SUMMARY cppcheck_err.txt + + - name: Archive static analysis artifacts + uses: actions/upload-artifact@v4 + with: + name: cppcheck-errors + path: ./*cppcheck_err.* + + - name: Check for reported errors + shell: bash + run: tail -n 1 cppcheck_err.txt | grep -q '^\*\*0 error(s) reported\*\*$' \ No newline at end of file diff --git a/actions/healthcheck-logs/action.yml b/actions/healthcheck-logs/action.yml new file mode 100644 index 000000000..154ba2519 --- /dev/null +++ b/actions/healthcheck-logs/action.yml @@ -0,0 +1,59 @@ +name: Check QEMU cFS Virtual Machine +description: 'Checks boot completion and obtains IP address of virtual machine' + +inputs: + container-id: + description: "Docker container ID" + required: true + healthcheck-regex: + description: 'String to grep for to identify successful boot' + required: true + ssh-keyscan: + description: 'Whether to execute ssh-keyscan, enable if VM has an SSH server' + required: false + default: 'false' + +outputs: + ip-addr: + description: "Docker container IP address" + value: ${{ steps.query-ip-addr.outputs.addr }} + +runs: + using: 'composite' + steps: + - name: Check for boot completion + shell: bash + run: | + count=0 + docker logs "${{ inputs.container-id }}" > ${{ inputs.container-id }}_logs.txt + while [ $count -lt 30 ] && ! grep -q '${{ inputs.healthcheck-regex }}' ${{ inputs.container-id }}_logs.txt + do + sleep 2 + count=$(($count+1)) + docker logs "${{ inputs.container-id }}" > ${{ inputs.container-id }}_logs.txt + done + echo "Boot log after ${count} iterations:" + cat ${{ inputs.container-id }}_logs.txt + grep -q '${{ inputs.healthcheck-regex }}' ${{ inputs.container-id }}_logs.txt + + - name: Get IP address of container + shell: bash + id: query-ip-addr + run: | + IP_ADDR=$(docker inspect "${{ inputs.container-id }}" \ + --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') + echo "addr=$IP_ADDR" >> $GITHUB_OUTPUT + echo "Container IP: $IP_ADDR" + + - name: Get SSH key + if: inputs.ssh-keyscan == 'true' + shell: bash + run: | + mkdir -p ~/.ssh + chmod 0700 ~/.ssh + touch ~/.ssh/known_hosts + chmod 0600 ~/.ssh/known_hosts + IP_ADDR=$(docker inspect "${{ inputs.container-id }}" \ + --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}') + ssh-keyscan -p 2222 "$IP_ADDR" | tee -a ~/.ssh/known_hosts + cat ~/.ssh/known_hosts \ No newline at end of file diff --git a/actions/setup-app/action.yml b/actions/setup-app/action.yml new file mode 100644 index 000000000..609bffe8f --- /dev/null +++ b/actions/setup-app/action.yml @@ -0,0 +1,87 @@ +name: Set up app source Workflow +description: 'Sets up application source' + +inputs: + app-name: + description: Application name + type: string + required: false + default: ${{ github.event.repository.name }} + dependency: + description: Additional module/library that this app depends on + type: string + required: false + default: '' + cmake-options: + description: Options to pass to the cmake command + type: string + required: false + default: '-DENABLE_UNIT_TESTS=TRUE -DSIMULATION=native -DMISSIONCONFIG=sample -DCMAKE_BUILD_TYPE=debug -DCMAKE_INSTALL_PREFIX=/exe' + +runs: + using: 'composite' + steps: + - name: Set up environment variables + # Apps typically use lowercase targets and uppercase names, this logic is fragile but works + shell: bash + run: | + echo "APP_UPPER=$(echo ${{ inputs.app-name }} | sed 's/[a-z]/\U&/g')" >> $GITHUB_ENV + echo "APP_LOWER=$(echo ${{ inputs.app-name }} | sed 's/[A-Z]/\L&/g')" >> $GITHUB_ENV + APP_DEP="${{ inputs.dependency }}" + echo "APP_DEP_UPPER=$(echo ${APP_DEP##*/} | sed 's/[a-z]/\U&/g')" >> $GITHUB_ENV + echo "APP_DEP_LOWER=$(echo ${APP_DEP##*/} | sed 's/[A-Z]/\L&/g')" >> $GITHUB_ENV + + - name: Checkout cFE + uses: actions/checkout@v4 + with: + repository: nasa/cFE + path: ./cfe + ref: dev + + - name: Set up Dependencies + uses: ./cfe/.github/actions/setup-cfe + with: + source-dir: . + preferred-ref: ${{ github.head_ref }} + org: 'nasa' + + - name: Checkout Dependency + if: ${{ inputs.dependency != '' }} + uses: actions/checkout@v4 + with: + repository: ${{ inputs.dependency }} + path: ./apps/${{ inputs.dependency }} + ref: dev + + - name: Checkout App Repo + if: inputs.app-name != 'cFS' && inputs.app-name != 'cFE' + uses: actions/checkout@v4 + with: + path: ./apps/${{ env.APP_LOWER }} + + - name: Set up basic targets.cmake config + shell: bash + run: | + cat > ./sample_defs/targets.cmake << EOF + SET(MISSION_NAME GithubActions) + SET(SPACECRAFT_ID 0x42) + SET(MISSION_CPUNAMES cpu1) + SET(cpu1_PROCESSORID 1) + SET(MISSION_GLOBAL_APPLIST) + EOF + + - name: Add Dependencies to targets + if: inputs.app-name != 'cFS' && inputs.app-name != 'cFE' + shell: bash + run: echo "LIST(APPEND MISSION_GLOBAL_APPLIST $APP_DEP_LOWER $APP_LOWER)" >> ./sample_defs/targets.cmake + + - name: Check targets.cmake + shell: bash + run: cat ./sample_defs/targets.cmake + + - name: Set up build tree + shell: bash + run: | + mkdir -p $GITHUB_WORKSPACE/build + echo DESTDIR=$GITHUB_WORKSPACE/build >> $GITHUB_ENV + cd $GITHUB_WORKSPACE/build && cmake ${{ inputs.cmake-options }} "$GITHUB_WORKSPACE/cfe" \ No newline at end of file diff --git a/actions/start-cfs-container/action.yml b/actions/start-cfs-container/action.yml new file mode 100644 index 000000000..496ab036e --- /dev/null +++ b/actions/start-cfs-container/action.yml @@ -0,0 +1,41 @@ +name: Start QEMU cFS Virtual Machine +description: 'Boots a QEMU virtual machine to execute a CFS build' + +inputs: + binary-dir: + description: 'Directory containing image files' + required: true + exec-image: + description: 'Docker image name to execute CFS' + required: false + default: ghcr.io/core-flight-system/cfsexec-qemu:latest + +outputs: + container-id: + description: "Docker container ID" + value: ${{ steps.start-container.outputs.id }} + +runs: + using: 'composite' + steps: + - name: Pull CFS container + shell: bash + run: docker pull ${{ inputs.exec-image }} + + - name: Start cFS execution container + shell: bash + id: start-container + run: | + DOCKER_ID=$(docker run -d \ + -v ${{ inputs.binary-dir }}:${{ inputs.binary-dir }} \ + --sysctl fs.mqueue.msg_max=64 \ + -w ${{ inputs.binary-dir }} \ + ${{ inputs.exec-image }} \ + ./container-start) + echo "id=$DOCKER_ID" >> $GITHUB_OUTPUT + echo "Started Container: $DOCKER_ID" + sleep 2 + + - name: List Containers + shell: bash + run: docker ps -a \ No newline at end of file diff --git a/actions/stop-cfs-container/action.yml b/actions/stop-cfs-container/action.yml new file mode 100644 index 000000000..adf380f9a --- /dev/null +++ b/actions/stop-cfs-container/action.yml @@ -0,0 +1,24 @@ +name: Stop QEMU cFS Virtual Machine +description: 'Stops a QEMU virtual machine' + +inputs: + container-id: + description: "Docker container ID" + required: true + +runs: + using: 'composite' + steps: + - name: Stop container + if: inputs.container-id != '' + shell: bash + run: docker stop "${{ inputs.container-id }}" + + - name: Remove container + if: inputs.container-id != '' + shell: bash + run: docker rm "${{ inputs.container-id }}" + + - name: List Containers + shell: bash + run: docker ps -a \ No newline at end of file