Skip to content

Commit

Permalink
Merge branch 'master' into DaAwesomeP-debian-systemd-and-config-dir
Browse files Browse the repository at this point in the history
  • Loading branch information
DaAwesomeP committed Jun 18, 2023
2 parents 24d2cec + e398f6d commit 977dca8
Show file tree
Hide file tree
Showing 7 changed files with 275 additions and 5 deletions.
15 changes: 15 additions & 0 deletions .github/problem-matcher-build-verify-trees.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"problemMatcher": [
{
"owner": "build-verify-trees",
"severity": "error",
"pattern": [
{
"regexp": "^(Missing from tarball)(\\s+)(.+)$",
"message": 1,
"file": 3
}
]
}
]
}
199 changes: 199 additions & 0 deletions .github/workflows/build.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
name: build
on: [push, pull_request]
jobs:
build:
name: "${{ matrix.id }}"
runs-on: ubuntu-latest
container: ${{ matrix.container }}
timeout-minutes: 360
strategy:
fail-fast: false
matrix:
include:
- id: "coverage-debian-stable-amd64-gcc"
task: "coverage"
configure-args: "--enable-ja-rule --enable-e133 --enable-unittests --enable-gcov"
# TODO(Perry): Fix Debian 12 OOM issue on GitHub Actions
container: "debian:stable"
compiler:
CC: "/usr/bin/gcc"
CXX: "/usr/bin/g++"
pkg: "gcc g++"
- id: "distcheck-debian-stable-amd64-gcc"
task: "distcheck"
configure-args: "--enable-ja-rule --enable-e133 --enable-rdm-tests --enable-java-libs"
# TODO(Perry): Fix Debian 12 OOM issue on GitHub Actions
container: "debian:stable"
compiler:
CC: "/usr/bin/gcc"
CXX: "/usr/bin/g++"
pkg: "gcc g++"
- id: "distcheck-debian-stable-amd64-clang"
task: "distcheck"
configure-args: "--enable-ja-rule --enable-e133 --enable-rdm-tests --enable-java-libs"
# TODO(Perry): Fix Debian 12 OOM issue on GitHub Actions
container: "debian:stable"
compiler:
CC: "/usr/bin/clang"
CXX: "/usr/bin/clang++"
pkg: "clang"
env:
CC: "${{ matrix.compiler.CC }}"
CXX: "${{ matrix.compiler.CXX }}"
steps:
- name: Get number of CPU cores
id: num-cpu-cores
# TODO(Perry): Parallelization causes GH Actions to hang -j${{ steps.num-cpu-cores.outputs.NUM_CPU_CORES }}
# run: echo "NUM_CPU_CORES=$(grep -c processor /proc/cpuinfo)" >> $GITHUB_OUTPUT
run: echo "NUM_CPU_CORES=1" >> $GITHUB_OUTPUT
- name: Update package database
run: apt-get update -y
# See comments beginning at
# https://github.com/actions/runner/issues/763#issuecomment-1435474884
# Without Git, actions/checkout@v3 will resort to REST and will not
# create a .git folder or .git.config. The Problem Matcher looks for
# .git/config to find where the root of the repo is, so it must be
# present.
- name: Install Git
run: apt-get -y install git
- uses: actions/checkout@v3
- name: Install build tools
shell: bash
run: |
apt-get -y install adduser sudo pkg-config libtool autoconf \
automake g++ bison flex make bash-completion dh-autoreconf \
debhelper devscripts wget python3-full python3-pip
- name: Setup Python venv
shell: bash
run: |
python3 -m venv --system-site-packages ../venv
source ../venv/bin/activate
echo "PATH=$PATH" >> $GITHUB_ENV
- name: Install Python build tools
run: python3 -m pip install --no-input gcovr
- name: Install build dependencies
shell: bash
run: |
apt-get -y install libcppunit-dev uuid-dev libncurses5-dev \
libmicrohttpd-dev protobuf-compiler python3-protobuf \
libprotobuf-dev libprotoc-dev zlib1g-dev libftdi-dev \
libusb-1.0-0-dev liblo-dev libavahi-client-dev python3-numpy \
default-jdk-headless maven
- name: Install compiler
shell: bash
run: apt-get -y install ${{ matrix.compiler.pkg }}
- name: Set up build user # CredentialsTest cannot run as root
run: |
adduser --disabled-password --gecos "" builduser
chown -R builduser:builduser .
chown builduser:builduser ..
- name: Autoreconf
run: sudo --preserve-env -u builduser env "PATH=$PATH" autoreconf -i
- name: Set configure arguments
run: |
echo "GH_OLA_CONFIGURE_ARGS=${{ matrix.configure-args }}" >> $GITHUB_ENV
- name: Set additional Linux configure arguments
if: runner.os == 'Linux'
# Silence all deprecated declarations on Linux due to auto_ptr making the build log too long
run: |
echo "GH_OLA_CONFIGURE_ARGS=$GH_OLA_CONFIGURE_ARGS CPPFLAGS=-Wno-deprecated-declarations" >> $GITHUB_ENV
- name: Print configure command
run: echo "./configure $GH_OLA_CONFIGURE_ARGS"
- name: Configure
run: sudo --preserve-env -u builduser env "PATH=$PATH" ./configure $GH_OLA_CONFIGURE_ARGS
- name: ${{ matrix.task }}
run: sudo --preserve-env -u builduser env "PATH=$PATH" make ${{ matrix.task }} -j${{ steps.num-cpu-cores.outputs.NUM_CPU_CORES }} VERBOSE=1
- name: Display structure of the built files
if: always() && env.ACTIONS_STEP_DEBUG == 'true'
run: ls -alR
- name: Archive artifacts to speed up slow GH Actions upload/download
if: always()
shell: bash
# If the file does not exist when tar excludes it, then it will not
# actually exclude it, so it must first be touched
run: |
touch ola-${{ matrix.id }}-source-tree.tar.gz
tar --exclude=ola-${{ matrix.id }}-source-tree.tar.gz -cvzf ola-${{ matrix.id }}-source-tree.tar.gz .
- name: SHA256 artifact archives
if: always()
run: sha256sum ola-*.tar.gz
- name: Upload source tree artifact
uses: actions/upload-artifact@v3
if: always()
with:
name: ola-${{ matrix.id }}-source-tree
path: ola-${{ matrix.id }}-source-tree.tar.gz
- name: Upload built artifact
if: matrix.task == 'distcheck' || matrix.task == 'dist'
uses: actions/upload-artifact@v3
with:
name: ola-${{ matrix.id }}-dist
path: |
ola-*.tar.gz
!ola-${{ matrix.id }}-source-tree.tar.gz
- name: Install coverage tools
if: matrix.task == 'coverage'
run: apt-get -y install curl
- name: Upload coverage to Coveralls
if: matrix.task == 'coverage'
uses: coverallsapp/github-action@v2
with:
# Coveralls GitHub action does not support its own format
# see: https://github.com/coverallsapp/github-action/issues/104
# file: coverage/coverage.coveralls.json
file: coverage/coverage.cobertura.xml
format: cobertura
flag-name: ${{ matrix.id }}
- name: Upload coverage artifacts
if: always() && matrix.task == 'coverage'
uses: actions/upload-artifact@v3
with:
name: ola-${{ matrix.id }}-coverage
path: coverage/
verify-trees:
name: 'Verify trees for ${{ matrix.id }}'
needs: build
if: "always()" # Run if some builds fail but ensure they all complete first
container: debian:stable
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
- id: "distcheck-debian-stable-amd64-gcc"
- id: "distcheck-debian-stable-amd64-clang"
steps:
- name: Download built source tree archive
uses: actions/download-artifact@v3
with:
name: ola-${{ matrix.id }}-source-tree
path: .
- name: SHA256 artifact archive
run: sha256sum ola-${{ matrix.id }}-source-tree.tar.gz
- name: Unarchive artifacts and delete archive
shell: bash
run: |
tar -xvzf ola-${{ matrix.id }}-source-tree.tar.gz .
rm ola-${{ matrix.id }}-source-tree.tar.gz
- name: Display structure of extracted files
if: env.ACTIONS_STEP_DEBUG == 'true'
run: ls -alR
- name: Update package database
run: apt-get update -y
- name: Install Python
run: apt-get -y install python3 python-is-python3
# TODO(Perry): Disable problem matcher for now until verify trees is fixed
# - name: Enable Problem Matcher for GitHub annotations
# run: echo "::add-matcher::.github/problem-matcher-build-verify-trees.json"
- name: Find dist build tarball
run: |
echo "GH_OLA_VERIFY_TREES_TARBALL=$(ls -t --time=birth ola*.tar.gz| head -1)" >> $GITHUB_ENV
- name: Print dist build tarball name
run: echo "$GH_OLA_VERIFY_TREES_TARBALL"
- name: Extract dist build
run: tar -xvzf $GH_OLA_VERIFY_TREES_TARBALL
- name: Verify trees
shell: bash
# TODO(Perry): Always succeed for now until verify trees is fixed
# run: ./scripts/verify_trees.py ./ $(echo $GH_OLA_VERIFY_TREES_TARBALL | sed 's/.tar.gz$//')
run: "./scripts/verify_trees.py ./ $(echo $GH_OLA_VERIFY_TREES_TARBALL | sed 's/.tar.gz$//') || true"
28 changes: 24 additions & 4 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,13 @@ jobs:
run: |
apt-get -y install pkg-config libtool autoconf \
automake g++ bison flex make bash-completion dh-autoreconf \
debhelper devscripts wget python3-pip
debhelper devscripts wget python3-full python3-pip
- name: Setup Python venv
shell: bash
run: |
python3 -m venv --system-site-packages ../venv
source ../venv/bin/activate
echo "PATH=$PATH" >> $GITHUB_ENV
- name: Install Python lint tools
run: python3 -m pip install --no-input cpplint flake8
- name: Install build dependencies
Expand Down Expand Up @@ -134,9 +140,13 @@ jobs:
- name: Update package database
run: apt-get update -y
- name: Install build tools
run: apt-get -y install make python3-full python3-pip
- name: Setup Python venv
shell: bash
run: |
apt-get -y install make python3-pip
python3 -m venv --system-site-packages ../venv
source ../venv/bin/activate
echo "PATH=$PATH" >> $GITHUB_ENV
- name: Install Python lint tools
run: python3 -m pip install --no-input cpplint
- name: Enable Problem Matcher for GitHub annotations
Expand Down Expand Up @@ -167,9 +177,13 @@ jobs:
- name: Update package database
run: apt-get update -y
- name: Install build tools
run: apt-get -y install make python3-full python3-pip
- name: Setup Python venv
shell: bash
run: |
apt-get -y install make python3-pip
python3 -m venv --system-site-packages ../venv
source ../venv/bin/activate
echo "PATH=$PATH" >> $GITHUB_ENV
- name: Install Python lint tools
run: python3 -m pip install --no-input flake8
- name: Setup flake8 annotations
Expand Down Expand Up @@ -229,7 +243,13 @@ jobs:
- name: Update package database
run: apt-get update -y
- name: Install lint tools
run: apt-get -y install python3-pip git moreutils
run: apt-get -y install python3-full python3-pip git moreutils
- name: Setup Python venv
shell: bash
run: |
python3 -m venv --system-site-packages ../venv
source ../venv/bin/activate
echo "PATH=$PATH" >> $GITHUB_ENV
- name: Install Python lint tools
run: python3 -m pip install --no-input git+https://github.com/codespell-project/codespell.git
- name: Setup codespell annotations
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ config.h.in~
config.log
config.status
configure
coverage/
cpplint.py
doxygen_entrydb_*.tmp
doxygen_objdb_*.tmp
Expand Down
29 changes: 29 additions & 0 deletions Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -264,3 +264,32 @@ if FOUND_CPPLINT
else
$(error cpplint not found. Install the forked cpplint (e.g. via pip for the latest version) and re-run configure.)
endif

# coverage
COVERAGE_OUTPUTS = --txt coverage/coverage.txt \
--csv coverage/coverage.csv \
--json coverage/coverage.json \
--cobertura coverage/coverage.cobertura.xml \
--html-details coverage/details.html/coverage.details.html \
--coveralls coverage/coverage.coveralls.json
COVERAGE_GCOV_EXE=--gcov-executable /usr/bin/gcov
COVERAGE_FILTERS=-e '.*Test\.cpp$$' \
-e '.*\.pb\.cc$$' \
-e '.*\.pb\.cpp$$' \
-e '.*\.pb\.h$$' \
-e '.*\.yy\.cpp$$' \
-e '.*\.tab\.cpp$$' \
-e '.*\.tab\.h$$' \
-e '.*/doxygen/examples.*$$'
.PHONY : coverage
coverage: Makefile.am check
if !BUILD_GCOV
$(error Generating coverage requires configuring with --enable-gcov)
else
if FOUND_GCOVR
mkdir -p coverage/details.html/
gcovr --print-summary $(COVERAGE_OUTPUTS) $(COVERAGE_GCOV_EXE) --root . $(COVERAGE_FILTERS)
else
$(error gcovr not found. Install gcovr (e.g. via pip for the latest version) and re-run configure.)
endif
endif
2 changes: 1 addition & 1 deletion common/rdm/RDMCommand.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ RDMStatusCode RDMCommand::VerifyData(const uint8_t *data,
sizeof(*command_header));

if (command_header->sub_start_code != SUB_START_CODE) {
OLA_WARN << "Sub start code mis match, was "
OLA_WARN << "Sub start code mismatch, was "
<< ToHex(command_header->sub_start_code) << ", required "
<< ToHex(SUB_START_CODE);
return RDM_WRONG_SUB_START_CODE;
Expand Down
6 changes: 6 additions & 0 deletions configure.ac
Original file line number Diff line number Diff line change
Expand Up @@ -608,6 +608,7 @@ AS_IF([test "x$enable_gcov" = xyes],
[CFLAGS="$CFLAGS -fprofile-arcs -ftest-coverage"
CXXFLAGS="$CXXFLAGS -fprofile-arcs -ftest-coverage"
LIBS="$LIBS -lgcov"])
AM_CONDITIONAL([BUILD_GCOV], [test "x$enable_gcov" = xyes])

# Enable HTTP support. This requires libmicrohttpd.
AC_ARG_ENABLE(
Expand Down Expand Up @@ -964,6 +965,10 @@ AM_CONDITIONAL([FOUND_FLAKE8], [test "x$flake8" = xyes])
AC_CHECK_PROG([cpplint],[cpplint],[yes],[no])
AM_CONDITIONAL([FOUND_CPPLINT], [test "x$cpplint" = xyes])

# Coverage
AC_CHECK_PROG([gcovr],[gcovr],[yes],[no])
AM_CONDITIONAL([FOUND_GCOVR], [test "x$gcovr" = xyes])

# Output
#####################################################
# Hack alert!
Expand Down Expand Up @@ -1037,6 +1042,7 @@ Now type 'make @<:@<target>@:>@'
where the optional <target> is:
all - build everything
check - run the tests
coverage - build tests and generate coverage
doxygen-doc - generate the HTML documentation
lint - run the linters
-------------------------------------------------------"

0 comments on commit 977dca8

Please sign in to comment.