Skip to content

Commit

Permalink
[Build] Support to collect the test coverage in cobertura format
Browse files Browse the repository at this point in the history
  • Loading branch information
xumia committed Jan 14, 2024
1 parent 7702b8a commit 7759c39
Show file tree
Hide file tree
Showing 9 changed files with 92 additions and 46 deletions.
2 changes: 2 additions & 0 deletions .artifactignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
**/*
!*.deb
!coverage.info
!build.info
8 changes: 6 additions & 2 deletions .azure-pipelines/build-docker-sonic-vs-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -141,12 +141,16 @@ jobs:
find $(Build.ArtifactStagingDirectory)/download/sairedis -name '*.deb' -exec cp "{}" .azure-pipelines/docker-sonic-vs/debs \;
cp -v $(Build.ArtifactStagingDirectory)/download/*.deb .azure-pipelines/docker-sonic-vs/debs
if [ -f $(Build.ArtifactStagingDirectory)/download/coverage.info ]; then
cp -v $(Build.ArtifactStagingDirectory)/download/coverage.info $(Build.ArtifactStagingDirectory)/
fi
pushd .azure-pipelines
build_args=""
build_dir=$(grep BUILD_DIR $(Build.ArtifactStagingDirectory)/download/build.info | cut -d= -f2)
build_args="--build-arg build_dir=$build_dir"
if [ '${{ parameters.asan }}' == True ]; then
build_args="--build-arg need_dbg=y"
build_args="$build_args --build-arg need_dbg=y"
fi
docker build $build_args --no-cache -t docker-sonic-vs:$(Build.DefinitionName).$(Build.BuildNumber).asan-${{ parameters.asan }} docker-sonic-vs
Expand Down
1 change: 1 addition & 0 deletions .azure-pipelines/build-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,7 @@ jobs:
cp -r pytest.tgz $(Build.ArtifactStagingDirectory)/
if [ '${{ parameters.archive_gcov }}' == True ]; then
export ENABLE_GCOV=y
echo BUILD_DIR=$(pwd) > build.info
fi
if [ '${{ parameters.asan }}' == True ]; then
export ENABLE_ASAN=y
Expand Down
6 changes: 6 additions & 0 deletions .azure-pipelines/docker-sonic-vs/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ FROM docker-sonic-vs

ARG docker_container_name
ARG need_dbg
ARG build_dir
ENV BUILD_DIR=$build_dir

COPY ["debs", "/debs"]

Expand All @@ -25,3 +27,7 @@ RUN if [ "$need_dbg" = "y" ] ; then dpkg -i /debs/swss-dbg_1.0.0_amd64.deb ; fi
RUN apt-get update

RUN apt-get -y install lcov

RUN pip3 install lcov_cobertura

RUN if [ -n "$BUILD_DIR" ]; then mkdir -p $BUILD_DIR && tar -xf /tmp/gcov/gcov-source.tar -C $BUILD_DIR; fi
83 changes: 43 additions & 40 deletions .azure-pipelines/test-docker-sonic-vs-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,16 @@ jobs:
- job:
displayName: vstest
timeoutInMinutes: ${{ parameters.timeout }}
${{ if parameters.archive_gcov }}:
variables:
DIFF_COVER_CHECK_THRESHOLD: 80
DIFF_COVER_ENABLE: 'true'

pool: sonic-common

steps:
- script: |
ip a show dev eth0 || true
ls -A1 | xargs -I{} sudo rm -rf {}
displayName: "Clean workspace"
- checkout: self
Expand Down Expand Up @@ -78,6 +83,15 @@ jobs:
path: $(Build.ArtifactStagingDirectory)/download
displayName: "Download sonic buildimage ubuntu20.04 deb packages"

- script: |
set -ex
# Install .NET CORE
curl -sSL https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
sudo apt-add-repository https://packages.microsoft.com/ubuntu/20.04/prod
sudo apt-get update
sudo apt-get install -y dotnet-sdk-7.0
displayName: "Install .NET CORE"
- script: |
set -ex
sudo .azure-pipelines/build_and_install_module.sh
Expand All @@ -92,6 +106,7 @@ jobs:
sudo apt-get install -y net-tools bridge-utils vlan
sudo apt-get install -y python3-pip
sudo pip3 install pytest==4.6.2 attrs==19.1.0 exabgp==4.0.10 distro==1.5.0 docker>=4.4.1 redis==3.3.4 flaky==3.7.0
sudo pip3 install lcov_cobertura
displayName: "Install dependencies"
- script: |
Expand All @@ -106,7 +121,7 @@ jobs:
params=""
if [ '${{ parameters.archive_gcov }}' == True ]; then
params=" ${params} --keeptb "
params=" ${params} --collect-coverage --force-recreate-dvs "
fi
if [ '${{ parameters.asan }}' == True ]; then
params=" ${params} --graceful-stop "
Expand All @@ -115,44 +130,44 @@ jobs:
params=" ${params} --num-ports=${{ parameters.num_ports }} "
fi
all_tests=$(ls test_*.py)
all_tests=$(ls test_*.py | xargs)
all_tests="${all_tests} p4rt"
if [ -n '${{ parameters.run_tests_pattern }}' ]; then
all_tests=" $(ls ${{ parameters.run_tests_pattern }}) "
all_tests=" $(ls ${{ parameters.run_tests_pattern }} | xargs) "
fi
# Run the tests in parallel
echo $all_tests | xargs -n 1 | xargs -P 4 -I TEST_MODULE sudo sh -c 'py.test -v --force-flaky --junitxml="$(echo TEST_MODULE | cut -d "." -f1)_tr.xml" '"$params --imgname=docker-sonic-vs:$(Build.DefinitionName).$(Build.BuildNumber).asan-${{ parameters.asan }} TEST_MODULE"
test_set=()
# Run 20 tests as a set.
for test in ${all_tests}; do
test_set+=("${test}")
if [ ${#test_set[@]} -ge 20 ]; then
test_name=$(echo "${test_set[0]}" | cut -d "." -f 1)
echo "${test_set[*]}" | xargs sudo py.test -v --force-flaky --junitxml="${test_name}_tr.xml" $params --imgname=docker-sonic-vs:$(Build.DefinitionName).$(Build.BuildNumber).asan-${{ parameters.asan }}
container_count=$(docker ps -q -a | wc -l)
if [ '${{ parameters.archive_gcov }}' == True ] && [ ${container_count} -gt 0 ]; then
./gcov_support.sh set_environment $(Build.ArtifactStagingDirectory)
docker stop $(docker ps -q -a)
docker rm $(docker ps -q -a)
fi
test_set=()
fi
done
if [ ${#test_set[@]} -gt 0 ]; then
test_name=$(echo "${test_set[0]}" | cut -d "." -f 1)
echo "${test_set[*]}" | xargs sudo py.test -v $params --force-flaky --junitxml="${test_name}_tr.xml" $params --imgname=docker-sonic-vs:$(Build.DefinitionName).$(Build.BuildNumber).asan-${{ parameters.asan }}
container_count=$(docker ps -q -a | wc -l)
if [ '${{ parameters.archive_gcov }}' == True ] && [ ${container_count} -gt 0 ]; then
./gcov_support.sh set_environment $(Build.ArtifactStagingDirectory)
docker stop $(docker ps -q -a)
docker rm $(docker ps -q -a)
fi
if [ '${{ parameters.archive_gcov }}' == True ]; then
cp $(Build.ArtifactStagingDirectory)/download/coverage.info ./
docker run --rm -v $(System.DefaultWorkingDirectory):/tmp/s --entrypoint bash $image_name -c 'cd $BUILD_DIR; cat /tmp/s/tests/*coverage.info > coverage.info; lcov_cobertura coverage.info -o /tmp/s/coverage.xml; genhtml coverage.info -o /tmp/s/htmlcov/'
cp *coverage.info ../coverage.xml $(Build.ArtifactStagingDirectory)/
fi
rm -rf $(Build.ArtifactStagingDirectory)/download
displayName: "Run vs tests"
continueOnError: ${{ parameters.asan }}
- script: |
set -ex
image_name=docker-sonic-vs:$(Build.DefinitionName).$(Build.BuildNumber).asan-${{ parameters.asan }}
docker run --rm -v $(pwd):/tmp/s --entrypoint bash $image_name -c 'cd $BUILD_DIR; cat /tmp/s/*coverage.info > coverage.info; lcov_cobertura coverage.info -o /tmp/s/coverage.xml'
cp $(Build.ArtifactStagingDirectory)/download/coverage.info ./tests/
docker run --rm -v $(pwd):/tmp/s --entrypoint bash $image_name -c 'cd $BUILD_DIR; cat /tmp/s/tests/*coverage.info > coverage.info; lcov_cobertura coverage.info -o /tmp/s/coverage.xml'
cp coverage.xml $(Build.ArtifactStagingDirectory)/
cp tests/*coverage.info $(Build.ArtifactStagingDirectory)/
rm -rf $(Build.ArtifactStagingDirectory)/download
condition: true
displayName: "Generate coverage.xml"
- task: PublishCodeCoverageResults@1
condition: true
inputs:
codeCoverageTool: Cobertura
summaryFileLocation: '$(System.DefaultWorkingDirectory)/coverage.xml'
displayName: 'Publish test coverage'

- task: PublishTestResults@2
inputs:
testResultsFiles: '**/*_tr.xml'
Expand All @@ -165,21 +180,9 @@ jobs:
if [ '${{ parameters.asan }}' == True ]; then
cp -vr tests/log/*/log/asan $(Build.ArtifactStagingDirectory)/
fi
if [ '${{ parameters.archive_gcov }}' == True ]; then
sudo apt-get install -y lcov
cd $(Build.ArtifactStagingDirectory)/gcov_tmp/
tar -zcvf sonic-gcov.tar.gz sonic-gcov/
rm -rf sonic-gcov
fi
displayName: "Collect logs"
condition: always()
- publish: $(Build.ArtifactStagingDirectory)/gcov_tmp
artifact: ${{ parameters.gcov_artifact_name }}
displayName: "Publish gcov output"
condition: and(succeeded(), eq('${{ parameters.archive_gcov }}', true))

- publish: $(Build.ArtifactStagingDirectory)/
artifact: ${{ parameters.log_artifact_name }}@$(System.JobAttempt)
displayName: "Publish logs"
Expand Down
2 changes: 1 addition & 1 deletion azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,8 @@ stages:
asan: true

- stage: Gcov
condition: false
dependsOn: Test
condition: in(dependencies.Test.result, 'Succeeded', 'SucceededWithIssues')
jobs:
- template: .azure-pipelines/gcov.yml
parameters:
Expand Down
6 changes: 4 additions & 2 deletions debian/rules
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ ifeq ($(ENABLE_ASAN), y)
endif

ifeq ($(ENABLE_GCOV), y)
configure_opts += --enable-gcov CFLAGS="-g -O0" CXXFLAGS="-g -O0"
configure_opts += --enable-gcov --enable-code-coverage CFLAGS="-g -O0" CXXFLAGS="-g -O0"
endif

override_dh_auto_configure:
Expand All @@ -43,7 +43,9 @@ override_dh_auto_install:
dh_auto_install --destdir=debian/swss
ifeq ($(ENABLE_GCOV), y)
mkdir -p debian/swss/tmp/gcov
sh ./tests/gcov_support.sh collect swss
lcov -c --directory . --no-external --output-file coverage.info
find ./ -type f -regex '.*\.\(h\|cpp\|gcno\|info\)' | tar -cf debian/swss/tmp/gcov/gcov-source.tar -T -
#sh ./tests/gcov_support.sh collect swss
endif

override_dh_strip:
Expand Down
2 changes: 1 addition & 1 deletion gcovpreload/gcovpreload.c
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ void ctor()
int sigs[] = {
SIGILL, SIGFPE, SIGABRT, SIGBUS,
SIGSEGV, SIGHUP, SIGINT, SIGQUIT,
SIGTERM
SIGTERM, SIGKILL, SIGUSR1
};
int i;
struct sigaction sa;
Expand Down
28 changes: 28 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,11 @@ def pytest_addoption(parser):
type=int,
help="number of ports")

parser.addoption("--collect-coverage",
action="store_true",
default=False,
help="Collect the test coverage information")


def random_string(size=4, chars=string.ascii_uppercase + string.digits):
return "".join(random.choice(chars) for x in range(size))
Expand Down Expand Up @@ -1795,6 +1800,10 @@ def update_dvs(log_path, new_dvs_env=[]):
curr_dvs_env = new_dvs_env

else:
# Workaround to generate GCDA files for GCov
cmd = " --help;".join(dvs.swssd)
subprocess.getstatusoutput(cmd)
time.sleep(1)
# First generate GCDA files for GCov
dvs.runcmd('killall5 -15')
# If not re-creating the DVS, restart container
Expand All @@ -1808,6 +1817,25 @@ def update_dvs(log_path, new_dvs_env=[]):

yield update_dvs

if collect_coverage:
cmd = " --help;".join(dvs.swssd)
subprocess.getstatusoutput(cmd)
time.sleep(1)
dvs.runcmd('killall5 -10')
time.sleep(1)
# Generate the converage info by lcov and copy to the host
cmd = f"docker exec {dvs.ctn.short_id} sh -c 'cd $BUILD_DIR; lcov -c --directory . --no-external --output-file /tmp/coverage.info'"
rc, output = subprocess.getstatusoutput(cmd)
if rc:
raise RuntimeError(f"Failed to run lcov command. rc={rc}. output: {output}")
coverage_info_name = dvs.ctn.short_id + '.coverage.info'
if name:
coverage_info_name = name + '.coverage.info'
cmd = f"docker cp {dvs.ctn.short_id}:/tmp/coverage.info {coverage_info_name}"
rc, output = subprocess.getstatusoutput(cmd)
if rc:
raise RuntimeError(f"Failed to run command: {cmd}. rc={rc}. output: {output}")

if graceful_stop:
dvs.stop_swss()
dvs.stop_syncd()
Expand Down

0 comments on commit 7759c39

Please sign in to comment.