Skip to content

fix codecov Job

fix codecov Job #254

Workflow file for this run

name: CI/CD Pipeline
# Continuous Integration / Continuous Delivery
# Triggers on all branches and tags starting with v
# Full Job Matrix for Stress Testing is activated on 'master', 'dev' and tags
## We Test factoring Platforms and Python versions
# For other branches only one Job is spawned for Running (Unit) Tests
# PyPI publish on v* tags on 'master' branch
# Test PyPI publish on v* 'pre-release' tags on 'release' branch
# Dockerhub publish on all branches and tags
on:
push:
branches:
- "*"
tags:
- v*
# from gh docs: "A map of variables that are available to the steps of all jobs in the workflow."
env:
## Pipeline Constants ##
# Job Matrix as an env var !
FULL_MATRIX_STRATEGY: "{\"platform\": [\"ubuntu-latest\", \"macos-latest\", \"windows-latest\"], \"python-version\": [\"3.8\", \"3.9\", \"3.10\", \"3.11\"]}"
UBUNTU_PY38_STRATEGY: "{\"platform\":[\"ubuntu-latest\"], \"python-version\":[\"3.8\"]}"
# UBUNTU_PY38_STRATEGY: "{\"platform\":[\"ubuntu-latest\"], \"python-version\":[\"3.8\"], \"distro\": [\"sdist\", \"wheel\"]}"
TEST_STRATEGY: "{\"platform\":[\"ubuntu-latest\", \"macos-latest\", \"windows-latest\"], \"python-version\":[\"3.9\"]}"
####### Pipeline Settings #######
##### JOB ON/OFF SWITCHES #####
RUN_UNIT_TESTS: "true"
RUN_LINT_CHECKS: "false"
PUBLISH_ON_PYPI: "true"
DRAW_DEPENDENCIES: "false"
PREVENT_CODECOV_TEST_COVERAGE: "false"
DOCKER_JOB_ON: "false"
###############################
#### DOCKER Job Policy #####
# Override Docker Policy-dependent decision-making and
# Accept any ALL (branch/build) to Publish to Dockerhub
# if true, will push image and ingnore below policy
ALWAYS_BUILD_N_PUBLSIH_DOCKER: "false"
# CDeployment : Builds and Publishes only if Tests ran and passed
# CDelivery : Builds and Publishes if Tests Passed or if Tests were Skipped
DOCKER_JOB_POLICY: "CDeployment"
# DOCKER_JOB_POLICY: "CDelivery"
############################
#### STATIC CHECK Job ####
# Python Runtime version to set the Job runner with
STATIC_ANALYSIS_PY: "3.8" # since our pyproject is tested to support 3.8 builds
# Pylint Score Threshold, if the score is below this value the Job will fail
# If pylint rated our code below that score, the Job fails
PYLINT_SCORE_THRESHOLD: "8.2"
##########################
jobs:
# we use the below to read the workflow env vars and be able to use in "- if:" Job conditionals
# now we can do -> if: ${{ needs.set_github_outputs.outputs.TESTS_ENABLED == 'true' }}
# github does not have a way to simply do "- if: ${{ env.RUN_UNIT_TESTS == 'true' }} " !!
set_github_outputs:
name: Read Workflow Env Section Vars and set Github Outputs
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.pass-env-to-output.outputs.matrix }}
TESTS_ENABLED: ${{ steps.pass-env-to-output.outputs.TESTS_ENABLED }}
DOCKER_POLICY: ${{ steps.pass-env-to-output.outputs.DOCKER_POLICY }}
DRAW_DEPS_SVG_GRAPHS: ${{ steps.pass-env-to-output.outputs.DRAW_DEPS_SVG_GRAPHS }}
RUN_LINT: ${{ steps.pass-env-to-output.outputs.RUN_LINT }}
PUBLISH_ON_PYPI: ${{ steps.pass-env-to-output.outputs.PUBLISH_ON_PYPI }}
PREVENT_CODECOV_TEST_COVERAGE: ${{ steps.pass-env-to-output.outputs.PREVENT_CODECOV_TEST_COVERAGE }}
steps:
- name: Pass 'env' section variables to GITHUB_OUTPUT
id: pass-env-to-output
run: |
# set the matrix strategy to Full Matrix Stress Test if on master/main or stress-test branch or any tag
BRANCH_NAME=${GITHUB_REF_NAME}
if [[ $BRANCH_NAME == "master" || $BRANCH_NAME == "main" || $BRANCH_NAME == "stress-test" || $GITHUB_REF == refs/tags/* ]]; then
echo "matrix=$UBUNTU_PY38_STRATEGY" >> $GITHUB_OUTPUT
else
echo "matrix=$UBUNTU_PY38_STRATEGY" >> $GITHUB_OUTPUT
fi
echo "DRAW_DEPS_SVG_GRAPHS=$DRAW_DEPENDENCIES" >> $GITHUB_OUTPUT
echo "RUN_LINT=$RUN_LINT_CHECKS" >> $GITHUB_OUTPUT
echo "TESTS_ENABLED=$RUN_UNIT_TESTS" >> $GITHUB_OUTPUT
echo "PUBLISH_ON_PYPI=$PUBLISH_ON_PYPI" >> $GITHUB_OUTPUT
echo "PREVENT_CODECOV_TEST_COVERAGE=$PREVENT_CODECOV_TEST_COVERAGE" >> $GITHUB_OUTPUT
# Derive Docker Strategy/Policy
echo "=== Current Docker High level Settings ==="
echo "Docker Job ON: $DOCKER_JOB_ON"
echo "Docker Publish All force override: $ALWAYS_BUILD_N_PUBLSIH_DOCKER"
echo "Docker Job Policy: $DOCKER_JOB_POLICY"
echo "=========================================="
if [[ $DOCKER_JOB_ON == "true" ]]; then
if [[ $ALWAYS_BUILD_N_PUBLSIH_DOCKER == "true" ]]; then
echo "Setting DOCKER_POLICY to 1"
DOCKER_POLICY=1
elif [[ $DOCKER_JOB_POLICY == "CDeployment" ]]; then
echo "Setting DOCKER_POLICY to 0"
DOCKER_POLICY=0
elif [[ $DOCKER_JOB_POLICY == "CDelivery" ]]; then
echo "Setting DOCKER_POLICY to 2"
DOCKER_POLICY=2
fi
else
echo "Setting DOCKER_POLICY to 3"
DOCKER_POLICY=3
fi
## Lower level config ##
# 2 bit state machine
# 0 0 = 0: pure CI/CD mode, aka Admit if Pass, Require Pass, guarantee quality
# 0 1 = 1: Always build and publish, aka Admit All
# 1 0 = 2: CI/CD with Bypass Opt, aka Admit Tested and when Test is OFF, Admit when Test OFF
# 1 1 = 3: Never build and publish, aka No Admitance, guarantee NO Dockerhub publish
echo "DOCKER_POLICY=$DOCKER_POLICY" >> $GITHUB_OUTPUT
echo "=== Derived Docker Lower level Settings ==="
echo "DOCKER_POLICY: $DOCKER_POLICY"
echo "============================================"
# RUN TEST SUITE ON ALL PLATFORMS
sdist_test:
runs-on: ${{ matrix.platform }}
needs: set_github_outputs
if: ${{ needs.set_github_outputs.outputs.TESTS_ENABLED == 'true' }}
strategy:
matrix: ${{fromJSON(needs.set_github_outputs.outputs.matrix)}}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- run: python -m pip install --upgrade pip && python -m pip install tox==3.28 tox-gh-actions
## TEST SUITE: By Default executes only unit-tests (ie no integration, or network-dependent tests)
- run: |
tox -vv -s false -e check --notest
.tox/check/bin/pyroma --directory .
# BUILD SDIST
- name: Create .tar.gz Source Distribution (aka pip install / python setup.py sdist)
env:
PLATFORM: ${{ matrix.platform }}
run: tox -vv -s false --notest -e sdist
# todo: read .tox/dist from env var (see tox docs for 'distdir' property)
- run: echo TAR_GZ_LOCATION=".tox/dist" >> $GITHUB_ENV
- run: 'echo SDIST_NAME=$(basename $(find ${{ env.TAR_GZ_LOCATION }} -type f -name "artificial_artwork*.tar.gz")) >> $GITHUB_OUTPUT'
id: produced_sdist
- run: .tox/check/bin/pyroma --file "${{ env.TAR_GZ_LOCATION }}/${{ steps.produced_sdist.outputs.SDIST_NAME }}"
- run: .tox/check/bin/twine check "${{ env.TAR_GZ_LOCATION }}/${{ steps.produced_sdist.outputs.SDIST_NAME }}"
# TEST installed SDIST, by running Unit Tests against it
- name: Run Unit Tests on Source Distribution (aka .tar.gz package)
run: tox -vv -s false -e sdist
env:
PLATFORM: ${{ matrix.platform }}
# ARTIFACTS: SDIST FILE
- name: Upload Source & Wheel distributions as Artefacts
uses: actions/upload-artifact@v3
with:
name: ${{ vars.ARTIFACTS_DIR_SDIST_FILES }}
path: ${{ env.TAR_GZ_LOCATION }}/${{ steps.produced_sdist.outputs.SDIST_NAME }}
if-no-files-found: error
# ARTIFACTS: RAW COVERAGE FILE
# .coverage.sdist
# coverage.sdist.xml
# - run: ls -l .tox/| grep coverage
# - run: echo CI_COVERAGE_RAW="coverage-${{ matrix.platform }}-${{ matrix.python-version }}" >> $GITHUB_ENV
# - run: mv ./.tox/coverage "${CI_COVERAGE_XML}"
# - name: "Upload RAW Coverage Data as Artefact"
# uses: actions/upload-artifact@v3
# with:
# name: ${{ vars.ARTIFACTS_DIR_RAW_COVERAGE_FILES }}
# path: ${{ env.CI_COVERAGE_XML }}
# if-no-files-found: error
# XML COVERAGE ARTIFACTS
- run: echo CI_COVERAGE_XML="coverage-${{ matrix.platform }}-${{ matrix.python-version }}-sdist.xml" >> $GITHUB_OUTPUT
id: set_coverage_xml_name
- name: "Aggregate Code Coverage & make XML Reports"
run: tox -e coverage --sitepackages -vv -s false
- run: mv ./.tox/coverage.xml "${{ steps.set_coverage_xml_name.outputs.CI_COVERAGE_XML }}"
- name: "Upload Test Coverage as Artifacts"
uses: actions/upload-artifact@v3
with:
name: ${{ vars.COVERAGE_XML_ARTIFACTS }}
path: ${{ steps.set_coverage_xml_name.outputs.CI_COVERAGE_XML }}
if-no-files-found: error
outputs:
SDIST_NAME: ${{ steps.produced_sdist.outputs.SDIST_NAME }}
CI_COVERAGE_XML: ${{ steps.set_coverage_xml_name.outputs.CI_COVERAGE_XML }}
## WHEEL ##
wheel_test:
runs-on: ${{ matrix.platform }}
needs: set_github_outputs
if: ${{ needs.set_github_outputs.outputs.TESTS_ENABLED == 'true' }}
strategy:
matrix: ${{fromJSON(needs.set_github_outputs.outputs.matrix)}}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- run: python -m pip install --upgrade pip && python -m pip install tox==3.28 tox-gh-actions
# BUILD wheel(s), install and TEST, by running Unit Tests against the installed distro
- run: echo WHEELS_LOCATION=wheels >> $GITHUB_ENV
- run: echo PY_VER="${MY_PY_VER//./}" >> $GITHUB_OUTPUT
id: set_py_ver
env:
MY_PY_VER: ${{ matrix.python-version }}
- name: Build Wheel Distribution (aka .whl package)
run: 'tox -vv -s false -e "py${{ steps.set_py_ver.outputs.PY_VER }}-pip_wheel-linux"'
- run: 'echo WHEEL_NAME=$(basename $(find "${WHEELS_LOCATION}" -type f -name "artificial_artwork*.whl")) >> $GITHUB_OUTPUT'
id: produced_wheel
- run: cp "${{ env.WHEELS_LOCATION }}/${{ steps.produced_wheel.outputs.WHEEL_NAME }}" ./
- run: |
tox -vv -s false -e check --notest
.tox/check/bin/twine check "${{ steps.produced_wheel.outputs.WHEEL_NAME }}"
- name: Run Unit Tests on Wheel Distribution (aka .whl package)
run: 'tox -vv -s false -e "py${{ steps.set_py_ver.outputs.PY_VER }}-wheel-linux"'
env:
WHEEL_FOR_INSTALL: ${{ steps.produced_wheel.outputs.WHEEL_NAME }}
# ARTIFACTS: WHEEL FILE
- name: Upload Wheel distribution in Artefacts
uses: actions/upload-artifact@v3
with:
name: ${{ vars.ARTIFACTS_DIR_WHEEL_FILES }}
path: ${{ steps.produced_wheel.outputs.WHEEL_NAME }}
if-no-files-found: error
# XML COVERAGE ARTIFACTS
- run: echo CI_COVERAGE_XML="coverage-${{ matrix.platform }}-${{ matrix.python-version }}-wheel.xml" >> $GITHUB_OUTPUT
id: set_coverage_xml_name
- name: "Aggregate Code Coverage & make XML Reports"
run: tox -e coverage --sitepackages -vv -s false
- run: mv ./.tox/coverage.xml "${{ steps.set_coverage_xml_name.outputs.CI_COVERAGE_XML }}"
- name: "Upload Test Coverage as Artifacts"
uses: actions/upload-artifact@v3
with:
name: ${{ vars.COVERAGE_XML_ARTIFACTS }}
path: ${{ steps.set_coverage_xml_name.outputs.CI_COVERAGE_XML }}
if-no-files-found: error
outputs:
WHEEL_NAME: ${{ steps.produced_wheel.outputs.WHEEL_NAME }}
CI_COVERAGE_XML: ${{ steps.set_coverage_xml_name.outputs.CI_COVERAGE_XML }}
# - run: tox -e check -vv -s false
# - name: Install documentation test dependencies
# if: ${{ matrix.platform == 'macos-latest' && matrix.python-version != '3.6' }}
# run: brew install enchant
# - name: Run Documentation Tests
# if: ${{ matrix.platform == 'ubuntu-latest' || matrix.python-version != '3.6' }}
# run: tox -e docs --sitepackages -vv -s false
### JOB: UPLOAD COVERAGE REPORTS TO CODECOV ###
codecov_coverage_host:
runs-on: ubuntu-latest
needs: [sdist_test, wheel_test, set_github_outputs]
if: ${{ needs.set_github_outputs.outputs.PREVENT_CODECOV_TEST_COVERAGE == 'false' }}
steps:
- uses: actions/checkout@v3
- name: Get Codecov binary
run: curl -Os https://uploader.codecov.io/latest/linux/codecov
- run: chmod +x codecov
- name: Download XML Test Coverage Results, from CI Artifacts
uses: actions/download-artifact@v3
with:
name: ${{ vars.COVERAGE_XML_ARTIFACTS }}
- name: Push to Codecov each XML Coverage Report
run: |
for file in coverage*.xml; do
OS_NAME=$(echo $file | sed -E "s/coverage-(\w\+)-/\1/")
PY_VERSION=$(echo $file | sed -E "s/coverage-\w\+-(\d\.)\+/\1/")
./codecov -f $file -e "OS=$OS_NAME,PYTHON=$PY_VERSION" --flags unittests --verbose
echo "Sent to Codecov: $file !"
done
## DEPLOY to PYPI: PROD and STAGING
# Automated Upload of Builds (.tar.gz, .whl), triggered by git push (aka git ops)
# Deployment happens only IF
# - PUBLISH_ON_PYPI == 'true'
# - we are on 'master' or 'dev' branch
# - the pushed git ref is a tag starting with 'v' ie v1.0.0
# For Production deployment we use the public pypi.org server.
# be on master branch, when you push a tag
# For Staging deployment we use the test.pypi.org server.
# be on release branch, when you push a tag
# - first make sure PUBLISH_ON_PYPI = true in Worklow test.yaml file
# To trigger automatically building your source code and deploying/uploading to PyPI, so that is becomes pip installable, you need to:
# - to trigger the automated deployment, push a git tag, that starts with 'v' (eg: v1.0.0, v1.2.3-dev, etc)
# PUBLISH DISTRIBUTIONS ON PYPI
check_which_git_branch_we_are_on:
runs-on: ubuntu-latest
needs: set_github_outputs
if: ${{ startsWith(github.event.ref, 'refs/tags/v') && needs.set_github_outputs.outputs.PUBLISH_ON_PYPI == 'true' }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: rickstaa/action-contains-tag@v1
id: main_contains_tag
with:
reference: "master"
tag: "${{ github.ref }}"
- uses: rickstaa/action-contains-tag@v1
id: release_contains_tag
with:
reference: "release"
tag: "${{ github.ref }}"
- name: Pick Production or Test Environment, if tag on master or release branch respectively
id: set_environment_name
run: |
DEPLOY=true
if [[ "${{ steps.main_contains_tag.outputs.retval }}" == "true" ]]; then
# Github Environment designed for Deploying to Production: DEPLOYMENT_PYPI_PROD
ENVIRONMENT_NAME=DEPLOYMENT_PYPI_PROD
elif [[ "${{ steps.release_contains_tag.outputs.retval }}" == "true" ]]; then
# Github Environment designed for Deploying to Staging: DEPLOYMENT_PYPI_STAGING
ENVIRONMENT_NAME=DEPLOYMENT_PYPI_STAGING
else
echo "A tag was pushed but not on master or release branch. No deployment will be done."
echo "[DEBUG] Branch name: ${GITHUB_REF_NAME}"
echo "[DEBUG] ${{ github.ref }}"
DEPLOY=false
fi
echo "SELECTED_ENVIRONMENT=$ENVIRONMENT_NAME" >> $GITHUB_OUTPUT
echo "AUTOMATED_DEPLOY=$DEPLOY" >> $GITHUB_OUTPUT
- run: echo "ENVIRONMENT_NAME=${{ steps.set_environment_name.outputs.SELECTED_ENVIRONMENT }}" >> $GITHUB_OUTPUT
id: select_pypi_env
- run: echo "AUTOMATED_DEPLOY=${{ steps.set_environment_name.outputs.AUTOMATED_DEPLOY }}" >> $GITHUB_OUTPUT
id: auto_pypi_deploy
outputs:
ENVIRONMENT_NAME: ${{ steps.select_pypi_env.outputs.ENVIRONMENT_NAME }}
AUTOMATED_DEPLOY: ${{ steps.auto_pypi_deploy.outputs.AUTOMATED_DEPLOY }}
### PYPI UPLOAD JOB ###
pypi_publ:
needs: [sdist_test, wheel_test, check_which_git_branch_we_are_on]
name: PyPI Upload
uses: boromir674/automated-workflows/.github/workflows/pypi_env.yml@v1.1.0
with:
distro_name: "artificial_artwork"
distro_version: "${{ needs.test_suite.outputs.SEMVER_PIP_FORMAT }}"
should_trigger: ${{ needs.check_which_git_branch_we_are_on.outputs.AUTOMATED_DEPLOY == 'true' }}
pypi_env: "${{ needs.check_which_git_branch_we_are_on.outputs.ENVIRONMENT_NAME }}"
artifacts_path: ${{ needs.test_suite.outputs.ARTIFACTS }}
require_wheel: true
allow_existing: true
secrets:
# This magically works, and the environment secret will be loaded
# it is really weird to pass a secret here because it feels that is comming from outside,
# from the repository secrets, not from the environment. But it doesn't!
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
# TAG="${GITHUB_REF_NAME}"
# TAG_SEM_VER="${TAG:1}" # remove the first character (v)
# # for now MUST match only the Major.Minor.Patch part
# # Extract Major.Minor.Patch parts from DISTRO_SEMVER and TAG_SEM_VER
# DISTRO_MMP=$(echo "$DISTRO_SEMVER" | grep -oE '^[0-9]+\.[0-9]+\.[0-9]+')
# TAG_MMP=$(echo "$TAG_SEM_VER" | grep -oE '^[0-9]+\.[0-9]+\.[0-9]+')
# if [ "$DISTRO_MMP" = "$TAG_MMP" ]; then
# echo "Major.Minor.Patch part of DISTRO_SEMVER matches TAG_SEM_VER"
# else
# echo "[ERROR] Major.Minor.Patch part of DISTRO_SEMVER does not match TAG_SEM_VER"
# echo "DISTRO_SEMVER=$DISTRO_SEMVER"
# echo "TAG_SEM_VER=$TAG_SEM_VER"
# echo "DISTRO_MMP=$DISTRO_MMP"
# echo "TAG_MMP=$TAG_MMP"
# exit 1
# fi
# echo "PACKAGE_DIST_VERSION=$DISTRO_SEMVER" >> $GITHUB_ENV
## AUTOMATED DOCKER BUILD and PUBLISH ON DOCKERHUB ##
read_docker_settings:
runs-on: ubuntu-latest
outputs:
CASE_POLICY: ${{ steps.derive_docker_policy.outputs.CASE_POLICY }}
steps:
- run: |
if [[ $DOCKER_JOB_ON == "true" ]]; then
if [[ $ALWAYS_BUILD_N_PUBLSIH_DOCKER == "true" ]]; then
DOCKER_POLICY=1
elif [[ $DOCKER_JOB_POLICY == "CDeployment" ]]; then
DOCKER_POLICY=2
elif [[ $DOCKER_JOB_POLICY == "CDelivery" ]]; then
DOCKER_POLICY=3
fi
else
DOCKER_POLICY=0
fi
echo "CASE_POLICY=$DOCKER_POLICY" >> $GITHUB_ENV
- run: echo "CASE_POLICY=$CASE_POLICY" >> $GITHUB_OUTPUT
id: derive_docker_policy
docker_build:
needs: [read_docker_settings, sdist_test, wheel_test]
uses: boromir674/automated-workflows/.github/workflows/docker.yml@v1.1.0
if: always()
with:
DOCKER_USER: ${{ vars.DOCKER_USER }}
acceptance_policy: ${{ needs.read_docker_settings.outputs.CASE_POLICY }}
tests_pass: ${{ needs.test_suite.result == 'success' }}
tests_run: ${{ !contains(fromJSON('["skipped", "cancelled"]'), needs.test_suite.result) }}
image_slug: "neural-style-transfer-cli"
target_stage: "prod_install"
secrets:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
# STATIC CODE ANALYSIS & LINTING
lint:
name: "Static Code Analysis & Lint"
runs-on: ubuntu-latest
needs: set_github_outputs
if: ${{ needs.set_github_outputs.outputs.RUN_LINT == 'true' }}
steps:
- uses: actions/checkout@v3
- name: Read Pipe Parameter STATIC_ANALYSIS_PY, to determine Python runtime to use for Static Code Analysis
run: echo "MY_STATIC_ANALYSIS_PY_VERSION=$STATIC_ANALYSIS_PY" >> $GITHUB_ENV # can be used in a with body of a next step in the Job, as eg: path: ${{ env.DIST_DIR }}
- name: Set up Python ${{ env.STATIC_ANALYSIS_PY }}
uses: actions/setup-python@v4
with:
python-version: ${{ env.STATIC_ANALYSIS_PY }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install tox==3.28
## Isort ##
- name: "Isort\\: Require Semantic and Alphabetic order of the Python Imports"
if: ${{ matrix.platform != 'windows-latest' }}
run: tox -e isort -vv -s false
## Black ##
- name: "Black\\: Require Project Style to be followed by the Python Code"
if: ${{ matrix.platform != 'windows-latest' }}
run: tox -e black -vv -s false
## Pylint ##
- name: Run Pylint tool on Python Code Base
run: TOXPYTHON="python${STATIC_ANALYSIS_PY}" tox -e pylint -vv -s false | tee pylint-result.txt
- name: Show Pylint output in Terminal
run: cat pylint-result.txt
- name: "Accept Code if Pylint Score > 8.2/10"
if: ${{ matrix.platform != 'windows-latest' }}
run: |
SCORE=`sed -n 's/^Your code has been rated at \([-0-9.]*\)\/.*/\1/p' pylint-result.txt`
echo "SCORE -> $SCORE"
# threshold check
if awk "BEGIN {exit !($SCORE >= $PYLINT_SCORE_THRESHOLD)}"; then
echo "PyLint Passed! | Score: ${SCORE} out of 10 | Threshold: ${PYLINT_SCORE_THRESHOLD}"
else
echo "PyLint Failed! | Score: ${SCORE} out of 10 | Threshold: ${PYLINT_SCORE_THRESHOLD}"
exit 1
fi
## Pyflakes, Pyroma, McCabe, DodgyRun, Profile Validator ##
- name: Check for errors, potential problems, convention violations and complexity, by running tools Pyflakes, Pyroma, McCabe, and DodgyRun
if: ${{ matrix.platform != 'windows-latest' }}
run: tox -e prospector -vv -s false
# DRAW PYTHON DEPENDENCY GRAPHS
check_trigger_draw_dependency_graphs:
runs-on: ubuntu-latest
name: Draw Python Dependency Graphs ?
needs: set_github_outputs
if: needs.set_github_outputs.outputs.DRAW_DEPS_SVG_GRAPHS == 'true'
outputs:
SHOULD_DRAW_GRAPHS: ${{ steps.decide_if_should_draw_graphs.outputs.SHOULD_DRAW_GRAPHS }}
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
fetch-depth: 2
- name: Decide if should draw graphs
id: decide_if_should_draw_graphs
run: |
# if branch is master or dev; or if we are on tag starting with "v"
if [[ ${GITHUB_REF_NAME} == "master" || ${GITHUB_REF_NAME} == "dev" || "${GITHUB_REF}" =~ refs/tags/v.* ]]; then
SHOULD_DRAW_GRAPHS=true
else
echo "=============== list modified files ==============="
git diff --name-only HEAD^ HEAD
echo "========== check paths of modified files =========="
git diff --name-only HEAD^ HEAD > files.txt
SHOULD_DRAW_GRAPHS=false
while read file; do
echo $file
if [[ $file =~ ^src/ ]]; then
echo "This modified file is under the 'src' folder."
SHOULD_DRAW_GRAPHS=true
break
fi
done < files.txt
fi
echo "SHOULD_DRAW_GRAPHS=$SHOULD_DRAW_GRAPHS" >> $GITHUB_OUTPUT
draw-dependencies:
runs-on: ubuntu-latest
needs: check_trigger_draw_dependency_graphs
if: needs.check_trigger_draw_dependency_graphs.outputs.SHOULD_DRAW_GRAPHS == 'true'
name: Draw Python Dependencies as Graphs, in .svg
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.8
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: Install tox
run: |
python -m pip install --upgrade pip
python -m pip install tox==3.28
- name: Install dependencies (ie dot binary of graphviz)
run: |
sudo apt-get update -y --allow-releaseinfo-change
sudo apt-get install -y graphviz
- name: Draw Dependency Graphs as .svg files
run: TOXPYTHON=python tox -e pydeps -vv -s false
- name: Upload Dependency Graphs as artifacts
uses: actions/upload-artifact@v3
with:
name: dependency-graphs
path: pydeps/
if-no-files-found: warn # 'error' or 'ignore' are also available, defaults to `warn`