Skip to content

release(semantic_version): bump version to 1.4.1 #39

release(semantic_version): bump version to 1.4.1

release(semantic_version): bump version to 1.4.1 #39

Workflow file for this run

name: CI/CD Pipeline
# Continuous Integration / Continuous Delivery
# Triggers on all branches and tags starting with v
# Full Job Matrix for Stress Testing is activated on:
# - branches 'master', 'main', 'stress-test'
# - tags =~ v*
## We Test factoring Platforms and Python versions
# For other branches only one Job is spawned for Running (Unit) Tests
# PyPI publish on v* tags on 'master' branch
# Test PyPI publish on v* 'pre-release' tags on 'release' branch
# ie on 'master' branch for stress-test and pypi publish tag: v1.7.4
# ie on 'release' branch for stress-test and test-pypi publish tag: v1.7.4-rc.0, v1.7.4-rc.1
# Dockerhub publish on all branches and tags
on:
push:
branches:
- "*"
tags:
- v*
env:
# Job Matrix as an env var !
# FULL_MATRIX_STRATEGY: "{\"platform\": [\"ubuntu-latest\", \"macos-latest\", \"windows-latest\"], \"python-version\": [\"3.7\", \"3.8\", \"3.9\", \"3.10\", \"3.11\"]}"
FULL_MATRIX_STRATEGY: "{\"platform\": [\"ubuntu-latest\", \"macos-latest\"], \"python-version\": [\"3.8\", \"3.9\", \"3.10\", \"3.11\"]}"
UBUNTU_PY310_STRATEGY: "{\"platform\":[\"ubuntu-latest\"], \"python-version\":[\"3.10\"]}"
TEST_STRATEGY: "{\"platform\": [\"macos-latest\"], \"python-version\": [\"3.8\", \"3.9\", \"3.10\", \"3.11\"]}"
# Lint Matrix
# LINT_MATRIX: "{\"platform\":[\"ubuntu-latest\", \"macos-latest\", \"windows-latest\"], \"python-version\":[\"3.10\"]}"
LINT_MATRIX: "{\"platform\":[\"ubuntu-latest\", \"macos-latest\"], \"python-version\":[\"3.10\"]}"
# On/Off switches for the various CI/CD steps
# If false the step is prevented from running
# If true the step is allowed to run (it still may be skipped on other runtime conditions)
RUN_UNIT_TESTS: "true"
RUN_LINT_CHECKS: "true"
BUILD_DOCKER: "true"
PUBLISH_ON_PYPI: "true"
DRAW_DEPENDENCIES: "true"
jobs:
# we use the below to read the workflow env vars and be able to use in "- if:" Job conditionals
# now we can do -> if: ${{ needs.set_github_outputs.outputs.TESTS_ENABLED == 'true' }}
# github does not have a way to simply do "- if: ${{ env.RUN_UNIT_TESTS == 'true' }} " !!
set_github_outputs:
name: Read Workflow Env Section Vars and set Github Outputs
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.pass-env-to-output.outputs.matrix }}
TESTS_ENABLED: ${{ steps.pass-env-to-output.outputs.TESTS_ENABLED }}
DOCKER_ENABLED: ${{ steps.pass-env-to-output.outputs.DOCKER_ENABLED }}
DRAW_DEPS_SVG_GRAPHS: ${{ steps.pass-env-to-output.outputs.DRAW_DEPS_SVG_GRAPHS }}
RUN_LINT: ${{ steps.pass-env-to-output.outputs.RUN_LINT }}
PUBLISH_ON_PYPI: ${{ steps.pass-env-to-output.outputs.PUBLISH_ON_PYPI }}
LINT_MATRIX: ${{ steps.pass-env-to-output.outputs.LINT_MATRIX }}
steps:
- name: Pass 'env' section variables to GITHUB_OUTPUT
id: pass-env-to-output
run: |
# set the matrix strategy to Full Matrix Stress Test if on master/main or stress-test branch or any tag
BRANCH_NAME=${GITHUB_REF_NAME}
if [[ $BRANCH_NAME == "master" || $BRANCH_NAME == "main" || $BRANCH_NAME == "stress-test" || $GITHUB_REF == refs/tags/* ]]; then
echo "matrix=$FULL_MATRIX_STRATEGY" >> $GITHUB_OUTPUT
else
echo "matrix=$UBUNTU_PY310_STRATEGY" >> $GITHUB_OUTPUT
fi
echo "DRAW_DEPS_SVG_GRAPHS=$DRAW_DEPENDENCIES" >> $GITHUB_OUTPUT
echo "RUN_LINT=$RUN_LINT_CHECKS" >> $GITHUB_OUTPUT
echo "TESTS_ENABLED=$RUN_UNIT_TESTS" >> $GITHUB_OUTPUT
echo "DOCKER_ENABLED=$BUILD_DOCKER" >> $GITHUB_OUTPUT
echo "PUBLISH_ON_PYPI=$PUBLISH_ON_PYPI" >> $GITHUB_OUTPUT
echo "LINT_MATRIX=$LINT_MATRIX" >> $GITHUB_OUTPUT
# RUN TEST SUITE ON ALL PLATFORMS
test_suite:
runs-on: ${{ matrix.platform }}
needs: set_github_outputs
if: ${{ needs.set_github_outputs.outputs.TESTS_ENABLED == 'true' }}
strategy:
matrix: ${{fromJSON(needs.set_github_outputs.outputs.matrix)}}
outputs:
SEMVER_PIP_FORMAT: ${{ steps.parse_version.outputs.SEMVER_PIP_FORMAT }}
steps:
- run: echo "Platform -> ${{ matrix.platform }} , Python -> ${{ matrix.python-version }}"
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- run: python -m pip install --upgrade pip && python -m pip install tox==3.28 tox-gh-actions
# - name: Do Type Checking
# run: tox -e type -vv -s false
- name: Parse package version from __init__.py to assist building
shell: bash
id: parse_version
run: |
PARSER="scripts/parse_version.py"
PARSED_VERSION=$(python "${PARSER}")
# transform version (ie 1.7.4-rc.1) to match the name of the wheel produced (ie 1.7.4rc1)
WHEEL_VERSION=$(echo $PARSED_VERSION | sed -E 's/([^.]*)\.([^.]*)\.([^-]*)-(rc)\.?(.*)/\1.\2.\3\4\5/')
# manually append the 0 to index the release candidate
# we account for wheel building that automatically does the above
last_two=${WHEEL_VERSION: -2}
if [[ $last_two == "rc" ]]; then
WHEEL_VERSION="${WHEEL_VERSION}0"
fi
echo "==== $PARSED_VERSION --> $WHEEL_VERSION"
echo "PKG_VERSION=$WHEEL_VERSION" >> $GITHUB_ENV # to be used in the next step
echo "SEMVER_PIP_FORMAT=$WHEEL_VERSION" >> $GITHUB_OUTPUT # to be used in other jobs
# TODO: provision those external dependencies at build-time through a custom docker image as runner
- name: Install external deps on MacOS
if: ${{ matrix.platform == 'macos-latest' }}
run: brew install ffmpeg
- name: Install external deps on Ubuntu
if: ${{ matrix.platform == 'ubuntu-latest' }}
run: sudo apt-get update && sudo apt-get install -y --install-recommends ffmpeg
- name: Run Unit Tests
run: tox -vv -s false
env:
PLATFORM: ${{ matrix.platform }}
- name: "Combine Coverage (dev, sdist, wheel) & make Reports"
run: tox -e coverage --sitepackages -vv -s false
- name: Rename Coverage Files
shell: bash
run: |
mv ./.tox/coverage.xml ./coverage-${{ matrix.platform }}-${{ matrix.python-version }}.xml
- name: "Upload Test Coverage as Artifacts"
uses: actions/upload-artifact@v3
with:
name: all_coverage_raw
path: coverage-${{ matrix.platform }}-${{ matrix.python-version }}.xml
if-no-files-found: error
- name: Check for compliance with Python Best Practices
shell: bash
run: |
DIST_DIR=dist
echo "DIST_DIR=dist" >> $GITHUB_ENV
mkdir ${DIST_DIR}
mv ".tox/${DIST_DIR}/music_album_creation-${PKG_VERSION}.tar.gz" "${DIST_DIR}"
mv ".tox/${DIST_DIR}/music_album_creation-${PKG_VERSION}-py3-none-any.whl" "${DIST_DIR}"
tox -e check -vv -s false
# - name: Install documentation test dependencies
# if: ${{ matrix.platform == 'macos-latest' && matrix.python-version != '3.6' }}
# run: brew install enchant
# - name: Run Documentation Tests
# if: ${{ matrix.platform == 'ubuntu-latest' || matrix.python-version != '3.6' }}
# run: tox -e docs --sitepackages -vv -s false
- name: Upload Source & Wheel distributions as Artefacts
uses: actions/upload-artifact@v3
with:
name: dist-${{ matrix.platform }}-${{ matrix.python-version }}
path: ${{ env.DIST_DIR }}
if-no-files-found: error
codecov_coverage_host:
runs-on: ubuntu-latest
needs: test_suite
steps:
- uses: actions/checkout@v3
- name: Get Codecov binary
run: |
curl -Os https://uploader.codecov.io/latest/linux/codecov
chmod +x codecov
- name: Download Raw Coverage Data Artefacts
uses: actions/download-artifact@v3
with:
name: all_coverage_raw
- name: Upload Coverage Reports to Codecov
run: |
for file in coverage*.xml; do
OS_NAME=$(echo $file | sed -E "s/coverage-(\w\+)-/\1/")
PY_VERSION=$(echo $file | sed -E "s/coverage-\w\+-(\d\.)\+/\1/")
./codecov -f $file -e "OS=$OS_NAME,PYTHON=$PY_VERSION" --flags unittests --verbose
echo "Sent to Codecov: $file !"
done
# DOCKER BUILD AND PUBLISH ON DOCKERHUB
docker_build:
runs-on: ubuntu-latest
needs: test_suite
if: needs.set_github_outputs.outputs.DOCKER_ENABLED == 'true'
env:
DOCKER_USER: ${{ secrets.DOCKER_USER }}
IMAGE_NAME: "create-album"
steps:
- uses: actions/checkout@v3
- name: Build 'n Push Docker Image to DockerHub
run: |
# this Job is realized inside a workflow enabled for branches and v* tags
IMAGE_TAG="${GITHUB_REF_NAME}" # this is branch name or tag name
echo "IMAGE_REF=${IMAGE_NAME}:${IMAGE_TAG}" >> $GITHUB_ENV
docker build -t "${DOCKER_USER}/${IMAGE_NAME}:${IMAGE_TAG}" .
- name: Publish Docker Image to DockerHub
env:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
run: |
echo "${DOCKER_PASSWORD}" | docker login --username "${DOCKER_USER}" --password-stdin
docker push "${DOCKER_USER}/${IMAGE_REF}"
docker logout
echo "Published in Dockerhub :)"
# PUBLISH DISTRIBUTIONS ON PYPI
# we consider publishing on tags starting with "v"
check_which_git_branch_we_are_on:
runs-on: ubuntu-latest
needs: set_github_outputs
if: ${{ startsWith(github.event.ref, 'refs/tags/v') && needs.set_github_outputs.outputs.PUBLISH_ON_PYPI == 'true' }}
outputs:
ENVIRONMENT_NAME: ${{ steps.set_environment_name.outputs.ENVIRONMENT_NAME }}
AUTOMATED_DEPLOY: ${{ steps.set_environment_name.outputs.AUTOMATED_DEPLOY }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: rickstaa/action-contains-tag@v1
id: main_contains_tag
with:
reference: "master"
tag: "${{ github.ref }}"
- run: echo "ON_MAIN_BRANCH=${{ steps.main_contains_tag.outputs.retval }}" >> $GITHUB_OUTPUT
- uses: rickstaa/action-contains-tag@v1
id: release_contains_tag
with:
reference: "release"
tag: "${{ github.ref }}"
- run: echo "ON_RELEASE_BRANCH=${{ steps.release_contains_tag.outputs.retval }}" >> $GITHUB_OUTPUT
- name: Pick Production or Test Environment, if tag on master or release branch respectively
id: set_environment_name
run: |
DEPLOY=true
if [[ "${{ steps.main_contains_tag.outputs.retval }}" == "true" ]]; then
echo "ENVIRONMENT_NAME=PROD_DEPLOYMENT" >> $GITHUB_OUTPUT
elif [[ "${{ steps.release_contains_tag.outputs.retval }}" == "true" ]]; then
echo "ENVIRONMENT_NAME=TEST_DEPLOYMENT" >> $GITHUB_OUTPUT
else
echo "A tag was pushed but not on master or release branch. No deployment will be done."
DEPLOY=false
fi
echo "AUTOMATED_DEPLOY=$DEPLOY" >> $GITHUB_OUTPUT
pypi_publish:
needs: [test_suite, check_which_git_branch_we_are_on]
runs-on: ubuntu-latest
# if we are on tag starting with "v" and if we are on master or dev branch
if: startsWith(github.event.ref, 'refs/tags/v') && ${{ needs.check_which_git_branch_we_are_on.outputs.AUTOMATED_DEPLOY == 'true' }}
environment:
name: ${{ needs.check_which_git_branch_we_are_on.outputs.ENVIRONMENT_NAME }}
env:
DIST_DIR: dist
PACKAGE_DIST_VERSION: ${{ needs.test_suite.outputs.SEMVER_PIP_FORMAT }}
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
PYPI_SERVER: ${{ vars.PYPI_SERVER }}
steps:
- uses: actions/checkout@v3
- name: Download Source & Wheel distributions
uses: actions/download-artifact@v3
with:
path: downloaded-artifacts
- name: Get Publishable files from the Artifacts
run: |
TAG="${GITHUB_REF_NAME}"
SEM_VER="${TAG:1}" # remove the first character (v)
PARSER="scripts/parse_version.py"
PARSED_VERSION=$(python "${PARSER}")
if [[ "${PARSED_VERSION}" != "${SEM_VER}" ]]; then
echo "ERROR: Version in __init__.py (${PARSED_VERSION}) does not match tag (${SEM_VER})"
exit 1
fi
- run: mkdir ${DIST_DIR}
- run: |
# Get Source Distribution (tar.gz of source code)
source_distributions=$(find downloaded-artifacts -type f -name music_album_creation*.tar.gz)
source_distributions_array=($source_distributions)
source_distribution=${source_distributions_array[0]} # a *.tar.gz file path
# Extract the base name (without the path)
source_distribution_name=$(basename "$source_distribution")
# Check if all source distribution (.tar.gz) files have the same name
for file in "${source_distributions_array[@]}"; do
if [ "$source_distribution_name" != "$(basename "$file")" ]; then
echo "Error: Not all Source Distribution .tar.gz files have the same name!"
exit 1
fi
done
echo "source_distribution=$source_distribution" >> $GITHUB_ENV
- run: cp "$source_distribution" ${DIST_DIR}
- run: |
# Get all built Wheels and copy to dist folder
for f in `find downloaded-artifacts -type f -name music_album_creation*.whl`; do
echo "F: $f";
# TODO check for duplicates, which means that our build matrix produces the same wheel (need a different compiler that python such as pypy, cython, etc)
cp $f ${DIST_DIR}
done
- name: Install Dependencies
run: pip install tox==3.28
- run: echo "Publishing $PACKAGE_DIST_VERSION to $PYPI_SERVER PyPI"
- name: Publish to PyPI
run: tox -vv -s false -e deploy -- upload --non-interactive --skip-existing
- run: echo "Published :\)"
# STATIC CODE ANALYSIS & LINTING
lint:
name: "Static Code Analysis & Lint"
runs-on: ${{ matrix.platform }}
needs: set_github_outputs
if: ${{ needs.set_github_outputs.outputs.RUN_LINT == 'true' }}
strategy:
matrix: ${{fromJson(needs.set_github_outputs.outputs.LINT_MATRIX)}}
steps:
- uses: actions/checkout@v3
- run: echo "Platform is ${{ matrix.platform }}, Ptython is ${{ matrix.python-version }}"
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install tox==3.28
- name: Lint check code with Isort and Black
if: ${{ matrix.platform != 'windows-latest' }}
run: tox -e lint -vv -s false
- name: Run Pylint on Production Code
run: tox -vv -s false -e pylint | tee /tmp/pylint-result-prod-code.txt
- name: Run Pylint on Test Code
run: tox -vv -s false -e pylint -- tests | tee /tmp/pylint-result-test-code.txt
- name: Compute Average Pylint Score
run: |
PYLINT_PROD_SCORE=`sed -n 's/^Your code has been rated at \([-0-9.]*\)\/.*/\1/p' /tmp/pylint-result-prod-code.txt`
PYLINT_TEST_SCORE=`sed -n 's/^Your code has been rated at \([-0-9.]*\)\/.*/\1/p' /tmp/pylint-result-test-code.txt`
PYLINT_AVERAGE_SCORE=`echo "scale=2; ($PYLINT_PROD_SCORE + $PYLINT_TEST_SCORE) / 2" | bc`
echo "PYLINT_AVERAGE_SCORE=$PYLINT_AVERAGE_SCORE" >> $GITHUB_ENV
- name: "Check Pylint Score > 7.0/10"
if: ${{ matrix.platform != 'windows-latest' }}
env:
PYLINT_THRESHOLD: 7.0 # TODO: increase to 8.2
run: |
echo "SCORE: $PYLINT_AVERAGE_SCORE"
if awk "BEGIN {exit !($PYLINT_AVERAGE_SCORE >= $PYLINT_THRESHOLD)}"; then
echo "PyLint Passed -> Score: ${PYLINT_AVERAGE_SCORE}/10 >= Threshold: ${PYLINT_THRESHOLD}/10"
else
echo "PyLint Failed -> Score: ${PYLINT_AVERAGE_SCORE}/10 < Threshold: ${PYLINT_THRESHOLD}/10"
exit 1
fi
- name: Run tox -e prospector
if: ${{ matrix.platform != 'windows-latest' }}
run: |
# Manual code to "allow_failure" for the Job
tox -e prospector -vv -s false || exit_code=$?
if [[ "$exit_code" -ne 0 ]]; then
echo "[ERROR] Prospector Failed"
exit 0 # TODO: fix prospector to get rid of this code
fi
# DRAW PYTHON DEPENDENCY GRAPHS
check_trigger_draw_dependency_graphs:
runs-on: ubuntu-latest
name: Draw Python Dependency Graphs ?
needs: set_github_outputs
if: needs.set_github_outputs.outputs.DRAW_DEPS_SVG_GRAPHS == 'true'
outputs:
SHOULD_DRAW_GRAPHS: ${{ steps.decide_if_should_draw_graphs.outputs.SHOULD_DRAW_GRAPHS }}
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
fetch-depth: 2
- name: Decide if should draw graphs
id: decide_if_should_draw_graphs
run: |
# if branch is master or dev; or if we are on tag starting with "v"
if [[ ${GITHUB_REF_NAME} == "master" || ${GITHUB_REF_NAME} == "dev" || "${GITHUB_REF}" =~ refs/tags/v.* ]]; then
SHOULD_DRAW_GRAPHS=true
else
echo "=============== list modified files ==============="
git diff --name-only HEAD^ HEAD
echo "========== check paths of modified files =========="
git diff --name-only HEAD^ HEAD > files.txt
SHOULD_DRAW_GRAPHS=false
while read file; do
echo $file
if [[ $file =~ ^src/ ]]; then
echo "This modified file is under the 'src' folder."
SHOULD_DRAW_GRAPHS=true
break
fi
done < files.txt
fi
echo "SHOULD_DRAW_GRAPHS=$SHOULD_DRAW_GRAPHS" >> $GITHUB_OUTPUT
draw-dependencies:
runs-on: ubuntu-latest
needs: check_trigger_draw_dependency_graphs
if: needs.check_trigger_draw_dependency_graphs.outputs.SHOULD_DRAW_GRAPHS == 'true'
name: Draw Python Dependencies as Graphs, in .svg
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install tox
run: |
python -m pip install --upgrade pip
python -m pip install tox==3.28
- name: Install dependencies (ie dot binary of graphviz)
run: |
sudo apt-get update -y --allow-releaseinfo-change
sudo apt-get install -y graphviz
- name: Draw Dependency Graphs as .svg files
run: tox -e pydeps -vv -s false
- name: Upload Dependency Graphs as artifacts
uses: actions/upload-artifact@v3
with:
name: dependency-graphs
path: pydeps/
if-no-files-found: warn # 'error' or 'ignore' are also available, defaults to `warn`