Skip to content

Commit

Permalink
Update configurations (#51)
Browse files Browse the repository at this point in the history
- updated GitHub Actions workflow: skip publishing existing versions
- included ruff pre commit hook
- updated dependency lock
- included tests for python 3.12
  • Loading branch information
jannikmi committed Apr 8, 2024
1 parent 05145c6 commit 6aa182c
Show file tree
Hide file tree
Showing 26 changed files with 1,588 additions and 1,134 deletions.
10 changes: 10 additions & 0 deletions .github/dependabot.yml
@@ -0,0 +1,10 @@
version: 2
updates:
- package-ecosystem: "pip"
directory: "pyproject.toml"
schedule:
interval: "weekly"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
50 changes: 32 additions & 18 deletions .github/workflows/build_test.yml
Expand Up @@ -22,14 +22,19 @@ jobs:
- "3.9"
- "3.10"
- "3.11"
- "3.12"
env:
TOXENV: ${{ matrix.tox-env }}
TOX_SKIP_MISSING_INTERPRETERS: False

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4

- name: Run pre-commit hook
uses: pre-commit/action@v3.0.1

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

Expand All @@ -42,16 +47,18 @@ jobs:
- name: Run tox
run: tox

tag_release:
name: tag current release
if: endsWith(github.ref, '/master')
needs: test
deploy:
runs-on: ubuntu-latest
needs: test
if: endsWith(github.ref, '/master')
permissions:
id-token: write
contents: write
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.8

Expand All @@ -61,26 +68,33 @@ jobs:
- name: Fetch version
id: fetch_version
run: echo "::set-output name=version_nr::$(poetry version -s)"
run: echo "version_nr=$(poetry version -s)" >> $GITHUB_OUTPUT

- name: Build a binary wheel and a source tarball
run: |
poetry build
poetry build --no-interaction
- name: Test PyPI Publishing
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.TEST_PYPI_DEPLOYMENT_API_KEY }}
repository-url: https://test.pypi.org/legacy/
skip-existing: true

- name: Create Release
id: create_release
uses: actions/create-release@v1
- name: Create GitHub Release
id: create_gh_release
uses: ncipollo/release-action@v1
env:
# use token provided by Actions
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
VERSION: ${{ steps.fetch_version.outputs.version_nr }}
with:
tag_name: ${{env.VERSION}}
release_name: Release ${{env.VERSION}}
tag: ${{env.VERSION}}
name: Release ${{env.VERSION}}
draft: false
prerelease: false
skipIfReleaseExists: true

- name: PyPI Publishing
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_DEPLOYMENT_API_KEY }}
skip-existing: true
138 changes: 65 additions & 73 deletions .pre-commit-config.yaml
@@ -1,82 +1,74 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: check-ast # Is it valid Python?
- id: debug-statements # no debugging statements used
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- id: check-case-conflict
- id: check-merge-conflict
# - id: name-tests-test
- id: check-docstring-first
- id: requirements-txt-fixer
- id: detect-private-key
# - id: check-executables-have-shebangs
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: check-ast # Is it valid Python?
- id: debug-statements # no debugging statements used
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- id: check-case-conflict
- id: check-merge-conflict
# - id: name-tests-test
- id: check-docstring-first
- id: requirements-txt-fixer
- id: detect-private-key
# - id: check-executables-have-shebangs

- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
args:
- --profile=black
- --filter-files
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.5
hooks:
# linter.
- id: ruff
args: [ --fix ]
- id: ruff-format

- repo: https://github.com/psf/black
rev: 23.1.0
hooks:
- id: black
language_version: python3
args:
- --line-length=120
- repo: https://github.com/asottile/blacken-docs
rev: 1.16.0
hooks:
- id: blacken-docs
additional_dependencies: [ black==22.1 ]

- repo: https://github.com/asottile/blacken-docs
rev: 1.13.0
hooks:
- id: blacken-docs
additional_dependencies: [ black==22.1 ]
- repo: https://github.com/pycqa/flake8
rev: 7.0.0
hooks:
- id: flake8
exclude: (docs|tests)
args:
- --max-line-length=120
- --ignore=E501,W503
additional_dependencies:
- flake8-bugbear
- flake8-comprehensions
- flake8-tidy-imports

- repo: https://github.com/pycqa/flake8
rev: 6.0.0
hooks:
- id: flake8
exclude: (docs|tests)
args:
- --max-line-length=120
- --ignore=E501,W503
additional_dependencies:
- flake8-bugbear
- flake8-comprehensions
- flake8-tidy-imports
# - repo: https://github.com/mgedmin/check-manifest
# rev: "0.49"
# hooks:
# - id: check-manifest
# args: [ "--no-build-isolation", "--ignore", "*.png,docs/*,paper/*,scripts/*,setup.py,publish.py,readthedocs.yml,poetry.lock" ]
# additional_dependencies: [ numpy, poetry==1.3.2]

# - repo: https://github.com/mgedmin/check-manifest
# rev: "0.49"
# hooks:
# - id: check-manifest
# args: [ "--no-build-isolation", "--ignore", "*.png,docs/*,paper/*,scripts/*,setup.py,publish.py,readthedocs.yml,poetry.lock" ]
# additional_dependencies: [ numpy, poetry==1.3.2]
# TODO sphinx check
# - repo: https://github.com/myint/rstcheck
# rev: 'v3.3.1'
# hooks:
# - id: rstcheck

# TODO sphinx check
# - repo: https://github.com/myint/rstcheck
# rev: 'v3.3.1'
# hooks:
# - id: rstcheck
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.2
hooks:
- id: pyupgrade

- repo: https://github.com/asottile/pyupgrade
rev: v3.3.1
hooks:
- id: pyupgrade
# # very detailed linting:
# - repo: https://github.com/pycqa/pylint
# rev: pylint-2.6.0
# hooks:
# - id: pylint

# # very detailed linting:
# - repo: https://github.com/pycqa/pylint
# rev: pylint-2.6.0
# hooks:
# - id: pylint

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.0.0
hooks:
- id: mypy
exclude: ^((tests|scripts)/)
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.9.0
hooks:
- id: mypy
exclude: ^((tests|scripts)/)
12 changes: 12 additions & 0 deletions CHANGELOG.rst
Expand Up @@ -54,6 +54,18 @@ idea:
- the evaluation of subtrees is independent and could theoretically be done in parallel
probably not worth the effort. more reasonable to just evaluate multiple polynomials in parallel

3.x.x (TBA)
__________________


internal:

- updated GitHub Actions workflow: skip publishing existing versions
- included ruff pre commit hook
- updated dependency lock
- included tests for python 3.12


3.1.0 (2023-02-15)
__________________

Expand Down
11 changes: 9 additions & 2 deletions Makefile
Expand Up @@ -6,7 +6,7 @@ update:
poetry update

install:
poetry install --all-extras
poetry install --with docs,dev --all-extras --sync

VENV_NAME=multivar_horner
venv:
Expand All @@ -17,9 +17,16 @@ hook:
pre-commit install
pre-commit run --all-files

hook2:
hookup:
pre-commit autoupdate

test:
poetry run pytest

tox:
tox


clean:
rm -rf .pytest_cache .coverage coverage.xml tests/__pycache__ src/__pycache__ mlruns/ .mypyp_cache/

Expand Down
6 changes: 3 additions & 3 deletions docs/conf.py
Expand Up @@ -25,15 +25,15 @@
sys.path.insert(0, os.path.join(project_root))

# needed for auto document, ATTENTION: must then be installed during online build!
import multivar_horner
import multivar_horner # noqa: E402 Module level import not at top of file

print(multivar_horner)

# -- Project information -----------------------------------------------------

project = "multivar_horner"
copyright = "2018, Jannik Michelfeit"
author = "Jannik Michelfeit"
copyright = "2018, Jannik Kissinger"
author = "Jannik Kissinger"

# The full version, including alpha/beta/rc tags.
release = subprocess.getoutput("poetry version -s")
Expand Down
12 changes: 10 additions & 2 deletions multivar_horner/__init__.py
@@ -1,5 +1,13 @@
from multivar_horner.classes.abstract_poly import load_pickle
from multivar_horner.classes.horner_poly import HornerMultivarPolynomial, HornerMultivarPolynomialOpt
from multivar_horner.classes.horner_poly import (
HornerMultivarPolynomial,
HornerMultivarPolynomialOpt,
)
from multivar_horner.classes.regular_poly import MultivarPolynomial

__all__ = ["HornerMultivarPolynomial", "MultivarPolynomial", "HornerMultivarPolynomialOpt", "load_pickle"]
__all__ = [
"HornerMultivarPolynomial",
"MultivarPolynomial",
"HornerMultivarPolynomialOpt",
"load_pickle",
]
2 changes: 1 addition & 1 deletion multivar_horner/_numba_replacements.py
@@ -1,4 +1,4 @@
""" 'transparent' numba functionality replacements
"""'transparent' numba functionality replacements
njit decorator
data types
Expand Down
4 changes: 3 additions & 1 deletion multivar_horner/c_evaluation.py
Expand Up @@ -36,7 +36,9 @@ def write_c_file(
instr = "#include <math.h>\n"
# NOTE: the coefficient array will be used to store intermediary results
# -> copy to use independent instance of array (NO pointer to external array!)
func_def = f"{DOUBLE} {EVAL_FCT}({DOUBLE} x[{nr_dims}], {DOUBLE} {COEFFS}[{nr_coeffs}])"
func_def = (
f"{DOUBLE} {EVAL_FCT}({DOUBLE} x[{nr_dims}], {DOUBLE} {COEFFS}[{nr_coeffs}])"
)
# declare function ("header")
instr += f"{func_def};\n"
# function definition
Expand Down
21 changes: 16 additions & 5 deletions multivar_horner/classes/abstract_poly.py
Expand Up @@ -58,16 +58,22 @@ def __init__(
self.compute_representation: bool = compute_representation

if rectify_input:
coefficients, exponents = rectify_construction_parameters(coefficients, exponents)
coefficients, exponents = rectify_construction_parameters(
coefficients, exponents
)
validate_construction_parameters(coefficients, exponents)
self.coefficients: np.ndarray = coefficients
self.exponents: np.ndarray = exponents

self.num_monomials: int = self.exponents.shape[0]
self.dim: int = self.exponents.shape[1]
self.unused_variables = np.where(~np.any(self.exponents.astype(BOOL_DTYPE), axis=1))[0]
self.unused_variables = np.where(
~np.any(self.exponents.astype(BOOL_DTYPE), axis=1)
)[0]
self.total_degree: int = np.max(np.sum(self.exponents, axis=0))
self.euclidean_degree: float = np.max(np.linalg.norm(self.exponents, ord=2, axis=0))
self.euclidean_degree: float = np.max(
np.linalg.norm(self.exponents, ord=2, axis=0)
)
self.maximal_degree: int = np.max(self.exponents)
self.num_ops: int = 0
self.representation: str
Expand Down Expand Up @@ -158,7 +164,9 @@ def get_partial_derivative(self, i: int, *args, **kwargs) -> "AbstractPolynomial
# multiply the coefficients with the exponent of the i-th coordinate
# f(x) = a x^b
# f'(x) = ab x^(b-1)
new_coefficients = np.multiply(new_coefficients.flatten(), new_exponents[:, coord_index])
new_coefficients = np.multiply(
new_coefficients.flatten(), new_exponents[:, coord_index]
)
new_coefficients = new_coefficients.reshape(-1, 1)

# reduce the the exponent of the i-th coordinate by 1
Expand All @@ -175,7 +183,10 @@ def get_gradient(self, *args, **kwargs) -> List["AbstractPolynomial"]:
Returns:
the list of all partial derivatives
"""
return [self.get_partial_derivative(i, *args, **kwargs) for i in range(1, self.dim + 1)]
return [
self.get_partial_derivative(i, *args, **kwargs)
for i in range(1, self.dim + 1)
]

def change_coefficients(
self,
Expand Down

0 comments on commit 6aa182c

Please sign in to comment.