From 022a2571532ffe796f73dc8020304716ba7a267d Mon Sep 17 00:00:00 2001 From: Bas Nijholt <bas@nijho.lt> Date: Tue, 13 May 2025 10:55:03 +0200 Subject: [PATCH 1/7] Prevent SciPy deprecation warning for `estimate_gradients_2d_global` (#475) * Prevent SciPy deprecation warning Currently, I see: ``` ~/pipefunc/.venv/lib/python3.13/site-packages/adaptive/learner/learner2D.py:52: DeprecationWarning: `scipy.interpolate.interpnd.estimate_gradients_2d_global` is deprecated along with the `scipy.interpolate.interpnd` namespace. `scipy.interpolate.interpnd.estimate_gradients_2d_global` will be removed in SciPy 1.16.0, and the `scipy.interpolate.interpnd` namespace will be removed in SciPy 2.0.0. gradients = interpolate.interpnd.estimate_gradients_2d_global( ``` * Use CloughTocher2DInterpolator --- adaptive/learner/learner2D.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/adaptive/learner/learner2D.py b/adaptive/learner/learner2D.py index 017a9da3..15864a80 100644 --- a/adaptive/learner/learner2D.py +++ b/adaptive/learner/learner2D.py @@ -11,7 +11,7 @@ import cloudpickle import numpy as np from scipy import interpolate -from scipy.interpolate import LinearNDInterpolator +from scipy.interpolate import CloughTocher2DInterpolator, LinearNDInterpolator from adaptive.learner.base_learner import BaseLearner from adaptive.learner.triangulation import simplex_volume_in_embedding @@ -49,9 +49,7 @@ def deviations(ip: LinearNDInterpolator) -> list[np.ndarray]: The deviation per triangle. """ values = ip.values / (np.ptp(ip.values, axis=0).max() or 1) - gradients = interpolate.interpnd.estimate_gradients_2d_global( - ip.tri, values, tol=1e-6 - ) + gradients = CloughTocher2DInterpolator(ip.tri, values, tol=1e-6).grad simplices = ip.tri.simplices p = ip.tri.points[simplices] From 5167fe0c5cfafd85aa5cc3d6a7c9018b978aacae Mon Sep 17 00:00:00 2001 From: Bas Nijholt <bas@nijho.lt> Date: Tue, 13 May 2025 11:16:58 +0200 Subject: [PATCH 2/7] Use `uv` as Nox backend and several related improvements (#476) --- .github/workflows/coverage.yml | 8 +++--- .github/workflows/matchers/pytest.json | 18 ------------ .github/workflows/nox.yml | 12 ++++---- .github/workflows/typeguard.yml | 8 +++--- README.md | 4 +-- noxfile.py | 40 +++++++++++++++++--------- pyproject.toml | 11 ++++++- 7 files changed, 52 insertions(+), 49 deletions(-) delete mode 100644 .github/workflows/matchers/pytest.json diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index ab393048..4c177878 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -11,10 +11,10 @@ jobs: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: - python-version: 3.11 - - name: Install dependencies - run: pip install nox + python-version: 3.13 + - name: Install uv + uses: astral-sh/setup-uv@v6 - name: Test with nox - run: nox -e coverage + run: uv run --group nox nox -e coverage - name: Upload coverage to Codecov uses: codecov/codecov-action@v4 diff --git a/.github/workflows/matchers/pytest.json b/.github/workflows/matchers/pytest.json deleted file mode 100644 index 3e5d8d5b..00000000 --- a/.github/workflows/matchers/pytest.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "problemMatcher": [ - { - "owner": "python", - "pattern": [ - { - "regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$", - "file": 1, - "line": 2 - }, - { - "regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$", - "message": 2 - } - ] - } - ] -} diff --git a/.github/workflows/nox.yml b/.github/workflows/nox.yml index 499b48c4..f021d3b2 100644 --- a/.github/workflows/nox.yml +++ b/.github/workflows/nox.yml @@ -12,7 +12,7 @@ jobs: fail-fast: false matrix: platform: [ubuntu-latest, macos-latest, windows-latest] - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 @@ -20,11 +20,9 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Register Python problem matcher - run: echo "::add-matcher::.github/workflows/matchers/pytest.json" - - name: Install dependencies - run: pip install nox pytest-github-actions-annotate-failures + - name: Install uv + uses: astral-sh/setup-uv@v6 - name: Test with nox using minimal dependencies - run: nox -e "pytest-${{ matrix.python-version }}(all_deps=False)" + run: uv run --group nox nox -e "pytest_min_deps-${{ matrix.python-version }}" - name: Test with nox with all dependencies - run: nox -e "pytest-${{ matrix.python-version }}(all_deps=True)" + run: uv run --group nox nox -e "pytest_all_deps-${{ matrix.python-version }}" diff --git a/.github/workflows/typeguard.yml b/.github/workflows/typeguard.yml index a95ac4f2..3689442b 100644 --- a/.github/workflows/typeguard.yml +++ b/.github/workflows/typeguard.yml @@ -12,8 +12,8 @@ jobs: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: - python-version: "3.11" - - name: Install dependencies - run: pip install nox + python-version: "3.13" + - name: Install uv + uses: astral-sh/setup-uv@v6 - name: Test with nox - run: nox -e pytest_typeguard + run: uv run --group nox nox -e pytest_typeguard diff --git a/README.md b/README.md index c7b2fcad..dcfd4ff8 100644 --- a/README.md +++ b/README.md @@ -160,12 +160,12 @@ jupyter labextension install @pyviz/jupyterlab_pyviz ## :wrench: Development -Clone the repository and run `pip install -e ".[notebook,testing,other]"` to add a link to the cloned repo into your Python path: +Clone the repository and run `pip install -e ".[notebook,test,other]"` to add a link to the cloned repo into your Python path: ```bash git clone git@github.com:python-adaptive/adaptive.git cd adaptive -pip install -e ".[notebook,testing,other]" +pip install -e ".[notebook,test,other]" ``` We recommend using a Conda environment or a virtualenv for package management during Adaptive development. diff --git a/noxfile.py b/noxfile.py index 55544435..6a6114bb 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,31 +1,45 @@ """Nox configuration file.""" +import os + import nox +nox.options.default_venv_backend = "uv" + +python = ["3.9", "3.10", "3.11", "3.12", "3.13"] +num_cpus = os.cpu_count() or 1 +xdist = ("-n", "auto") if num_cpus > 2 else () + + +@nox.session(python=python) +def pytest_min_deps(session: nox.Session) -> None: + """Run pytest with no optional dependencies.""" + session.install(".[test]") + session.run("coverage", "erase") + session.run("pytest", *xdist) + -@nox.session(python=["3.9", "3.10", "3.11", "3.12"]) -@nox.parametrize("all_deps", [True, False]) -def pytest(session: nox.Session, all_deps: bool) -> None: - """Run pytest with optional dependencies.""" - session.install(".[testing,other]" if all_deps else ".[testing]") +@nox.session(python=python) +def pytest_all_deps(session: nox.Session) -> None: + """Run pytest with "other" optional dependencies.""" + session.install(".[test,other]") session.run("coverage", "erase") - session.run("pytest") + session.run("pytest", *xdist) -@nox.session(python="3.11") +@nox.session(python="3.13") def pytest_typeguard(session: nox.Session) -> None: """Run pytest with typeguard.""" - session.install(".[testing,other]") + session.install(".[test,other]") session.run("coverage", "erase") - session.run("pytest", "--typeguard-packages=adaptive") + session.run("pytest", "--typeguard-packages=adaptive", *xdist) -@nox.session(python="3.11") +@nox.session(python="3.13") def coverage(session: nox.Session) -> None: """Generate coverage report.""" - session.install("coverage") - session.install(".[testing,other]") - session.run("pytest") + session.install(".[test,other]") + session.run("pytest", *xdist) session.run("coverage", "report") session.run("coverage", "xml") diff --git a/pyproject.toml b/pyproject.toml index 8dbe2c89..5e59cad3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,15 +47,18 @@ notebook = [ "matplotlib", "plotly", ] -testing = [ +test = [ "flaky", "pytest", "pytest-cov", "pytest-randomly", "pytest-timeout", + "pytest-xdist", "pre_commit", "typeguard", + "coverage", ] +dev = ["adaptive[test,nox,notebook,other]"] [project.urls] homepage = "https://adaptive.readthedocs.io/" @@ -66,6 +69,12 @@ repository = "https://github.com/python-adaptive/adaptive" content-type = "text/markdown" file = "README.md" +[dependency-groups] +nox = [ + "nox", + "pytest-github-actions-annotate-failures", +] + [tool.setuptools.packages.find] include = ["adaptive.*", "adaptive"] From d25e7e7656c6cabcd6978ca0cb4de358e745bf60 Mon Sep 17 00:00:00 2001 From: Bas Nijholt <bas@nijho.lt> Date: Tue, 13 May 2025 12:40:51 +0200 Subject: [PATCH 3/7] Follow SPEC 0 and drop support for Python 3.9 and 3.10 (#477) --- .github/workflows/nox.yml | 2 +- adaptive/learner/average_learner.py | 2 +- adaptive/learner/average_learner1D.py | 3 +- adaptive/learner/balancing_learner.py | 20 ++--- adaptive/learner/base_learner.py | 3 +- adaptive/learner/data_saver.py | 3 +- adaptive/learner/integrator_learner.py | 3 +- adaptive/learner/learner1D.py | 44 ++++------ adaptive/learner/learner2D.py | 3 +- adaptive/learner/sequence_learner.py | 10 +-- adaptive/runner.py | 89 +++++++-------------- adaptive/tests/algorithm_4.py | 2 +- adaptive/tests/test_average_learner1d.py | 2 +- adaptive/tests/test_notebook_integration.py | 8 +- adaptive/types.py | 16 ++-- adaptive/utils.py | 4 +- docs/environment.yml | 2 +- docs/source/algorithms_and_examples.md | 2 +- docs/source/tutorial/tutorial.LearnerND.md | 2 +- environment.yml | 2 +- noxfile.py | 2 +- pyproject.toml | 11 ++- 22 files changed, 81 insertions(+), 154 deletions(-) diff --git a/.github/workflows/nox.yml b/.github/workflows/nox.yml index f021d3b2..a03b2d7b 100644 --- a/.github/workflows/nox.yml +++ b/.github/workflows/nox.yml @@ -12,7 +12,7 @@ jobs: fail-fast: false matrix: platform: [ubuntu-latest, macos-latest, windows-latest] - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 diff --git a/adaptive/learner/average_learner.py b/adaptive/learner/average_learner.py index c3d4892b..3252530b 100644 --- a/adaptive/learner/average_learner.py +++ b/adaptive/learner/average_learner.py @@ -1,7 +1,7 @@ from __future__ import annotations +from collections.abc import Callable from math import sqrt -from typing import Callable import cloudpickle import numpy as np diff --git a/adaptive/learner/average_learner1D.py b/adaptive/learner/average_learner1D.py index 9678b4f6..cb844f01 100644 --- a/adaptive/learner/average_learner1D.py +++ b/adaptive/learner/average_learner1D.py @@ -3,10 +3,9 @@ import math import sys from collections import defaultdict -from collections.abc import Iterable, Sequence +from collections.abc import Callable, Iterable, Sequence from copy import deepcopy from math import hypot -from typing import Callable import numpy as np import scipy.stats diff --git a/adaptive/learner/balancing_learner.py b/adaptive/learner/balancing_learner.py index e9a4a661..b8a340f9 100644 --- a/adaptive/learner/balancing_learner.py +++ b/adaptive/learner/balancing_learner.py @@ -1,13 +1,12 @@ from __future__ import annotations import itertools -import sys from collections import defaultdict -from collections.abc import Iterable, Sequence +from collections.abc import Callable, Iterable, Sequence from contextlib import suppress from functools import partial from operator import itemgetter -from typing import Any, Callable, Union, cast +from typing import Any, Literal, TypeAlias, cast import numpy as np @@ -16,13 +15,6 @@ from adaptive.types import Int, Real from adaptive.utils import cache_latest, named_product, restore -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - -from typing import Literal - try: import pandas @@ -38,11 +30,9 @@ def dispatch(child_functions: list[Callable], arg: Any) -> Any: STRATEGY_TYPE: TypeAlias = Literal["loss_improvements", "loss", "npoints", "cycle"] -CDIMS_TYPE: TypeAlias = Union[ - Sequence[dict[str, Any]], - tuple[Sequence[str], Sequence[tuple[Any, ...]]], - None, -] +CDIMS_TYPE: TypeAlias = ( + Sequence[dict[str, Any]] | tuple[Sequence[str], Sequence[tuple[Any, ...]]] | None +) class BalancingLearner(BaseLearner): diff --git a/adaptive/learner/base_learner.py b/adaptive/learner/base_learner.py index 73720dd5..f5ef73ec 100644 --- a/adaptive/learner/base_learner.py +++ b/adaptive/learner/base_learner.py @@ -1,8 +1,9 @@ from __future__ import annotations import abc +from collections.abc import Callable from contextlib import suppress -from typing import TYPE_CHECKING, Any, Callable, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar import cloudpickle diff --git a/adaptive/learner/data_saver.py b/adaptive/learner/data_saver.py index a6980738..644691a5 100644 --- a/adaptive/learner/data_saver.py +++ b/adaptive/learner/data_saver.py @@ -2,7 +2,8 @@ import functools from collections import OrderedDict -from typing import Any, Callable +from collections.abc import Callable +from typing import Any from adaptive.learner.base_learner import BaseLearner, LearnerType from adaptive.utils import copy_docstring_from diff --git a/adaptive/learner/integrator_learner.py b/adaptive/learner/integrator_learner.py index 0fc97df6..d6ee9ef1 100644 --- a/adaptive/learner/integrator_learner.py +++ b/adaptive/learner/integrator_learner.py @@ -3,9 +3,10 @@ import sys from collections import defaultdict +from collections.abc import Callable from math import sqrt from operator import attrgetter -from typing import TYPE_CHECKING, Callable +from typing import TYPE_CHECKING import cloudpickle import numpy as np diff --git a/adaptive/learner/learner1D.py b/adaptive/learner/learner1D.py index bf04743b..6cd88b4a 100644 --- a/adaptive/learner/learner1D.py +++ b/adaptive/learner/learner1D.py @@ -3,10 +3,9 @@ import collections.abc import itertools import math -import sys -from collections.abc import Sequence +from collections.abc import Callable, Sequence from copy import copy, deepcopy -from typing import TYPE_CHECKING, Any, Callable, Optional, Union +from typing import TYPE_CHECKING, Any, TypeAlias import cloudpickle import numpy as np @@ -24,12 +23,6 @@ partial_function_from_dataframe, ) -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - - try: import pandas @@ -42,28 +35,21 @@ # -- types -- # Commonly used types - Interval: TypeAlias = Union[tuple[float, float], tuple[float, float, int]] - NeighborsType: TypeAlias = SortedDict[float, list[Optional[float]]] + Interval: TypeAlias = tuple[float, float] | tuple[float, float, int] + NeighborsType: TypeAlias = SortedDict[float, list[float | None]] # Types for loss_per_interval functions XsType0: TypeAlias = tuple[float, float] - YsType0: TypeAlias = Union[tuple[float, float], tuple[np.ndarray, np.ndarray]] - XsType1: TypeAlias = tuple[ - Optional[float], Optional[float], Optional[float], Optional[float] - ] - YsType1: TypeAlias = Union[ - tuple[Optional[float], Optional[float], Optional[float], Optional[float]], - tuple[ - Optional[np.ndarray], - Optional[np.ndarray], - Optional[np.ndarray], - Optional[np.ndarray], - ], - ] - XsTypeN: TypeAlias = tuple[Optional[float], ...] - YsTypeN: TypeAlias = Union[ - tuple[Optional[float], ...], tuple[Optional[np.ndarray], ...] - ] + YsType0: TypeAlias = tuple[float, float] | tuple[np.ndarray, np.ndarray] + XsType1: TypeAlias = tuple[float | None, float | None, float | None, float | None] + YsType1: TypeAlias = ( + tuple[float | None, float | None, float | None, float | None] + | tuple[ + np.ndarray | None, np.ndarray | None, np.ndarray | None, np.ndarray | None + ] + ) + XsTypeN: TypeAlias = tuple[float | None, ...] + YsTypeN: TypeAlias = tuple[float | None, ...] | tuple[np.ndarray | None, ...] __all__ = [ @@ -598,7 +584,7 @@ def tell(self, x: float, y: Float | Sequence[Float] | np.ndarray) -> None: ) # either it is a float/int, if not, try casting to a np.array - if not isinstance(y, (float, int)): + if not isinstance(y, float | int): y = np.asarray(y, dtype=float) # Add point to the real data dict diff --git a/adaptive/learner/learner2D.py b/adaptive/learner/learner2D.py index 15864a80..49b9cef5 100644 --- a/adaptive/learner/learner2D.py +++ b/adaptive/learner/learner2D.py @@ -3,10 +3,9 @@ import itertools import warnings from collections import OrderedDict -from collections.abc import Iterable +from collections.abc import Callable, Iterable from copy import copy from math import sqrt -from typing import Callable import cloudpickle import numpy as np diff --git a/adaptive/learner/sequence_learner.py b/adaptive/learner/sequence_learner.py index c307744f..e8d83af5 100644 --- a/adaptive/learner/sequence_learner.py +++ b/adaptive/learner/sequence_learner.py @@ -1,8 +1,7 @@ from __future__ import annotations -import sys from copy import copy -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, TypeAlias import cloudpickle from sortedcontainers import SortedDict, SortedSet @@ -16,8 +15,7 @@ ) if TYPE_CHECKING: - from collections.abc import Sequence - from typing import Callable + from collections.abc import Callable, Sequence try: import pandas @@ -27,10 +25,6 @@ except ModuleNotFoundError: with_pandas = False -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias PointType: TypeAlias = tuple[Int, Any] diff --git a/adaptive/runner.py b/adaptive/runner.py index 4f877096..592fca4a 100644 --- a/adaptive/runner.py +++ b/adaptive/runner.py @@ -8,77 +8,48 @@ import itertools import pickle import platform -import sys import time import traceback import warnings +from collections.abc import Callable from contextlib import suppress from datetime import datetime, timedelta from importlib.util import find_spec -from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, Union +from typing import TYPE_CHECKING, Any, Literal, TypeAlias import loky -from adaptive import ( - BalancingLearner, - DataSaver, - IntegratorLearner, - SequenceLearner, -) +from adaptive import BalancingLearner, DataSaver, IntegratorLearner, SequenceLearner from adaptive.learner.base_learner import LearnerType from adaptive.notebook_integration import in_ipynb, live_info, live_plot from adaptive.utils import SequentialExecutor -ExecutorTypes: TypeAlias = Union[ - concurrent.ProcessPoolExecutor, - concurrent.ThreadPoolExecutor, - SequentialExecutor, - loky.reusable_executor._ReusablePoolExecutor, -] -FutureTypes: TypeAlias = Union[concurrent.Future, asyncio.Future, asyncio.Task] - if TYPE_CHECKING: import holoviews -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - - with_ipyparallel = find_spec("ipyparallel") is not None with_distributed = find_spec("distributed") is not None with_mpi4py = find_spec("mpi4py") is not None if TYPE_CHECKING: - ExecutorTypes = Optional[()] - FutureTypes = Optional[()] - - if with_distributed: - import distributed - - ExecutorTypes = Optional[ - Union[ - ExecutorTypes, distributed.Client, distributed.cfexecutor.ClientExecutor - ] - ] - - if with_mpi4py: - import mpi4py.futures - - ExecutorTypes = Optional[Union[ExecutorTypes, mpi4py.futures.MPIPoolExecutor]] - - if with_ipyparallel: - import ipyparallel - from ipyparallel.client.asyncresult import AsyncResult + import distributed + import ipyparallel + import mpi4py.futures + + ExecutorTypes: TypeAlias = ( + concurrent.ProcessPoolExecutor + | concurrent.ThreadPoolExecutor + | SequentialExecutor + | loky.reusable_executor._ReusablePoolExecutor + | distributed.Client + | distributed.cfexecutor.ClientExecutor + | mpi4py.futures.MPIPoolExecutor + | ipyparallel.Client + | ipyparallel.client.view.ViewExecutor + ) + FutureTypes: TypeAlias = concurrent.Future | asyncio.Future - ExecutorTypes = Optional[ - Union[ - ExecutorTypes, ipyparallel.Client, ipyparallel.client.view.ViewExecutor - ] - ] - FutureTypes = Optional[Union[FutureTypes, AsyncResult]] with suppress(ModuleNotFoundError): import uvloop @@ -203,7 +174,7 @@ def __init__( self._max_tasks = ntasks - self._pending_tasks: dict[concurrent.Future, int] = {} + self._pending_tasks: dict[FutureTypes, int] = {} # if we instantiate our own executor, then we are also responsible # for calling 'shutdown' @@ -292,7 +263,8 @@ def _process_futures( pid = self._pending_tasks.pop(fut) try: y = fut.result() - t = time.time() - fut.start_time # total execution time + # total execution time + t = time.time() - fut.start_time # type: ignore[union-attr] except Exception as e: self._tracebacks[pid] = traceback.format_exc() self._to_retry[pid] = self._to_retry.get(pid, 0) + 1 @@ -508,12 +480,12 @@ def _run(self) -> None: try: while not self.goal(self.learner): futures = self._get_futures() - done, _ = concurrent.wait(futures, return_when=first_completed) - self._process_futures(done) + done, _ = concurrent.wait(futures, return_when=first_completed) # type: ignore[arg-type] + self._process_futures(done) # type: ignore[arg-type] finally: remaining = self._remove_unfinished() if remaining: - concurrent.wait(remaining) + concurrent.wait(remaining) # type: ignore[arg-type] # Some futures get their result set, despite being cancelled. # see https://github.com/python-adaptive/adaptive/issues/319 with_result = {f for f in remaining if not f.cancelled() and f.done()} @@ -835,13 +807,12 @@ async def _run(self) -> None: try: while not self.goal(self.learner): futures = self._get_futures() - kw = {"loop": self.ioloop} if sys.version_info[:2] < (3, 10) else {} - done, _ = await asyncio.wait(futures, return_when=first_completed, **kw) # type: ignore[arg-type] + done, _ = await asyncio.wait(futures, return_when=first_completed) # type: ignore[arg-type,type-var] self._process_futures(done) finally: remaining = self._remove_unfinished() if remaining: - await asyncio.wait(remaining) + await asyncio.wait(remaining) # type: ignore[type-var] self._cleanup() def elapsed_time(self) -> float: @@ -1062,9 +1033,7 @@ def _get_ncores( import mpi4py.futures if with_ipyparallel and isinstance(ex, ipyparallel.client.view.ViewExecutor): return len(ex.view) - elif isinstance( - ex, (concurrent.ProcessPoolExecutor, concurrent.ThreadPoolExecutor) - ): + elif isinstance(ex, concurrent.ProcessPoolExecutor | concurrent.ThreadPoolExecutor): return ex._max_workers # type: ignore[union-attr] elif isinstance(ex, loky.reusable_executor._ReusablePoolExecutor): return ex._max_workers # type: ignore[union-attr] @@ -1119,7 +1088,7 @@ def stop_after(*, seconds=0, minutes=0, hours=0) -> Callable[[LearnerType], bool class _TimeGoal: def __init__(self, dt: timedelta | datetime | int | float): - self.dt = dt if isinstance(dt, (timedelta, datetime)) else timedelta(seconds=dt) + self.dt = dt if isinstance(dt, timedelta | datetime) else timedelta(seconds=dt) self.start_time = None def __call__(self, _): diff --git a/adaptive/tests/algorithm_4.py b/adaptive/tests/algorithm_4.py index 27832298..8741fdc5 100644 --- a/adaptive/tests/algorithm_4.py +++ b/adaptive/tests/algorithm_4.py @@ -3,8 +3,8 @@ from __future__ import annotations from collections import defaultdict +from collections.abc import Callable from fractions import Fraction -from typing import Callable import numpy as np from numpy.testing import assert_allclose diff --git a/adaptive/tests/test_average_learner1d.py b/adaptive/tests/test_average_learner1d.py index c0148c5e..a9be7fce 100644 --- a/adaptive/tests/test_average_learner1d.py +++ b/adaptive/tests/test_average_learner1d.py @@ -21,7 +21,7 @@ def almost_equal_dicts(a, b): if ( v1 is None or v2 is None - or isinstance(v1, (tuple, list)) + or isinstance(v1, tuple | list) and any(x is None for x in chain(v1, v2)) ): assert v1 == v2 diff --git a/adaptive/tests/test_notebook_integration.py b/adaptive/tests/test_notebook_integration.py index 3e4ddb29..2fb266f2 100644 --- a/adaptive/tests/test_notebook_integration.py +++ b/adaptive/tests/test_notebook_integration.py @@ -1,7 +1,5 @@ from __future__ import annotations -import os -import sys from typing import TYPE_CHECKING import pytest @@ -16,13 +14,9 @@ except ImportError: with_notebook_dependencies = False -# XXX: remove when is fixed https://github.com/ipython/ipykernel/issues/468 -skip_because_of_bug = os.name == "nt" and sys.version_info[:2] == (3, 8) - @pytest.mark.skipif( - not with_notebook_dependencies or skip_because_of_bug, - reason="notebook dependencies are not installed", + not with_notebook_dependencies, reason="notebook dependencies are not installed" ) def test_private_api_used_in_live_info(): """We are catching all errors in diff --git a/adaptive/types.py b/adaptive/types.py index 367445ee..e2869d46 100644 --- a/adaptive/types.py +++ b/adaptive/types.py @@ -1,17 +1,11 @@ -import sys -from typing import Union +from typing import TypeAlias import numpy as np -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - -Float: TypeAlias = Union[float, np.float64] -Bool: TypeAlias = Union[bool, np.bool_] -Int: TypeAlias = Union[int, np.int_] -Real: TypeAlias = Union[Float, Int] +Float: TypeAlias = float | np.float64 +Bool: TypeAlias = bool | np.bool_ +Int: TypeAlias = int | np.int_ +Real: TypeAlias = Float | Int __all__ = ["Float", "Bool", "Int", "Real"] diff --git a/adaptive/utils.py b/adaptive/utils.py index ff80f62f..2a1680ca 100644 --- a/adaptive/utils.py +++ b/adaptive/utils.py @@ -7,11 +7,11 @@ import os import pickle import warnings -from collections.abc import Awaitable, Iterator, Sequence +from collections.abc import Awaitable, Callable, Iterator, Sequence from contextlib import contextmanager from functools import wraps from itertools import product -from typing import TYPE_CHECKING, Any, Callable, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar import cloudpickle diff --git a/docs/environment.yml b/docs/environment.yml index 3e99ae62..caa8badc 100644 --- a/docs/environment.yml +++ b/docs/environment.yml @@ -4,7 +4,7 @@ channels: - conda-forge dependencies: - - python=3.10 + - python=3.11 - sortedcollections=2.1.0 - scipy=1.10.1 - holoviews=1.18.3 diff --git a/docs/source/algorithms_and_examples.md b/docs/source/algorithms_and_examples.md index eda3c2ff..0aff9f1f 100644 --- a/docs/source/algorithms_and_examples.md +++ b/docs/source/algorithms_and_examples.md @@ -4,7 +4,7 @@ jupytext: extension: .md format_name: myst format_version: 0.13 - jupytext_version: 1.14.5 + jupytext_version: 1.17.1 kernelspec: display_name: python3 name: python3 diff --git a/docs/source/tutorial/tutorial.LearnerND.md b/docs/source/tutorial/tutorial.LearnerND.md index 46f94870..37705b79 100644 --- a/docs/source/tutorial/tutorial.LearnerND.md +++ b/docs/source/tutorial/tutorial.LearnerND.md @@ -4,7 +4,7 @@ jupytext: extension: .md format_name: myst format_version: 0.13 - jupytext_version: 1.14.5 + jupytext_version: 1.17.1 kernelspec: display_name: python3 name: python3 diff --git a/environment.yml b/environment.yml index a31560f1..1dfb8c86 100644 --- a/environment.yml +++ b/environment.yml @@ -4,7 +4,7 @@ channels: - conda-forge dependencies: - - python=3.9 + - python=3.13 - sortedcontainers - sortedcollections - scipy diff --git a/noxfile.py b/noxfile.py index 6a6114bb..71a2217a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -6,7 +6,7 @@ nox.options.default_venv_backend = "uv" -python = ["3.9", "3.10", "3.11", "3.12", "3.13"] +python = ["3.11", "3.12", "3.13"] num_cpus = os.cpu_count() or 1 xdist = ("-n", "auto") if num_cpus > 2 else () diff --git a/pyproject.toml b/pyproject.toml index 5e59cad3..5c7186e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,15 +8,14 @@ dynamic = ["version"] description = "Parallel active learning of mathematical functions" maintainers = [{ name = "Adaptive authors" }] license = { text = "BSD" } -requires-python = ">=3.9" +requires-python = ">=3.11" classifiers = [ "Development Status :: 4 - Beta", "License :: OSI Approved :: BSD License", "Intended Audience :: Science/Research", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] dependencies = [ "scipy", @@ -24,7 +23,6 @@ dependencies = [ "sortedcontainers >= 2.0", "cloudpickle", "loky >= 2.9", - "typing_extensions; python_version < '3.10'", "versioningit", ] @@ -100,11 +98,11 @@ output = ".coverage.xml" [tool.mypy] ignore_missing_imports = true -python_version = "3.9" +python_version = "3.11" [tool.ruff] line-length = 88 -target-version = "py39" +target-version = "py311" [tool.ruff.lint] select = ["B", "C", "E", "F", "W", "T", "B9", "I", "UP"] @@ -118,6 +116,7 @@ ignore = [ "PLW0603", # Using the global statement to update `X` is discouraged "D401", # First line of docstring should be in imperative mood "E501", # Line too long + "B905", # `zip()` without an explicit `strict=` parameter ] [tool.ruff.lint.mccabe] From 240fb89f3e6713d2224db281b17ef2704484cd47 Mon Sep 17 00:00:00 2001 From: Bas Nijholt <bas@nijho.lt> Date: Tue, 13 May 2025 13:21:21 +0200 Subject: [PATCH 4/7] Enable runtime type checking in tests with typeguard (#478) Re-enables the `typeguard` job that I disabled 2 years ago in https://github.com/python-adaptive/adaptive/pull/415. --- .github/workflows/typeguard.yml | 7 +-- adaptive/_types.py | 24 +++++++++++ adaptive/learner/balancing_learner.py | 8 ++-- adaptive/learner/learner1D.py | 48 ++++++++++----------- adaptive/learner/sequence_learner.py | 4 +- adaptive/runner.py | 24 +++-------- adaptive/tests/test_average_learner.py | 4 -- adaptive/tests/test_average_learner1d.py | 4 -- adaptive/tests/test_balancing_learner.py | 2 +- adaptive/tests/test_learner1d.py | 4 -- adaptive/tests/test_notebook_integration.py | 4 -- 11 files changed, 62 insertions(+), 71 deletions(-) create mode 100644 adaptive/_types.py diff --git a/.github/workflows/typeguard.yml b/.github/workflows/typeguard.yml index 3689442b..c89a4442 100644 --- a/.github/workflows/typeguard.yml +++ b/.github/workflows/typeguard.yml @@ -1,8 +1,9 @@ name: typeguard -# TODO: enable this once typeguard=4 is released and issues are fixed. -# on: -# - push +on: + pull_request: + push: + branches: [main] jobs: typeguard: diff --git a/adaptive/_types.py b/adaptive/_types.py new file mode 100644 index 00000000..3f56bf59 --- /dev/null +++ b/adaptive/_types.py @@ -0,0 +1,24 @@ +# Only used for static type checkers, should only be imported in `if TYPE_CHECKING` block +# Workaround described in https://github.com/agronholm/typeguard/issues/456 + +import concurrent.futures as concurrent +from typing import TypeAlias + +import distributed +import ipyparallel +import loky +import mpi4py.futures + +from adaptive.utils import SequentialExecutor + +ExecutorTypes: TypeAlias = ( + concurrent.ProcessPoolExecutor + | concurrent.ThreadPoolExecutor + | SequentialExecutor + | loky.reusable_executor._ReusablePoolExecutor + | distributed.Client + | distributed.cfexecutor.ClientExecutor + | mpi4py.futures.MPIPoolExecutor + | ipyparallel.Client + | ipyparallel.client.view.ViewExecutor +) diff --git a/adaptive/learner/balancing_learner.py b/adaptive/learner/balancing_learner.py index b8a340f9..1b59d2b3 100644 --- a/adaptive/learner/balancing_learner.py +++ b/adaptive/learner/balancing_learner.py @@ -269,17 +269,17 @@ def ask( return self._ask_and_tell(n) def tell(self, x: tuple[Int, Any], y: Any) -> None: - index, x = x + index, x_ = x self._ask_cache.pop(index, None) self._loss.pop(index, None) self._pending_loss.pop(index, None) - self.learners[index].tell(x, y) + self.learners[index].tell(x_, y) def tell_pending(self, x: tuple[Int, Any]) -> None: - index, x = x + index, x_ = x self._ask_cache.pop(index, None) self._loss.pop(index, None) - self.learners[index].tell_pending(x) + self.learners[index].tell_pending(x_) def _losses(self, real: bool = True) -> list[float]: losses = [] diff --git a/adaptive/learner/learner1D.py b/adaptive/learner/learner1D.py index 6cd88b4a..d805f396 100644 --- a/adaptive/learner/learner1D.py +++ b/adaptive/learner/learner1D.py @@ -5,7 +5,7 @@ import math from collections.abc import Callable, Sequence from copy import copy, deepcopy -from typing import TYPE_CHECKING, Any, TypeAlias +from typing import Any, TypeAlias import cloudpickle import numpy as np @@ -31,25 +31,21 @@ except ModuleNotFoundError: with_pandas = False -if TYPE_CHECKING: - # -- types -- - # Commonly used types - Interval: TypeAlias = tuple[float, float] | tuple[float, float, int] - NeighborsType: TypeAlias = SortedDict[float, list[float | None]] +# Commonly used types +Interval: TypeAlias = tuple[float, float] | tuple[float, float, int] +NeighborsType: TypeAlias = SortedDict[float, list[float | None]] - # Types for loss_per_interval functions - XsType0: TypeAlias = tuple[float, float] - YsType0: TypeAlias = tuple[float, float] | tuple[np.ndarray, np.ndarray] - XsType1: TypeAlias = tuple[float | None, float | None, float | None, float | None] - YsType1: TypeAlias = ( - tuple[float | None, float | None, float | None, float | None] - | tuple[ - np.ndarray | None, np.ndarray | None, np.ndarray | None, np.ndarray | None - ] - ) - XsTypeN: TypeAlias = tuple[float | None, ...] - YsTypeN: TypeAlias = tuple[float | None, ...] | tuple[np.ndarray | None, ...] +# Types for loss_per_interval functions +XsType0: TypeAlias = tuple[float, float] +YsType0: TypeAlias = tuple[float, float] | tuple[np.ndarray, np.ndarray] +XsType1: TypeAlias = tuple[float | None, float | None, float | None, float | None] +YsType1: TypeAlias = ( + tuple[float | None, float | None, float | None, float | None] + | tuple[np.ndarray | None, np.ndarray | None, np.ndarray | None, np.ndarray | None] +) +XsTypeN: TypeAlias = tuple[float | None, ...] +YsTypeN: TypeAlias = tuple[float | None, ...] | tuple[np.ndarray | None, ...] __all__ = [ @@ -110,18 +106,18 @@ def abs_min_log_loss(xs: XsType0, ys: YsType0) -> Float: @uses_nth_neighbors(1) def triangle_loss(xs: XsType1, ys: YsType1) -> Float: assert len(xs) == 4 - xs = [x for x in xs if x is not None] # type: ignore[assignment] - ys = [y for y in ys if y is not None] # type: ignore[assignment] + x = [x for x in xs if x is not None] + y = [y for y in ys if y is not None] - if len(xs) == 2: # we do not have enough points for a triangle - return xs[1] - xs[0] # type: ignore[operator] + if len(x) == 2: # we do not have enough points for a triangle + return x[1] - x[0] # type: ignore[operator] - N = len(xs) - 2 # number of constructed triangles - if isinstance(ys[0], collections.abc.Iterable): - pts = [(x, *y) for x, y in zip(xs, ys)] # type: ignore[misc] + N = len(x) - 2 # number of constructed triangles + if isinstance(y[0], collections.abc.Iterable): + pts = [(x, *y) for x, y in zip(x, y)] # type: ignore[misc] vol = simplex_volume_in_embedding else: - pts = list(zip(xs, ys)) + pts = list(zip(x, y)) vol = volume return sum(vol(pts[i : i + 3]) for i in range(N)) / N diff --git a/adaptive/learner/sequence_learner.py b/adaptive/learner/sequence_learner.py index e8d83af5..2209ff87 100644 --- a/adaptive/learner/sequence_learner.py +++ b/adaptive/learner/sequence_learner.py @@ -134,13 +134,13 @@ def remove_unfinished(self) -> None: self.pending_points = set() def tell(self, point: PointType, value: Any) -> None: - index, point = point + index, _ = point self.data[index] = value self.pending_points.discard(index) self._to_do_indices.discard(index) def tell_pending(self, point: PointType) -> None: - index, point = point + index, _ = point self.pending_points.add(index) self._to_do_indices.discard(index) diff --git a/adaptive/runner.py b/adaptive/runner.py index 592fca4a..53ed79a1 100644 --- a/adaptive/runner.py +++ b/adaptive/runner.py @@ -24,32 +24,18 @@ from adaptive.notebook_integration import in_ipynb, live_info, live_plot from adaptive.utils import SequentialExecutor +FutureTypes: TypeAlias = concurrent.Future | asyncio.Future + if TYPE_CHECKING: import holoviews + from ._types import ExecutorTypes + with_ipyparallel = find_spec("ipyparallel") is not None with_distributed = find_spec("distributed") is not None with_mpi4py = find_spec("mpi4py") is not None -if TYPE_CHECKING: - import distributed - import ipyparallel - import mpi4py.futures - - ExecutorTypes: TypeAlias = ( - concurrent.ProcessPoolExecutor - | concurrent.ThreadPoolExecutor - | SequentialExecutor - | loky.reusable_executor._ReusablePoolExecutor - | distributed.Client - | distributed.cfexecutor.ClientExecutor - | mpi4py.futures.MPIPoolExecutor - | ipyparallel.Client - | ipyparallel.client.view.ViewExecutor - ) - FutureTypes: TypeAlias = concurrent.Future | asyncio.Future - with suppress(ModuleNotFoundError): import uvloop @@ -906,7 +892,7 @@ def _info_text(runner, separator: str = "\n"): info.append(("# of samples", runner.learner.nsamples)) with suppress(Exception): - info.append(("latest loss", f'{runner.learner._cache["loss"]:.3f}')) + info.append(("latest loss", f"{runner.learner._cache['loss']:.3f}")) width = 30 formatted_info = [f"{k}: {v}".ljust(width) for i, (k, v) in enumerate(info)] diff --git a/adaptive/tests/test_average_learner.py b/adaptive/tests/test_average_learner.py index d9493339..d0176858 100644 --- a/adaptive/tests/test_average_learner.py +++ b/adaptive/tests/test_average_learner.py @@ -1,5 +1,4 @@ import random -from typing import TYPE_CHECKING import flaky import numpy as np @@ -7,9 +6,6 @@ from adaptive.learner import AverageLearner from adaptive.runner import simple -if TYPE_CHECKING: - pass - def f_unused(seed): raise NotImplementedError("This function shouldn't be used.") diff --git a/adaptive/tests/test_average_learner1d.py b/adaptive/tests/test_average_learner1d.py index a9be7fce..619358e8 100644 --- a/adaptive/tests/test_average_learner1d.py +++ b/adaptive/tests/test_average_learner1d.py @@ -1,5 +1,4 @@ from itertools import chain -from typing import TYPE_CHECKING import numpy as np @@ -10,9 +9,6 @@ simple_run, ) -if TYPE_CHECKING: - pass - def almost_equal_dicts(a, b): assert a.keys() == b.keys() diff --git a/adaptive/tests/test_balancing_learner.py b/adaptive/tests/test_balancing_learner.py index 905a55e0..c50b2105 100644 --- a/adaptive/tests/test_balancing_learner.py +++ b/adaptive/tests/test_balancing_learner.py @@ -35,7 +35,7 @@ def test_distribute_first_points_over_learners(strategy): learner = BalancingLearner(learners, strategy=strategy) points = learner.ask(initial_points)[0] - learner.tell_many(points, points) + learner.tell_many(points, [x for i, x in points]) points, _ = learner.ask(100) i_learner, xs = zip(*points) diff --git a/adaptive/tests/test_learner1d.py b/adaptive/tests/test_learner1d.py index 1f28d7a1..e83629f3 100644 --- a/adaptive/tests/test_learner1d.py +++ b/adaptive/tests/test_learner1d.py @@ -2,7 +2,6 @@ import random import time -from typing import TYPE_CHECKING import flaky import numpy as np @@ -11,9 +10,6 @@ from adaptive.learner.learner1D import curvature_loss_function from adaptive.runner import BlockingRunner, simple -if TYPE_CHECKING: - pass - def flat_middle(x): x *= 1e7 diff --git a/adaptive/tests/test_notebook_integration.py b/adaptive/tests/test_notebook_integration.py index 2fb266f2..45ee1c8c 100644 --- a/adaptive/tests/test_notebook_integration.py +++ b/adaptive/tests/test_notebook_integration.py @@ -1,11 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING - import pytest -if TYPE_CHECKING: - pass try: import ipykernel.iostream import zmq From 462c531b2ff1299993e0e3c4344160f4a470c4c0 Mon Sep 17 00:00:00 2001 From: Bas Nijholt <bas@nijho.lt> Date: Tue, 13 May 2025 13:41:52 +0200 Subject: [PATCH 5/7] Bump `mypy` and `ruff` in `pre-commit` (#479) --- .pre-commit-config.yaml | 8 ++++---- adaptive/learner/average_learner1D.py | 6 ++---- adaptive/learner/balancing_learner.py | 4 +--- adaptive/learner/data_saver.py | 2 +- adaptive/learner/learner1D.py | 3 ++- adaptive/learner/learner2D.py | 2 +- adaptive/learner/learnerND.py | 3 +-- adaptive/learner/triangulation.py | 3 +-- adaptive/notebook_integration.py | 11 ++++------- adaptive/runner.py | 2 +- example-notebook.ipynb | 14 ++++++-------- 11 files changed, 24 insertions(+), 34 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5a0a4184..2715666e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -9,13 +9,13 @@ repos: - id: debug-statements - id: check-ast - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.3.5" + rev: "v0.11.9" hooks: - id: ruff args: ["--fix"] - id: ruff-format - repo: https://github.com/nbQA-dev/nbQA - rev: 1.8.5 + rev: 1.9.1 hooks: - id: nbqa-black additional_dependencies: [jupytext, black] @@ -23,7 +23,7 @@ repos: args: ["ruff", "--fix", "--ignore=E402,B018,F704"] additional_dependencies: [jupytext, ruff] - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.9.0" + rev: "v1.15.0" hooks: - id: mypy exclude: ipynb_filter.py|docs/source/conf.py diff --git a/adaptive/learner/average_learner1D.py b/adaptive/learner/average_learner1D.py index cb844f01..82c76f5d 100644 --- a/adaptive/learner/average_learner1D.py +++ b/adaptive/learner/average_learner1D.py @@ -499,8 +499,7 @@ def tell_many( # type: ignore[override] # but ignore it going forward. if not np.prod([x >= self.bounds[0] and x <= self.bounds[1] for _, x in xs]): raise ValueError( - "x value out of bounds, " - "remove x or enlarge the bounds of the learner" + "x value out of bounds, remove x or enlarge the bounds of the learner" ) # Create a mapping of points to a list of samples @@ -533,8 +532,7 @@ def tell_many_at_point(self, x: Real, seed_y_mapping: dict[int, Real]) -> None: # Check x is within the bounds if not np.prod(x >= self.bounds[0] and x <= self.bounds[1]): raise ValueError( - "x value out of bounds, " - "remove x or enlarge the bounds of the learner" + "x value out of bounds, remove x or enlarge the bounds of the learner" ) # If x is a new point: diff --git a/adaptive/learner/balancing_learner.py b/adaptive/learner/balancing_learner.py index 1b59d2b3..43f7dc1f 100644 --- a/adaptive/learner/balancing_learner.py +++ b/adaptive/learner/balancing_learner.py @@ -106,9 +106,7 @@ def __init__( self._cdims_default = cdims if len({learner.__class__ for learner in self.learners}) > 1: - raise TypeError( - "A BalacingLearner can handle only one type" " of learners." - ) + raise TypeError("A BalacingLearner can handle only one type of learners.") self.strategy: STRATEGY_TYPE = strategy diff --git a/adaptive/learner/data_saver.py b/adaptive/learner/data_saver.py index 644691a5..2deafe2c 100644 --- a/adaptive/learner/data_saver.py +++ b/adaptive/learner/data_saver.py @@ -162,7 +162,7 @@ def _set_data( self.learner._set_data(learner_data) def __getstate__(self) -> tuple[LearnerType, Callable, OrderedDict]: - return ( + return ( # type: ignore[return-value] self.learner, self.arg_picker, self.extra_data, diff --git a/adaptive/learner/learner1D.py b/adaptive/learner/learner1D.py index d805f396..68583422 100644 --- a/adaptive/learner/learner1D.py +++ b/adaptive/learner/learner1D.py @@ -761,8 +761,9 @@ def _ask_points_without_adding(self, n: int) -> tuple[list[float], list[float]]: ival is not None and self._loss(self.losses_combined, ival) >= self._loss(quals, qual) ): + assert ival is not None i += 1 - quals[(*ival, 2)] = loss_ival / 2 + quals[(ival[0], ival[1], 2)] = loss_ival / 2 else: quals.pop(qual, None) *xs, n = qual diff --git a/adaptive/learner/learner2D.py b/adaptive/learner/learner2D.py index 49b9cef5..a4b2a812 100644 --- a/adaptive/learner/learner2D.py +++ b/adaptive/learner/learner2D.py @@ -448,7 +448,7 @@ def __init__( self.aspect_ratio = 1 self._bounds_points = list(itertools.product(*bounds)) - self._stack.update({p: np.inf for p in self._bounds_points}) + self._stack.update(dict.fromkeys(self._bounds_points, np.inf)) self.function = function # type: ignore self._ip = self._ip_combined = None diff --git a/adaptive/learner/learnerND.py b/adaptive/learner/learnerND.py index c4af7ddc..d0300143 100644 --- a/adaptive/learner/learnerND.py +++ b/adaptive/learner/learnerND.py @@ -1098,8 +1098,7 @@ def _get_iso(self, level=0.0, which="surface"): if which == "surface": if self.ndim != 3 or self.vdim != 1: raise Exception( - "Isosurface plotting is only supported" - " for a 3D input and 1D output" + "Isosurface plotting is only supported for a 3D input and 1D output" ) get_surface = True get_line = False diff --git a/adaptive/learner/triangulation.py b/adaptive/learner/triangulation.py index 03455e3b..26a5ebc2 100644 --- a/adaptive/learner/triangulation.py +++ b/adaptive/learner/triangulation.py @@ -336,8 +336,7 @@ def __init__(self, coords): vectors = subtract(coords[1:], coords[0]) if matrix_rank(vectors) < dim: raise ValueError( - "Initial simplex has zero volumes " - "(the points are linearly dependent)" + "Initial simplex has zero volumes (the points are linearly dependent)" ) self.vertices = list(coords) diff --git a/adaptive/notebook_integration.py b/adaptive/notebook_integration.py index 782bc855..5eb5c6d2 100644 --- a/adaptive/notebook_integration.py +++ b/adaptive/notebook_integration.py @@ -16,8 +16,7 @@ def notebook_extension(*, _inline_js=True): """Enable ipywidgets, holoviews, and asyncio notebook integration.""" if not in_ipynb(): raise RuntimeError( - '"adaptive.notebook_extension()" may only be run ' - "from a Jupyter notebook." + '"adaptive.notebook_extension()" may only be run from a Jupyter notebook.' ) global _holoviews_enabled, _ipywidgets_enabled @@ -116,8 +115,7 @@ def live_plot(runner, *, plotter=None, update_interval=2, name=None, normalize=T """ if not _holoviews_enabled: raise RuntimeError( - "Live plotting is not enabled; did you run " - "'adaptive.notebook_extension()'?" + "Live plotting is not enabled; did you run 'adaptive.notebook_extension()'?" ) import holoviews as hv @@ -202,8 +200,7 @@ def live_info(runner, *, update_interval=0.5): """ if not _holoviews_enabled: raise RuntimeError( - "Live plotting is not enabled; did you run " - "'adaptive.notebook_extension()'?" + "Live plotting is not enabled; did you run 'adaptive.notebook_extension()'?" ) import ipywidgets @@ -268,7 +265,7 @@ def _info_html(runner): info.append(("# of samples", runner.learner.nsamples)) with suppress(Exception): - info.append(("latest loss", f'{runner.learner._cache["loss"]:.3f}')) + info.append(("latest loss", f"{runner.learner._cache['loss']:.3f}")) table = "\n".join(_table_row(i, k, v) for i, (k, v) in enumerate(info)) diff --git a/adaptive/runner.py b/adaptive/runner.py index 53ed79a1..b9812505 100644 --- a/adaptive/runner.py +++ b/adaptive/runner.py @@ -1157,7 +1157,7 @@ def auto_goal( if isinstance(learner, DataSaver): assert learner is not None return auto_goal( - learner=learner.learner, + learner=learner.learner, # type: ignore[arg-type] loss=loss, npoints=npoints, end_time=end_time, diff --git a/example-notebook.ipynb b/example-notebook.ipynb index 4f49415f..429239c6 100644 --- a/example-notebook.ipynb +++ b/example-notebook.ipynb @@ -23,16 +23,15 @@ "metadata": {}, "outputs": [], "source": [ - "import adaptive\n", - "\n", - "adaptive.notebook_extension()\n", - "\n", "import random\n", "from functools import partial\n", "\n", - "# Import modules that are used in multiple cells\n", "import holoviews as hv\n", - "import numpy as np" + "import numpy as np\n", + "\n", + "import adaptive\n", + "\n", + "adaptive.notebook_extension()" ] }, { @@ -489,8 +488,7 @@ " print(\"WARINING: The runner hasn't reached it goal yet!\")\n", "\n", "print(\n", - " f\"The integral value is {learner.igral} \"\n", - " f\"with a corresponding error of {learner.err}\"\n", + " f\"The integral value is {learner.igral} with a corresponding error of {learner.err}\"\n", ")\n", "learner.plot()" ] From 1eae4ae7eb0f411a989d35644bcc4dad112a050f Mon Sep 17 00:00:00 2001 From: Bas Nijholt <bas@nijho.lt> Date: Tue, 13 May 2025 13:49:58 +0200 Subject: [PATCH 6/7] Update changelog to v1.4.0 (#480) --- CHANGELOG.md | 59 +++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 47 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e497b099..339c9db0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,11 +1,46 @@ # 🗞️ Changelog -## [v1.3.0](https://github.com/python-adaptive/adaptive/tree/v1.3.0) (2024-04-10) +## [v1.4.0](https://github.com/python-adaptive/adaptive/tree/v1.3.0) (2025-05-13) + +[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v1.3.2...v.1.4.0) + +**Merged pull requests:** + +- Bump `mypy` and `ruff` in `pre-commit` [\#479](https://github.com/python-adaptive/adaptive/pull/479) ([basnijholt](https://github.com/basnijholt)) +- Enable runtime type checking in tests with typeguard [\#478](https://github.com/python-adaptive/adaptive/pull/478) ([basnijholt](https://github.com/basnijholt)) +- Follow SPEC 0 and drop support for Python 3.9 and 3.10 [\#477](https://github.com/python-adaptive/adaptive/pull/477) ([basnijholt](https://github.com/basnijholt)) +- Use `uv` as Nox backend and several related improvements [\#476](https://github.com/python-adaptive/adaptive/pull/476) ([basnijholt](https://github.com/basnijholt)) +- Prevent SciPy deprecation warning for `estimate_gradients_2d_global` [\#475](https://github.com/python-adaptive/adaptive/pull/475) ([basnijholt](https://github.com/basnijholt)) + +## [v1.3.2](https://github.com/python-adaptive/adaptive/tree/v1.3.2) (2025-03-03) + +[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v1.3.1...v1.3.2) + +**Closed issues:** + +- Runner slows down visual studio code on linux [\#471](https://github.com/python-adaptive/adaptive/issues/471) + +**Merged pull requests:** + +- Fix readthedocs.yml [\#474](https://github.com/python-adaptive/adaptive/pull/474) ([basnijholt](https://github.com/basnijholt)) +- Remove async activation magic in notebook\_integration.py [\#473](https://github.com/python-adaptive/adaptive/pull/473) ([basnijholt](https://github.com/basnijholt)) + +## [v1.3.1](https://github.com/python-adaptive/adaptive/tree/v1.3.1) (2025-01-07) + +[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v1.3.0...v1.3.1) + +**Merged pull requests:** + +- Fix scipy deprecation warning for LinearNDInterpolator [\#465](https://github.com/python-adaptive/adaptive/pull/465) ([eendebakpt](https://github.com/eendebakpt)) +- Remove Azure Pipelines badge in README.md [\#462](https://github.com/python-adaptive/adaptive/pull/462) ([basnijholt](https://github.com/basnijholt)) + +## [v1.3.0](https://github.com/python-adaptive/adaptive/tree/v1.3.0) (2024-05-31) [Full Changelog](https://github.com/python-adaptive/adaptive/compare/v1.2.0...v1.3.0) **Merged pull requests:** +- Release v1.3.0 [\#459](https://github.com/python-adaptive/adaptive/pull/459) ([basnijholt](https://github.com/basnijholt)) - Replace deprecated numpy aliases [\#458](https://github.com/python-adaptive/adaptive/pull/458) ([eendebakpt](https://github.com/eendebakpt)) - Remove `SKOptLearner` because `scikit-optimize` is unmaintained [\#404](https://github.com/python-adaptive/adaptive/pull/404) ([basnijholt](https://github.com/basnijholt)) @@ -27,8 +62,8 @@ - \[pre-commit.ci\] pre-commit autoupdate [\#447](https://github.com/python-adaptive/adaptive/pull/447) ([pre-commit-ci[bot]](https://github.com/apps/pre-commit-ci)) - Use ruff-format instead of black [\#446](https://github.com/python-adaptive/adaptive/pull/446) ([basnijholt](https://github.com/basnijholt)) - Bump versions to compatible packages in `docs/environment.yml` [\#445](https://github.com/python-adaptive/adaptive/pull/445) ([basnijholt](https://github.com/basnijholt)) -- Add `AsyncRunner.block\_until\_done` [\#444](https://github.com/python-adaptive/adaptive/pull/444) ([basnijholt](https://github.com/basnijholt)) -- Add `live\_info\_terminal`, closes \#436 [\#441](https://github.com/python-adaptive/adaptive/pull/441) ([basnijholt](https://github.com/basnijholt)) +- Add `AsyncRunner.block_until_done` [\#444](https://github.com/python-adaptive/adaptive/pull/444) ([basnijholt](https://github.com/basnijholt)) +- Add `live_info_terminal`, closes \#436 [\#441](https://github.com/python-adaptive/adaptive/pull/441) ([basnijholt](https://github.com/basnijholt)) - \[pre-commit.ci\] pre-commit autoupdate [\#434](https://github.com/python-adaptive/adaptive/pull/434) ([pre-commit-ci[bot]](https://github.com/apps/pre-commit-ci)) - Add benchmarks page for Learner1D and Learner2D functions [\#405](https://github.com/python-adaptive/adaptive/pull/405) ([basnijholt](https://github.com/basnijholt)) @@ -51,7 +86,7 @@ ## [v1.0.0](https://github.com/python-adaptive/adaptive/tree/v1.0.0) (2023-05-15) -[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v0.15.1...v1.0.0) +[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v0.15.0...v1.0.0) **Closed issues:** @@ -90,13 +125,13 @@ - Add nbQA for notebook and docs linting [\#361](https://github.com/python-adaptive/adaptive/pull/361) ([basnijholt](https://github.com/basnijholt)) - Fix HoloViews opts deprecation warnings [\#357](https://github.com/python-adaptive/adaptive/pull/357) ([basnijholt](https://github.com/basnijholt)) -## [v0.15.1](https://github.com/python-adaptive/adaptive/tree/v0.15.1) (2022-12-02) +## [v0.15.0](https://github.com/python-adaptive/adaptive/tree/v0.15.0) (2022-12-02) -[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v0.15.0...v0.15.1) +[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v0.15.1...v0.15.0) -## [v0.15.0](https://github.com/python-adaptive/adaptive/tree/v0.15.0) (2022-12-02) +## [v0.15.1](https://github.com/python-adaptive/adaptive/tree/v0.15.1) (2022-12-02) -[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v0.14.2...v0.15.0) +[Full Changelog](https://github.com/python-adaptive/adaptive/compare/v0.14.2...v0.15.1) **Closed issues:** @@ -290,7 +325,7 @@ - bump pre-commit filter dependencies [\#293](https://github.com/python-adaptive/adaptive/pull/293) ([basnijholt](https://github.com/basnijholt)) - fix docs [\#291](https://github.com/python-adaptive/adaptive/pull/291) ([basnijholt](https://github.com/basnijholt)) - update to miniver 0.7.0 [\#290](https://github.com/python-adaptive/adaptive/pull/290) ([basnijholt](https://github.com/basnijholt)) -- add `runner.live\_plot\(\)` in README example [\#288](https://github.com/python-adaptive/adaptive/pull/288) ([basnijholt](https://github.com/basnijholt)) +- add `runner.live_plot()` in README example [\#288](https://github.com/python-adaptive/adaptive/pull/288) ([basnijholt](https://github.com/basnijholt)) - Update pre commit [\#287](https://github.com/python-adaptive/adaptive/pull/287) ([basnijholt](https://github.com/basnijholt)) - Use m2r2 [\#286](https://github.com/python-adaptive/adaptive/pull/286) ([basnijholt](https://github.com/basnijholt)) - temporarily pin scikit-learn\<=0.23.1 [\#285](https://github.com/python-adaptive/adaptive/pull/285) ([basnijholt](https://github.com/basnijholt)) @@ -382,6 +417,7 @@ - add \_RequireAttrsABCMeta and make the BaseLearner use it [\#222](https://github.com/python-adaptive/adaptive/pull/222) ([basnijholt](https://github.com/basnijholt)) - 2D: add triangle\_loss [\#221](https://github.com/python-adaptive/adaptive/pull/221) ([basnijholt](https://github.com/basnijholt)) - 2D: add interpolated\_on\_grid method [\#216](https://github.com/python-adaptive/adaptive/pull/216) ([basnijholt](https://github.com/basnijholt)) +- add scatter\_or\_line argument to Learner1D.plot [\#215](https://github.com/python-adaptive/adaptive/pull/215) ([basnijholt](https://github.com/basnijholt)) - WIP: raise an error when using a lambda and default executor [\#210](https://github.com/python-adaptive/adaptive/pull/210) ([basnijholt](https://github.com/basnijholt)) ## [v0.10.0-dev](https://github.com/python-adaptive/adaptive/tree/v0.10.0-dev) (2019-10-07) @@ -408,7 +444,6 @@ **Merged pull requests:** - pass value\_scale to the LearnerND's loss\_per\_simplex function [\#219](https://github.com/python-adaptive/adaptive/pull/219) ([basnijholt](https://github.com/basnijholt)) -- add scatter\_or\_line argument to Learner1D.plot [\#215](https://github.com/python-adaptive/adaptive/pull/215) ([basnijholt](https://github.com/basnijholt)) - remove MPI4PY\_MAX\_WORKERS where it's not used [\#209](https://github.com/python-adaptive/adaptive/pull/209) ([basnijholt](https://github.com/basnijholt)) - use jupyter\_sphinx v0.2.0 from conda instead of my branch [\#204](https://github.com/python-adaptive/adaptive/pull/204) ([basnijholt](https://github.com/basnijholt)) - Authors [\#202](https://github.com/python-adaptive/adaptive/pull/202) ([basnijholt](https://github.com/basnijholt)) @@ -524,7 +559,7 @@ - Gracefully handle exceptions when evaluating the function to be learned [\#125](https://github.com/python-adaptive/adaptive/issues/125) - Allow BalancingLearner to return arbitrary number of points from 'choose\_points' [\#124](https://github.com/python-adaptive/adaptive/issues/124) - Increase the default refresh rate for 'live\_plot' [\#120](https://github.com/python-adaptive/adaptive/issues/120) -- remove default number of points to choose in `choose\_points` [\#118](https://github.com/python-adaptive/adaptive/issues/118) +- remove default number of points to choose in `choose_points` [\#118](https://github.com/python-adaptive/adaptive/issues/118) - Consider using Gaussian process optimization as a learner [\#115](https://github.com/python-adaptive/adaptive/issues/115) - Make `distributed.Client` work with automatic scaling of the cluster [\#104](https://github.com/python-adaptive/adaptive/issues/104) - Improve plotting for learners [\#83](https://github.com/python-adaptive/adaptive/issues/83) @@ -611,7 +646,7 @@ - Remove public 'fname' learner attribute [\#17](https://github.com/python-adaptive/adaptive/issues/17) - Release v0.7.0 [\#14](https://github.com/python-adaptive/adaptive/issues/14) - \(Learner1D\) improve time complexity [\#13](https://github.com/python-adaptive/adaptive/issues/13) -- Typo in documentation for` adaptive.learner.learner2D.uniform\_loss\(ip\)` [\#12](https://github.com/python-adaptive/adaptive/issues/12) +- Typo in documentation for` adaptive.learner.learner2D.uniform_loss(ip)` [\#12](https://github.com/python-adaptive/adaptive/issues/12) - \(LearnerND\) fix plotting of scaled domains [\#11](https://github.com/python-adaptive/adaptive/issues/11) - suggested points lie outside of domain [\#7](https://github.com/python-adaptive/adaptive/issues/7) - DEVELOPMENT IS ON GITLAB: https://gitlab.kwant-project.org/qt/adaptive [\#5](https://github.com/python-adaptive/adaptive/issues/5) From d0aab31298ffeb1c4825bad40b9ac3294ea4aa80 Mon Sep 17 00:00:00 2001 From: Bas Nijholt <bas@nijho.lt> Date: Tue, 13 May 2025 14:16:32 +0200 Subject: [PATCH 7/7] Fix several deprecation warnings (#481) --- adaptive/learner/learner2D.py | 6 +++++- adaptive/learner/learnerND.py | 7 ++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/adaptive/learner/learner2D.py b/adaptive/learner/learner2D.py index a4b2a812..cb179a22 100644 --- a/adaptive/learner/learner2D.py +++ b/adaptive/learner/learner2D.py @@ -287,6 +287,10 @@ def custom_loss(ip: LinearNDInterpolator) -> np.ndarray: return custom_loss +def _cross_2d(x, y): + return x[..., 0] * y[..., 1] - x[..., 1] * y[..., 0] + + def choose_point_in_triangle(triangle: np.ndarray, max_badness: int) -> np.ndarray: """Choose a new point in inside a triangle. @@ -310,7 +314,7 @@ def choose_point_in_triangle(triangle: np.ndarray, max_badness: int) -> np.ndarr The x and y coordinate of the suggested new point. """ a, b, c = triangle - area = 0.5 * np.cross(b - a, c - a) + area = 0.5 * _cross_2d(b - a, c - a) triangle_roll = np.roll(triangle, 1, axis=0) edge_lengths = np.linalg.norm(triangle - triangle_roll, axis=1) i = edge_lengths.argmax() diff --git a/adaptive/learner/learnerND.py b/adaptive/learner/learnerND.py index d0300143..33bbbbb0 100644 --- a/adaptive/learner/learnerND.py +++ b/adaptive/learner/learnerND.py @@ -726,9 +726,10 @@ def _compute_loss(self, simplex): if self.nth_neighbors == 0: # compute the loss on the scaled simplex - return float( - self.loss_per_simplex(vertices, values, self._output_multiplier) - ) + loss = self.loss_per_simplex(vertices, values, self._output_multiplier) + if isinstance(loss, np.ndarray): + return float(loss.item()) + return float(loss) # We do need the neighbors neighbors = self.tri.get_opposing_vertices(simplex)