Skip to content

Commit

Permalink
Merge pull request #3144 from ericpre/drop_python3.7
Browse files Browse the repository at this point in the history
Drop python 3.7
  • Loading branch information
CSSFrancis committed May 6, 2023
2 parents e9461e1 + 7a5b1f1 commit bfafb1c
Show file tree
Hide file tree
Showing 25 changed files with 81 additions and 195 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/release.yml
Expand Up @@ -31,7 +31,7 @@ jobs:
fail-fast: false
matrix:
os: [windows-latest, macos-latest]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
python-version: ['3.8', '3.9', '3.10', '3.11']

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -102,7 +102,7 @@ jobs:
- name: Build manylinux Python wheels
uses: RalfG/python-wheels-manylinux-build@ff8504699f7a33a08d3ff85b3c6d4e8f0e70462b
with:
python-versions: 'cp37-cp37m cp38-cp38 cp39-cp39 cp310-cp310 cp311-cp311'
python-versions: 'cp38-cp38 cp39-cp39 cp310-cp310 cp311-cp311'
build-requirements: 'cython'

- name: Display content dist folder
Expand Down
25 changes: 6 additions & 19 deletions .github/workflows/tests.yml
Expand Up @@ -15,37 +15,31 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu, windows, macos]
PYTHON_VERSION: ['3.8', '3.9']
PYTHON_VERSION: ['3.9', '3.10']
PIP_SELECTOR: ['[all, tests, coverage]']
include:
# test oldest supported version of main dependencies on python 3.7
# test oldest supported version of main dependencies on python 3.8
- os: ubuntu
PYTHON_VERSION: '3.7'
PYTHON_VERSION: '3.8'
PIP_SELECTOR: '[all, tests, coverage]'
OLDEST_SUPPORTED_VERSION: true
DEPENDENCIES: matplotlib==3.1.3 numpy==1.19.0 scipy==1.4 imagecodecs==2020.1.31 tifffile==2020.2.16 dask==2.11.0 distributed==2.11.0 scikit-image==0.15 numba==0.52 scikit-learn==1.0.1
DEPENDENCIES: dask[array]==2021.3.1 matplotlib==3.1.3 numba==0.52 numpy==1.20.0 scipy==1.5 scikit-image==0.18 scikit-learn==1.0.1
LABEL: -oldest
# test minimum requirement
- os: ubuntu
PYTHON_VERSION: '3.8'
PIP_SELECTOR: '[tests, coverage]'
LABEL: -minimum
# Run coverage
- os: ubuntu
PYTHON_VERSION: '3.7'
PIP_SELECTOR: '[all, tests, coverage]'
- os: ubuntu
PYTHON_VERSION: '3.8'
PIP_SELECTOR: '[all, tests, coverage]'
- os: ubuntu
PYTHON_VERSION: '3.10'
PIP_SELECTOR: '[all, tests, coverage]'
- os: ubuntu
PYTHON_VERSION: '3.11'
PIP_SELECTOR: '[all, tests, coverage]'

steps:
- uses: actions/checkout@v3.3.0
- uses: actions/checkout@v3

- uses: actions/setup-python@v4
name: Install Python
Expand All @@ -57,17 +51,10 @@ jobs:
python --version
pip --version
- name: Install numba rc
if: contains(matrix.PYTHON_VERSION, '3.11')
# Require for python 3.11 support, remove when numba 0.57 is release
shell: bash
run: |
pip install numba --pre
- name: Install oldest supported version
if: ${{ matrix.OLDEST_SUPPORTED_VERSION }}
run: |
pip install ${{ matrix.DEPENDENCIES }}
pip install ${{ matrix.DEPENDENCIES }} -v
- name: Install
shell: bash
Expand Down
26 changes: 12 additions & 14 deletions azure-pipelines.yml
Expand Up @@ -25,31 +25,29 @@ resources:

strategy:
matrix:
# Needs to update matplotlib reference image to add python3.9 build
# matplotlib 3.1 doesn't have a python 3.9 build
Linux_Python38:
Linux_Python39:
vmImage: 'ubuntu-latest'
PYTHON_VERSION: '3.8'
PYTHON_VERSION: '3.9'
MAMBAFORGE_PATH: $(Agent.BuildDirectory)/mambaforge
Linux_Python37:
Linux_Python310:
vmImage: 'ubuntu-latest'
PYTHON_VERSION: '3.7'
PYTHON_VERSION: '3.10'
MAMBAFORGE_PATH: $(Agent.BuildDirectory)/mambaforge
MacOS_Python38:
MacOS_Python39:
vmImage: 'macOS-latest'
PYTHON_VERSION: '3.8'
PYTHON_VERSION: '3.9'
MAMBAFORGE_PATH: $(Agent.BuildDirectory)/mambaforge
MacOS_Python37:
MacOS_Python310:
vmImage: 'macOS-latest'
PYTHON_VERSION: '3.7'
PYTHON_VERSION: '3.10'
MAMBAFORGE_PATH: $(Agent.BuildDirectory)/mambaforge
Windows_Python38:
Windows_Python39:
vmImage: 'windows-latest'
PYTHON_VERSION: '3.8'
PYTHON_VERSION: '3.9'
MAMBAFORGE_PATH: $(Agent.BuildDirectory)\mambaforge
Windows_Python37:
Windows_Python310:
vmImage: 'windows-latest'
PYTHON_VERSION: '3.7'
PYTHON_VERSION: '3.10'
MAMBAFORGE_PATH: $(Agent.BuildDirectory)\mambaforge

pool:
Expand Down
26 changes: 12 additions & 14 deletions conda_environment.yml
Expand Up @@ -2,28 +2,26 @@ name: test_env
channels:
- conda-forge
dependencies:
- dask-core >=2.11
- dask-core >=2021.3.1
- dill
# dask-core < 2021.3.1 doesn't have fsspec as dependency
- fsspec
- importlib-metadata
- ipyparallel
- ipython !=8.0.*
- jinja2
- matplotlib-base
- matplotlib-base >=3.1.3
- natsort
- numba
- numba >=0.52
- numexpr
- numpy
- numpy >=1.20
- packaging
- pint
- pint >=0.10
- prettytable
- python-dateutil
- python-dateutil >=2.5.0
- pyyaml
- requests
- scikit-image
- scikit-learn
- scipy
- sympy
- tqdm
- traits
- scikit-image >=0.18
- scikit-learn >=1.0.1
- scipy >=1.5.0
- sympy >=1.6.0
- tqdm >=4.9.0
- traits >=4.5.0
5 changes: 0 additions & 5 deletions hyperspy/_components/error_function.py
Expand Up @@ -17,8 +17,6 @@
# along with HyperSpy. If not, see <https://www.gnu.org/licenses/#GPL>.

from hyperspy._components.expression import Expression
from packaging.version import Version
import sympy


class Erf(Expression):
Expand Down Expand Up @@ -54,9 +52,6 @@ class Erf(Expression):

def __init__(self, A=1., sigma=1., origin=0., module=["numpy", "scipy"],
**kwargs):
if Version(sympy.__version__) < Version("1.3"):
raise ImportError("The `ErrorFunction` component requires "
"SymPy >= 1.3")
super().__init__(
expression="A * erf((x - origin) / sqrt(2) / sigma) / 2",
name="Erf",
Expand Down
7 changes: 2 additions & 5 deletions hyperspy/_components/power_law.py
Expand Up @@ -22,8 +22,6 @@

from hyperspy._components.expression import Expression

from hyperspy.misc.utils import get_numpy_kwargs

_logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -138,9 +136,8 @@ def estimate_parameters(self, signal, x1, x2, only_current=False,
else:
from hyperspy.signal import BaseSignal
shape = s.data.shape[:-1]
kw = get_numpy_kwargs(s.data)
I1_s = BaseSignal(np.empty(shape, dtype='float', **kw))
I2_s = BaseSignal(np.empty(shape, dtype='float', **kw))
I1_s = BaseSignal(np.empty(shape, dtype='float', like=s.data))
I2_s = BaseSignal(np.empty(shape, dtype='float', like=s.data))
# Use the `out` parameters to avoid doing the deepcopy
s.isig[i1:i3].integrate1D(2j, out=I1_s)
s.isig[i3:i2].integrate1D(2j, out=I2_s)
Expand Down
5 changes: 0 additions & 5 deletions hyperspy/_components/skew_normal.py
Expand Up @@ -18,8 +18,6 @@

import dask.array as da
import numpy as np
from packaging.version import Version
import sympy

from hyperspy.component import _get_scaling_factor
from hyperspy._components.expression import Expression
Expand Down Expand Up @@ -142,9 +140,6 @@ class SkewNormal(Expression):

def __init__(self, x0=0., A=1., scale=1., shape=0.,
module=['numpy', 'scipy'], **kwargs):
if Version(sympy.__version__) < Version("1.3"):
raise ImportError("The `SkewNormal` component requires "
"SymPy >= 1.3")
# We use `_shape` internally because `shape` is already taken in sympy
# https://github.com/sympy/sympy/pull/20791
super().__init__(
Expand Down
5 changes: 0 additions & 5 deletions hyperspy/_components/voigt.py
Expand Up @@ -17,8 +17,6 @@
# along with HyperSpy. If not, see <https://www.gnu.org/licenses/#GPL>.

import math
from packaging.version import Version
import sympy

from hyperspy.component import _get_scaling_factor
from hyperspy._components.expression import Expression
Expand Down Expand Up @@ -91,9 +89,6 @@ def __init__(self, centre=10., area=1., gamma=0.2, sigma=0.1,
# Not to break scripts once we remove the legacy Voigt
if "legacy" in kwargs:
del kwargs["legacy"]
if Version(sympy.__version__) < Version("1.3"):
raise ImportError("The `Voigt` component requires "
"SymPy >= 1.3")
# We use `gamma_` internally to workaround the use of the `gamma`
# function in sympy
super().__init__(
Expand Down
13 changes: 5 additions & 8 deletions hyperspy/_signals/lazy.py
Expand Up @@ -161,7 +161,7 @@ class LazySignal(BaseSignal):

"""Lazy general signal class."""

_lazy = True
_lazy = True
__doc__ += LAZYSIGNAL_DOC.replace("__BASECLASS__", "BaseSignal")

def __init__(self, *args, **kwargs):
Expand All @@ -176,7 +176,7 @@ def __init__(self, *args, **kwargs):
self._cache_dask_chunk_slice = None
if not self._clear_cache_dask_data in self.events.data_changed.connected:
self.events.data_changed.connect(self._clear_cache_dask_data)

__init__.__doc__ = BaseSignal.__init__.__doc__.replace(
":py:class:`numpy.ndarray`", ":py:class:`dask.array.Array`"
)
Expand Down Expand Up @@ -1253,12 +1253,9 @@ def compute_navigator(self, index=None, chunks_number=None,
# Needs to reverse the chunks list to match dask chunking order
signal_chunks = list(signal_chunks)[::-1]
navigation_chunks = ['auto'] * len(self.axes_manager.navigation_shape)
if Version(dask.__version__ ) >= Version("2.30.0"):
kwargs = {'balance':True}
else:
kwargs = {}
chunks = self.data.rechunk([*navigation_chunks, *signal_chunks],
**kwargs).chunks
chunks = self.data.rechunk(
[*navigation_chunks, *signal_chunks], balance=True,
).chunks

# Get the slice of the corresponding chunk
signal_size = len(signal_shape)
Expand Down
4 changes: 1 addition & 3 deletions hyperspy/learn/mva.py
Expand Up @@ -39,7 +39,6 @@
stack,
is_hyperspy_signal,
is_cupy_array,
get_numpy_kwargs,
)
from hyperspy.external.progressbar import progressbar

Expand Down Expand Up @@ -2595,8 +2594,7 @@ def estimate_elbow_position(self, explained_variance_ratio=None,log=True,
y1 = curve_values_adj[0]
y2 = curve_values_adj[max_points]

kw = get_numpy_kwargs(self.data)
xs = np.arange(max_points, **kw)
xs = np.arange(max_points, like=self.data)
if log:
ys = np.log(curve_values_adj[:max_points])
else:
Expand Down
3 changes: 0 additions & 3 deletions hyperspy/learn/svd_pca.py
Expand Up @@ -150,9 +150,6 @@ def svd_solve(
)
U, S, V = randomized_svd(data, n_components=output_dimension, **kwargs)
elif svd_solver == "arpack":
if Version(scipy.__version__) < Version("1.4.0"): # pragma: no cover
raise ValueError('`svd_solver="arpack"` requires scipy >= 1.4.0')

if output_dimension >= min(m, n):
raise ValueError(
"svd_solver='arpack' requires output_dimension "
Expand Down
21 changes: 1 addition & 20 deletions hyperspy/misc/utils.py
Expand Up @@ -1362,8 +1362,7 @@ def process_function_blockwise(data,
chunk_nav_shape = tuple([data.shape[i] for i in sorted(nav_indexes)])
output_shape = chunk_nav_shape + tuple(output_signal_size)
# Pre-allocating the output array
kw = get_numpy_kwargs(data)
output_array = np.empty(output_shape, dtype=dtype, **kw)
output_array = np.empty(output_shape, dtype=dtype, like=data)
if len(args) == 0:
# There aren't any BaseSignals for iterating
for nav_index in np.ndindex(chunk_nav_shape):
Expand Down Expand Up @@ -1647,21 +1646,3 @@ def get_array_module(array):
pass

return module


def get_numpy_kwargs(array):
"""
Convenience funtion to return a dictionary containing the `like` keyword
if numpy>=1.20.
Note
----
`like` keyword is an experimental feature introduced in numpy 1.20 and is
pending on acceptance of NEP 35
"""
kw = {}
if Version(np.__version__) >= Version("1.20"):
kw['like'] = array

return kw
12 changes: 0 additions & 12 deletions hyperspy/model.py
Expand Up @@ -23,15 +23,12 @@
import tempfile
import warnings
from contextlib import contextmanager
from packaging.version import Version
from functools import partial

import dill
import numpy as np
import dask
import dask.array as da
from dask.diagnostics import ProgressBar
import scipy
import scipy.odr as odr
from IPython.display import display, display_pretty
from scipy.linalg import svd
Expand Down Expand Up @@ -1079,12 +1076,6 @@ def _linear_fit(self, optimizer="lstsq", calculate_errors=False,
**kw)
coefficient_array = result.T

if self.signal._lazy and not only_current and (
Version(dask.__version__) < Version("2020.12.0")):
# Dask pre 2020.12 didn't support residuals on 2D input,
# we calculate them later.
residual = None # pragma: no cover

elif optimizer == "ridge_regression":
if self.signal._lazy:
raise ValueError(
Expand Down Expand Up @@ -1305,9 +1296,6 @@ def fit(
}

if optimizer in ["Dual Annealing", "SHGO"]:
if Version(scipy.__version__) < Version("1.2.0"):
raise ValueError(f"`optimizer='{optimizer}'` requires scipy >= 1.2.0")

from scipy.optimize import dual_annealing, shgo

_supported_global.update({"Dual Annealing": dual_annealing, "SHGO": shgo})
Expand Down
6 changes: 1 addition & 5 deletions hyperspy/signal.py
Expand Up @@ -3675,11 +3675,7 @@ def _make_sure_data_is_contiguous(self):
_logger.info("{0!r} data is replaced by its optimized copy, see "
"optimize parameter of ``Basesignal.transpose`` "
"for more information.".format(self))
# `like` keyword is necessary to support cupy array (NEP-35)
kw = {}
if Version(np.__version__) >= Version("1.20"):
kw['like'] = self.data
self.data = np.ascontiguousarray(self.data, **kw)
self.data = np.ascontiguousarray(self.data, like=self.data)

def _iterate_signal(self, iterpath=None):
"""Iterates over the signal data. It is faster than using the signal
Expand Down

0 comments on commit bfafb1c

Please sign in to comment.