Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,17 @@ dask-image>=0.5.0
dask[array]>=2021.02.0
docrep>=0.3.1
leidenalg>=0.8.2
networkx>=2.6.0
numba>=0.52.0
numpy>=1.18.0
omnipath>=1.0.5
pandas>=1.2.0
Pillow
scanpy>=1.8.0
scikit-image>=0.17.1
scikit-learn>=0.24.0
statsmodels>=0.12.0
tifffile
tqdm>=4.50.2
typing_extensions
xarray>=0.16.1
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
for l in (Path("docs") / "requirements.txt").read_text("utf-8").splitlines()
if not l.startswith("-r")
],
interactive=["PyQt5>=5.15.0", "napari>=0.4.15,<0.5"],
interactive=["PyQt5>=5.15.0", "napari>=0.4.14,<0.5"],
),
classifiers=[
"Development Status :: 5 - Production/Stable",
Expand Down
2 changes: 1 addition & 1 deletion squidpy/gr/_ligrec.py
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,7 @@ def _trim_data(self) -> None:
assert isinstance(self.interactions, pd.DataFrame)

logg.debug("DEBUG: Removing genes not in any interaction")
self._filtered_data = self._data.loc[:, set(self.interactions[SOURCE]) | set(self.interactions[TARGET])]
self._filtered_data = self._data.loc[:, list(set(self.interactions[SOURCE]) | set(self.interactions[TARGET]))]

def _filter_interactions_by_genes(self) -> None:
"""Subset :attr:`interactions` to only those for which we have the data."""
Expand Down
2 changes: 1 addition & 1 deletion squidpy/gr/_nhood.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ def centrality_scores(

centralities = [Centrality(c) for c in centrality]

graph = nx.from_scipy_sparse_matrix(adata.obsp[connectivity_key])
graph = nx.Graph(adata.obsp[connectivity_key])

cat = adata.obs[cluster_key].cat.categories.values
clusters = adata.obs[cluster_key].values
Expand Down
9 changes: 6 additions & 3 deletions squidpy/im/_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -576,9 +576,8 @@ def _post_process(
**_: Any,
) -> xr.Dataset:
def _rescale(arr: xr.DataArray) -> xr.DataArray:
# TODO(michalk8): in skimage==0.19.0, multichannel is deprecated
scaling_fn = partial(
rescale, scale=[scale, scale, 1], preserve_range=True, order=1, multichannel=True, cval=cval
rescale, scale=[scale, scale, 1], preserve_range=True, order=1, channel_axis=-1, cval=cval
)
dtype = arr.dtype

Expand Down Expand Up @@ -1151,7 +1150,8 @@ def apply(
kwargs
Keyword arguments for :func:`dask.array.map_overlap` or :func:`dask.array.map_blocks`, depending whether
``depth`` is present in ``fn_kwargs``. Only used when ``chunks != None``.
Use ``depth`` to control boundary artifacts if ``func`` requires data from neighboring chunks.
Use ``depth`` to control boundary artifacts if ``func`` requires data from neighboring chunks,
by default, ``boundary = 'reflect`` is used.

Returns
-------
Expand All @@ -1173,6 +1173,9 @@ def apply_func(func: Callable[..., NDArrayA], arr: xr.DataArray) -> NDArrayA | d
else da.map_blocks(func, arr, **fn_kwargs, **kwargs, dtype=arr.dtype)
)

if "depth" in kwargs:
kwargs.setdefault("boundary", "reflect")

layer = self._get_layer(layer)
if new_layer is None:
new_layer = layer
Expand Down
18 changes: 9 additions & 9 deletions squidpy/im/_feature_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import xarray as xr

from skimage.util import img_as_ubyte
from skimage.feature import greycoprops, greycomatrix
from skimage.feature import graycoprops, graycomatrix
import skimage.measure

from squidpy._docs import d
Expand Down Expand Up @@ -196,12 +196,12 @@ def features_texture(
"""
Calculate texture features.

A grey level co-occurrence matrix (`GLCM <https://en.wikipedia.org/wiki/Co-occurrence_matrix>`_) is computed
A gray level co-occurrence matrix (`GLCM <https://en.wikipedia.org/wiki/Co-occurrence_matrix>`_) is computed
for different combinations of distance and angle.

The distance defines the pixel difference of co-occurrence. The angle define the direction along which
we check for co-occurrence. The GLCM includes the number of times that grey-level :math:`j` occurs at a distance
:math:`d` and at an angle theta from grey-level :math:`i`.
we check for co-occurrence. The GLCM includes the number of times that gray-level :math:`j` occurs at a distance
:math:`d` and at an angle theta from gray-level :math:`i`.

Parameters
----------
Expand All @@ -210,11 +210,11 @@ def features_texture(
%(feature_name)s
%(channels)s
props
Texture features that are calculated, see the `prop` argument in :func:`skimage.feature.greycoprops`.
Texture features that are calculated, see the `prop` argument in :func:`skimage.feature.graycoprops`.
distances
The `distances` argument in :func:`skimage.feature.greycomatrix`.
The `distances` argument in :func:`skimage.feature.graycomatrix`.
angles
The `angles` argument in :func:`skimage.feature.greycomatrix`.
The `angles` argument in :func:`skimage.feature.graycomatrix`.

Returns
-------
Expand Down Expand Up @@ -244,9 +244,9 @@ def features_texture(

features = {}
for c in channels:
comatrix = greycomatrix(arr[..., c], distances=distances, angles=angles, levels=256)
comatrix = graycomatrix(arr[..., c], distances=distances, angles=angles, levels=256)
for p in props:
tmp_features = greycoprops(comatrix, prop=p)
tmp_features = graycoprops(comatrix, prop=p)
for d_idx, dist in enumerate(distances):
for a_idx, a in enumerate(angles):
features[f"{feature_name}_ch-{c}_{p}_dist-{dist}_angle-{a:.2f}"] = tmp_features[d_idx, a_idx]
Expand Down
2 changes: 1 addition & 1 deletion squidpy/im/_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from scanpy import logging as logg

from dask import delayed
from scipy.ndimage.filters import gaussian_filter as scipy_gf
from scipy.ndimage import gaussian_filter as scipy_gf
import numpy as np
import dask.array as da

Expand Down
4 changes: 2 additions & 2 deletions squidpy/pl/_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def interaction_matrix(
_assert_categorical_obs(adata, key=cluster_key)
array = _get_data(adata, cluster_key=cluster_key, func_name="interaction_matrix")

ad = AnnData(X=array, obs={cluster_key: pd.Categorical(adata.obs[cluster_key].cat.categories)})
ad = AnnData(X=array, obs={cluster_key: pd.Categorical(adata.obs[cluster_key].cat.categories)}, dtype=array.dtype)
_maybe_set_colors(source=adata, target=ad, key=cluster_key, palette=palette)
if title is None:
title = "Interaction matrix"
Expand Down Expand Up @@ -236,7 +236,7 @@ def nhood_enrichment(
_assert_categorical_obs(adata, key=cluster_key)
array = _get_data(adata, cluster_key=cluster_key, func_name="nhood_enrichment")[mode]

ad = AnnData(X=array, obs={cluster_key: pd.Categorical(adata.obs[cluster_key].cat.categories)})
ad = AnnData(X=array, obs={cluster_key: pd.Categorical(adata.obs[cluster_key].cat.categories)}, dtype=array.dtype)
_maybe_set_colors(source=adata, target=ad, key=cluster_key, palette=palette)
if title is None:
title = "Neighborhood enrichment"
Expand Down
2 changes: 1 addition & 1 deletion squidpy/pl/_ligrec.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ def get_dendrogram(adata: AnnData, linkage: str = "complete") -> Mapping[str, An
var = pd.DataFrame(pvals.columns)
var = var.set_index(var.columns[0])

adata = AnnData(pvals.values, obs={"groups": pd.Categorical(pvals.index)}, var=var)
adata = AnnData(pvals.values, obs={"groups": pd.Categorical(pvals.index)}, var=var, dtype=pvals.values.dtype)
adata.obs_names = pvals.index
minn = np.nanmin(adata.X)
delta = np.nanmax(adata.X) - minn
Expand Down
Binary file modified tests/_data/test_data.h5ad
Binary file not shown.
9 changes: 5 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def nhood_data(adata: AnnData) -> AnnData:
@pytest.fixture()
def dummy_adata() -> AnnData:
r = np.random.RandomState(100)
adata = AnnData(r.rand(200, 100), obs={"cluster": r.randint(0, 3, 200)})
adata = AnnData(r.rand(200, 100), obs={"cluster": r.randint(0, 3, 200)}, dtype=float)

adata.obsm[Key.obsm.spatial] = np.stack([r.randint(0, 500, 200), r.randint(0, 500, 200)], axis=1)
sp.gr.spatial_neighbors(adata, spatial_key=Key.obsm.spatial, n_rings=2)
Expand All @@ -88,6 +88,7 @@ def adata_intmat() -> AnnData:
np.zeros((5, 5)),
obs={"cat": pd.Categorical.from_codes([0, 0, 0, 1, 1], ("a", "b"))},
obsp={"spatial_connectivities": graph},
dtype=float,
)


Expand All @@ -108,7 +109,7 @@ def adata_squaregrid() -> AnnData:
coord = rng.integers(0, 10, size=(400, 2))
coord = np.unique(coord, axis=0)
counts = rng.integers(0, 10, size=(coord.shape[0], 10))
adata = AnnData(counts)
adata = AnnData(counts, dtype=counts.dtype)
adata.obsm["spatial"] = coord
sc.pp.scale(adata)
return adata
Expand Down Expand Up @@ -275,7 +276,7 @@ def visium_adata():
[4400, 7729],
]
)
adata = AnnData(X=np.ones((visium_coords.shape[0], 3)))
adata = AnnData(X=np.ones((visium_coords.shape[0], 3)), dtype=float)
adata.obsm[Key.obsm.spatial] = visium_coords
adata.uns[Key.uns.spatial] = {}
return adata
Expand All @@ -284,7 +285,7 @@ def visium_adata():
@pytest.fixture()
def non_visium_adata():
non_visium_coords = np.array([[1, 0], [3, 0], [5, 6], [0, 4]])
adata = AnnData(X=non_visium_coords)
adata = AnnData(X=non_visium_coords, dtype=int)
adata.obsm[Key.obsm.spatial] = non_visium_coords
return adata

Expand Down
34 changes: 20 additions & 14 deletions tests/datasets/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@
from pathlib import Path
from http.client import RemoteDisconnected
import pytest
import warnings

from anndata import AnnData
from anndata import AnnData, OldFormatWarning

import squidpy as sq

Expand All @@ -17,21 +18,26 @@ def test_import(self, func):
assert isinstance(fn, FunctionType)


# TODO(michalk8): parse the code and xfail iff server issue
class TestDatasetsDownload:
def test_download_imc(self, tmp_path: Path):
try:
adata = sq.datasets.imc(tmp_path / "foo")
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=OldFormatWarning)
try:
adata = sq.datasets.imc(tmp_path / "foo")

assert isinstance(adata, AnnData)
assert adata.shape == (4668, 34)
except RemoteDisconnected as e:
pytest.skip(str(e))
assert isinstance(adata, AnnData)
assert adata.shape == (4668, 34)
except RemoteDisconnected as e:
pytest.xfail(str(e))

def test_download_visium_hne_image_crop(self, tmp_path: Path):
try:
img = sq.datasets.visium_hne_image_crop(tmp_path / "foo")

assert isinstance(img, sq.im.ImageContainer)
assert img.shape == (3527, 3527)
except RemoteDisconnected as e:
pytest.skip(str(e))
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=OldFormatWarning)
try:
img = sq.datasets.visium_hne_image_crop(tmp_path / "foo")

assert isinstance(img, sq.im.ImageContainer)
assert img.shape == (3527, 3527)
except RemoteDisconnected as e:
pytest.xfail(str(e))
15 changes: 8 additions & 7 deletions tests/image/test_container.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from typing import Any, Set, List, Tuple, Union, Optional, Sequence
from imageio import imread, imsave
from pathlib import Path
from itertools import permutations
from collections import defaultdict
from html.parser import HTMLParser
from pytest_mock import MockerFixture
import pytest
import imageio

from anndata import AnnData
import anndata as ad
Expand Down Expand Up @@ -60,7 +60,7 @@ def test_lazy_load(self, on_init: bool, tmpdir):
img_orig = np.random.randint(low=0, high=255, size=(100, 100, 1), dtype=np.uint8)
if on_init:
fname = str(tmpdir / "tmp.tiff")
tifffile.imsave(fname, img_orig)
tifffile.imwrite(fname, img_orig)
img = ImageContainer(fname, lazy=True)
else:
img = ImageContainer(da.from_array(img_orig), lazy=True)
Expand Down Expand Up @@ -143,9 +143,9 @@ def test_add_img_invalid_zdim(self, cont: ImageContainer):
def test_load_ext(self, shape: Tuple[int, ...], ext: str, tmpdir):
img_orig = np.random.randint(low=0, high=255, size=shape, dtype=np.uint8)
fname = tmpdir / f"tmp.{ext}"
imsave(str(fname), img_orig)
imageio.imsave(str(fname), img_orig)

gt = imread(str(fname)) # because of compression, we load again
gt = imageio.imread(str(fname)) # because of compression, we load again
cont = ImageContainer(str(fname))

np.testing.assert_array_equal(cont["image"].values.squeeze(), gt.squeeze())
Expand All @@ -154,7 +154,7 @@ def test_load_ext(self, shape: Tuple[int, ...], ext: str, tmpdir):
def test_load_tiff(self, shape: Tuple[int, ...], tmpdir):
img_orig = np.random.randint(low=0, high=255, size=shape, dtype=np.uint8)
fname = tmpdir / "tmp.tiff"
tifffile.imsave(fname, img_orig)
tifffile.imwrite(fname, img_orig)

cont = ImageContainer(str(fname))

Expand Down Expand Up @@ -750,8 +750,7 @@ def func(arr: Union[np.ndarray, da.Array]):
assert isinstance(res["image"].data, da.Array)
else:
assert isinstance(res["image"].data, np.ndarray)

assert not np.shares_memory(cont["image"].data, res["image"].data)
assert not np.shares_memory(cont["image"].data, res["image"].data)

def test_apply_wrong_number_of_dim(self):
def func(arr: np.ndarray) -> float:
Expand Down Expand Up @@ -863,12 +862,14 @@ def test_generate_spot_crops(self):
np.zeros((len(crop_coords), 1)),
uns={"spatial": {"1": {"scalefactors": {"spot_diameter_fullres": 5}}}},
obsm={"spatial": crop_coords},
dtype=float,
)
# for library_id 2 (with larger scalefactor)
adata2 = AnnData(
np.zeros((len(crop_coords), 1)),
uns={"spatial": {"2": {"scalefactors": {"spot_diameter_fullres": 7}}}},
obsm={"spatial": crop_coords},
dtype=float,
)
# concatenate
adata = ad.concat({"1": adata1, "2": adata2}, uns_merge="unique", label="library_id")
Expand Down
6 changes: 4 additions & 2 deletions tests/image/test_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,12 @@


class TestIO:
def _create_image(self, path: str, shape: Tuple[int, ...]):
@staticmethod
def _create_image(path: str, shape: Tuple[int, ...]):
dtype = np.uint8 if len(shape) <= 3 else np.float32
img = np.random.randint(0, 255, size=shape).astype(dtype)
tifffile.imsave(path, img)
# set `photometric` to remove warnings
tifffile.imwrite(path, img, photometric=tifffile.TIFF.PHOTOMETRIC.MINISBLACK)

return img

Expand Down
4 changes: 4 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,10 @@ filterwarnings =
ignore:Using or importing the ABCs from 'collections':DeprecationWarning
ignore:Converting `np.inexact`:DeprecationWarning
ignore:the matrix subclass is:PendingDeprecationWarning
ignore:Please use:DeprecationWarning:dask_image.*
ignore:Auto-removal of grids by:DeprecationWarning:
ignore:Support for passing numbers through unit converters:DeprecationWarning
ignore:distutils Version classes are deprecated:DeprecationWarning

[coverage:run]
branch = true
Expand Down