diff --git a/.cruft.json b/.cruft.json
index ae416b57..7c4e0e5e 100644
--- a/.cruft.json
+++ b/.cruft.json
@@ -1,7 +1,7 @@
{
"template": "https://github.com/scverse/cookiecutter-scverse",
- "commit": "87a407a65408d75a949c0b54b19fd287475a56f8",
- "checkout": "v0.4.0",
+ "commit": "d383d94fadff9e4e6fdb59d77c68cb900d7cedec",
+ "checkout": "v0.6.0",
"context": {
"cookiecutter": {
"project_name": "spatialdata-plot",
@@ -10,19 +10,33 @@
"author_full_name": "scverse",
"author_email": "scverse",
"github_user": "scverse",
- "project_repo": "https://github.com/scverse/spatialdata-plot",
+ "github_repo": "spatialdata-plot",
"license": "BSD 3-Clause License",
+ "ide_integration": true,
"_copy_without_render": [
".github/workflows/build.yaml",
".github/workflows/test.yaml",
"docs/_templates/autosummary/**.rst"
],
+ "_exclude_on_template_update": [
+ "CHANGELOG.md",
+ "LICENSE",
+ "README.md",
+ "docs/api.md",
+ "docs/index.md",
+ "docs/notebooks/example.ipynb",
+ "docs/references.bib",
+ "docs/references.md",
+ "src/**",
+ "tests/**"
+ ],
"_render_devdocs": false,
"_jinja2_env_vars": {
"lstrip_blocks": true,
"trim_blocks": true
},
- "_template": "https://github.com/scverse/cookiecutter-scverse"
+ "_template": "https://github.com/scverse/cookiecutter-scverse",
+ "_commit": "d383d94fadff9e4e6fdb59d77c68cb900d7cedec"
}
},
"directory": null
diff --git a/.editorconfig b/.editorconfig
index 050f9118..66678e37 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -8,10 +8,7 @@ charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
-[*.{yml,yaml}]
-indent_size = 2
-
-[.cruft.json]
+[{*.{yml,yaml,toml},.cruft.json}]
indent_size = 2
[Makefile]
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
new file mode 100644
index 00000000..3ca1ccbd
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -0,0 +1,94 @@
+name: Bug report
+description: Report something that is broken or incorrect
+labels: bug
+body:
+ - type: markdown
+ attributes:
+ value: |
+ **Note**: Please read [this guide](https://matthewrocklin.com/blog/work/2018/02/28/minimal-bug-reports)
+ detailing how to provide the necessary information for us to reproduce your bug. In brief:
+ * Please provide exact steps how to reproduce the bug in a clean Python environment.
+ * In case it's not clear what's causing this bug, please provide the data or the data generation procedure.
+ * Sometimes it is not possible to share the data, but usually it is possible to replicate problems on publicly
+ available datasets or to share a subset of your data.
+
+ - type: textarea
+ id: report
+ attributes:
+ label: Report
+ description: A clear and concise description of what the bug is.
+ validations:
+ required: true
+
+ - type: textarea
+ id: versions
+ attributes:
+ label: Versions
+ description: |
+ Which version of packages.
+
+ Please install `session-info2`, run the following command in a notebook,
+ click the “Copy as Markdown” button, then paste the results into the text box below.
+
+ ```python
+ In[1]: import session_info2; session_info2.session_info(dependencies=True)
+ ```
+
+ Alternatively, run this in a console:
+
+ ```python
+ >>> import session_info2; print(session_info2.session_info(dependencies=True)._repr_mimebundle_()["text/markdown"])
+ ```
+ render: python
+ placeholder: |
+ anndata 0.11.3
+ ---- ----
+ charset-normalizer 3.4.1
+ coverage 7.7.0
+ psutil 7.0.0
+ dask 2024.7.1
+ jaraco.context 5.3.0
+ numcodecs 0.15.1
+ jaraco.functools 4.0.1
+ Jinja2 3.1.6
+ sphinxcontrib-jsmath 1.0.1
+ sphinxcontrib-htmlhelp 2.1.0
+ toolz 1.0.0
+ session-info2 0.1.2
+ PyYAML 6.0.2
+ llvmlite 0.44.0
+ scipy 1.15.2
+ pandas 2.2.3
+ sphinxcontrib-devhelp 2.0.0
+ h5py 3.13.0
+ tblib 3.0.0
+ setuptools-scm 8.2.0
+ more-itertools 10.3.0
+ msgpack 1.1.0
+ sparse 0.15.5
+ wrapt 1.17.2
+ jaraco.collections 5.1.0
+ numba 0.61.0
+ pyarrow 19.0.1
+ pytz 2025.1
+ MarkupSafe 3.0.2
+ crc32c 2.7.1
+ sphinxcontrib-qthelp 2.0.0
+ sphinxcontrib-serializinghtml 2.0.0
+ zarr 2.18.4
+ asciitree 0.3.3
+ six 1.17.0
+ sphinxcontrib-applehelp 2.0.0
+ numpy 2.1.3
+ cloudpickle 3.1.1
+ sphinxcontrib-bibtex 2.6.3
+ natsort 8.4.0
+ jaraco.text 3.12.1
+ setuptools 76.1.0
+ Deprecated 1.2.18
+ packaging 24.2
+ python-dateutil 2.9.0.post0
+ ---- ----
+ Python 3.13.2 | packaged by conda-forge | (main, Feb 17 2025, 14:10:22) [GCC 13.3.0]
+ OS Linux-6.11.0-109019-tuxedo-x86_64-with-glibc2.39
+ Updated 2025-03-18 15:47
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 00000000..5b62547f
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,5 @@
+blank_issues_enabled: false
+contact_links:
+ - name: Scverse Community Forum
+ url: https://discourse.scverse.org/
+ about: If you have questions about “How to do X”, please ask them here.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
new file mode 100644
index 00000000..ae9ca05b
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -0,0 +1,11 @@
+name: Feature request
+description: Propose a new feature for spatialdata-plot
+labels: enhancement
+body:
+ - type: textarea
+ id: description
+ attributes:
+ label: Description of feature
+ description: Please describe your suggestion for a new feature. It might help to describe a problem or use case, plus any alternatives that you have considered.
+ validations:
+ required: true
diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
new file mode 100644
index 00000000..83e01a1e
--- /dev/null
+++ b/.github/workflows/build.yaml
@@ -0,0 +1,33 @@
+name: Check Build
+
+on:
+ push:
+ branches: [main]
+ pull_request:
+ branches: [main]
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+defaults:
+ run:
+ # to fail on error in multiline statements (-e), in pipes (-o pipefail), and on unset variables (-u).
+ shell: bash -euo pipefail {0}
+
+jobs:
+ package:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ filter: blob:none
+ fetch-depth: 0
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+ with:
+ cache-dependency-glob: pyproject.toml
+ - name: Build package
+ run: uv build
+ - name: Check package
+ run: uvx twine check --strict dist/*.whl
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 9bd6c545..6c23760f 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -4,28 +4,31 @@ on:
release:
types: [published]
+defaults:
+ run:
+ # to fail on error in multiline statements (-e), in pipes (-o pipefail), and on unset variables (-u).
+ shell: bash -euo pipefail {0}
+
+# Use "trusted publishing", see https://docs.pypi.org/trusted-publishers/
jobs:
- package_and_release:
+ release:
+ name: Upload release to PyPI
runs-on: ubuntu-latest
- if: startsWith(github.ref, 'refs/tags/v')
+ environment:
+ name: pypi
+ url: https://pypi.org/p/spatialdata_plot
+ permissions:
+ id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
steps:
- - uses: actions/checkout@v3
- - name: Set up Python 3.12
- uses: actions/setup-python@v5
+ - uses: actions/checkout@v4
with:
- python-version: "3.12"
- cache: pip
- - name: Install build dependencies
- run: python -m pip install --upgrade pip wheel twine build
+ filter: blob:none
+ fetch-depth: 0
+ - name: Install uv
+ uses: astral-sh/setup-uv@v5
+ with:
+ cache-dependency-glob: pyproject.toml
- name: Build package
- run: python -m build
- - name: Check package
- run: twine check --strict dist/*.whl
- - name: Install hatch
- run: pip install hatch
- - name: Build project for distribution
- run: hatch build
- - name: Publish a Python distribution to PyPI
+ run: uv build
+ - name: Publish package distributions to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
- with:
- password: ${{ secrets.PYPI_API_TOKEN }}
diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml
index e4c8ef13..78859aa0 100644
--- a/.github/workflows/test.yaml
+++ b/.github/workflows/test.yaml
@@ -3,73 +3,140 @@ name: Test
on:
push:
branches: [main]
- tags: ["v*"] # Push events to matching v*, i.e. v1.0, v20.15.10
pull_request:
- branches: ["*"]
+ branches: [main]
+ schedule:
+ - cron: "0 5 1,15 * *"
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+env:
+ FORCE_COLOR: "1"
+ MPLBACKEND: agg
+ UV_COMPILE_BYTECODE: "1"
+ COVERAGE_FILE: ${{ github.workspace }}/.coverage
+
+defaults:
+ run:
+ # fail on error in multiline statements (-e), in pipes (-o pipefail), and on unset variables (-u)
+ shell: bash -euo pipefail {0}
jobs:
- test:
+ # Discover hatch test environments from pyproject.toml, similar to squidpy.
+ get-environments:
runs-on: ubuntu-latest
+ outputs:
+ envs: ${{ steps.get-envs.outputs.envs }}
+ steps:
+ - uses: actions/checkout@v5
+ with:
+ filter: blob:none
+ fetch-depth: 0
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v7
+ with:
+ enable-cache: true
+ cache-dependency-glob: pyproject.toml
+
+ - name: Get test environments
+ id: get-envs
+ run: |
+ ENVS_JSON=$(NO_COLOR=1 uvx --quiet hatch env show --json | jq -c 'to_entries
+ | map(
+ select(.key | startswith("hatch-test"))
+ | {
+ name: .key,
+ label: (if (.key | contains("pre")) then .key + " (PRE-RELEASE DEPENDENCIES)" else .key end),
+ python: .value.python,
+ test_type: (if (.key | contains("py3.13-stable")) then "coverage" else null end)
+ }
+ )')
+ echo "envs=${ENVS_JSON}" | tee "$GITHUB_OUTPUT"
+
+ # Run tests through hatch for each discovered environment.
+ test:
+ needs: [get-environments]
strategy:
fail-fast: false
matrix:
- env: ["dev-py311", "dev-py313"]
-
- # Configure pytest-xdist
+ os: [ubuntu-latest]
+ env: ${{ fromJSON(needs.get-environments.outputs.envs) }}
+ name: ${{ matrix.env.label }} (${{ matrix.os }})
+ runs-on: ${{ matrix.os }}
+ # Configure pytest-xdist and BLAS threading as before
env:
OMP_NUM_THREADS: "1"
OPENBLAS_NUM_THREADS: "1"
MKL_NUM_THREADS: "1"
NUMEXPR_MAX_THREADS: "1"
- MPLBACKEND: "agg"
- DISPLAY: ":42"
PYTEST_ADDOPTS: "-n auto --dist=load --durations=10"
-
steps:
- - uses: actions/checkout@v4
-
- # Cache rattler's shared package cache (speeds up downloads)
- - name: Restore rattler cache
- uses: actions/cache@v4
+ - uses: actions/checkout@v5
with:
- path: ~/.cache/rattler
- key: rattler-${{ runner.os }}-${{ matrix.env }}-${{ hashFiles('pyproject.toml') }}
- restore-keys: |
- rattler-${{ runner.os }}-${{ matrix.env }}-
- rattler-${{ runner.os }}-
-
- # Install pixi and the requested environment
- - uses: prefix-dev/setup-pixi@v0.9.0
+ filter: blob:none
+ fetch-depth: 0
+
+ - name: Install uv
+ uses: astral-sh/setup-uv@v7
with:
- environments: ${{ matrix.env }}
- # We're not comitting the pixi-lock file
- locked: false
- cache: false
- activate-environment: ${{ matrix.env }}
+ python-version: ${{ matrix.env.python }}
+ cache-dependency-glob: pyproject.toml
- - name: Show versions
- run: |
- python --version
- pixi --version
+ - name: Ensure figure directory exists
+ run: mkdir -p "$GITHUB_WORKSPACE/tests/figures"
+
+ - name: create hatch environment
+ run: uvx hatch env create ${{ matrix.env.name }}
+
+ - name: run tests using hatch
+ if: matrix.env.test_type == null
+ env:
+ PLATFORM: ${{ matrix.os }}
+ DISPLAY: :42
+ run: uvx hatch run ${{ matrix.env.name }}:run -v --color=yes
- - name: Run tests
+ - name: run tests using hatch (coverage)
+ if: matrix.env.test_type == 'coverage'
env:
- MPLBACKEND: agg
- DISPLAY: ":42"
+ PLATFORM: ${{ matrix.os }}
+ DISPLAY: :42
+ run: uvx hatch run ${{ matrix.env.name }}:run-cov -v --color=yes
+
+ - name: generate coverage report
+ if: matrix.env.test_type == 'coverage'
run: |
- pytest -v --cov --color=yes --cov-report=xml
+ # See https://coverage.readthedocs.io/en/latest/config.html#run-patch
+ test -f .coverage || uvx hatch run ${{ matrix.env.name }}:cov-combine
+ uvx hatch run ${{ matrix.env.name }}:cov-report # report visibly
+ uvx hatch run ${{ matrix.env.name }}:coverage xml # create report for upload
- name: Archive figures generated during testing
if: always()
uses: actions/upload-artifact@v4
with:
- name: visual_test_results_${{ matrix.env }}
- path: /home/runner/work/spatialdata-plot/spatialdata-plot/tests/figures/*
+ name: visual_test_results_${{ runner.os }}_${{ matrix.env.name }}
+ path: ${{ github.workspace }}/tests/figures/*
+ if-no-files-found: ignore
- - name: Upload coverage to Codecov
- uses: codecov/codecov-action@v4
+ - name: Upload coverage
+ if: matrix.env.test_type == 'coverage'
+ uses: codecov/codecov-action@v5
with:
- name: coverage
- verbose: true
- env:
- CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
+ token: ${{ secrets.CODECOV_TOKEN }}
+ fail_ci_if_error: true
+
+ # Single “required” check that aggregates all test jobs, as in squidpy.
+ check:
+ name: Tests pass in all hatch environments
+ if: always()
+ needs:
+ - get-environments
+ - test
+ runs-on: ubuntu-latest
+ steps:
+ - uses: re-actors/alls-green@release/v1
+ with:
+ jobs: ${{ toJSON(needs) }}
diff --git a/.gitignore b/.gitignore
index 580789d6..8585fcc2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,6 +8,7 @@ buck-out/
__pycache__/
.mypy_cache/
.ruff_cache/
+.*cache/
/node_modules
# Distribution / packaging
@@ -31,7 +32,6 @@ __pycache__/
format.sh
-
# test
tests/figures/
@@ -44,3 +44,7 @@ tests/figures/
# other
_version.py
/temp/
+
+# pixi
+/pixi/
+pixi.lock
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index ced59a92..23828b79 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -5,22 +5,40 @@ default_stages:
- pre-commit
- pre-push
minimum_pre_commit_version: 2.16.0
-ci:
- skip: []
repos:
- - repo: https://github.com/rbubley/mirrors-prettier
- rev: v3.6.2
+ - repo: https://github.com/biomejs/pre-commit
+ rev: v2.2.4
hooks:
- - id: prettier
+ - id: biome-format
+ exclude: ^\.cruft\.json$ # inconsistent indentation with cruft - file never to be modified manually.
+ - repo: https://github.com/tox-dev/pyproject-fmt
+ rev: v2.6.0
+ hooks:
+ - id: pyproject-fmt
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.14.0
hooks:
- - id: ruff
+ - id: ruff-check
+ types_or: [python, pyi, jupyter]
args: [--fix, --exit-non-zero-on-fix]
- id: ruff-format
+ types_or: [python, pyi, jupyter]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.18.2
hooks:
- id: mypy
additional_dependencies: [numpy, types-requests]
exclude: tests/|docs/
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v6.0.0
+ hooks:
+ - id: detect-private-key
+ - id: check-ast
+ - id: end-of-file-fixer
+ - id: mixed-line-ending
+ args: [--fix=lf]
+ - id: trailing-whitespace
+ - id: check-case-conflict
+ # Check that there are no merge conflicts (could be generated by template sync)
+ - id: check-merge-conflict
+ args: [--assume-in-merge]
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index dd21449c..b698bd16 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -1,15 +1,10 @@
+# https://docs.readthedocs.io/en/stable/config-file/v2.html
version: 2
build:
os: ubuntu-24.04
tools:
python: "3.12"
commands:
- - asdf plugin add uv
- - asdf install uv latest
- - asdf global uv latest
- - uv venv
- - uv pip install .[docs,pre]
- - .venv/bin/python -m sphinx -T -b html -d docs/_build/doctrees -D language=en docs $READTHEDOCS_OUTPUT/html
-sphinx:
- configuration: docs/conf.py
- fail_on_warning: false
+ - python -m pip install uv
+ - uvx hatch run docs:build
+ - mv docs/_build $READTHEDOCS_OUTPUT
diff --git a/biome.jsonc b/biome.jsonc
new file mode 100644
index 00000000..9f8f2208
--- /dev/null
+++ b/biome.jsonc
@@ -0,0 +1,17 @@
+{
+ "$schema": "https://biomejs.dev/schemas/2.2.0/schema.json",
+ "vcs": { "enabled": true, "clientKind": "git", "useIgnoreFile": true },
+ "formatter": { "useEditorconfig": true },
+ "overrides": [
+ {
+ "includes": ["./.vscode/*.json", "**/*.jsonc"],
+ "json": {
+ "formatter": { "trailingCommas": "all" },
+ "parser": {
+ "allowComments": true,
+ "allowTrailingCommas": true,
+ },
+ },
+ },
+ ],
+}
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index 0e22bef4..00000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,23 +0,0 @@
-# Minimal makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line, and also
-# from the environment for the first two.
-SPHINXOPTS ?=
-SPHINXBUILD ?= sphinx-build
-SOURCEDIR = .
-BUILDDIR = _build
-
-# Put it first so that "make" without argument is like "make help".
-help:
- @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-.PHONY: help Makefile
-
-# Catch-all target: route all unknown targets to Sphinx using the new
-# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
-%: Makefile
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-clean:
- rm -r "$(BUILDDIR)"
diff --git a/docs/_templates/autosummary/class.rst b/docs/_templates/autosummary/class.rst
index ee6d05f5..7b4a0cf8 100644
--- a/docs/_templates/autosummary/class.rst
+++ b/docs/_templates/autosummary/class.rst
@@ -9,14 +9,11 @@
{% block attributes %}
{% if attributes %}
Attributes table
-~~~~~~~~~~~~~~~~~~
+~~~~~~~~~~~~~~~~
.. autosummary::
-
{% for item in attributes %}
-
- ~{{ fullname }}.{{ item }}
-
+ ~{{ name }}.{{ item }}
{%- endfor %}
{% endif %}
{% endblock %}
@@ -27,13 +24,10 @@ Methods table
~~~~~~~~~~~~~
.. autosummary::
-
{% for item in methods %}
-
{%- if item != '__init__' %}
- ~{{ fullname }}.{{ item }}
+ ~{{ name }}.{{ item }}
{%- endif -%}
-
{%- endfor %}
{% endif %}
{% endblock %}
@@ -41,15 +35,11 @@ Methods table
{% block attributes_documentation %}
{% if attributes %}
Attributes
-~~~~~~~~~~~
+~~~~~~~~~~
{% for item in attributes %}
-{{ item }}
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
.. autoattribute:: {{ [objname, item] | join(".") }}
-
{%- endfor %}
{% endif %}
@@ -63,11 +53,7 @@ Methods
{% for item in methods %}
{%- if item != '__init__' %}
-{{ item }}
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
.. automethod:: {{ [objname, item] | join(".") }}
-
{%- endif -%}
{%- endfor %}
diff --git a/docs/conf.py b/docs/conf.py
index d1573a31..7d625e46 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -23,9 +23,8 @@
author = info["Author"]
copyright = f"{datetime.now():%Y}, {author}"
version = info["Version"]
-
-# repository_url = f"https://github.com/scverse/{project_name}"
-
+urls = dict(pu.split(", ") for pu in info.get_all("Project-URL"))
+repository_url = urls["Source"]
# The full version, including alpha/beta/rc tags
release = info["Version"]
@@ -56,14 +55,15 @@
"sphinx.ext.napoleon",
"sphinxcontrib.bibtex",
"sphinx_autodoc_typehints",
+ "sphinx_design",
+ "sphinx_tabs.tabs",
"sphinx.ext.mathjax",
"IPython.sphinxext.ipython_console_highlighting",
- "sphinx_design",
+ "sphinxext.opengraph",
*[p.stem for p in (HERE / "extensions").glob("*.py")],
]
autosummary_generate = True
-autodoc_process_signature = True
autodoc_member_order = "groupwise"
default_role = "literal"
napoleon_google_docstring = False
@@ -71,7 +71,7 @@
napoleon_include_init_with_doc = False
napoleon_use_rtype = True # having a separate entry generally helps readability
napoleon_use_param = True
-myst_heading_anchors = 3 # create anchors for h1-h3
+myst_heading_anchors = 6 # create anchors for h1-h6
myst_enable_extensions = [
"amsmath",
"colon_fence",
@@ -85,6 +85,9 @@
nb_execution_mode = "off"
nb_merge_streams = True
typehints_defaults = "braces"
+autodoc_type_aliases = {
+ "ColorLike": "spatialdata_plot.pl.basic.ColorLike",
+}
source_suffix = {
".rst": "restructuredtext",
@@ -93,33 +96,23 @@
}
intersphinx_mapping = {
+ "python": ("https://docs.python.org/3", None),
"anndata": ("https://anndata.readthedocs.io/en/stable/", None),
"numpy": ("https://numpy.org/doc/stable/", None),
+ "pandas": ("https://pandas.pydata.org/docs/", None),
+ "matplotlib": ("https://matplotlib.org/stable/", None),
"geopandas": ("https://geopandas.org/en/stable/", None),
"xarray": ("https://docs.xarray.dev/en/stable/", None),
"datatree": ("https://datatree.readthedocs.io/en/latest/", None),
"dask": ("https://docs.dask.org/en/latest/", None),
+ "spatialdata": ("https://spatialdata.scverse.org/en/stable/", None),
+ "scanpy": ("https://scanpy.readthedocs.io/en/stable/", None),
}
-
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = [
- "_build",
- "Thumbs.db",
- "**.ipynb_checkpoints",
- "tutorials/notebooks/index.md",
- "tutorials/notebooks/README.md",
- "tutorials/notebooks/references.md",
- "tutorials/notebooks/notebooks/paper_reproducibility/*",
-]
-# Ignore warnings.
-nitpicky = False # TODO: solve upstream.
-# nitpick_ignore = [
-# ("py:class", "spatial_image.SpatialImage"),
-# ("py:class", "multiscale_spatial_image.multiscale_spatial_image.MultiscaleSpatialImage"),
-# ]
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "**.ipynb_checkpoints"]
# -- Options for HTML output -------------------------------------------------
@@ -128,36 +121,29 @@
# a list of builtin themes.
#
html_theme = "sphinx_book_theme"
-# html_theme = "sphinx_rtd_theme"
html_static_path = ["_static"]
+html_css_files = ["css/custom.css"]
+
html_title = project_name
-html_logo = "_static/img/spatialdata_horizontal.png"
-# html_theme_options = {
-# "repository_url": repository_url,
-# "use_repository_button": True,
-# }
+html_theme_options = {
+ "repository_url": repository_url,
+ "use_repository_button": True,
+ "path_to_docs": "docs/",
+ "navigation_with_keys": False,
+}
pygments_style = "default"
nitpick_ignore = [
# If building the documentation fails because of a missing link that is outside your control,
# you can add an exception to this list.
- ("py:class", "igraph.Graph"),
+ ("py:class", "Colormap"),
+ ("py:class", "Normalize"),
+ ("py:class", "ColorLike"),
+]
+nitpick_ignore_regex = [
+ ("py:class", r"default .+"),
+ ("py:class", r"gets set to .+"),
+ ("py:class", r"optional"),
]
-
-
-def setup(app):
- """App setup hook."""
- app.add_config_value(
- "recommonmark_config",
- {
- "auto_toc_tree_section": "Contents",
- "enable_auto_toc_tree": True,
- "enable_math": True,
- "enable_inline_math": False,
- "enable_eval_rst": True,
- },
- True,
- )
- app.add_css_file("css/custom.css")
diff --git a/docs/contributing.md b/docs/contributing.md
index 02bc7757..53aab1f9 100644
--- a/docs/contributing.md
+++ b/docs/contributing.md
@@ -1,13 +1,330 @@
# Contributing guide
-Please refer to the [contribution guide from the `spatialdata` repository](https://github.com/scverse/spatialdata/blob/main/docs/contributing.md).
+This document aims at summarizing the most important information for getting you started on contributing to this project.
+We assume that you are already familiar with git and with making pull requests on GitHub.
-## Testing the correctness of the plots
+For more extensive tutorials, that also cover the absolute basics,
+please refer to other resources such as the [pyopensci tutorials][],
+the [scientific Python tutorials][], or the [scanpy developer guide][].
-Many tests will produce plots and check that they are correct by comparing them with a previously saved and serialized version of the same plots. The ground truth images are located in `tests/_images`. Different OS/versions may produce similar but not identical plots (for instance the ticks/padding could vary). To take into account for this please consider the following:
+[pyopensci tutorials]: https://www.pyopensci.org/learn.html
+[scientific Python tutorials]: https://learn.scientific-python.org/development/tutorials/
+[scanpy developer guide]: https://scanpy.readthedocs.io/en/latest/dev/index.html
-- you should not use locally generated plots as ground truth images, but you should commit images that have been generated by a GitHub Action. The recommended workflow is to go to the ["actions" page for the repo](https://github.com/scverse/spatialdata-plot/actions/workflows/test.yaml), download the artifacts, and upload them as ground truth (after having reviewed them).
-- the ground truth images need to be updated when a new test is passing, or when a test starts producing a slightly different (but consistent) plot.
-- please never replace the ground truth images without having manually reviewed them.
-- if you run the tests locally in macOS or Windows they will likely fail because the ground truth images are generated using Ubuntu. To overcome this you can use `act`, which will generate a Docker reproducing the environment used in the GitHub Action. After the Docker container is generated you can use it within IDEs to run tests and debug code.
-- in the case of PyCharm, it is easier to create a container from a `Dockerfile` instead of using `act`. Please in such case use the `Dockerfile` made availabel in the repository. If you encountering problems with `act` or `docker`, please [get in touch with the developers via Zulip](https://scverse.zulipchat.com/#narrow/channel/443514-spatialdata-dev) and we will help troubleshoot the issue. See also additional details [here](https://github.com/scverse/spatialdata-plot/pull/397).
+:::{tip} The *hatch* project manager
+
+We highly recommend to familiarize yourself with [`hatch`][hatch].
+Hatch is a Python project manager that
+
+- manages virtual environments, separately for development, testing and building the documentation.
+ Separating the environments is useful to avoid dependency conflicts.
+- allows to run tests locally in different environments (e.g. different python versions)
+- allows to run tasks defined in `pyproject.toml`, e.g. to build documentation.
+
+While the project is setup with `hatch` in mind,
+it is still possible to use different tools to manage dependencies, such as `uv` or `pip`.
+
+:::
+
+[hatch]: https://hatch.pypa.io/latest/
+
+## Installing dev dependencies
+
+In addition to the packages needed to _use_ this package,
+you need additional python packages to [run tests](#writing-tests) and [build the documentation](#docs-building).
+
+:::::{tabs}
+::::{group-tab} Hatch
+
+On the command line, you typically interact with hatch through its command line interface (CLI).
+Running one of the following commands will automatically resolve the environments for testing and
+building the documentation in the background:
+
+```bash
+hatch test # defined in the table [tool.hatch.envs.hatch-test] in pyproject.toml
+hatch run docs:build # defined in the table [tool.hatch.envs.docs]
+```
+
+When using an IDE such as VS Code,
+you’ll have to point the editor at the paths to the virtual environments manually.
+The environment you typically want to use as your main development environment is the `hatch-test`
+environment with the latest Python version.
+
+To get a list of all environments for your projects, run
+
+```bash
+hatch env show -i
+```
+
+This will list “Standalone” environments and a table of “Matrix” environments like the following:
+
+```
++------------+---------+--------------------------+----------+---------------------------------+-------------+
+| Name | Type | Envs | Features | Dependencies | Scripts |
++------------+---------+--------------------------+----------+---------------------------------+-------------+
+| hatch-test | virtual | hatch-test.py3.10-stable | dev | coverage-enable-subprocess==1.0 | cov-combine |
+| | | hatch-test.py3.13-stable | test | coverage[toml]~=7.4 | cov-report |
+| | | hatch-test.py3.13-pre | | pytest-mock~=3.12 | run |
+| | | | | pytest-randomly~=3.15 | run-cov |
+| | | | | pytest-rerunfailures~=14.0 | |
+| | | | | pytest-xdist[psutil]~=3.5 | |
+| | | | | pytest~=8.1 | |
++------------+---------+--------------------------+----------+---------------------------------+-------------+
+```
+
+From the `Envs` column, select the environment name you want to use for development.
+In this example, it would be `hatch-test.py3.13-stable`.
+
+Next, create the environment with
+
+```bash
+hatch env create hatch-test.py3.13-stable
+```
+
+Then, obtain the path to the environment using
+
+```bash
+hatch env find hatch-test.py3.13-stable
+```
+
+In case you are using VScode, now open the command palette (Ctrl+Shift+P) and search for `Python: Select Interpreter`.
+Choose `Enter Interpreter Path` and paste the path to the virtual environment from above.
+
+In this future, this may become easier through a hatch vscode extension.
+
+::::
+
+::::{group-tab} uv
+
+A popular choice for managing virtual environments is [uv][].
+The main disadvantage compared to hatch is that it supports only a single environment per project at a time,
+which requires you to mix the dependencies for running tests and building docs.
+This can have undesired side-effects,
+such as requiring to install a lower version of a library your project depends on,
+only because an outdated sphinx plugin pins an older version.
+
+To initalize a virtual environment in the `.venv` directory of your project, simply run
+
+```bash
+uv sync --all-extras
+```
+
+The `.venv` directory is typically automatically discovered by IDEs such as VS Code.
+
+::::
+
+::::{group-tab} Pip
+
+Pip is nowadays mostly superseded by environment manager such as [hatch][].
+However, for the sake of completeness, and since it’s ubiquitously available,
+we describe how you can manage environments manually using `pip`:
+
+```bash
+python3 -m venv .venv
+source .venv/bin/activate
+pip install -e ".[dev,test,docs]"
+```
+
+The `.venv` directory is typically automatically discovered by IDEs such as VS Code.
+
+::::
+:::::
+
+[hatch environments]: https://hatch.pypa.io/latest/tutorials/environment/basic-usage/
+[uv]: https://docs.astral.sh/uv/
+
+## Code-style
+
+This package uses [pre-commit][] to enforce consistent code-styles.
+On every commit, pre-commit checks will either automatically fix issues with the code, or raise an error message.
+
+To enable pre-commit locally, simply run
+
+```bash
+pre-commit install
+```
+
+in the root of the repository.
+Pre-commit will automatically download all dependencies when it is run for the first time.
+
+Alternatively, you can rely on the [pre-commit.ci][] service enabled on GitHub.
+If you didn’t run `pre-commit` before pushing changes to GitHub it will automatically commit fixes to your pull request, or show an error message.
+
+If pre-commit.ci added a commit on a branch you still have been working on locally, simply use
+
+```bash
+git pull --rebase
+```
+
+to integrate the changes into yours.
+While the [pre-commit.ci][] is useful, we strongly encourage installing and running pre-commit locally first to understand its usage.
+
+Finally, most editors have an _autoformat on save_ feature.
+Consider enabling this option for [ruff][ruff-editors] and [biome][biome-editors].
+
+[pre-commit]: https://pre-commit.com/
+[pre-commit.ci]: https://pre-commit.ci/
+[ruff-editors]: https://docs.astral.sh/ruff/integrations/
+[biome-editors]: https://biomejs.dev/guides/integrate-in-editor/
+
+(writing-tests)=
+
+## Writing tests
+
+This package uses [pytest][] for automated testing.
+Please write {doc}`scanpy:dev/testing` for every function added to the package.
+
+Most IDEs integrate with pytest and provide a GUI to run tests.
+Just point yours to one of the environments returned by
+
+```bash
+hatch env create hatch-test # create test environments for all supported versions
+hatch env find hatch-test # list all possible test environment paths
+```
+
+Alternatively, you can run all tests from the command line by executing
+
+:::::{tabs}
+::::{group-tab} Hatch
+
+```bash
+hatch test # test with the highest supported Python version
+# or
+hatch test --all # test with all supported Python versions
+```
+
+::::
+
+::::{group-tab} uv
+
+```bash
+uv run pytest
+```
+
+::::
+
+::::{group-tab} Pip
+
+```bash
+source .venv/bin/activate
+pytest
+```
+
+::::
+:::::
+
+in the root of the repository.
+
+[pytest]: https://docs.pytest.org/
+
+### Continuous integration
+
+Continuous integration via GitHub actions will automatically run the tests on all pull requests and test
+against the minimum and maximum supported Python version.
+
+Additionally, there’s a CI job that tests against pre-releases of all dependencies (if there are any).
+The purpose of this check is to detect incompatibilities of new package versions early on and
+gives you time to fix the issue or reach out to the developers of the dependency before the package
+is released to a wider audience.
+
+The CI job is defined in `.github/workflows/test.yaml`,
+however the single point of truth for CI jobs is the Hatch test matrix defined in `pyproject.toml`.
+This means that local testing via hatch and remote testing on CI tests against the same python versions and uses the same environments.
+
+## Publishing a release
+
+### Updating the version number
+
+Before making a release, you need to update the version number in the `pyproject.toml` file.
+Please adhere to [Semantic Versioning][semver], in brief
+
+> Given a version number MAJOR.MINOR.PATCH, increment the:
+>
+> 1. MAJOR version when you make incompatible API changes,
+> 2. MINOR version when you add functionality in a backwards compatible manner, and
+> 3. PATCH version when you make backwards compatible bug fixes.
+>
+> Additional labels for pre-release and build metadata are available as extensions to the MAJOR.MINOR.PATCH format.
+
+Once you are done, commit and push your changes and navigate to the "Releases" page of this project on GitHub.
+Specify `vX.X.X` as a tag name and create a release.
+For more information, see [managing GitHub releases][].
+This will automatically create a git tag and trigger a Github workflow that creates a release on [PyPI][].
+
+[semver]: https://semver.org/
+[managing GitHub releases]: https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository
+[pypi]: https://pypi.org/
+
+## Writing documentation
+
+Please write documentation for new or changed features and use-cases.
+This project uses [sphinx][] with the following features:
+
+- The [myst][] extension allows to write documentation in markdown/Markedly Structured Text
+- [Numpy-style docstrings][numpydoc] (through the [napoloen][numpydoc-napoleon] extension).
+- Jupyter notebooks as tutorials through [myst-nb][] (See [Tutorials with myst-nb](#tutorials-with-myst-nb-and-jupyter-notebooks))
+- [sphinx-autodoc-typehints][], to automatically reference annotated input and output types
+- Citations (like {cite:p}`Virshup_2023`) can be included with [sphinxcontrib-bibtex](https://sphinxcontrib-bibtex.readthedocs.io/)
+
+See scanpy’s {doc}`scanpy:dev/documentation` for more information on how to write your own.
+
+[sphinx]: https://www.sphinx-doc.org/en/master/
+[myst]: https://myst-parser.readthedocs.io/en/latest/intro.html
+[myst-nb]: https://myst-nb.readthedocs.io/en/latest/
+[numpydoc-napoleon]: https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html
+[numpydoc]: https://numpydoc.readthedocs.io/en/latest/format.html
+[sphinx-autodoc-typehints]: https://github.com/tox-dev/sphinx-autodoc-typehints
+
+### Tutorials with myst-nb and jupyter notebooks
+
+The documentation is set-up to render jupyter notebooks stored in the `docs/notebooks` directory using [myst-nb][].
+Currently, only notebooks in `.ipynb` format are supported that will be included with both their input and output cells.
+It is your responsibility to update and re-run the notebook whenever necessary.
+
+If you are interested in automatically running notebooks as part of the continuous integration,
+please check out [this feature request][issue-render-notebooks] in the `cookiecutter-scverse` repository.
+
+[issue-render-notebooks]: https://github.com/scverse/cookiecutter-scverse/issues/40
+
+#### Hints
+
+- If you refer to objects from other packages, please add an entry to `intersphinx_mapping` in `docs/conf.py`.
+ Only if you do so can sphinx automatically create a link to the external documentation.
+- If building the documentation fails because of a missing link that is outside your control,
+ you can add an entry to the `nitpick_ignore` list in `docs/conf.py`
+
+(docs-building)=
+
+### Building the docs locally
+
+:::::{tabs}
+::::{group-tab} Hatch
+
+```bash
+hatch run docs:build
+hatch run docs:open
+```
+
+::::
+
+::::{group-tab} uv
+
+```bash
+cd docs
+uv run sphinx-build -M html . _build -W
+(xdg-)open _build/html/index.html
+```
+
+::::
+
+::::{group-tab} Pip
+
+```bash
+source .venv/bin/activate
+cd docs
+sphinx-build -M html . _build -W
+(xdg-)open _build/html/index.html
+```
+
+::::
+:::::
diff --git a/docs/extensions/typed_returns.py b/docs/extensions/typed_returns.py
index 0fbffefe..1a3e43ab 100644
--- a/docs/extensions/typed_returns.py
+++ b/docs/extensions/typed_returns.py
@@ -11,7 +11,8 @@
def _process_return(lines: Iterable[str]) -> Generator[str, None, None]:
for line in lines:
- if m := re.fullmatch(r"(?P\w+)\s+:\s+(?P[\w.]+)", line):
+ m = re.fullmatch(r"(?P\w+)\s+:\s+(?P[\w.]+)", line)
+ if m:
yield f"-{m['param']} (:class:`~{m['type']}`)"
else:
yield line
diff --git a/pyproject.toml b/pyproject.toml
index dd7b9633..49ed6e90 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,89 +1,84 @@
[build-system]
build-backend = "hatchling.build"
-requires = ["hatchling", "hatch-vcs"]
+requires = [ "hatch-vcs", "hatchling" ]
[project]
name = "spatialdata-plot"
description = "Static plotting for spatial data."
-authors = [
- {name = "scverse"},
-]
+readme = "README.md"
+license = "BSD-3-Clause"
maintainers = [
- {name = "Tim Treis", email = "tim.treis@helmholtz-munich.de"},
+ { name = "Tim Treis", email = "tim.treis@helmholtz-munich.de" },
+]
+authors = [
+ { name = "scverse" },
]
-urls.Documentation = "https://spatialdata.scverse.org/projects/plot/en/latest/index.html"
-urls.Source = "https://github.com/scverse/spatialdata-plot.git"
-urls.Home-page = "https://github.com/scverse/spatialdata-plot.git"
requires-python = ">=3.11"
-dynamic= [
- "version" # allow version to be set by git tags
+classifiers = [
+ "Programming Language :: Python :: 3 :: Only",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+]
+dynamic = [
+ "version", # allow version to be set by git tags
]
-license = {file = "LICENSE"}
-readme = "README.md"
dependencies = [
- "spatialdata>=0.3.0",
- "matplotlib",
- "scikit-learn",
- "scanpy",
- "matplotlib_scalebar",
-]
-
-[project.optional-dependencies]
-dev = [
- "jupyterlab",
- "notebook",
- "ipykernel",
- "ipywidgets",
- "jupytext",
- "pytest",
- "pytest-cov",
- "pooch",
- "ruff",
- "pre-commit",
-]
-docs = [
- "sphinx>=4.5",
- "sphinx-book-theme>=1.0.0",
- "sphinx_rtd_theme",
- "myst-nb",
- "sphinxcontrib-bibtex>=1.0.0",
- "sphinx-autodoc-typehints",
- "sphinx-design",
- # For notebooks
- "ipython>=8.6.0",
- "sphinx-copybutton",
-]
-test = [
- "pytest",
- "pytest-cov",
- "pytest-xdist",
- "pooch", # for scipy.datasets module
+ "matplotlib",
+ "matplotlib-scalebar",
+ "scanpy",
+ "scikit-learn",
+ # for debug logging (referenced from the issue template)
+ "session-info2",
+ "spatialdata>=0.3",
]
-[tool.coverage.run]
-source = ["spatialdata_plot"]
-omit = [
- "**/test_*.py",
+optional-dependencies.dev = [
+ "ipykernel",
+ "ipywidgets",
+ "jupyterlab",
+ "jupytext",
+ "notebook",
+ "pooch",
+ "pre-commit",
+ "pytest",
+ "pytest-cov",
+ "ruff",
+ "twine>=4.0.2",
]
-
-[tool.pytest.ini_options]
-testpaths = ["tests"]
-xfail_strict = true
-addopts = [
-# "-Werror", # if 3rd party libs raise DeprecationWarnings, just use filterwarnings below
- "--import-mode=importlib", # allow using test files with same name
- "-s" # print output from tests
+optional-dependencies.docs = [
+ "docutils>=0.8,!=0.18.*,!=0.19.*",
+ # For notebooks
+ "ipython>=8.6",
+ "myst-nb>=1.1",
+ "pandas",
+ # Until pybtex >0.24.0 releases: https://bitbucket.org/pybtex-devs/pybtex/issues/169/
+ "setuptools",
+ "sphinx>=8.1",
+ "sphinx-autodoc-typehints",
+ "sphinx-book-theme>=1",
+ "sphinx-copybutton",
+ "sphinx-design",
+ "sphinx-rtd-theme",
+ "sphinx-tabs",
+ "sphinxcontrib-bibtex>=1",
+ "sphinxext-opengraph",
]
-# info on how to use this https://stackoverflow.com/questions/57925071/how-do-i-avoid-getting-deprecationwarning-from-inside-dependencies-with-pytest
-filterwarnings = [
- # "ignore:.*U.*mode is deprecated:DeprecationWarning",
+optional-dependencies.test = [
+ "coverage>=7.10",
+ "pooch", # for scipy.datasets module
+ "pytest",
+ "pytest-cov",
+ "pytest-xdist",
]
-[tool.jupytext]
-formats = "ipynb,md"
+urls."Bug Tracker" = "https://github.com/scverse/spatialdata-plot/issues"
+urls.Documentation = "https://spatialdata.scverse.org/projects/plot/en/latest/index.html"
+urls.Home-page = "https://github.com/scverse/spatialdata-plot.git"
+urls.Source = "https://github.com/scverse/spatialdata-plot.git"
[tool.hatch.build.targets.wheel]
-packages = ['src/spatialdata_plot']
+packages = [ "src/spatialdata_plot" ]
[tool.hatch.version]
source = "vcs"
@@ -94,70 +89,126 @@ version-file = "_version.py"
[tool.hatch.metadata]
allow-direct-references = true
+[tool.hatch.envs.default]
+installer = "uv"
+features = [ "dev" ]
+
+[tool.hatch.envs.docs]
+features = [ "docs" ]
+scripts.build = "sphinx-build -M html docs docs/_build -W {args}"
+scripts.open = "python -m webbrowser -t docs/_build/html/index.html"
+scripts.clean = "git clean -fdX -- {args:docs}"
+
+# Test the lowest and highest supported Python versions with normal deps
+[[tool.hatch.envs.hatch-test.matrix]]
+deps = [ "stable" ]
+python = [ "3.11", "3.13" ]
+
+# Test the newest supported Python version also with pre-release deps
+[[tool.hatch.envs.hatch-test.matrix]]
+deps = [ "pre" ]
+python = [ "3.13" ]
+
+[tool.hatch.envs.hatch-test]
+features = [ "dev", "test" ]
+scripts.run = "pytest {args:tests}"
+scripts.run-cov = "pytest --cov --cov-report=term-missing {args:tests}"
+scripts.cov = "coverage combine"
+scripts.cov-combine = "coverage combine"
+scripts.cov-report = "coverage report"
+scripts.coverage = "python -m coverage {args}"
+
+[tool.hatch.envs.hatch-test.overrides]
+# If the matrix variable `deps` is set to "pre",
+# set the environment variable `UV_PRERELEASE` to "allow".
+matrix.deps.env-vars = [
+ { key = "UV_PRERELEASE", value = "allow", if = [ "pre" ] },
+]
+
[tool.ruff]
line-length = 120
+src = [ "src" ]
+extend-include = [ "*.ipynb" ]
exclude = [
- ".git",
- ".tox",
- "__pycache__",
- "build",
- "docs/_build",
- "dist",
- "setup.py",
-]
-[tool.ruff.lint]
-ignore = [
- # Do not assign a lambda expression, use a def -> lambda expression assignments are convenient
- "E731",
- # allow I, O, l as variable names -> I is the identity matrix, i, j, k, l is reasonable indexing notation
- "E741",
- # Missing docstring in public package
- "D104",
- # Missing docstring in public module
- "D100",
- # Missing docstring in __init__
- "D107",
- # Missing docstring in magic method
- "D105",
- # Do not perform function calls in argument defaults.
- "B008",
- # Missing docstring in magic method
- "D105",
-]
-select = [
- "D", # flake8-docstrings
- "I", # isort
- "E", # pycodestyle
- "F", # pyflakes
- "W", # pycodestyle
- "Q", # flake8-quotes
- "SIM", # flake8-simplify
- "TID", # flake-8-tidy-imports
- "NPY", # NumPy-specific rules
- "PT", # flake8-pytest-style
- "B", # flake8-bugbear
- "UP", # pyupgrade
- "C4", # flake8-comprehensions
- "BLE", # flake8-blind-except
- "T20", # flake8-print
- "RET", # flake8-raise
- "PGH", # pygrep-hooks
-]
-unfixable = ["B", "UP", "C4", "BLE", "T20", "RET"]
-
-[tool.ruff.lint.per-file-ignores]
- "tests/*" = ["D", "PT", "B024"]
- "*/__init__.py" = ["F401", "D104", "D107", "E402"]
- "docs/*" = ["D","B","E","A"]
- "tests/conftest.py"= ["E402", "RET504"]
- "src/spatialdata_plot/pl/utils.py"= ["PGH003"]
-
-[tool.ruff.lint.pydocstyle]
-convention = "numpy"
+ ".git",
+ ".tox",
+ "__pycache__",
+ "build",
+ "dist",
+ "docs/_build",
+ "setup.py",
+]
+
+format.docstring-code-format = true
+
+lint.select = [
+ "B", # flake8-bugbear
+ "BLE", # flake8-blind-except
+ "C4", # flake8-comprehensions
+ "D", # flake8-docstrings
+ "E", # Error detected by Pycodestyle
+ "F", # Errors detected by Pyflakes
+ "I", # isort
+ "NPY", # NumPy-specific rules
+ "PGH", # pygrep-hooks
+ "PT", # flake8-pytest-style
+ "Q", # flake8-quotes
+ "RET", # flake8-raise
+ "RUF100", # Report unused noqa directives
+ "SIM", # flake8-simplify
+ "T20", # flake8-print
+ "TID", # flake8-tidy-imports
+ "UP", # pyupgrade
+ "W", # Warning detected by Pycodestyle
+]
+lint.ignore = [
+ "B008", # Errors from function calls in argument defaults. These are fine when the result is immutable.
+ "D100", # Missing docstring in public module
+ "D104", # Missing docstring in public package
+ "D105", # __magic__ methods are often self-explanatory, allow missing docstrings
+ "D107", # Missing docstring in __init__
+ # Disable one in each pair of mutually incompatible rules
+ "D203", # We don't want a blank line before a class docstring
+ "D213", # <> We want docstrings to start immediately after the opening triple quote
+ "D400", # first line should end with a period [Bug: doesn't work with single-line docstrings]
+ "D401", # First line should be in imperative mood; try rephrasing
+ "E501", # line too long -> we accept long comment lines; formatter gets rid of long code lines
+ "E731", # Do not assign a lambda expression, use a def -> lambda expression assignments are convenient
+ "E741", # allow I, O, l as variable names -> I is the identity matrix
+]
+lint.per-file-ignores."*/__init__.py" = [ "D104", "D107", "E402", "F401" ]
+lint.per-file-ignores."docs/*" = [ "A", "B", "D", "E", "I" ]
+lint.per-file-ignores."src/spatialdata_plot/pl/utils.py" = [ "PGH003" ]
+lint.per-file-ignores."tests/*" = [ "B024", "D", "PT" ]
+lint.per-file-ignores."tests/conftest.py" = [ "E402", "RET504" ]
+lint.unfixable = [ "B", "BLE", "C4", "RET", "T20", "UP" ]
+lint.pydocstyle.convention = "numpy"
+
+[tool.pytest.ini_options]
+testpaths = [ "tests" ]
+xfail_strict = true
+addopts = [
+ # "-Werror", # if 3rd party libs raise DeprecationWarnings, just use filterwarnings below
+ "--import-mode=importlib", # allow using test files with same name
+ "-s", # print output from tests
+]
+# info on how to use this https://stackoverflow.com/questions/57925071/how-do-i-avoid-getting-deprecationwarning-from-inside-dependencies-with-pytest
+filterwarnings = [
+ # "ignore:.*U.*mode is deprecated:DeprecationWarning",
+]
+
+[tool.coverage.run]
+source = [ "spatialdata_plot" ]
+omit = [
+ "**/test_*.py",
+]
+
+[tool.jupytext]
+formats = "ipynb,md"
[tool.pixi.workspace]
-channels = ["conda-forge"]
-platforms = ["osx-arm64", "linux-64"]
+channels = [ "conda-forge" ]
+platforms = [ "osx-arm64", "linux-64" ]
[tool.pixi.dependencies]
python = ">=3.11"
@@ -174,14 +225,14 @@ python = "3.13.*"
[tool.pixi.environments]
# 3.11 lane (for gh-actions)
-dev-py311 = { features = ["dev", "test", "py311"], solve-group = "py311" }
-docs-py311 = { features = ["docs", "py311"], solve-group = "py311" }
+dev-py311 = { features = [ "dev", "test", "py311" ], solve-group = "py311" }
+docs-py311 = { features = [ "docs", "py311" ], solve-group = "py311" }
# 3.13 lane
-default = { features = ["py313"], solve-group = "py313" }
-dev-py313 = { features = ["dev", "test", "py313"], solve-group = "py313" }
-docs-py313 = { features = ["docs", "py313"], solve-group = "py313" }
-test-py313 = { features = ["test", "py313"], solve-group = "py313" }
+default = { features = [ "py313" ], solve-group = "py313" }
+dev-py313 = { features = [ "dev", "test", "py313" ], solve-group = "py313" }
+docs-py313 = { features = [ "docs", "py313" ], solve-group = "py313" }
+test-py313 = { features = [ "test", "py313" ], solve-group = "py313" }
[tool.pixi.tasks]
lab = "jupyter lab"
@@ -190,4 +241,16 @@ test = "pytest -v --color=yes --tb=short --durations=10"
lint = "ruff check ."
format = "ruff format ."
pre-commit-install = "pre-commit install"
-pre-commit-run = "pre-commit run --all-files"
\ No newline at end of file
+pre-commit-run = "pre-commit run --all-files"
+
+[tool.cruft]
+skip = [
+ "tests",
+ "src/**/__init__.py",
+ "src/**/basic.py",
+ "docs/api.md",
+ "docs/changelog.md",
+ "docs/references.bib",
+ "docs/references.md",
+ "docs/notebooks/example.ipynb",
+]
diff --git a/src/spatialdata_plot/pl/basic.py b/src/spatialdata_plot/pl/basic.py
index 029168be..afe5c4db 100644
--- a/src/spatialdata_plot/pl/basic.py
+++ b/src/spatialdata_plot/pl/basic.py
@@ -188,11 +188,15 @@ def render_shapes(
element : str | None, optional
The name of the shapes element to render. If `None`, all shapes elements in the `SpatialData` object will be
used.
- color : ColorLike | None, optional
+ color : ColorLike | str | None, optional
Can either be color-like (name of a color as string, e.g. "red", hex representation, e.g. "#000000" or
"#000000ff", or an RGB(A) array as a tuple or list containing 3-4 floats within [0, 1]. If an alpha value is
- indicated, the value of `fill_alpha` takes precedence if given) or a string representing a key in
- :attr:`sdata.table.obs`. The latter can be used to color by categorical or continuous variables. If
+ indicated, the value of `fill_alpha` takes precedence if given) or a string referencing stored annotations.
+ When the provided key matches a column on the shapes element itself, those values are used directly. When
+ the key references an AnnData table annotating the element, both ``obs`` columns and ``var_names`` entries
+ (optionally pulled from ``layers``) are supported; use `table_name` to disambiguate which table should be
+ consulted. The string form can therefore represent categorical or continuous measurements tied to the shapes
+ element.
`element` is `None`, if possible the color will be broadcasted to all elements. For this, the table in which
the color key is found must annotate the respective element (region must be set to the specific element). If
the color column is found in multiple locations, please provide the table_name to be used for the elements.
@@ -242,8 +246,8 @@ def render_shapes(
spatial element to be plotted if the table annotates it. If you want to use different tables for particular
elements, as specified under element.
table_layer: str | None
- Layer of the table to use for coloring if `color` is in :attr:`sdata.table.var_names`. If None, the data in
- :attr:`sdata.table.X` is used for coloring.
+ Layer of the table to use for coloring if `color` is present in the ``var_names`` of the table. If None, the
+ data stored in ``X`` is used for coloring.
shape: Literal["circle", "hex", "visium_hex", "square"] | None
If None (default), the shapes are rendered as they are. Else, if either of "circle", "hex" or "square" is
specified, the shapes are converted to a circle/hexagon/square before rendering. If "visium_hex" is
@@ -370,8 +374,9 @@ def render_points(
color : str | None, optional
Can either be color-like (name of a color as string, e.g. "red", hex representation, e.g. "#000000" or
"#000000ff", or an RGB(A) array as a tuple or list containing 3-4 floats within [0, 1]. If an alpha value is
- indicated, the value of `fill_alpha` takes precedence if given) or a string representing a key in
- :attr:`sdata.table.obs`. The latter can be used to color by categorical or continuous variables. If
+ indicated, the value of `fill_alpha` takes precedence if given) or a string representing a key in the ``obs``
+ dataframe of the table providing annotations. The latter can be used to color by categorical or continuous
+ variables. If
`element` is `None`, if possible the color will be broadcasted to all elements. For this, the table in which
the color key is found must annotate the respective element (region must be set to the specific element). If
the color column is found in multiple locations, please provide the table_name to be used for the elements.
@@ -405,8 +410,8 @@ def render_points(
spatial element to be plotted if the table annotates it. If you want to use different tables for particular
elements, as specified under element.
table_layer: str | None
- Layer of the table to use for coloring if `color` is in :attr:`sdata.table.var_names`. If None, the data in
- :attr:`sdata.table.X` is used for coloring.
+ Layer of the table to use for coloring if `color` is present in the ``var_names`` of the table. If None, the
+ data stored in ``X`` is used for coloring.
**kwargs : Any
Additional arguments for customization. This can include:
@@ -631,8 +636,8 @@ def render_labels(
The name of the labels element to render. If `None`, all label
elements in the `SpatialData` object will be used and all parameters will be broadcasted if possible.
color : str | None
- Can either be string representing a color-like or key in :attr:`sdata.table.obs` or in the index of
- :attr:`sdata.table.var`. The latter can be used to color by categorical or continuous variables. If the
+ Can either be string representing a color-like or key in the ``obs`` dataframe or ``var`` index of the
+ associated table. The latter can be used to color by categorical or continuous variables. If the
color column is found in multiple locations, please provide the table_name to be used for the element if you
would like a specific table to be used. By default one table will automatically be choosen.
groups : list[str] | str | None
@@ -668,8 +673,8 @@ def render_labels(
table_name: str | None
Name of the table containing the color columns.
table_layer: str | None
- Layer of the AnnData table to use for coloring if `color` is in :attr:`sdata.table.var_names`. If None,
- :attr:`sdata.table.X` of the default table is used for coloring.
+ Layer of the AnnData table to use for coloring if `color` is present in the ``var_names`` of the default
+ table. If None, the ``X`` matrix of the default table is used for coloring.
kwargs
Additional arguments to be passed to cmap and norm.
diff --git a/src/spatialdata_plot/pl/utils.py b/src/spatialdata_plot/pl/utils.py
index cf21d212..47abb267 100644
--- a/src/spatialdata_plot/pl/utils.py
+++ b/src/spatialdata_plot/pl/utils.py
@@ -138,6 +138,27 @@ def _extract_scalar_value(value: Any, default: float = 0.0) -> float:
return default
+def _ensure_region_is_categorical(table: AnnData) -> None:
+ """Ensure the region column used for annotation remains categorical."""
+ attrs = table.uns.get("spatialdata_attrs", {})
+ region_key = attrs.get("region_key")
+
+ if not region_key or region_key not in table.obs:
+ return
+
+ if not isinstance(table.obs[region_key].dtype, CategoricalDtype):
+ table.obs[region_key] = table.obs[region_key].astype("category")
+
+
+def _ensure_tables_have_categorical_regions(sdata: SpatialData) -> None:
+ """Cast the region columns of all tables to categorical if necessary."""
+ if sdata.tables is None:
+ return
+
+ for table in sdata.tables.values():
+ _ensure_region_is_categorical(table)
+
+
def _verify_plotting_tree(sdata: SpatialData) -> SpatialData:
"""Verify that the plotting tree exists, and if not, create it."""
if not hasattr(sdata, "plotting_tree"):
@@ -2127,6 +2148,8 @@ def _validate_col_for_column_table(
table_name: str | None,
labels: bool = False,
) -> tuple[str | None, str | None]:
+ _ensure_tables_have_categorical_regions(sdata)
+
if not labels and col_for_color in sdata[element_name].columns:
table_name = None
elif table_name is not None:
diff --git a/tests/conftest.py b/tests/conftest.py
index b44d6d8e..ffe4e786 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -405,7 +405,7 @@ def __new__(cls, clsname, superclasses, attributedict):
return super().__new__(cls, clsname, superclasses, attributedict)
-class PlotTester(ABC): # noqa: B024
+class PlotTester(ABC):
@classmethod
def compare(cls, basename: str, tolerance: float | None = None):
ACTUAL.mkdir(parents=True, exist_ok=True)
diff --git a/tests/pl/test_get_extent.py b/tests/pl/test_get_extent.py
index 9d9f1195..14f65b3e 100644
--- a/tests/pl/test_get_extent.py
+++ b/tests/pl/test_get_extent.py
@@ -10,7 +10,7 @@
from spatialdata.models import PointsModel, ShapesModel
from spatialdata.transformations import Affine, set_transformation
-import spatialdata_plot # noqa: F401
+import spatialdata_plot
from tests.conftest import DPI, PlotTester, PlotTesterMeta
sc.pl.set_rcParams_defaults()
diff --git a/tests/pl/test_render_images.py b/tests/pl/test_render_images.py
index 5cba4e88..8dab039c 100644
--- a/tests/pl/test_render_images.py
+++ b/tests/pl/test_render_images.py
@@ -5,7 +5,7 @@
from spatial_image import to_spatial_image
from spatialdata import SpatialData
-import spatialdata_plot # noqa: F401
+import spatialdata_plot
from tests.conftest import DPI, PlotTester, PlotTesterMeta, _viridis_with_under_over
sc.pl.set_rcParams_defaults()
diff --git a/tests/pl/test_render_labels.py b/tests/pl/test_render_labels.py
index 8d96ec96..ffa6524f 100644
--- a/tests/pl/test_render_labels.py
+++ b/tests/pl/test_render_labels.py
@@ -10,7 +10,7 @@
from spatialdata import SpatialData, deepcopy, get_element_instances
from spatialdata.models import Labels2DModel, TableModel
-import spatialdata_plot # noqa: F401
+import spatialdata_plot
from tests.conftest import DPI, PlotTester, PlotTesterMeta, _viridis_with_under_over, get_standard_RNG
sc.pl.set_rcParams_defaults()
diff --git a/tests/pl/test_render_points.py b/tests/pl/test_render_points.py
index 71a6c4e3..abd3fcb1 100644
--- a/tests/pl/test_render_points.py
+++ b/tests/pl/test_render_points.py
@@ -21,7 +21,7 @@
)
from spatialdata.transformations._utils import _set_transformations
-import spatialdata_plot # noqa: F401
+import spatialdata_plot
from tests.conftest import DPI, PlotTester, PlotTesterMeta, _viridis_with_under_over, get_standard_RNG
sc.pl.set_rcParams_defaults()
@@ -473,7 +473,7 @@ def test_warns_when_table_does_not_annotate_element(sdata_blobs: SpatialData):
# Create a table that annotates a DIFFERENT element than the one we will render
other_table = sdata_blobs_local["table"].copy()
- other_table.obs["region"] = pd.Categorical(["blobs_labels"] * other_table.n_obs) # Different from blobs_points
+ other_table.obs["region"] = pd.Categorical(["blobs_labels"] * other_table.n_obs)
other_table.uns["spatialdata_attrs"]["region"] = "blobs_labels"
sdata_blobs_local["other_table"] = other_table
diff --git a/tests/pl/test_render_shapes.py b/tests/pl/test_render_shapes.py
index 8fbc5d4c..989e7160 100644
--- a/tests/pl/test_render_shapes.py
+++ b/tests/pl/test_render_shapes.py
@@ -16,7 +16,7 @@
from spatialdata.transformations import Affine, Identity, MapAxis, Scale, Sequence, Translation
from spatialdata.transformations._utils import _set_transformations
-import spatialdata_plot # noqa: F401
+import spatialdata_plot
from tests.conftest import DPI, PlotTester, PlotTesterMeta, _viridis_with_under_over, get_standard_RNG
sc.pl.set_rcParams_defaults()
@@ -670,7 +670,7 @@ def test_warns_when_table_does_not_annotate_element(sdata_blobs: SpatialData):
# Create a table that annotates a DIFFERENT element than the one we will render
other_table = sdata_blobs_local["table"].copy()
- other_table.obs["region"] = pd.Categorical(["blobs_points"] * other_table.n_obs) # Different region
+ other_table.obs["region"] = pd.Categorical(["blobs_points"] * other_table.n_obs)
other_table.uns["spatialdata_attrs"]["region"] = "blobs_points"
sdata_blobs_local["other_table"] = other_table
diff --git a/tests/pl/test_show.py b/tests/pl/test_show.py
index 2b7e444d..311dfc0e 100644
--- a/tests/pl/test_show.py
+++ b/tests/pl/test_show.py
@@ -2,7 +2,7 @@
import scanpy as sc
from spatialdata import SpatialData
-import spatialdata_plot # noqa: F401
+import spatialdata_plot
from tests.conftest import DPI, PlotTester, PlotTesterMeta
sc.pl.set_rcParams_defaults()
diff --git a/tests/pl/test_upstream_plots.py b/tests/pl/test_upstream_plots.py
index 65267bd7..cec48248 100644
--- a/tests/pl/test_upstream_plots.py
+++ b/tests/pl/test_upstream_plots.py
@@ -13,7 +13,7 @@
set_transformation,
)
-import spatialdata_plot # noqa: F401
+import spatialdata_plot
from tests.conftest import DPI, PlotTester, PlotTesterMeta
# sc.pl.set_rcParams_defaults()