From 649078e60daee11f3d94426cad5948db24fd04fb Mon Sep 17 00:00:00 2001 From: Scott Lowe Date: Fri, 18 Jun 2021 17:12:45 +0100 Subject: [PATCH 1/8] DEV: Add isort pre-commit hook --- .pre-commit-config.yaml | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7000e45b..5527cec3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,14 +1,14 @@ repos: - - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.9.0 + - repo: https://github.com/timothycrosley/isort + rev: 5.9.1 hooks: - - id: python-check-blanket-noqa - - id: python-no-log-warn - - id: rst-backticks - - id: rst-directive-colons - types: [text] - - id: rst-inline-touching-normal - types: [text] + - id: isort + name: isort + args: ["--profile", "black"] + - id: isort + name: isort (cython) + types: [cython] + args: ["--profile", "black"] - repo: https://github.com/kynan/nbstripout rev: 0.4.0 @@ -20,6 +20,17 @@ repos: hooks: - id: black_nbconvert + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.9.0 + hooks: + - id: python-check-blanket-noqa + - id: python-no-log-warn + - id: rst-backticks + - id: rst-directive-colons + types: [text] + - id: rst-inline-touching-normal + types: [text] + - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.0.1 hooks: From d0c275135c194b71552e67d05024ec25ec06a64a Mon Sep 17 00:00:00 2001 From: Scott Lowe Date: Fri, 18 Jun 2021 17:12:45 +0100 Subject: [PATCH 2/8] STY: Fix import orders with isort --- docs/conf.py | 1 + examples/basic_usage.py | 1 - examples/basic_usage_func.py | 1 - examples/basic_usage_func_windows.py | 1 - examples/basic_usage_windows.py | 1 - examples/datahandler_custom.py | 4 ++-- fissa/ROI.py | 3 +-- fissa/__init__.py | 1 - fissa/core.py | 7 ++++--- fissa/readimagejrois.py | 7 +++---- fissa/tests/base_test.py | 11 ++++++----- fissa/tests/generate_downsampled_resources.py | 14 ++++++-------- fissa/tests/generate_tiffs.py | 1 - fissa/tests/test_ROI.py | 3 ++- fissa/tests/test_core.py | 5 ++--- fissa/tests/test_deltaf.py | 3 ++- fissa/tests/test_extraction.py | 6 ++---- fissa/tests/test_neuropil.py | 2 +- fissa/tests/test_readimagejrois.py | 2 +- fissa/tests/test_roitools.py | 3 ++- setup.py | 2 +- 21 files changed, 36 insertions(+), 43 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ac582ab5..0f2ae359 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -15,6 +15,7 @@ import datetime import os import sys + sys.path.insert(0, os.path.abspath('.')) sys.path.insert(0, os.path.abspath('../')) diff --git a/examples/basic_usage.py b/examples/basic_usage.py index 60190d85..b6b5f490 100755 --- a/examples/basic_usage.py +++ b/examples/basic_usage.py @@ -12,7 +12,6 @@ import fissa - # Define the data to extract rois = 'exampleData/20150429.zip' images = 'exampleData/20150529' diff --git a/examples/basic_usage_func.py b/examples/basic_usage_func.py index 8c9e3f88..522c4778 100755 --- a/examples/basic_usage_func.py +++ b/examples/basic_usage_func.py @@ -12,7 +12,6 @@ import fissa - # Define the data to extract rois = "exampleData/20150429.zip" images = "exampleData/20150529" diff --git a/examples/basic_usage_func_windows.py b/examples/basic_usage_func_windows.py index 66b6c171..8cc78860 100755 --- a/examples/basic_usage_func_windows.py +++ b/examples/basic_usage_func_windows.py @@ -12,7 +12,6 @@ import fissa - # On Windows, it is necessary to wrap the script within a block that checks # for __name__ == "__main__", so that multiprocessing works correctly. if __name__ == "__main__": diff --git a/examples/basic_usage_windows.py b/examples/basic_usage_windows.py index 90ddefd4..2c749ea7 100755 --- a/examples/basic_usage_windows.py +++ b/examples/basic_usage_windows.py @@ -12,7 +12,6 @@ import fissa - # On Windows, it is necessary to wrap the script in a __name__ check, so # that multiprocessing works correctly. Multiprocessing is triggered by the # experiment.separate() step. diff --git a/examples/datahandler_custom.py b/examples/datahandler_custom.py index a66369f5..07d41e1c 100755 --- a/examples/datahandler_custom.py +++ b/examples/datahandler_custom.py @@ -10,10 +10,10 @@ """ -from past.builtins import basestring - import numpy as np import tifffile +from past.builtins import basestring + from fissa import roitools diff --git a/fissa/ROI.py b/fissa/ROI.py index e4cb674c..cdc0fcd6 100644 --- a/fissa/ROI.py +++ b/fissa/ROI.py @@ -22,11 +22,10 @@ """ from itertools import product -# from warnings import warn import numpy as np from scipy.sparse import lil_matrix -from shapely.geometry import MultiPolygon, Polygon, Point +from shapely.geometry import MultiPolygon, Point, Polygon def poly2mask(polygons, im_size): diff --git a/fissa/__init__.py b/fissa/__init__.py index 9a0c5dc6..bbcc1f6d 100644 --- a/fissa/__init__.py +++ b/fissa/__init__.py @@ -1,5 +1,4 @@ from . import __meta__ - from .core import Experiment, run_fissa __version__ = __meta__.version diff --git a/fissa/core.py b/fissa/core.py index 21939e1f..aafcb502 100644 --- a/fissa/core.py +++ b/fissa/core.py @@ -8,7 +8,6 @@ """ from __future__ import print_function -from past.builtins import basestring import collections import functools @@ -18,6 +17,9 @@ import os.path import sys import warnings + +from past.builtins import basestring + try: from collections import abc except ImportError: @@ -26,8 +28,7 @@ import numpy as np from scipy.io import savemat -from . import deltaf -from . import extraction +from . import deltaf, extraction from . import neuropil as npil from . import roitools diff --git a/fissa/readimagejrois.py b/fissa/readimagejrois.py index 8ccac229..06e3ce07 100644 --- a/fissa/readimagejrois.py +++ b/fissa/readimagejrois.py @@ -10,16 +10,15 @@ - 2015 by Scott Lowe (@scottclowe) and Sander Keemink (@swkeemink). ''' -from __future__ import division -from __future__ import unicode_literals -from past.builtins import basestring +from __future__ import division, unicode_literals import sys +import zipfile from itertools import product import numpy as np +from past.builtins import basestring from skimage.draw import ellipse -import zipfile if sys.version_info >= (3, 0): import read_roi diff --git a/fissa/tests/base_test.py b/fissa/tests/base_test.py index 47c10338..36ab0ef6 100644 --- a/fissa/tests/base_test.py +++ b/fissa/tests/base_test.py @@ -15,12 +15,13 @@ from inspect import getsourcefile import numpy as np -from numpy.testing import (assert_almost_equal, - assert_array_equal, - assert_allclose, - assert_equal) import pytest - +from numpy.testing import ( + assert_allclose, + assert_almost_equal, + assert_array_equal, + assert_equal, +) # Check where the test directory is located, to be used when fetching # test resource files diff --git a/fissa/tests/generate_downsampled_resources.py b/fissa/tests/generate_downsampled_resources.py index 09c68283..a37cddbc 100755 --- a/fissa/tests/generate_downsampled_resources.py +++ b/fissa/tests/generate_downsampled_resources.py @@ -1,22 +1,20 @@ #!/usr/bin/env python -from __future__ import division -from __future__ import unicode_literals +from __future__ import division, unicode_literals import glob import os -from itertools import product -import sys import shutil +import sys +import zipfile +from itertools import product import numpy as np import scipy.ndimage -from skimage.draw import ellipse import tifffile -import zipfile +from skimage.draw import ellipse -from fissa import readimagejrois -from fissa import extraction +from fissa import extraction, readimagejrois def maybe_make_dir(dirname): diff --git a/fissa/tests/generate_tiffs.py b/fissa/tests/generate_tiffs.py index 94ac60bb..9d7cf7fc 100755 --- a/fissa/tests/generate_tiffs.py +++ b/fissa/tests/generate_tiffs.py @@ -6,7 +6,6 @@ import numpy as np import tifffile - TEST_DIRECTORY = os.path.dirname(os.path.abspath(getsourcefile(lambda: 0))) diff --git a/fissa/tests/test_ROI.py b/fissa/tests/test_ROI.py index 001566ad..42292d4b 100644 --- a/fissa/tests/test_ROI.py +++ b/fissa/tests/test_ROI.py @@ -26,10 +26,11 @@ # Tests follow conventions for NumPy/SciPy available at # https://github.com/numpy/numpy/blob/master/doc/TESTS.rst.txt +import numpy as np + # use assert_() and related functions over the built in assert to ensure tests # run properly, regardless of how python is started. from numpy.testing import assert_equal -import numpy as np from .. import ROI diff --git a/fissa/tests/test_core.py b/fissa/tests/test_core.py index a1160114..9a3fafe8 100644 --- a/fissa/tests/test_core.py +++ b/fissa/tests/test_core.py @@ -2,7 +2,7 @@ from __future__ import division -import os, os.path +import os import shutil import sys import types @@ -11,9 +11,8 @@ import numpy as np from scipy.io import loadmat +from .. import core, extraction from .base_test import BaseTestCase -from .. import core -from .. import extraction class ExperimentTestMixin: diff --git a/fissa/tests/test_deltaf.py b/fissa/tests/test_deltaf.py index 399ac63e..0ea14970 100644 --- a/fissa/tests/test_deltaf.py +++ b/fissa/tests/test_deltaf.py @@ -3,10 +3,11 @@ from __future__ import division import unittest + import numpy as np -from .base_test import BaseTestCase from .. import deltaf +from .base_test import BaseTestCase class TestFindBaseline(BaseTestCase): diff --git a/fissa/tests/test_extraction.py b/fissa/tests/test_extraction.py index b4bc5c4b..8f7ddb93 100755 --- a/fissa/tests/test_extraction.py +++ b/fissa/tests/test_extraction.py @@ -11,15 +11,13 @@ import tempfile import numpy as np +import pytest import tifffile from PIL import Image -import pytest +from .. import extraction, roitools from . import base_test from .base_test import BaseTestCase -from .. import extraction -from .. import roitools - RESOURCES_DIR = os.path.join(base_test.TEST_DIRECTORY, 'resources', 'tiffs') diff --git a/fissa/tests/test_neuropil.py b/fissa/tests/test_neuropil.py index 46eb29dd..367d962c 100644 --- a/fissa/tests/test_neuropil.py +++ b/fissa/tests/test_neuropil.py @@ -8,8 +8,8 @@ import numpy as np -from .base_test import BaseTestCase from .. import neuropil as npil +from .base_test import BaseTestCase class NeuropilMixin: diff --git a/fissa/tests/test_readimagejrois.py b/fissa/tests/test_readimagejrois.py index 87710f9b..d331cb86 100644 --- a/fissa/tests/test_readimagejrois.py +++ b/fissa/tests/test_readimagejrois.py @@ -9,8 +9,8 @@ import numpy as np -from .base_test import BaseTestCase from .. import readimagejrois +from .base_test import BaseTestCase class TestReadImageJRois(BaseTestCase): diff --git a/fissa/tests/test_roitools.py b/fissa/tests/test_roitools.py index 17adb72e..accd32c5 100644 --- a/fissa/tests/test_roitools.py +++ b/fissa/tests/test_roitools.py @@ -3,10 +3,11 @@ from __future__ import division import unittest + import numpy as np -from .base_test import BaseTestCase from .. import roitools +from .base_test import BaseTestCase class TestGetMaskCom(BaseTestCase): diff --git a/setup.py b/setup.py index 223eff16..d2f598cb 100644 --- a/setup.py +++ b/setup.py @@ -2,8 +2,8 @@ import os - from distutils.core import setup + from setuptools.command.test import test as TestCommand From 4564ec773945aa3629a4fc917f11bdf25d7fe174 Mon Sep 17 00:00:00 2001 From: Scott Lowe Date: Fri, 18 Jun 2021 17:13:13 +0100 Subject: [PATCH 3/8] DEV: Add black to dev requirements and pre-commit hook --- .pre-commit-config.yaml | 19 +++++++++++++++++++ requirements-dev.txt | 1 + 2 files changed, 20 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5527cec3..76464ab4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,6 +10,25 @@ repos: types: [cython] args: ["--profile", "black"] + - repo: https://github.com/psf/black + rev: 21.6b0 + hooks: + - id: black + args: + - "--target-version=py27" + - "--target-version=py35" + - "--target-version=py36" + - "--target-version=py37" + - "--target-version=py38" + - "--target-version=py39" + types: [python] + + - repo: https://github.com/asottile/blacken-docs + rev: v1.8.0 + hooks: + - id: blacken-docs + additional_dependencies: [black==21.6b0] + - repo: https://github.com/kynan/nbstripout rev: 0.4.0 hooks: diff --git a/requirements-dev.txt b/requirements-dev.txt index 416634f5..de02930e 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1 +1,2 @@ +black==21.6b0; python_version>='3.6' pre-commit From 8b3e4ebdd623007fdf2e77416f0b3054dc978c85 Mon Sep 17 00:00:00 2001 From: Scott Lowe Date: Fri, 18 Jun 2021 17:13:13 +0100 Subject: [PATCH 4/8] STY: Automatically convert code style to black --- docs/conf.py | 87 ++++----- examples/basic_usage.py | 6 +- examples/basic_usage_windows.py | 8 +- examples/datahandler_custom.py | 3 +- fissa/ROI.py | 21 ++- fissa/__meta__.py | 4 +- fissa/core.py | 71 ++++---- fissa/deltaf.py | 8 +- fissa/extraction.py | 33 ++-- fissa/readimagejrois.py | 165 ++++++++++-------- fissa/roitools.py | 44 ++--- fissa/tests/base_test.py | 29 +-- fissa/tests/generate_downsampled_resources.py | 136 +++++++++------ fissa/tests/test_core.py | 42 ++--- fissa/tests/test_extraction.py | 73 ++++---- fissa/tests/test_neuropil.py | 9 +- fissa/tests/test_readimagejrois.py | 44 ++--- fissa/tests/test_roitools.py | 135 +++++++------- setup.py | 61 ++++--- 19 files changed, 520 insertions(+), 459 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 0f2ae359..95b8abcb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -16,8 +16,8 @@ import os import sys -sys.path.insert(0, os.path.abspath('.')) -sys.path.insert(0, os.path.abspath('../')) +sys.path.insert(0, os.path.abspath(".")) +sys.path.insert(0, os.path.abspath("../")) # Can't import __meta__.py if the requirements aren't installed @@ -25,6 +25,7 @@ def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() + meta = {} exec(read("../fissa/__meta__.py"), meta) @@ -36,20 +37,21 @@ def read(fname): project = meta["name"].upper() project_path = meta["path"] author = meta["author"] -copyright = '{}, {}'.format(now.year, author) +copyright = "{}, {}".format(now.year, author) # The full version, including alpha/beta/rc tags release = meta["version"] # The short X.Y version -version = '.'.join(release.split('.')[0:2]) +version = ".".join(release.split(".")[0:2]) # -- Automatically generate API documentation -------------------------------- + def run_apidoc(_): ignore_paths = [ - os.path.join('..', project_path, 'tests'), + os.path.join("..", project_path, "tests"), ] argv = [ @@ -57,33 +59,36 @@ def run_apidoc(_): "--follow-links", # Follow symbolic links "--separate", # Put each module file in its own page "--module-first", # Put module documentation before submodule - "-o", "source/packages", # Output path + "-o", + "source/packages", # Output path os.path.join("..", project_path), ] + ignore_paths try: # Sphinx 1.7+ from sphinx.ext import apidoc + apidoc.main(argv) except ImportError: # Sphinx 1.6 (and earlier) from sphinx import apidoc + argv.insert(0, apidoc.__file__) apidoc.main(argv) def retitle_modules(_): - pth = 'source/packages/modules.rst' + pth = "source/packages/modules.rst" lines = open(pth).read().splitlines() # Overwrite the junk in the first two lines with a better title - lines[0] = 'API Reference' - lines[1] = '=============' - open(pth, 'w').write('\n'.join(lines)) + lines[0] = "API Reference" + lines[1] = "=============" + open(pth, "w").write("\n".join(lines)) def setup(app): - app.connect('builder-inited', run_apidoc) - app.connect('builder-inited', retitle_modules) + app.connect("builder-inited", run_apidoc) + app.connect("builder-inited", retitle_modules) # -- General configuration --------------------------------------------------- @@ -96,13 +101,13 @@ def setup(app): # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.mathjax', - 'sphinx.ext.ifconfig', - 'sphinx.ext.viewcode', + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.coverage", + "sphinx.ext.mathjax", + "sphinx.ext.ifconfig", + "sphinx.ext.viewcode", "numpydoc", # handle NumPy documentation formatted docstrings ] @@ -130,19 +135,19 @@ def setup(app): } # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. source_encoding = "utf-8" # The master toctree document. -master_doc = 'index' +master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -154,7 +159,7 @@ def setup(app): # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None @@ -187,7 +192,7 @@ def setup(app): # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -203,7 +208,7 @@ def setup(app): # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -htmlhelp_basename = project + 'doc' +htmlhelp_basename = project + "doc" # -- Options for LaTeX output ------------------------------------------------ @@ -212,19 +217,15 @@ def setup(app): # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', - # Need to manually declare what the delta symbol (Δ) corresponds to. "preamble": """ \DeclareUnicodeCharacter{394}{$\Delta$} @@ -235,8 +236,13 @@ def setup(app): # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, project + '.tex', project + ' Documentation', - meta["author"], 'manual'), + ( + master_doc, + project + ".tex", + project + " Documentation", + meta["author"], + "manual", + ), ] @@ -244,10 +250,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, project, project + ' Documentation', - [author], 1) -] +man_pages = [(master_doc, project, project + " Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- @@ -256,9 +259,15 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, project, project + ' Documentation', - author, project, meta["description"], - 'Miscellaneous'), + ( + master_doc, + project, + project + " Documentation", + author, + project, + meta["description"], + "Miscellaneous", + ), ] @@ -277,7 +286,7 @@ def setup(app): # epub_uid = '' # A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] +epub_exclude_files = ["search.html"] # -- Extension configuration ------------------------------------------------- diff --git a/examples/basic_usage.py b/examples/basic_usage.py index b6b5f490..e6ec10b3 100755 --- a/examples/basic_usage.py +++ b/examples/basic_usage.py @@ -13,11 +13,11 @@ import fissa # Define the data to extract -rois = 'exampleData/20150429.zip' -images = 'exampleData/20150529' +rois = "exampleData/20150429.zip" +images = "exampleData/20150529" # Define the name of the experiment extraction location -output_dir = 'fissa_example' +output_dir = "fissa_example" # Make sure you use a different output path for each experiment you run. # Instantiate a fissa experiment object diff --git a/examples/basic_usage_windows.py b/examples/basic_usage_windows.py index 2c749ea7..079a0f0a 100755 --- a/examples/basic_usage_windows.py +++ b/examples/basic_usage_windows.py @@ -15,13 +15,13 @@ # On Windows, it is necessary to wrap the script in a __name__ check, so # that multiprocessing works correctly. Multiprocessing is triggered by the # experiment.separate() step. -if __name__ == '__main__': +if __name__ == "__main__": # Define the data to extract - rois = 'exampleData/20150429.zip' - images = 'exampleData/20150529' + rois = "exampleData/20150429.zip" + images = "exampleData/20150529" # Define the name of the experiment extraction location - output_dir = 'fissa_example' + output_dir = "fissa_example" # Make sure you use a different output path for each experiment you run. # Instantiate a fissa experiment object diff --git a/examples/datahandler_custom.py b/examples/datahandler_custom.py index 07d41e1c..77df2310 100755 --- a/examples/datahandler_custom.py +++ b/examples/datahandler_custom.py @@ -88,8 +88,7 @@ def rois2masks(rois, data): return rois else: - raise ValueError('Wrong rois input format') - + raise ValueError("Wrong rois input format") def extracttraces(data, masks): diff --git a/fissa/ROI.py b/fissa/ROI.py index cdc0fcd6..0591e676 100644 --- a/fissa/ROI.py +++ b/fissa/ROI.py @@ -68,18 +68,22 @@ def poly2mask(polygons, im_size): # assuming all points in the polygon share a z-coordinate z = int(np.array(poly.exterior.coords)[0][2]) # @swkeemink: Commented out to remove a warning message for FISSA. -# if z > im_size[0]: -# warn('Polygon with zero-coordinate {} '.format(z) + -# 'cropped using im_size = {}'.format(im_size)) -# continue + # if z > im_size[0]: + # warn('Polygon with zero-coordinate {} '.format(z) + + # 'cropped using im_size = {}'.format(im_size)) + # continue x_min, y_min, x_max, y_max = poly.bounds # Shift all points by 0.5 to move coordinates to corner of pixel shifted_poly = Polygon(np.array(poly.exterior.coords)[:, :2] - 0.5) - points = [Point(x, y) for x, y in - product(np.arange(int(x_min), np.ceil(x_max)), - np.arange(int(y_min), np.ceil(y_max)))] + points = [ + Point(x, y) + for x, y in product( + np.arange(int(x_min), np.ceil(x_max)), + np.arange(int(y_min), np.ceil(y_max)), + ) + ] points_in_poly = list(filter(shifted_poly.contains, points)) for point in points_in_poly: xx, yy = point.xy @@ -141,6 +145,5 @@ def _reformat_polygons(polygons): else: # warn('Polygon initialized without z-coordinate. ' + # 'Assigning to zeroth plane (z = 0)') - z_polygons.append( - Polygon([point + (0,) for point in poly.exterior.coords])) + z_polygons.append(Polygon([point + (0,) for point in poly.exterior.coords])) return MultiPolygon(z_polygons) diff --git a/fissa/__meta__.py b/fissa/__meta__.py index ab21badb..fbb12088 100644 --- a/fissa/__meta__.py +++ b/fissa/__meta__.py @@ -1,6 +1,6 @@ -name = 'fissa' +name = "fissa" path = name -version = '1.0.dev0' +version = "1.0.dev0" author = "Sander Keemink and Scott Lowe" author_email = "swkeemink@scimail.eu" description = "A Python Library estimating somatic signals in 2-photon data" diff --git a/fissa/core.py b/fissa/core.py index aafcb502..101ecac1 100644 --- a/fissa/core.py +++ b/fissa/core.py @@ -199,7 +199,7 @@ def _separate_wrapper(args): return separate_trials(*args) -class Experiment(): +class Experiment: r""" FISSA Experiment. @@ -417,32 +417,43 @@ class Experiment(): This field is only populated after :meth:`calc_deltaf` has been run; until then, it is set to ``None``. """ - def __init__(self, images, rois, folder=None, nRegions=4, - expansion=1, alpha=0.1, ncores_preparation=None, - ncores_separation=None, method='nmf', - lowmemory_mode=False, datahandler=None): + + def __init__( + self, + images, + rois, + folder=None, + nRegions=4, + expansion=1, + alpha=0.1, + ncores_preparation=None, + ncores_separation=None, + method="nmf", + lowmemory_mode=False, + datahandler=None, + ): # Initialise internal variables self.clear() if isinstance(images, basestring): - self.images = sorted(glob.glob(os.path.join(images, '*.tif*'))) + self.images = sorted(glob.glob(os.path.join(images, "*.tif*"))) elif isinstance(images, abc.Sequence): self.images = images else: - raise ValueError('images should either be string or list') + raise ValueError("images should either be string or list") if isinstance(rois, basestring): - if rois[-3:] == 'zip': + if rois[-3:] == "zip": self.rois = [rois] * len(self.images) else: - self.rois = sorted(glob.glob(os.path.join(rois, '*.zip'))) + self.rois = sorted(glob.glob(os.path.join(rois, "*.zip"))) elif isinstance(rois, abc.Sequence): self.rois = rois if len(rois) == 1: # if only one roiset is specified self.rois *= len(self.images) else: - raise ValueError('rois should either be string or list') + raise ValueError("rois should either be string or list") if datahandler is not None and lowmemory_mode: raise ValueError( @@ -650,7 +661,7 @@ def separation_prep(self, redo=False): # Wipe outputs self.clear() # Extract signals - print('Doing region growing and data extraction....') + print("Doing region growing and data extraction....") # Make a handle to the extraction function with parameters configured _extract_cfg = functools.partial( @@ -662,7 +673,7 @@ def separation_prep(self, redo=False): # Check whether we should use multiprocessing use_multiprocessing = ( - (self.ncores_preparation is None or self.ncores_preparation > 1) + self.ncores_preparation is None or self.ncores_preparation > 1 ) # Do the extraction if use_multiprocessing and sys.version_info < (3, 0): @@ -730,7 +741,7 @@ def save_prep(self, destination=None): "The folder attribute must be declared in order to save" " preparation outputs the cache." ) - destination = os.path.join(self.folder, 'preparation.npz') + destination = os.path.join(self.folder, "preparation.npz") destdir = os.path.dirname(destination) if destdir and not os.path.isdir(destdir): os.makedirs(destdir) @@ -806,7 +817,7 @@ def separate(self, redo_prep=False, redo_sep=False): # Wipe outputs self.clear_separated() # Separate data - print('Doing signal separation....') + print("Doing signal separation....") # Check size of the input arrays n_roi = len(self.raw) @@ -821,7 +832,7 @@ def separate(self, redo_prep=False, redo_sep=False): # Check whether we should use multiprocessing use_multiprocessing = ( - (self.ncores_separation is None or self.ncores_separation > 1) + self.ncores_separation is None or self.ncores_separation > 1 ) # Do the extraction if use_multiprocessing and sys.version_info < (3, 0): @@ -937,9 +948,7 @@ def calc_deltaf(self, freq, use_raw_f0=True, across_trials=True): # calculate deltaf/f0 raw_f0 = deltaf.findBaselineF0(raw_conc, freq) raw_conc = (raw_conc - raw_f0) / raw_f0 - result_f0 = deltaf.findBaselineF0( - result_conc, freq, 1 - ).T[:, None] + result_f0 = deltaf.findBaselineF0(result_conc, freq, 1).T[:, None] if use_raw_f0: result_conc = (result_conc - result_f0) / raw_f0 else: @@ -963,9 +972,7 @@ def calc_deltaf(self, freq, use_raw_f0=True, across_trials=True): # calculate deltaf/fo raw_f0 = deltaf.findBaselineF0(raw_sig, freq) - result_f0 = deltaf.findBaselineF0( - result_sig, freq, 1 - ).T[:, None] + result_f0 = deltaf.findBaselineF0(result_sig, freq, 1).T[:, None] result_f0[result_f0 < 0] = 0 raw_sig = (raw_sig - raw_f0) / raw_f0 if use_raw_f0: @@ -1021,9 +1028,9 @@ def save_to_matlab(self, fname=None): if fname is None: if self.folder is None: raise ValueError( - 'fname must be provided if experiment folder is undefined' + "fname must be provided if experiment folder is undefined" ) - fname = os.path.join(self.folder, 'matlab.mat') + fname = os.path.join(self.folder, "matlab.mat") # initialize dictionary to save M = collections.OrderedDict() @@ -1033,23 +1040,23 @@ def reformat_dict_for_matlab(orig_dict): # loop over cells and trial for cell in range(self.nCell): # get current cell label - c_lab = 'cell' + str(cell) + c_lab = "cell" + str(cell) # update dictionary new_dict[c_lab] = collections.OrderedDict() for trial in range(self.nTrials): # get current trial label - t_lab = 'trial' + str(trial) + t_lab = "trial" + str(trial) # update dictionary new_dict[c_lab][t_lab] = orig_dict[cell][trial] return new_dict - M['ROIs'] = reformat_dict_for_matlab(self.roi_polys) - M['raw'] = reformat_dict_for_matlab(self.raw) - M['result'] = reformat_dict_for_matlab(self.result) - if getattr(self, 'deltaf_raw', None) is not None: - M['df_raw'] = reformat_dict_for_matlab(self.deltaf_raw) - if getattr(self, 'deltaf_result', None) is not None: - M['df_result'] = reformat_dict_for_matlab(self.deltaf_result) + M["ROIs"] = reformat_dict_for_matlab(self.roi_polys) + M["raw"] = reformat_dict_for_matlab(self.raw) + M["result"] = reformat_dict_for_matlab(self.result) + if getattr(self, "deltaf_raw", None) is not None: + M["df_raw"] = reformat_dict_for_matlab(self.deltaf_raw) + if getattr(self, "deltaf_result", None) is not None: + M["df_result"] = reformat_dict_for_matlab(self.deltaf_result) savemat(fname, M) diff --git a/fissa/deltaf.py b/fissa/deltaf.py index dc3afa67..949e5f6f 100644 --- a/fissa/deltaf.py +++ b/fissa/deltaf.py @@ -73,7 +73,7 @@ def findBaselineF0(rawF, fs, axis=0, keepdims=False): # Make a set of weights to use with our taps. # We use an FIR filter with a Hamming window. - b = scipy.signal.firwin(nfilt, cutoff=cutoff, window='hamming') + b = scipy.signal.firwin(nfilt, cutoff=cutoff, window="hamming") # The default padlen for filtfilt is 3 * nfilt, but in case our # dataset is small, we need to make sure padlen is not too big @@ -81,12 +81,10 @@ def findBaselineF0(rawF, fs, axis=0, keepdims=False): # Use filtfilt to filter with the FIR filter, both forwards and # backwards. - filtered_f = scipy.signal.filtfilt(b, [1.0], rawF, axis=axis, - padlen=padlen) + filtered_f = scipy.signal.filtfilt(b, [1.0], rawF, axis=axis, padlen=padlen) # Take a percentile of the filtered signal - baselineF0 = np.percentile(filtered_f, base_pctle, axis=axis, - keepdims=keepdims) + baselineF0 = np.percentile(filtered_f, base_pctle, axis=axis, keepdims=keepdims) # Ensure filtering doesn't take us below the minimum value which actually # occurs in the data. This can occur when the amount of data is very low. diff --git a/fissa/extraction.py b/fissa/extraction.py index c8db167d..2f6a0608 100755 --- a/fissa/extraction.py +++ b/fissa/extraction.py @@ -21,7 +21,7 @@ from . import roitools -class DataHandlerAbstract(): +class DataHandlerAbstract: """ Abstract class for a data handler. @@ -37,6 +37,7 @@ class DataHandlerAbstract(): -------- DataHandlerTifffile, DataHandlerPillow """ + def __repr__(self): return "{}.{}()".format(__name__, self.__class__.__name__) @@ -145,6 +146,7 @@ class DataHandlerTifffile(DataHandlerAbstract): """ Extract data from TIFF images using tifffile. """ + @staticmethod def image2array(image): """ @@ -177,18 +179,16 @@ def image2array(image): ) ) if ( - n_pages > 1 and - page.ndim > 2 and - (np.array(page.shape[:-2]) > 1).sum() > 0 + n_pages > 1 + and page.ndim > 2 + and (np.array(page.shape[:-2]) > 1).sum() > 0 ): warnings.warn( "Multipage TIFF {} with {} pages has at least one page" " with {} dimensions (page shaped {})." " All dimensions before the final two (height and" " width) will be treated as time-like and flattened." - "".format( - image, n_pages, page.ndim, page.shape - ) + "".format(image, n_pages, page.ndim, page.shape) ) elif page.ndim > 3 and (np.array(page.shape[:-2]) > 1).sum() > 1: warnings.warn( @@ -196,9 +196,7 @@ def image2array(image): " (page shaped {})." " All dimensions before the final two (height and" " width) will be treated as time-like and flattened." - "".format( - image, page.ndim, page.shape - ) + "".format(image, page.ndim, page.shape) ) shp = [-1] + list(page.shape[-2:]) frames.append(page.reshape(shp)) @@ -317,18 +315,16 @@ def getmean(data): for page in data.pages: page = page.asarray() if ( - n_pages > 1 and - page.ndim > 2 and - (np.array(page.shape[:-2]) > 1).sum() > 0 + n_pages > 1 + and page.ndim > 2 + and (np.array(page.shape[:-2]) > 1).sum() > 0 ): warnings.warn( "Multipage TIFF {} with {} pages has at least one page" " with {} dimensions (page shaped {})." " All dimensions before the final two (height and" " width) will be treated as time-like and flattened." - "".format( - "", n_pages, page.ndim, page.shape - ) + "".format("", n_pages, page.ndim, page.shape) ) elif page.ndim > 3 and (np.array(page.shape[:-2]) > 1).sum() > 1: warnings.warn( @@ -336,9 +332,7 @@ def getmean(data): " (page shaped {})." " All dimensions before the final two (height and" " width) will be treated as time-like and flattened." - "".format( - "", page.ndim, page.shape - ) + "".format("", page.ndim, page.shape) ) shp = [-1] + list(page.shape[-2:]) page = page.reshape(shp) @@ -418,6 +412,7 @@ class DataHandlerPillow(DataHandlerAbstract): Slower, but less memory-intensive than :class:`DataHandlerTifffile`. """ + @staticmethod def image2array(image): """ diff --git a/fissa/readimagejrois.py b/fissa/readimagejrois.py index 06e3ce07..e0bc58f4 100644 --- a/fissa/readimagejrois.py +++ b/fissa/readimagejrois.py @@ -1,4 +1,4 @@ -''' +""" Tools for reading ImageJ files. Based on code originally written by Luis Pedro Coelho , @@ -8,7 +8,7 @@ Modified - 2014 by Jeffrey Zaremba (@jzaremba), https://github.com/losonczylab/sima - 2015 by Scott Lowe (@scottclowe) and Sander Keemink (@swkeemink). -''' +""" from __future__ import division, unicode_literals @@ -83,7 +83,7 @@ def _get8(): pos[0] += 1 s = roi_obj.read(1) if not s: - raise IOError('read_imagej_roi: Unexpected EOF') + raise IOError("read_imagej_roi: Unexpected EOF") return ord(s) def _get16(): @@ -130,8 +130,8 @@ def _getcoords(z=0): return points magic = roi_obj.read(4) - if magic != b'Iout': - raise IOError('read_imagej_roi: Magic number not found') + if magic != b"Iout": + raise IOError("read_imagej_roi: Magic number not found") _get16() # version @@ -140,8 +140,12 @@ def _getcoords(z=0): _get8() if not (0 <= roi_type < 11): - raise ValueError('read_imagej_roi: \ - ROI type {} not supported'.format(roi_type)) + raise ValueError( + "read_imagej_roi: \ + ROI type {} not supported".format( + roi_type + ) + ) top = _get16signed() left = _get16signed() @@ -163,12 +167,17 @@ def _getcoords(z=0): subtype = _get16() if subtype == 5: raise ValueError( - 'read_imagej_roi: ROI subtype {} (rotated rectangle) not supported' - .format(subtype) + "read_imagej_roi: ROI subtype {} (rotated rectangle) not supported".format( + subtype + ) ) if subtype != 0 and subtype != 3: - raise ValueError('read_imagej_roi: \ - ROI subtype {} not supported (!= 0)'.format(subtype)) + raise ValueError( + "read_imagej_roi: \ + ROI subtype {} not supported (!= 0)".format( + subtype + ) + ) options = _get16() if subtype == 3 and roi_type == 7: # ellipse aspect ratio @@ -185,14 +194,18 @@ def _getcoords(z=0): if roi_type == 0: # Polygon coords = _getcoords(z) - coords = coords.astype('float') - return {'polygons': coords} + coords = coords.astype("float") + return {"polygons": coords} elif roi_type == 1: # Rectangle - coords = [[left, top, z], [right, top, z], [right, bottom, z], - [left, bottom, z]] - coords = np.array(coords).astype('float') - return {'polygons': coords} + coords = [ + [left, top, z], + [right, top, z], + [right, bottom, z], + [left, bottom, z], + ] + coords = np.array(coords).astype("float") + return {"polygons": coords} elif roi_type == 2: # Oval width = right - left @@ -202,23 +215,26 @@ def _getcoords(z=0): x_mid = (right + left) / 2.0 - 0.5 y_mid = (top + bottom) / 2.0 - 0.5 mask = np.zeros((z + 1, right, bottom), dtype=bool) - for y, x in product(np.arange(max(0, top), bottom), np.arange(max(0, left), right)): - mask[z, x, y] = ((x - x_mid) ** 2 / (width / 2.0) ** 2 + - (y - y_mid) ** 2 / (height / 2.0) ** 2 <= 1) - return {'mask': mask} + for y, x in product( + np.arange(max(0, top), bottom), np.arange(max(0, left), right) + ): + mask[z, x, y] = (x - x_mid) ** 2 / (width / 2.0) ** 2 + (y - y_mid) ** 2 / ( + height / 2.0 + ) ** 2 <= 1 + return {"mask": mask} elif roi_type == 7: if subtype == 3: if (x1 < 0 and x2 < 0) or (y1 < 0 and y2 < 0): raise ValueError("ROI is entirely offscreen.") # Ellipse # Radius of major and minor axes - r_radius = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2) / 2. + r_radius = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2) / 2.0 c_radius = r_radius * aspect_ratio # Centre coordinates # We subtract 0.5 because ImageJ's co-ordinate system has indices at # the pixel boundaries, and we are using indices at the pixel centers. - x_mid = (x1 + x2) / 2. - 0.5 - y_mid = (y1 + y2) / 2. - 0.5 + x_mid = (x1 + x2) / 2.0 - 0.5 + y_mid = (y1 + y2) / 2.0 - 0.5 orientation = np.arctan2(y2 - y1, x2 - x1) # We need to make a mask which is a bit bigger than this, because @@ -241,15 +257,15 @@ def _getcoords(z=0): raise ValueError("Ellipse ROI is empty.") # Trim mask down to only the points needed - mask = mask[:, :max(xx) + 1, :max(yy) + 1] + mask = mask[:, : max(xx) + 1, : max(yy) + 1] # Convert sparse ellipse representation to mask mask[z, xx, yy] = True - return {'mask': mask} + return {"mask": mask} else: # Freehand coords = _getcoords(z) - coords = coords.astype('float') - return {'polygons': coords} + coords = coords.astype("float") + return {"polygons": coords} elif roi_type == 10: raise ValueError("read_imagej_roi: point/mulipoint types are not supported") @@ -257,11 +273,12 @@ def _getcoords(z=0): else: try: coords = _getcoords(z) - coords = coords.astype('float') - return {'polygons': coords} + coords = coords.astype("float") + return {"polygons": coords} except BaseException: raise ValueError( - 'read_imagej_roi: ROI type {} not supported'.format(roi_type)) + "read_imagej_roi: ROI type {} not supported".format(roi_type) + ) def _parse_roi_file_py3(roi_source): @@ -298,45 +315,49 @@ def _parse_roi_file_py3(roi_source): roi = roi[keys[0]] # Convert the roi dictionary into either polygon or a mask - if 'x' in roi and 'y' in roi and 'n' in roi: + if "x" in roi and "y" in roi and "n" in roi: # ROI types "freehand", "freeline", "multipoint", "point", "polygon", # "polyline", and "trace" are loaded and returned as a set of polygon # co-ordinates. - coords = np.empty((roi['n'], 3), dtype=np.float64) - coords[:, 0] = roi['x'] - coords[:, 1] = roi['y'] - coords[:, 2] = roi.get('z', 0) + coords = np.empty((roi["n"], 3), dtype=np.float64) + coords[:, 0] = roi["x"] + coords[:, 1] = roi["y"] + coords[:, 2] = roi.get("z", 0) if np.all(coords[:, 0] < 0) or np.all(coords[:, 1] < 0): raise ValueError("ROI is entirely offscreen.") - return {'polygons': coords} + return {"polygons": coords} - if 'width' in roi and 'height' in roi and 'left' in roi and 'top' in roi: - width = roi['width'] - height = roi['height'] - left = roi['left'] - top = roi['top'] + if "width" in roi and "height" in roi and "left" in roi and "top" in roi: + width = roi["width"] + height = roi["height"] + left = roi["left"] + top = roi["top"] right = left + width bottom = top + height if right < 0 or bottom < 0: raise ValueError("ROI is entirely offscreen.") - z = roi.get('z', 0) + z = roi.get("z", 0) - if roi['type'] == 'rectangle': + if roi["type"] == "rectangle": # Rectangle is converted into polygon co-ordinates - coords = [[left, top, z], [right, top, z], [right, bottom, z], - [left, bottom, z]] - coords = np.array(coords).astype('float') - return {'polygons': coords} - - elif roi['type'] == 'oval': + coords = [ + [left, top, z], + [right, top, z], + [right, bottom, z], + [left, bottom, z], + ] + coords = np.array(coords).astype("float") + return {"polygons": coords} + + elif roi["type"] == "oval": # Oval mask = np.zeros((z + 1, right, bottom), dtype=bool) # We subtract 0.5 because ImageJ's co-ordinate system has indices at # the pixel boundaries, and we are using indices at the pixel centers. - x_mid = left + width / 2. - 0.5 - y_mid = top + height / 2. - 0.5 + x_mid = left + width / 2.0 - 0.5 + y_mid = top + height / 2.0 - 0.5 # Ensure we only make a mask of things which are inside the image left = max(0, left) @@ -346,32 +367,32 @@ def _parse_roi_file_py3(roi_source): # pixels within the extent of the oval. xx = np.arange(left, right) yy = np.arange(top, bottom) - xx = ((xx - x_mid) / (width / 2.)) ** 2 - yy = ((yy - y_mid) / (height / 2.)) ** 2 + xx = ((xx - x_mid) / (width / 2.0)) ** 2 + yy = ((yy - y_mid) / (height / 2.0)) ** 2 dd = np.expand_dims(xx, 1) + np.expand_dims(yy, 0) mask[z, left:, top:] = dd <= 1 - return {'mask': mask} + return {"mask": mask} - elif roi['type'] == 'ellipse' or ( - roi['type'] == 'freehand' and 'aspect_ratio' in roi and 'ex1' in roi + elif roi["type"] == "ellipse" or ( + roi["type"] == "freehand" and "aspect_ratio" in roi and "ex1" in roi ): # Ellipse # Co-ordinates of points at either end of major axis - x1 = roi['ex1'] - y1 = roi['ey1'] - x2 = roi['ex2'] - y2 = roi['ey2'] + x1 = roi["ex1"] + y1 = roi["ey1"] + x2 = roi["ex2"] + y2 = roi["ey2"] if (x1 < 0 and x2 < 0) or (y1 < 0 and y2 < 0): raise ValueError("ROI is entirely offscreen.") # Radius of major and minor axes - r_radius = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2) / 2. - c_radius = r_radius * roi['aspect_ratio'] + r_radius = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2) / 2.0 + c_radius = r_radius * roi["aspect_ratio"] # Centre coordinates # We subtract 0.5 because ImageJ's co-ordinate system has indices at # the pixel boundaries, and we are using indices at the pixel centers. - x_mid = (x1 + x2) / 2. - 0.5 - y_mid = (y1 + y2) / 2. - 0.5 + x_mid = (x1 + x2) / 2.0 - 0.5 + y_mid = (y1 + y2) / 2.0 - 0.5 orientation = np.arctan2(y2 - y1, x2 - x1) # We need to make a mask which is a bit bigger than this, because @@ -394,19 +415,19 @@ def _parse_roi_file_py3(roi_source): raise ValueError("Ellipse ROI is empty.") # Trim mask down to only the points needed - mask = mask[:, :max(xx) + 1, :max(yy) + 1] + mask = mask[:, : max(xx) + 1, : max(yy) + 1] # Convert sparse ellipse representation to mask mask[z, xx, yy] = True - return {'mask': mask} + return {"mask": mask} else: - raise ValueError( - 'ROI type {} not supported'.format(roi['type']) - ) + raise ValueError("ROI type {} not supported".format(roi["type"])) # Handle different functions on Python 2/3 -parse_roi_file = _parse_roi_file_py3 if sys.version_info >= (3, 0) else _parse_roi_file_py2 +parse_roi_file = ( + _parse_roi_file_py3 if sys.version_info >= (3, 0) else _parse_roi_file_py2 +) def read_imagej_roi_zip(filename): @@ -429,6 +450,6 @@ def read_imagej_roi_zip(filename): roi = parse_roi_file(zf.open(name)) if roi is None: continue - roi['label'] = str(name).rstrip('.roi') + roi["label"] = str(name).rstrip(".roi") roi_list.append(roi) return roi_list diff --git a/fissa/roitools.py b/fissa/roitools.py index 5bc36acb..2390f732 100644 --- a/fissa/roitools.py +++ b/fissa/roitools.py @@ -41,8 +41,8 @@ def get_mask_com(mask): if mask.ndim != 2: raise ValueError( - 'Mask must be two-dimensional. Received input with {} dimensions' - ''.format(mask.ndim) + "Mask must be two-dimensional. Received input with {} dimensions" + "".format(mask.ndim) ) # TODO: make this work for non-boolean masks too @@ -74,7 +74,7 @@ def split_npil(mask, centre, num_slices, adaptive_num=False): A list with `num_slices` many masks, each of which is a 2d boolean numpy array. """ - #TODO: This should yield an iterable instead. + # TODO: This should yield an iterable instead. # Ensure array_like input is a numpy.ndarray mask = np.asarray(mask) @@ -82,7 +82,7 @@ def split_npil(mask, centre, num_slices, adaptive_num=False): # Get the (x,y) co-ordinates of the pixels in the mask x, y = mask.nonzero() if x.size == 0 or y.size == 0: - raise ValueError('ROI mask must be not be empty') + raise ValueError("ROI mask must be not be empty") # Find the angle of the vector from the mask centre to each pixel theta = np.arctan2(x - centre[0], y - centre[1]) @@ -106,7 +106,7 @@ def split_npil(mask, centre, num_slices, adaptive_num=False): # Ensure num_slices is an integer number num_slices = int(num_slices) if num_slices < 1: - raise ValueError('Number of slices must be positive') + raise ValueError("Number of slices must be positive") # Change theta so it is the angle relative to a new zero-point, # the middle of the bin which is least populated by mask pixels. @@ -114,8 +114,9 @@ def split_npil(mask, centre, num_slices, adaptive_num=False): theta = (theta - theta_offset) % (2 * np.pi) - np.pi # get the boundaries - bounds = [np.percentile(theta, 100.0 * (i + 1) / num_slices) - for i in range(num_slices)] + bounds = [ + np.percentile(theta, 100.0 * (i + 1) / num_slices) for i in range(num_slices) + ] # predefine the masks masks = [] @@ -175,10 +176,7 @@ def shift_2d_array(a, shift=1, axis=0): elif shift < 0: out[:, shift:] = 0 else: - raise ValueError( - 'Axis must be 0 or 1, but {} was given.' - ''.format(axis) - ) + raise ValueError("Axis must be 0 or 1, but {} was given." "".format(axis)) # return shifted array return out @@ -231,8 +229,10 @@ def get_npil_mask(mask, totalexpansion=4): count = 0 # for count in range(iterations): - while area_current < totalexpansion * area_orig \ - and area_current < area_total - area_orig: + while ( + area_current < totalexpansion * area_orig + and area_current < area_total - area_orig + ): # Check which case to use. In current version, we alternate # between case 0 (cardinals) and case 1 (diagonals). case = count % 2 @@ -338,28 +338,28 @@ def readrois(roiset): # set frame number to 0 for every roi for i in range(len(rois)): - if 'polygons' in rois[i]: - rois[i] = rois[i]['polygons'][:, :2] + if "polygons" in rois[i]: + rois[i] = rois[i]["polygons"][:, :2] # check if we are looking at an oval roi - elif 'mask' in rois[i]: + elif "mask" in rois[i]: # this is an oval roi, which gets imported as a 3D mask. # First get the frame that has the mask in it by finding the # nonzero frame - mask_frame = np.nonzero(rois[i]['mask'])[0][0] + mask_frame = np.nonzero(rois[i]["mask"])[0][0] # get the mask - mask = rois[i]['mask'][mask_frame, :, :] + mask = rois[i]["mask"][mask_frame, :, :] # finally, get the outline coordinates rois[i] = find_roi_edge(mask)[0] else: raise ValueError( - 'ROI #{} contains neither a polygon nor mask representation' - ' of the region of interest.' - ''.format(i)) - + "ROI #{} contains neither a polygon nor mask representation" + " of the region of interest." + "".format(i) + ) return rois diff --git a/fissa/tests/base_test.py b/fissa/tests/base_test.py index 36ab0ef6..fb188021 100644 --- a/fissa/tests/base_test.py +++ b/fissa/tests/base_test.py @@ -1,7 +1,7 @@ -''' +""" Provides a general testing class which inherits from unittest.TestCase and also provides the numpy testing functions. -''' +""" import contextlib import datetime @@ -36,6 +36,7 @@ def assert_allclose_ragged(actual, desired): else: assert_allclose(actual_i, desired_i) + def assert_equal_list_of_array_perm_inv(actual, desired): assert_equal(len(actual), len(desired)) for desired_i in desired: @@ -45,11 +46,13 @@ def assert_equal_list_of_array_perm_inv(actual, desired): n_matches += 1 assert n_matches >= 0 + def assert_equal_dict_of_array(actual, desired): assert_equal(actual.keys(), desired.keys()) for k in desired.keys(): assert_equal(actual[k], desired[k]) + def assert_starts_with(actual, desired): """ Check that a string starts with a certain substring. @@ -64,20 +67,24 @@ def assert_starts_with(actual, desired): try: assert len(actual) >= len(desired) except BaseException as err: - print("Actual string too short ({} < {} characters)".format(len(actual), len(desired))) + print( + "Actual string too short ({} < {} characters)".format( + len(actual), len(desired) + ) + ) print("ACTUAL: {}".format(actual)) raise try: - return assert_equal(str(actual)[:len(desired)], desired) + return assert_equal(str(actual)[: len(desired)], desired) except BaseException as err: msg = "ACTUAL: {}".format(actual) if isinstance(getattr(err, "args", None), str): err.args += "\n" + msg elif isinstance(getattr(err, "args", None), tuple): if len(err.args) == 1: - err.args = (err.args[0] + "\n" + msg, ) + err.args = (err.args[0] + "\n" + msg,) else: - err.args += (msg, ) + err.args += (msg,) else: print(msg) raise @@ -85,16 +92,16 @@ def assert_starts_with(actual, desired): class BaseTestCase(unittest.TestCase): - ''' + """ Superclass for all the FISSA test cases - ''' + """ # Have the test directory as an attribute to the class as well as # a top-level variable test_directory = TEST_DIRECTORY def __init__(self, *args, **kwargs): - '''Add test for numpy type''' + """Add test for numpy type""" # super(self).__init__(*args, **kw) # Only works on Python3 super(BaseTestCase, self).__init__(*args, **kwargs) # Works on Python2 self.addTypeEqualityFunc(np.ndarray, self.assert_allclose) @@ -124,7 +131,7 @@ def tearDown(self): @contextlib.contextmanager def subTest(self, *args, **kwargs): # For backwards compatability with Python < 3.4 - if hasattr(super(BaseTestCase, self), 'subTest'): + if hasattr(super(BaseTestCase, self), "subTest"): yield super(BaseTestCase, self).subTest(*args, **kwargs) else: yield None @@ -169,7 +176,7 @@ def assert_array_equal(self, actual, desired, *args, **kwargs): def assert_allclose(self, actual, desired, *args, **kwargs): # Handle msg argument, which is passed from assertEqual, established # with addTypeEqualityFunc in __init__ - msg = kwargs.pop('msg', None) + msg = kwargs.pop("msg", None) return assert_allclose(actual, desired, *args, **kwargs) def assert_equal(self, actual, desired, *args, **kwargs): diff --git a/fissa/tests/generate_downsampled_resources.py b/fissa/tests/generate_downsampled_resources.py index a37cddbc..64becd6b 100755 --- a/fissa/tests/generate_downsampled_resources.py +++ b/fissa/tests/generate_downsampled_resources.py @@ -18,9 +18,9 @@ def maybe_make_dir(dirname): - ''' + """ If it doesn't exist, make a directory. Compatible with Python 2 and 3. - ''' + """ if sys.version_info[0] >= 3: os.makedirs(dirname, exist_ok=True) elif os.path.isdir(dirname): @@ -31,8 +31,9 @@ def maybe_make_dir(dirname): if err.errno != 17: raise + def downscale_roi(source_file, dest_file, downsamp=[1, 1], offsets=[0, 0]): - ''' + """ Downscale a ROI appearing in an ImageJ ROI file. The co-ordinates of the ROI are adjusted, and all metadata remains the same. @@ -57,8 +58,8 @@ def downscale_roi(source_file, dest_file, downsamp=[1, 1], offsets=[0, 0]): Based on `fissa.readimagejrois.read_roi`. The original version of `read_roi` was written by Luis Pedro Coelho, released under the MIT License. - ''' - with open(source_file, 'rb') as inhand, open(dest_file, 'wb') as outhand: + """ + with open(source_file, "rb") as inhand, open(dest_file, "wb") as outhand: sub_pixel_resolution = 128 @@ -79,7 +80,7 @@ def _get8(): pos[0] += 1 s = inhand.read(1) if not s: - raise IOError('read_imagej_roi: Unexpected EOF') + raise IOError("read_imagej_roi: Unexpected EOF") return ord(s) def _write8(s): @@ -96,9 +97,9 @@ def _get16(): def _write16(s): """Write 2 byte to a roi file object""" - b0 = (s >> 8) & 0xff + b0 = (s >> 8) & 0xFF _write8(b0) - b1 = s & 0xff + b1 = s & 0xFF _write8(b1) def _get16signed(): @@ -126,9 +127,9 @@ def _get32(): def _write32(s): """Write 4 bytes to the roi file object""" - s0 = (s >> 16) & 0xffff + s0 = (s >> 16) & 0xFFFF _write16(s0) - s1 = s & 0xffff + s1 = s & 0xFFFF _write16(s1) def _getfloat(): @@ -162,11 +163,11 @@ def _writecoords(points): if options & sub_pixel_resolution: writec = _writefloat convert = lambda x: x.astype(np.float32) - #points = np.empty((n_coordinates, 3), dtype=np.float32) + # points = np.empty((n_coordinates, 3), dtype=np.float32) else: writec = _write16signed convert = lambda x: np.round(x).astype(np.int16) - #points = np.empty((n_coordinates, 3), dtype=np.int16) + # points = np.empty((n_coordinates, 3), dtype=np.int16) x = (points[:, 0] - left) / downsamp[0] y = (points[:, 1] - top) / downsamp[1] for xi in x: @@ -175,11 +176,11 @@ def _writecoords(points): writec(convert(yi)) magic = inhand.read(4) - if magic != b'Iout': + if magic != b"Iout": raise IOError( - 'read_imagej_roi: Magic number not found.' - ' Expected: {}. Detected: {}.' - ''.format(b'Iout', magic) + "read_imagej_roi: Magic number not found." + " Expected: {}. Detected: {}." + "".format(b"Iout", magic) ) outhand.write(magic) @@ -193,8 +194,12 @@ def _writecoords(points): _write8(b) if not (0 <= roi_type < 11): - raise ValueError('read_imagej_roi: \ - ROI type {} not supported'.format(roi_type)) + raise ValueError( + "read_imagej_roi: \ + ROI type {} not supported".format( + roi_type + ) + ) top = _get16signed() _write16signed(int(np.round((top + offsets[1]) / downsamp[1]))) @@ -226,8 +231,12 @@ def _writecoords(points): subtype = _get16() _write16(subtype) if subtype != 0 and subtype != 3: - raise ValueError('read_imagej_roi: \ - ROI subtype {} not supported (!= 0)'.format(subtype)) + raise ValueError( + "read_imagej_roi: \ + ROI subtype {} not supported (!= 0)".format( + subtype + ) + ) options = _get16() _write16(options) if subtype == 3 and roi_type == 7: @@ -252,14 +261,18 @@ def _writecoords(points): # Polygon coords = _getcoords(z) _writecoords(coords) - coords = coords.astype('float') - #return {'polygons': coords} + coords = coords.astype("float") + # return {'polygons': coords} elif roi_type == 1: # Rectangle - coords = [[left, top, z], [right, top, z], [right, bottom, z], - [left, bottom, z]] - coords = np.array(coords).astype('float') - #return {'polygons': coords} + coords = [ + [left, top, z], + [right, top, z], + [right, bottom, z], + [left, bottom, z], + ] + coords = np.array(coords).astype("float") + # return {'polygons': coords} elif roi_type == 2: # Oval width = right - left @@ -270,38 +283,40 @@ def _writecoords(points): y_mid = (top + bottom) / 2.0 - 0.5 mask = np.zeros((z + 1, right, bottom), dtype=bool) for y, x in product(np.arange(top, bottom), np.arange(left, right)): - mask[z, x, y] = ((x - x_mid) ** 2 / (width / 2.0) ** 2 + - (y - y_mid) ** 2 / (height / 2.0) ** 2 <= 1) + mask[z, x, y] = (x - x_mid) ** 2 / (width / 2.0) ** 2 + ( + y - y_mid + ) ** 2 / (height / 2.0) ** 2 <= 1 # return {'mask': mask} elif roi_type == 7: if subtype == 3: # ellipse - mask = np.zeros((1, right+10, bottom+10), dtype=bool) - r_radius = np.sqrt((x2-x1)**2+(y2-y1)**2)/2.0 - c_radius = r_radius*aspect_ratio - r = (x1+x2)/2-0.5 - c = (y1+y2)/2-0.5 + mask = np.zeros((1, right + 10, bottom + 10), dtype=bool) + r_radius = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2) / 2.0 + c_radius = r_radius * aspect_ratio + r = (x1 + x2) / 2 - 0.5 + c = (y1 + y2) / 2 - 0.5 shpe = mask.shape - orientation = np.arctan2(y2-y1, x2-x1) + orientation = np.arctan2(y2 - y1, x2 - x1) X, Y = ellipse(r, c, r_radius, c_radius, shpe[1:], orientation) mask[0, X, Y] = True - #return {'mask': mask} + # return {'mask': mask} else: # Freehand coords = _getcoords(z) _writecoords(coords) - coords = coords.astype('float') - #return {'polygons': coords} + coords = coords.astype("float") + # return {'polygons': coords} else: try: coords = _getcoords(z) _writecoords(coords) - coords = coords.astype('float') - #return {'polygons': coords} + coords = coords.astype("float") + # return {'polygons': coords} except BaseException: raise ValueError( - 'read_imagej_roi: ROI type {} not supported'.format(roi_type)) + "read_imagej_roi: ROI type {} not supported".format(roi_type) + ) # Copy the rest of the file, line by line for line in inhand: @@ -309,9 +324,9 @@ def _writecoords(points): def main(name=None, roi_ids=None, x_down=4, y_down=None, t_down=10): - ''' + """ Convert example data into downsampled test data. - ''' + """ # Default arguments if y_down is None: y_down = x_down @@ -325,26 +340,29 @@ def main(name=None, roi_ids=None, x_down=4, y_down=None, t_down=10): this_dir = os.path.dirname(os.path.abspath(__file__)) repo_dir = os.path.dirname(os.path.dirname(this_dir)) rois_location = os.path.join( - repo_dir, 'examples', 'exampleData', '20150429.zip', + repo_dir, + "examples", + "exampleData", + "20150429.zip", ) images_location = os.path.join( - repo_dir, 'examples', 'exampleData', '20150529', + repo_dir, + "examples", + "exampleData", + "20150529", ) # Output configuration - output_folder_base = os.path.join(this_dir, 'resources') - roi_extract_dir = os.path.join(output_folder_base, '_build_rois') + output_folder_base = os.path.join(this_dir, "resources") + roi_extract_dir = os.path.join(output_folder_base, "_build_rois") output_folder = os.path.join(output_folder_base, name) datahandler = extraction.DataHandlerTifffile # Extract the rois from the zip file - print( - 'Extracting rois from {} into {}' - ''.format(rois_location, roi_extract_dir) - ) - with zipfile.ZipFile(rois_location, 'r') as zr: + print("Extracting rois from {} into {}" "".format(rois_location, roi_extract_dir)) + with zipfile.ZipFile(rois_location, "r") as zr: zr.extractall(roi_extract_dir) # Read in rois @@ -385,7 +403,8 @@ def main(name=None, roi_ids=None, x_down=4, y_down=None, t_down=10): # Downscale roi(s) for roi_id in roi_ids: roi_raw_pth = os.path.join( - roi_extract_dir, "{:02d}.roi".format(roi_id + 1), + roi_extract_dir, + "{:02d}.roi".format(roi_id + 1), ) roi_dwn_pth = os.path.join( output_folder, "rois", "{:02d}.roi".format(roi_id + 1) @@ -398,10 +417,10 @@ def main(name=None, roi_ids=None, x_down=4, y_down=None, t_down=10): downscale_roi(roi_raw_pth, roi_dwn_pth, [x_down, y_down], [-off_x, -off_y]) # Turn rois into a zip file - roi_zip_pth = os.path.join(output_folder, 'rois.zip') + roi_zip_pth = os.path.join(output_folder, "rois.zip") print("Zipping rois {} as {}".format(os.path.dirname(roi_dwn_pth), roi_zip_pth)) maybe_make_dir(os.path.dirname(roi_dwn_pth)) - with zipfile.ZipFile(roi_zip_pth, 'w') as zr: + with zipfile.ZipFile(roi_zip_pth, "w") as zr: for roi_id in roi_ids: roi_dwn_pth = os.path.join( output_folder, "rois", "{:02d}.roi".format(roi_id + 1) @@ -433,7 +452,10 @@ def get_parser(): ) parser.add_argument( - "-h", "--help", action="help", help="Show this help message and exit.", + "-h", + "--help", + action="help", + help="Show this help message and exit.", ) parser.add_argument( @@ -473,8 +495,8 @@ def get_parser(): return parser -if __name__ == '__main__': - __package__ = 'fissa.tests.generate_downsampled_resources' +if __name__ == "__main__": + __package__ = "fissa.tests.generate_downsampled_resources" parser = get_parser() kwargs = vars(parser.parse_args()) roi_ids = kwargs.pop("roi_id") diff --git a/fissa/tests/test_core.py b/fissa/tests/test_core.py index 9a3fafe8..01bcae77 100644 --- a/fissa/tests/test_core.py +++ b/fissa/tests/test_core.py @@ -1,4 +1,4 @@ -'''Unit tests for core.py.''' +"""Unit tests for core.py.""" from __future__ import division @@ -91,13 +91,13 @@ def compare_output(self, actual, separated=True, compare_deltaf=None): self.compare_deltaf_result(actual.deltaf_result) def compare_experiments( - self, - actual, - expected, - folder=True, - prepared=True, - separated=True, - ): + self, + actual, + expected, + folder=True, + prepared=True, + separated=True, + ): """ Compare attributes of two experiments. @@ -292,7 +292,6 @@ def test_repr_eval(self): exp2 = eval(actual) self.compare_experiments(exp, exp2) - def test_imagedir_roizip(self): exp = core.Experiment(self.images_dir, self.roi_zip_path) exp.separate() @@ -301,10 +300,7 @@ def test_imagedir_roizip(self): self.compare_str_repr_contents(repr(exp)) def test_imagelist_roizip(self): - image_paths = [ - os.path.join(self.images_dir, img) - for img in self.image_names - ] + image_paths = [os.path.join(self.images_dir, img) for img in self.image_names] exp = core.Experiment(image_paths, self.roi_zip_path) exp.separate() self.compare_output(exp) @@ -312,10 +308,7 @@ def test_imagelist_roizip(self): self.compare_str_repr_contents(repr(exp)) def test_imagelistloaded_roizip(self): - image_paths = [ - os.path.join(self.images_dir, img) - for img in self.image_names - ] + image_paths = [os.path.join(self.images_dir, img) for img in self.image_names] datahandler = extraction.DataHandlerTifffile() images = [datahandler.image2array(pth) for pth in image_paths] exp = core.Experiment(images, self.roi_zip_path) @@ -326,10 +319,7 @@ def test_imagelistloaded_roizip(self): @unittest.expectedFailure def test_imagedir_roilistpath(self): - roi_paths = [ - os.path.join(self.resources_dir, r) - for r in self.roi_paths - ] + roi_paths = [os.path.join(self.resources_dir, r) for r in self.roi_paths] exp = core.Experiment(self.images_dir, roi_paths) exp.separate() self.compare_output(exp) @@ -338,14 +328,8 @@ def test_imagedir_roilistpath(self): @unittest.expectedFailure def test_imagelist_roilistpath(self): - image_paths = [ - os.path.join(self.images_dir, img) - for img in self.image_names - ] - roi_paths = [ - os.path.join(self.resources_dir, r) - for r in self.roi_paths - ] + image_paths = [os.path.join(self.images_dir, img) for img in self.image_names] + roi_paths = [os.path.join(self.resources_dir, r) for r in self.roi_paths] exp = core.Experiment(image_paths, roi_paths) exp.separate() self.compare_output(exp) diff --git a/fissa/tests/test_extraction.py b/fissa/tests/test_extraction.py index 8f7ddb93..44ddf1aa 100755 --- a/fissa/tests/test_extraction.py +++ b/fissa/tests/test_extraction.py @@ -19,7 +19,7 @@ from . import base_test from .base_test import BaseTestCase -RESOURCES_DIR = os.path.join(base_test.TEST_DIRECTORY, 'resources', 'tiffs') +RESOURCES_DIR = os.path.join(base_test.TEST_DIRECTORY, "resources", "tiffs") def get_dtyped_expected(expected, dtype): @@ -59,10 +59,7 @@ def test_single_frame_3d(dtype, datahandler): """ expected = np.array([[[-11, 12], [14, 15], [17, 18]]]) expected = get_dtyped_expected(expected, dtype) - fname = os.path.join( - RESOURCES_DIR, - "imageio.imwrite_{}.tif".format(dtype) - ) + fname = os.path.join(RESOURCES_DIR, "imageio.imwrite_{}.tif".format(dtype)) actual = datahandler.image2array(fname) base_test.assert_equal(actual, expected) @@ -93,10 +90,7 @@ def test_single_frame_2d(dtype, datahandler): """ expected = np.array([[-11, 12], [14, 15], [17, 18]]) expected = get_dtyped_expected(expected, dtype) - fname = os.path.join( - RESOURCES_DIR, - "imageio.imwrite_{}.tif".format(dtype) - ) + fname = os.path.join(RESOURCES_DIR, "imageio.imwrite_{}.tif".format(dtype)) actual = datahandler.image2array(fname) base_test.assert_equal(actual, expected) @@ -127,10 +121,7 @@ def multiframe_image2array_tester(base_fname, dtype, datahandler): ] ) expected = get_dtyped_expected(expected, dtype) - fname = os.path.join( - RESOURCES_DIR, - base_fname + "_{}.tif".format(dtype) - ) + fname = os.path.join(RESOURCES_DIR, base_fname + "_{}.tif".format(dtype)) actual = datahandler.image2array(fname) base_test.assert_equal(actual, expected) @@ -245,10 +236,7 @@ def multiframe_mean_tester(base_fname, dtype, datahandler): ) expected = get_dtyped_expected(expected, dtype) expected = np.mean(expected, dtype=np.float64, axis=0) - fname = os.path.join( - RESOURCES_DIR, - base_fname + "_{}.tif".format(dtype) - ) + fname = os.path.join(RESOURCES_DIR, base_fname + "_{}.tif".format(dtype)) data = datahandler.image2array(fname) actual = datahandler.getmean(data) base_test.assert_allclose(actual, expected) @@ -280,7 +268,9 @@ def multiframe_mean_tester(base_fname, dtype, datahandler): "float64", ], ) -@pytest.mark.parametrize("datahandler", [extraction.DataHandlerTifffile, extraction.DataHandlerTifffileLazy]) +@pytest.mark.parametrize( + "datahandler", [extraction.DataHandlerTifffile, extraction.DataHandlerTifffileLazy] +) def test_multiframe_mean(base_fname, dtype, datahandler): """ Test the mean of TIFFs. @@ -304,7 +294,9 @@ def test_multiframe_mean(base_fname, dtype, datahandler): "base_fname", [ "tifffile.imsave", - pytest.param("tifffile.imsave.bigtiff", marks=pytest.mark.xfail(reason="not supported")), + pytest.param( + "tifffile.imsave.bigtiff", marks=pytest.mark.xfail(reason="not supported") + ), "TiffWriter.mixedA", # pytest.param("TiffWriter.mixedB", marks=pytest.mark.xfail(reason="not supported")), "TiffWriter.mixedC", @@ -339,7 +331,11 @@ def test_multiframe_mean_pillow(base_fname, dtype, datahandler): @pytest.mark.parametrize("dtype", ["uint8", "uint16", "float32"]) @pytest.mark.parametrize( "datahandler", - [extraction.DataHandlerTifffile, extraction.DataHandlerTifffileLazy, extraction.DataHandlerPillow], + [ + extraction.DataHandlerTifffile, + extraction.DataHandlerTifffileLazy, + extraction.DataHandlerPillow, + ], ) def test_multiframe_mean_imagejformat(dtype, datahandler): """ @@ -367,7 +363,9 @@ def test_multiframe_mean_imagejformat(dtype, datahandler): ) @pytest.mark.parametrize("dtype", ["uint8"]) @pytest.mark.parametrize("shp", ["3,2,3,2", "2,1,3,3,2", "2,3,1,1,3,2"]) -@pytest.mark.parametrize("datahandler", [extraction.DataHandlerTifffile, extraction.DataHandlerTifffileLazy]) +@pytest.mark.parametrize( + "datahandler", [extraction.DataHandlerTifffile, extraction.DataHandlerTifffileLazy] +) def test_multiframe_mean_higherdim(base_fname, shp, dtype, datahandler): """ Test the mean of 4d/5d TIFFs. @@ -404,7 +402,7 @@ def test_multiframe_mean_higherdim(base_fname, shp, dtype, datahandler): "3,2,3,2", pytest.param("2,1,3,3,2", marks=pytest.mark.xfail(reason="looks like RGB")), "2,3,1,1,3,2", - ] + ], ) @pytest.mark.parametrize("datahandler", [extraction.DataHandlerPillow]) def test_multiframe_mean_higherdim_pillow(base_fname, shp, dtype, datahandler): @@ -417,6 +415,7 @@ def test_multiframe_mean_higherdim_pillow(base_fname, shp, dtype, datahandler): datahandler=datahandler, ) + class TestDataHandlerRepr(BaseTestCase): """String representations of DataHandler are correct.""" @@ -439,8 +438,8 @@ class Rois2MasksTestMixin: """Tests for rois2masks.""" polys = [ - np.array([[39., 62.], [60., 45.], [48., 71.]]), - np.array([[72., 107.], [78., 130.], [100., 110.]]), + np.array([[39.0, 62.0], [60.0, 45.0], [48.0, 71.0]]), + np.array([[72.0, 107.0], [78.0, 130.0], [100.0, 110.0]]), ] def setUp(self): @@ -450,7 +449,7 @@ def setUp(self): def test_imagej_zip(self): # load zip of rois - ROI_loc = os.path.join(self.test_directory, 'resources', 'RoiSet.zip') + ROI_loc = os.path.join(self.test_directory, "resources", "RoiSet.zip") actual = self.datahandler.rois2masks(ROI_loc, self.data) # assert equality @@ -485,8 +484,20 @@ def test_rois_not_list(self): def test_polys_1d(self): # check that rois2masks fails when the polys are not 2d polys1d = [ - np.array([[39.,]]), - np.array([[72.,]]), + np.array( + [ + [ + 39.0, + ] + ] + ), + np.array( + [ + [ + 72.0, + ] + ] + ), ] with self.assertRaises(ValueError): self.datahandler.rois2masks(polys1d, self.data) @@ -494,8 +505,8 @@ def test_polys_1d(self): def test_polys_3d(self): # check that rois2masks fails when the polys are not 2d polys3d = [ - np.array([[39., 62., 0.], [60., 45., 0.], [48., 71., 0.]]), - np.array([[72., 107., 0.], [78., 130., 0.], [100., 110., 0.]]), + np.array([[39.0, 62.0, 0.0], [60.0, 45.0, 0.0], [48.0, 71.0, 0.0]]), + np.array([[72.0, 107.0, 0.0], [78.0, 130.0, 0.0], [100.0, 110.0, 0.0]]), ] with self.assertRaises(ValueError): self.datahandler.rois2masks(polys3d, self.data) @@ -540,5 +551,7 @@ class TestRois2MasksPillow(BaseTestCase, Rois2MasksTestMixin): def setUp(self): Rois2MasksTestMixin.setUp(self) - self.data = Image.fromarray(self.data.reshape(self.data.shape[-2:]).astype(np.uint8)) + self.data = Image.fromarray( + self.data.reshape(self.data.shape[-2:]).astype(np.uint8) + ) self.datahandler = extraction.DataHandlerPillow() diff --git a/fissa/tests/test_neuropil.py b/fissa/tests/test_neuropil.py index 367d962c..667ad25d 100644 --- a/fissa/tests/test_neuropil.py +++ b/fissa/tests/test_neuropil.py @@ -40,7 +40,7 @@ def run_method(self, method, expected_converged=None, **kwargs): self.assert_equal(S_sep.shape, self.shape_desired) # If specified, assert that the result is as expected if expected_converged is not None: - self.assert_equal(convergence['converged'], expected_converged) + self.assert_equal(convergence["converged"], expected_converged) def test_method(self): self.run_method(self.method, expected_converged=True, maxtries=1) @@ -63,28 +63,25 @@ def test_retry(self): class TestNeuropilNMF(BaseTestCase, NeuropilMixin): - def setUp(self): NeuropilMixin.setUp(self) self.method = "nmf" def test_nmf_manual_alpha(self): - self.run_method(self.method, expected_converged=True, alpha=.2) + self.run_method(self.method, expected_converged=True, alpha=0.2) def test_badmethod(self): with self.assertRaises(ValueError): - npil.separate(self.S, sep_method='bogus_method') + npil.separate(self.S, sep_method="bogus_method") class TestNeuropilICA(BaseTestCase, NeuropilMixin): - def setUp(self): NeuropilMixin.setUp(self) self.method = "ica" class TestNeuropilFA(BaseTestCase, NeuropilMixin): - def setUp(self): NeuropilMixin.setUp(self) self.method = "FactorAnalysis" diff --git a/fissa/tests/test_readimagejrois.py b/fissa/tests/test_readimagejrois.py index d331cb86..1d9a6c8d 100644 --- a/fissa/tests/test_readimagejrois.py +++ b/fissa/tests/test_readimagejrois.py @@ -54,11 +54,15 @@ def test_freehand(self): def test_freeline(self): self.check_polygon("freeline") - @unittest.skipIf(sys.version_info < (3, 0), "multipoint rois only supported on Python 3") + @unittest.skipIf( + sys.version_info < (3, 0), "multipoint rois only supported on Python 3" + ) def test_multipoint(self): self.check_polygon("multipoint") - @unittest.skipIf(sys.version_info >= (3, 0), "multipoint rois are supported on Python 3") + @unittest.skipIf( + sys.version_info >= (3, 0), "multipoint rois are supported on Python 3" + ) def test_multipoint_py2_raises(self): with self.assertRaises(ValueError): self.check_polygon("multipoint") @@ -81,16 +85,12 @@ def test_polygon_bottom(self): def test_polygon_left_offscreen(self): name = "polygon-left-offscreen" with self.assertRaises(ValueError): - readimagejrois.parse_roi_file( - os.path.join(self.data_dir, name + ".roi") - ) + readimagejrois.parse_roi_file(os.path.join(self.data_dir, name + ".roi")) def test_polygon_top_offscreen(self): name = "polygon-top-offscreen" with self.assertRaises(ValueError): - readimagejrois.parse_roi_file( - os.path.join(self.data_dir, name + ".roi") - ) + readimagejrois.parse_roi_file(os.path.join(self.data_dir, name + ".roi")) def test_polygon_right_offscreen(self): self.check_polygon("polygon-right-offscreen") @@ -104,11 +104,15 @@ def test_polyline(self): def test_rectangle(self): self.check_polygon("rectangle") - @unittest.skipIf(sys.version_info < (3, 0), "Rotated rectangle rois only supported on Python 3") + @unittest.skipIf( + sys.version_info < (3, 0), "Rotated rectangle rois only supported on Python 3" + ) def test_rectangle_rotated(self): self.check_polygon("rectangle-rotated") - @unittest.skipIf(sys.version_info >= (3, 0), "Rotated rectangle rois are supported on Python 3") + @unittest.skipIf( + sys.version_info >= (3, 0), "Rotated rectangle rois are supported on Python 3" + ) def test_rectangle_rotated_py2_raises(self): with self.assertRaises(ValueError): self.check_polygon("rectangle-rotated") @@ -139,16 +143,12 @@ def test_oval_bottom(self): def test_oval_left_offscreen(self): name = "oval-left-offscreen" with self.assertRaises(ValueError): - readimagejrois.parse_roi_file( - os.path.join(self.data_dir, name + ".roi") - ) + readimagejrois.parse_roi_file(os.path.join(self.data_dir, name + ".roi")) def test_oval_top_offscreen(self): name = "oval-top-offscreen" with self.assertRaises(ValueError): - readimagejrois.parse_roi_file( - os.path.join(self.data_dir, name + ".roi") - ) + readimagejrois.parse_roi_file(os.path.join(self.data_dir, name + ".roi")) def test_oval_right_offscreen(self): self.check_mask("oval-right-offscreen") @@ -174,16 +174,12 @@ def test_ellipse_bottom(self): def test_ellipse_left_offscreen(self): name = "ellipse-left-offscreen" with self.assertRaises(ValueError): - readimagejrois.parse_roi_file( - os.path.join(self.data_dir, name + ".roi") - ) + readimagejrois.parse_roi_file(os.path.join(self.data_dir, name + ".roi")) def test_ellipse_top_offscreen(self): name = "ellipse-top-offscreen" with self.assertRaises(ValueError): - readimagejrois.parse_roi_file( - os.path.join(self.data_dir, name + ".roi") - ) + readimagejrois.parse_roi_file(os.path.join(self.data_dir, name + ".roi")) def test_ellipse_right_offscreen(self): self.check_mask("ellipse-right-offscreen") @@ -195,6 +191,4 @@ def test_ellipse_tiny(self): # ROI which is too small to cover a single pixel name = "ellipse-tiny" with self.assertRaises(ValueError): - readimagejrois.parse_roi_file( - os.path.join(self.data_dir, name + ".roi") - ) + readimagejrois.parse_roi_file(os.path.join(self.data_dir, name + ".roi")) diff --git a/fissa/tests/test_roitools.py b/fissa/tests/test_roitools.py index accd32c5..9220eac7 100644 --- a/fissa/tests/test_roitools.py +++ b/fissa/tests/test_roitools.py @@ -1,4 +1,4 @@ -'''Unit tests for roitools.py.''' +"""Unit tests for roitools.py.""" from __future__ import division @@ -12,12 +12,10 @@ class TestGetMaskCom(BaseTestCase): - '''Tests for get_mask_com.''' + """Tests for get_mask_com.""" def test_trivial_list(self): - actual = roitools.get_mask_com( - [[True]] - ) + actual = roitools.get_mask_com([[True]]) desired = (0, 0) self.assert_equal(actual, desired) @@ -55,17 +53,19 @@ def test_main(self): self.assert_equal(actual, desired) actual = roitools.get_mask_com( - [[False, True], - [False, False], - ] + [ + [False, True], + [False, False], + ] ) desired = (0, 1) self.assert_equal(actual, desired) actual = roitools.get_mask_com( - [[False, True, True], - [False, False, True], - ] + [ + [False, True, True], + [False, False, True], + ] ) desired = (1 / 3, 1 + 2 / 3) self.assert_allclose(actual, desired) @@ -73,15 +73,15 @@ def test_main(self): def test_non2d_input(self): self.assertRaises(ValueError, roitools.get_mask_com, []) self.assertRaises(ValueError, roitools.get_mask_com, np.ones((1))) - self.assertRaises( - ValueError, roitools.get_mask_com, np.ones((1, 1, 1))) + self.assertRaises(ValueError, roitools.get_mask_com, np.ones((1, 1, 1))) @unittest.expectedFailure def test_non_boolean(self): actual = roitools.get_mask_com( - [[0, 1, 2], - [0, 0, 1], - ] + [ + [0, 1, 2], + [0, 0, 1], + ] ) desired = (3 / 4, 1 + 3 / 4) self.assert_allclose(actual, desired) @@ -89,7 +89,7 @@ def test_non_boolean(self): class TestShift2dArray(BaseTestCase): - '''Tests for shift_2d_array.''' + """Tests for shift_2d_array.""" def test_noop(self): x = np.array([4, 5, 1]) @@ -175,14 +175,14 @@ def test_axis3(self): class TestSplitNpil(BaseTestCase): - '''Tests for split_npil.''' + """Tests for split_npil.""" def test_2x2(self): mask = np.ones((2, 2)) npil_masks = roitools.split_npil(mask, (0.5, 0.5), 4) desired_npil_masks = [ - np.array([[False, True], [False, False]]), - np.array([[False, False], [False, True]]), + np.array([[False, True], [False, False]]), + np.array([[False, False], [False, True]]), np.array([[False, False], [True, False]]), np.array([[True, False], [False, False]]), ] @@ -192,7 +192,7 @@ def test_bottom(self): mask = [[0, 0], [1, 1]] npil_masks = roitools.split_npil(mask, (0.5, 0.5), 4) desired_npil_masks = [ - np.array([[False, False], [False, True]]), + np.array([[False, False], [False, True]]), np.array([[False, False], [False, False]]), np.array([[False, False], [False, False]]), np.array([[False, False], [True, False]]), @@ -203,17 +203,16 @@ def test_bottom2(self): mask = [[0, 0], [1, 1]] npil_masks = roitools.split_npil(mask, (1, 1), 2) desired_npil_masks = [ - np.array([[False, False], [False, True]]), + np.array([[False, False], [False, True]]), np.array([[False, False], [True, False]]), ] self.assert_equal_list_of_array_perm_inv(npil_masks, desired_npil_masks) def test_bottom_adaptive(self): mask = [[0, 0], [1, 1]] - npil_masks = roitools.split_npil(mask, (1, 1), 4, - adaptive_num=True) + npil_masks = roitools.split_npil(mask, (1, 1), 4, adaptive_num=True) desired_npil_masks = [ - np.array([[False, False], [False, True]]), + np.array([[False, False], [False, True]]), np.array([[False, False], [True, False]]), ] self.assert_equal_list_of_array_perm_inv(npil_masks, desired_npil_masks) @@ -221,7 +220,7 @@ def test_bottom_adaptive(self): class TestGetNpilMask(BaseTestCase): - '''Tests for get_npil_mask.''' + """Tests for get_npil_mask.""" def test_empty(self): mask = [ @@ -230,11 +229,13 @@ def test_empty(self): [False, False, False], ] actual = roitools.get_npil_mask(mask, 0) - desired = np.array([ - [False, False, False], - [False, False, False], - [False, False, False], - ]) + desired = np.array( + [ + [False, False, False], + [False, False, False], + [False, False, False], + ] + ) self.assert_equal(actual, desired) def test_full(self): @@ -243,11 +244,13 @@ def test_full(self): [False, False, False], [False, False, False], ] - desired = np.array([ - [False, True, True], - [True, True, True], - [True, True, True], - ]) + desired = np.array( + [ + [False, True, True], + [True, True, True], + [True, True, True], + ] + ) actual = roitools.get_npil_mask(mask, 8) self.assert_equal(actual, desired) @@ -262,29 +265,35 @@ def test_corner(self): [False, False, False], ] - desired = np.array([ - [False, True, False], - [True, False, False], - [False, False, False], - ]) + desired = np.array( + [ + [False, True, False], + [True, False, False], + [False, False, False], + ] + ) for area in [1, 2]: actual = roitools.get_npil_mask(mask, area) self.assert_equal(actual, desired) - desired = np.array([ - [False, True, False], - [True, True, True], - [False, True, False], - ]) + desired = np.array( + [ + [False, True, False], + [True, True, True], + [False, True, False], + ] + ) for area in [3, 4, 5]: actual = roitools.get_npil_mask(mask, area) self.assert_equal(actual, desired) - desired = np.array([ - [False, True, True], - [True, True, True], - [True, True, True], - ]) + desired = np.array( + [ + [False, True, True], + [True, True, True], + [True, True, True], + ] + ) for area in [6, 7, 8]: actual = roitools.get_npil_mask(mask, area) self.assert_equal(actual, desired) @@ -296,20 +305,24 @@ def test_middle(self): [False, False, False], ] - desired = np.array([ - [False, True, False], - [True, False, True], - [False, True, False], - ]) + desired = np.array( + [ + [False, True, False], + [True, False, True], + [False, True, False], + ] + ) for area in [1, 2, 3, 4]: actual = roitools.get_npil_mask(mask, area) self.assert_equal(actual, desired) - desired = np.array([ - [True, True, True], - [True, False, True], - [True, True, True], - ]) + desired = np.array( + [ + [True, True, True], + [True, False, True], + [True, True, True], + ] + ) for area in [5, 6, 7, 8]: actual = roitools.get_npil_mask(mask, area) self.assert_equal(actual, desired) diff --git a/setup.py b/setup.py index d2f598cb..cea9814a 100644 --- a/setup.py +++ b/setup.py @@ -10,20 +10,21 @@ def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() + # Can't import __meta__.py if the requirements aren't installed # due to imports in __init__.py. This is a workaround. meta = {} -exec(read('fissa/__meta__.py'), meta) +exec(read("fissa/__meta__.py"), meta) -install_requires = read('requirements.txt').splitlines() +install_requires = read("requirements.txt").splitlines() extras_require = {} # Notebook dependencies for plotting -extras_require['plotting'] = read('requirements-plots.txt').splitlines() +extras_require["plotting"] = read("requirements-plots.txt").splitlines() # Dependencies for generating documentation -extras_require['docs'] = read('requirements-docs.txt').splitlines() +extras_require["docs"] = read("requirements-docs.txt").splitlines() # Test dependencies extras_require["test"] = read("requirements-test.txt").splitlines() @@ -32,13 +33,10 @@ def read(fname): extras_require["dev"] = read("requirements-dev.txt").splitlines() # Everything as a list. Replicated items are removed by use of set with {}. -extras_require['all'] = sorted(list( - {x for v in extras_require.values() for x in v} -)) +extras_require["all"] = sorted(list({x for v in extras_require.values() for x in v})) class PyTest(TestCommand): - def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] @@ -46,39 +44,40 @@ def finalize_options(self): def run_tests(self): import pytest + pytest.main(self.test_args) setup( - name = meta['name'], - python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*', - install_requires = install_requires, - extras_require = extras_require, - version = meta['version'], - author = meta['author'], - author_email = meta['author_email'], - description = meta['description'], - url = meta['url'], - package_dir = {meta['name']: os.path.join(".", meta['path'])}, - packages = [meta['name']], - license = "GNU", - long_description = read('README.rst'), + name=meta["name"], + python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*", + install_requires=install_requires, + extras_require=extras_require, + version=meta["version"], + author=meta["author"], + author_email=meta["author_email"], + description=meta["description"], + url=meta["url"], + package_dir={meta["name"]: os.path.join(".", meta["path"])}, + packages=[meta["name"]], + license="GNU", + long_description=read("README.rst"), # https://pypi.org/pypi?%3Aaction=list_classifiers - classifiers = [ + classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Bio-Informatics", "Topic :: Scientific/Engineering :: Information Analysis", @@ -89,5 +88,5 @@ def run_tests(self): "Bug Tracker": "https://github.com/rochefort-lab/fissa/issues", "Citation": "https://www.doi.org/10.1038/s41598-018-21640-2", }, - cmdclass = {'test': PyTest}, + cmdclass={"test": PyTest}, ) From 3dfbfe2ef24a547128d1c7fdb18f807e16877830 Mon Sep 17 00:00:00 2001 From: Scott Lowe Date: Fri, 18 Jun 2021 17:14:16 +0100 Subject: [PATCH 5/8] STY: Fix janky code formatting --- docs/conf.py | 26 +++++++++++++------------- fissa/ROI.py | 5 ----- fissa/tests/test_extraction.py | 16 ++-------------- 3 files changed, 15 insertions(+), 32 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 95b8abcb..8534a6be 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -95,10 +95,10 @@ def setup(app): # If your documentation needs a minimal Sphinx version, state it here. # -# needs_sphinx = '1.0' +# needs_sphinx = "1.0" # Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# extensions coming with Sphinx (named "sphinx.ext.*") or your custom # ones. extensions = [ "sphinx.ext.autodoc", @@ -140,7 +140,7 @@ def setup(app): # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # -# source_suffix = ['.rst', '.md'] +# source_suffix = [".rst", ".md"] source_suffix = ".rst" # The encoding of source files. @@ -199,8 +199,8 @@ def setup(app): # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by -# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', -# 'searchbox.html']``. +# default: ``["localtoc.html", "relations.html", "sourcelink.html", +# "searchbox.html"]``. # # html_sidebars = {} @@ -214,18 +214,18 @@ def setup(app): # -- Options for LaTeX output ------------------------------------------------ latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). + # The paper size ("letterpaper" or "a4paper"). + # "papersize": "letterpaper", # - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). + # The font size ("10pt", "11pt" or "12pt"). + # "pointsize": "10pt", # - # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. + # "preamble": "", # - # 'preamble': '', # Latex figure (float) alignment - # # 'figure_align': 'htbp', + # # Need to manually declare what the delta symbol (Δ) corresponds to. "preamble": """ \DeclareUnicodeCharacter{394}{$\Delta$} @@ -279,11 +279,11 @@ def setup(app): # The unique identifier of the text. This can be a ISBN number # or the project homepage. # -# epub_identifier = '' +# epub_identifier = "" # A unique identification for the text. # -# epub_uid = '' +# epub_uid = "" # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] diff --git a/fissa/ROI.py b/fissa/ROI.py index 0591e676..dd375abf 100644 --- a/fissa/ROI.py +++ b/fissa/ROI.py @@ -67,11 +67,6 @@ def poly2mask(polygons, im_size): for poly in polygons: # assuming all points in the polygon share a z-coordinate z = int(np.array(poly.exterior.coords)[0][2]) - # @swkeemink: Commented out to remove a warning message for FISSA. - # if z > im_size[0]: - # warn('Polygon with zero-coordinate {} '.format(z) + - # 'cropped using im_size = {}'.format(im_size)) - # continue x_min, y_min, x_max, y_max = poly.bounds # Shift all points by 0.5 to move coordinates to corner of pixel diff --git a/fissa/tests/test_extraction.py b/fissa/tests/test_extraction.py index 44ddf1aa..4bd66424 100755 --- a/fissa/tests/test_extraction.py +++ b/fissa/tests/test_extraction.py @@ -484,20 +484,8 @@ def test_rois_not_list(self): def test_polys_1d(self): # check that rois2masks fails when the polys are not 2d polys1d = [ - np.array( - [ - [ - 39.0, - ] - ] - ), - np.array( - [ - [ - 72.0, - ] - ] - ), + np.array([[39.0]]), + np.array([[72.0]]), ] with self.assertRaises(ValueError): self.datahandler.rois2masks(polys1d, self.data) From dcd6ea8a15d9f257510f26fd94f90613aa1b50c5 Mon Sep 17 00:00:00 2001 From: Scott Lowe Date: Fri, 18 Jun 2021 17:40:36 +0100 Subject: [PATCH 6/8] DOC: Add black and pre-commit enabled badges --- README.rst | 8 ++++++++ docs/source/user-guide.rst | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 27acdcd5..ef045942 100644 --- a/README.rst +++ b/README.rst @@ -10,6 +10,8 @@ FISSA +------------------+----------------------------------------------------------+ | Build Status | |Documentation| |GHA tests| |AppVeyor| |Codecov| | +------------------+----------------------------------------------------------+ +| Code style | |black| |pre-commit| | ++------------------+----------------------------------------------------------+ | Interactive Demo | |Binder| | +------------------+----------------------------------------------------------+ | Support | |Gitter| | @@ -389,3 +391,9 @@ with this program. If not, see http://www.gnu.org/licenses/. .. |License| image:: https://img.shields.io/pypi/l/fissa :target: https://raw.githubusercontent.com/rochefort-lab/fissa/master/LICENSE :alt: GPLv3 License +.. |pre-commit| image:: https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white + :target: https://github.com/pre-commit/pre-commit + :alt: pre-commit +.. |black| image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: black diff --git a/docs/source/user-guide.rst b/docs/source/user-guide.rst index 84c4c0e9..7e7e6980 100644 --- a/docs/source/user-guide.rst +++ b/docs/source/user-guide.rst @@ -2,4 +2,4 @@ FISSA User Guide ================ .. include:: ../../README.rst - :start-line: 19 + :start-line: 21 From 2cde648373f31b5f380d7947f20bafb6fc46e369 Mon Sep 17 00:00:00 2001 From: Scott Lowe Date: Sun, 20 Jun 2021 01:20:37 +0100 Subject: [PATCH 7/8] DOC: Add pre-commit.ci badge --- README.rst | 39 +++++++++++++++++++++------------------ 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/README.rst b/README.rst index ef045942..7262d429 100644 --- a/README.rst +++ b/README.rst @@ -1,23 +1,23 @@ FISSA ===== -+------------------+----------------------------------------------------------+ -| Latest Release | |PyPI badge| |Py Versions| | -+------------------+----------------------------------------------------------+ -| License | |License| | -+------------------+----------------------------------------------------------+ -| Documentation | |readthedocs| | -+------------------+----------------------------------------------------------+ -| Build Status | |Documentation| |GHA tests| |AppVeyor| |Codecov| | -+------------------+----------------------------------------------------------+ -| Code style | |black| |pre-commit| | -+------------------+----------------------------------------------------------+ -| Interactive Demo | |Binder| | -+------------------+----------------------------------------------------------+ -| Support | |Gitter| | -+------------------+----------------------------------------------------------+ -| Citation | |DOI badge| | -+------------------+----------------------------------------------------------+ ++------------------+----------------------------------------------------------------------+ +| Latest Release | |PyPI badge| |Py Versions| | ++------------------+----------------------------------------------------------------------+ +| License | |License| | ++------------------+----------------------------------------------------------------------+ +| Documentation | |readthedocs| | ++------------------+----------------------------------------------------------------------+ +| Build Status | |Documentation| |GHA tests| |AppVeyor| |Codecov| |pre-commit-status| | ++------------------+----------------------------------------------------------------------+ +| Code style | |black| |pre-commit| | ++------------------+----------------------------------------------------------------------+ +| Interactive Demo | |Binder| | ++------------------+----------------------------------------------------------------------+ +| Support | |Gitter| | ++------------------+----------------------------------------------------------------------+ +| Citation | |DOI badge| | ++------------------+----------------------------------------------------------------------+ FISSA (Fast Image Signal Separation Analysis) is a Python package for decontaminating somatic signals from two-photon calcium imaging data. @@ -393,7 +393,10 @@ with this program. If not, see http://www.gnu.org/licenses/. :alt: GPLv3 License .. |pre-commit| image:: https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white :target: https://github.com/pre-commit/pre-commit - :alt: pre-commit + :alt: pre-commit enabled +.. |pre-commit-status| image:: https://results.pre-commit.ci/badge/github/rochefort-lab/fissa/master.svg + :target: https://results.pre-commit.ci/latest/github/rochefort-lab/fissa/master + :alt: pre-commit.ci status .. |black| image:: https://img.shields.io/badge/code%20style-black-000000.svg :target: https://github.com/psf/black :alt: black From ec7b5317a0ea72cd999986753cbf35df2ee6343c Mon Sep 17 00:00:00 2001 From: Scott Lowe Date: Mon, 28 Jun 2021 13:15:54 +0100 Subject: [PATCH 8/8] DOC: Merge template placeholder preamble with ours --- docs/conf.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 8534a6be..2a9c177f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -220,12 +220,10 @@ def setup(app): # The font size ("10pt", "11pt" or "12pt"). # "pointsize": "10pt", # - # Additional stuff for the LaTeX preamble. - # "preamble": "", - # # Latex figure (float) alignment # 'figure_align': 'htbp', # + # Additional stuff for the LaTeX preamble. # Need to manually declare what the delta symbol (Δ) corresponds to. "preamble": """ \DeclareUnicodeCharacter{394}{$\Delta$}