diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d3a853ba44..25bfb50fd2 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -21,4 +21,4 @@ jobs: uses: ./.github/workflows/test_template.yml with: runs-on: '["ubuntu-latest", "macos-latest", "windows-latest"]' - python-version: '["3.8", "3.9", "3.10", 3.11]' + python-version: '["3.8", "3.9", "3.10", 3.11, 3.12]' diff --git a/.github/workflows/test_compat.yml b/.github/workflows/test_compat.yml index ff657193c6..830d93fedd 100644 --- a/.github/workflows/test_compat.yml +++ b/.github/workflows/test_compat.yml @@ -16,7 +16,7 @@ jobs: with: runs-on: '["ubuntu-latest", ]' python-version: '["3.8", ]' - depends: cython==0.29.24 numpy==1.23.0 scipy==1.8 nibabel==4.0.0 h5py==3.0.0 tqdm + depends: cython==0.29.24 numpy==1.19.5 scipy==1.8 nibabel==4.0.0 h5py==3.0.0 tqdm minimal-py39: uses: ./.github/workflows/test_template.yml with: diff --git a/.github/workflows/test_pre.yml b/.github/workflows/test_pre.yml index 69be51fd65..798ee1fa33 100644 --- a/.github/workflows/test_pre.yml +++ b/.github/workflows/test_pre.yml @@ -11,10 +11,10 @@ concurrency: cancel-in-progress: true jobs: - PRE: + PRE-py311: uses: ./.github/workflows/test_template.yml with: runs-on: '["ubuntu-latest", ]' - python-version: '["3.11", ]' + python-version: '["3.11",]' use-pre: true - extra-depends: scikit_learn scipy statsmodels pandas tables \ No newline at end of file + extra-depends: scikit_learn scipy statsmodels pandas tables diff --git a/.github/workflows/test_template.yml b/.github/workflows/test_template.yml index 49a44ab1e7..74df31ec45 100644 --- a/.github/workflows/test_template.yml +++ b/.github/workflows/test_template.yml @@ -93,9 +93,28 @@ jobs: channels: defaults, conda-forge use-only-tar-bz2: true - name: Install Dependencies - run: tools/ci/install_dependencies.sh + run: | + if [ "${{ inputs.use-pre }}" == "true" ]; then + tools/ci/install_dependencies.sh || echo "::warning::Experimental Job so Install Dependencies failure ignored!" + else + tools/ci/install_dependencies.sh + fi + # No need to update mingw-w64, we use msvc + # mingw-w64 does not manage well openmp threads so we avoid it for now + # Note that compilation works with mingw-w64 but 2-3 tests fail due to openmp + # - name: Install rtools (mingw-w64) + # if runner.os == 'Windows' + # run: | + # choco install rtools -y --no-progress --force --version=4.0.0.20220206 + # echo "/c/rtools40/ucrt64/bin;" >> $GITHUB_PATH + # echo "PKG_CONFIG_PATH=/c/opt/64/lib/pkgconfig;" >> $GITHUB_ENV - name: Install DIPY - run: tools/ci/install.sh + run: | + if [ "${{ inputs.use-pre }}" == "true" ]; then + tools/ci/install.sh || echo "::warning::Experimental Job so Install DIPY failure ignored!" + else + tools/ci/install.sh + fi - name: Setup Headless if: ${{ inputs.enable-viz-tests }} run: tools/ci/setup_headless.sh @@ -117,7 +136,3 @@ jobs: flags: unittests name: codecov-umbrella verbose: true - # Enable tmate debugging of manually-triggered workflows if the input option was provided - # - name: Setup tmate session - # if: ${{ failure() }} - # uses: mxschmitt/action-tmate@v3 diff --git a/.gitignore b/.gitignore index 74d18c18c2..6ce0e96c0e 100644 --- a/.gitignore +++ b/.gitignore @@ -41,3 +41,7 @@ dipy/.idea/ tmp/ venv build-install +.mesonpy* +build-install +benchmarks/results/ +benchmarks/env/ diff --git a/.pep8speaks.yml b/.pep8speaks.yml index 862a538639..d3b3d76ee9 100644 --- a/.pep8speaks.yml +++ b/.pep8speaks.yml @@ -22,5 +22,6 @@ pycodestyle: exclude: - doc/examples - dipy/info.py + - doc/conf.py only_mention_files_with_errors: True # If False, a separate status comment for each file is made. descending_issues_order: False # If True, PEP8 issues in message will be displayed in descending order of line numbers in the file diff --git a/.spin/cmds.py b/.spin/cmds.py new file mode 100644 index 0000000000..a31f77da13 --- /dev/null +++ b/.spin/cmds.py @@ -0,0 +1,199 @@ +"""Additional Command-line interface for spin.""" +import os +import shutil + +import click +from spin import util +from spin.cmds import meson + + +# From scipy: benchmarks/benchmarks/common.py +def _set_mem_rlimit(max_mem=None): + """Set address space rlimit.""" + import resource + import psutil + + mem = psutil.virtual_memory() + + if max_mem is None: + max_mem = int(mem.total * 0.7) + cur_limit = resource.getrlimit(resource.RLIMIT_AS) + if cur_limit[0] > 0: + max_mem = min(max_mem, cur_limit[0]) + + try: + resource.setrlimit(resource.RLIMIT_AS, (max_mem, cur_limit[1])) + except ValueError: + # on macOS may raise: current limit exceeds maximum limit + pass + + +def _commit_to_sha(commit): + p = util.run(['git', 'rev-parse', commit], output=False, echo=False) + if p.returncode != 0: + raise click.ClickException( + f'Could not find SHA matching commit `{commit}`') + + return p.stdout.decode('ascii').strip() + + +def _dirty_git_working_dir(): + # Changes to the working directory + p0 = util.run(['git', 'diff-files', '--quiet']) + + # Staged changes + p1 = util.run(['git', 'diff-index', '--quiet', '--cached', 'HEAD']) + + return (p0.returncode != 0 or p1.returncode != 0) + + +def _run_asv(cmd): + # Always use ccache, if installed + PATH = os.environ['PATH'] + # EXTRA_PATH = os.pathsep.join([ + # '/usr/lib/ccache', '/usr/lib/f90cache', + # '/usr/local/lib/ccache', '/usr/local/lib/f90cache' + # ]) + env = os.environ + env['PATH'] = f'EXTRA_PATH:{PATH}' + + # Control BLAS/LAPACK threads + env['OPENBLAS_NUM_THREADS'] = '1' + env['MKL_NUM_THREADS'] = '1' + + # Limit memory usage + try: + _set_mem_rlimit() + except (ImportError, RuntimeError): + pass + + util.run(cmd, cwd='benchmarks', env=env) + + +@click.command() +@click.option( + '--tests', '-t', + default=None, metavar='TESTS', multiple=True, + help="Which tests to run" +) +@click.option( + '--compare', '-c', + is_flag=True, + default=False, + help="Compare benchmarks between the current branch and main " + "(unless other branches specified). " + "The benchmarks are each executed in a new isolated " + "environment." +) +@click.option( + '--verbose', '-v', is_flag=True, default=False +) +@click.option( + '--quick', '-q', is_flag=True, default=False, + help="Run each benchmark only once (timings won't be accurate)" +) +@click.argument( + 'commits', metavar='', + required=False, + nargs=-1 +) +@click.pass_context +def bench(ctx, tests, compare, verbose, quick, commits): + """๐Ÿ‹ Run benchmarks. + + \b + Examples: + + \b + $ spin bench -t bench_lib + $ spin bench -t bench_random.Random + $ spin bench -t Random -t Shuffle + + Two benchmark runs can be compared. + By default, `HEAD` is compared to `main`. + You can also specify the branches/commits to compare: + + \b + $ spin bench --compare + $ spin bench --compare main + $ spin bench --compare main HEAD + + You can also choose which benchmarks to run in comparison mode: + + $ spin bench -t Random --compare + """ + if not commits: + commits = ('main', 'HEAD') + elif len(commits) == 1: + commits = commits + ('HEAD',) + elif len(commits) > 2: + raise click.ClickException( + 'Need a maximum of two revisions to compare' + ) + + bench_args = [] + for t in tests: + bench_args += ['--bench', t] + + if verbose: + bench_args = ['-v'] + bench_args + + if quick: + bench_args = ['--quick'] + bench_args + + if not compare: + # No comparison requested; we build and benchmark the current version + + click.secho( + "Invoking `build` prior to running benchmarks:", + bold=True, fg="bright_green" + ) + ctx.invoke(meson.build) + + meson._set_pythonpath() + + p = util.run( + ['python', '-c', 'import dipy; print(dipy.__version__)'], + cwd='benchmarks', + echo=False, + output=False + ) + os.chdir('..') + + dipy_ver = p.stdout.strip().decode('ascii') + click.secho( + f'Running benchmarks on DIPY {dipy_ver}', + bold=True, fg="bright_green" + ) + cmd = [ + 'asv', 'run', '--dry-run', '--show-stderr', '--python=same' + ] + bench_args + _run_asv(cmd) + else: + # Ensure that we don't have uncommited changes + commit_a, commit_b = [_commit_to_sha(c) for c in commits] + + if commit_b == 'HEAD' and _dirty_git_working_dir(): + click.secho( + "WARNING: you have uncommitted changes --- " + "these will NOT be benchmarked!", + fg="red" + ) + + cmd_compare = [ + 'asv', 'continuous', '--factor', '1.05', + ] + bench_args + [commit_a, commit_b] + _run_asv(cmd_compare) + + +@click.command() +def clean(): + """๐Ÿงน Remove build and install folder.""" + build_dir = "build" + install_dir = "build-install" + print(f"Removing `{build_dir}`") + if os.path.isdir(build_dir): + shutil.rmtree(build_dir) + print(f"Removing `{install_dir}`") + if os.path.isdir(install_dir): + shutil.rmtree(install_dir) \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index b17b4a3cb9..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,16 +0,0 @@ -include AUTHOR LICENSE Makefile* MANIFEST.in setup* README.* -include Changelog TODO -recursive-include doc * -recursive-include bin * -recursive-include tools * -recursive-include src * -# setup utilities -include setup_helpers.py -include version_helpers.py -include cythexts.py -recursive-include dipy *.pxd -recursive-include fake_pyrex * -# put this stuff back into setup.py (package_data) once I'm enlightened -# enough to accomplish this herculean task -recursive-include dipy/data * -include dipy/COMMIT_INFO.txt diff --git a/benchmarks/README.rst b/benchmarks/README.rst new file mode 100644 index 0000000000..94ca50031a --- /dev/null +++ b/benchmarks/README.rst @@ -0,0 +1,120 @@ +===================== +๐Ÿš€ DIPY Benchmarks ๐Ÿ“Š +===================== + +Benchmarking Dipy with Airspeed Velocity (ASV). Measure the speed and performance of DIPY functions easily! + +Prerequisites โš™๏ธ +--------------------- + +Before you start, make sure you have ASV and installed: + +.. code-block:: bash + + pip install asv + pip install virtualenv + +Getting Started ๐Ÿƒโ€โ™‚๏ธ +------------------ + +DIPY Benchmarking is as easy as a piece of ๐Ÿฐ with ASV. You don't need to install a development version of DIPY into your current Python environment. ASV manages virtual environments and builds DIPY automatically. + +Running Benchmarks ๐Ÿ“ˆ +--------------------- + +To run all available benchmarks, navigate to the root DIPY directory at the command line and execute: + +.. code-block:: bash + + spin bench + +This command builds DIPY and runs all available benchmarks defined in the ``benchmarks/`` directory. Be patient; this could take a while as each benchmark is run multiple times to measure execution time distribution. + +For local testing without replications, unleash the power of โšก: + +.. code-block:: bash + + cd benchmarks/ + export REGEXP="bench.*Ufunc" + asv run --dry-run --show-stderr --python=same --quick -b $REGEXP + +Here, ``$REGEXP`` is a regular expression used to match benchmarks, and ``--quick`` is used to avoid repetitions. + +To run benchmarks from a particular benchmark module, such as ``bench_segment.py``, simply append the filename without the extension: + +.. code-block:: bash + + spin bench -t bench_segment + +To run a benchmark defined in a class, such as ``BenchQuickbundles`` from ``bench_segment.py``, show your benchmarking ninja skills: + +.. code-block:: bash + + spin bench -t bench_segment.BenchQuickbundles + +Comparing Results ๐Ÿ“Š +-------------------- + +To compare benchmark results with another version/commit/branch, use the ``--compare`` option (or ``-c``): + +.. code-block:: bash + + spin bench --compare v1.7.0 -t bench_segment + spin bench --compare 20d03bcfd -t bench_segment + spin bench -c master -t bench_segment + +These commands display results in the console but don't save them for future comparisons. For greater control and to save results for future comparisons, use ASV commands: + +.. code-block:: bash + + cd benchmarks + asv run -n -e --python=same + asv publish + asv preview + +Benchmarking Versions ๐Ÿ’ป +------------------------ + +To benchmark or visualize releases on different machines locally, generate tags with their commits: + +.. code-block:: bash + + cd benchmarks + # Get commits for tags + # delete tag_commits.txt before re-runs + for gtag in $(git tag --list --sort taggerdate | grep "^v"); do + git log $gtag --oneline -n1 --decorate=no | awk '{print $1;}' >> tag_commits.txt + done + # Use the last 20 versions for maximum power ๐Ÿ”ฅ + tail --lines=20 tag_commits.txt > 20_vers.txt + asv run HASHFILE:20_vers.txt + # Publish and view + asv publish + asv preview + +Contributing ๐Ÿค +--------------- + +TBD + +Writing Benchmarks โœ๏ธ +--------------------- + +See `ASV documentation`__ for basics on how to write benchmarks. + +Things to consider: + +- The benchmark suite should be importable with multiple DIPY version. +- Benchmark parameters should not depend on which DIPY version is installed. +- Keep the runtime of the benchmark reasonable. +- Prefer ASV's ``time_`` methods for benchmarking times. +- Prepare arrays in the setup method rather than in the ``time_`` methods. +- Be mindful of large arrays created. + +Embrace the Speed! โฉ +--------------------- + +Now you're all set to benchmark DIPY with ASV and watch your code reach for the stars! Happy benchmarking! ๐Ÿš€ + + + diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json new file mode 100644 index 0000000000..939e6c3d82 --- /dev/null +++ b/benchmarks/asv.conf.json @@ -0,0 +1,91 @@ +{ + // The version of the config file format. Do not change, unless + // you know what you are doing. + "version": 1, + + // The name of the project being benchmarked + "project": "dipy", + + // The project's homepage + "project_url": "https://dipy.org", + + // The URL or local path of the source code repository for the + // project being benchmarked + "repo": "..", + + // List of branches to benchmark. If not provided, defaults to "master" + // (for git) or "tip" (for mercurial). + "branches": ["HEAD"], + + "build_command": [ + "python -m build --wheel -o {build_cache_dir} {build_dir}" + ], + + // The DVCS being used. If not set, it will be automatically + // determined from "repo" by looking at the protocol in the URL + // (if remote), or by looking for special directories, such as + // ".git" (if local). + "dvcs": "git", + + // The tool to use to create environments. May be "conda", + // "virtualenv" or other value depending on the plugins in use. + // If missing or the empty string, the tool will be automatically + // determined by looking for tools on the PATH environment + // variable. + "environment_type": "virtualenv", + + // the base URL to show a commit for the project. + "show_commit_url": "https://github.com/dipy/dipy/commit/", + + // The Pythons you'd like to test against. If not provided, defaults + // to the current version of Python used to run `asv`. + // "pythons": ["3.9"], + + // The matrix of dependencies to test. Each key is the name of a + // package (in PyPI) and the values are version numbers. An empty + // list indicates to just test against the default (latest) + // version. + "matrix": { + "Cython": [], + "build": [], + "packaging": [] + }, + + // The directory (relative to the current directory) that benchmarks are + // stored in. If not provided, defaults to "benchmarks" + "benchmark_dir": "benchmarks", + + // The directory (relative to the current directory) to cache the Python + // environments in. If not provided, defaults to "env" + "env_dir": "env", + + + // The directory (relative to the current directory) that raw benchmark + // results are stored in. If not provided, defaults to "results". + "results_dir": "results", + + // The directory (relative to the current directory) that the html tree + // should be written to. If not provided, defaults to "html". + "html_dir": "html", + + // The number of characters to retain in the commit hashes. + // "hash_length": 8, + + // `asv` will cache wheels of the recent builds in each + // environment, making them faster to install next time. This is + // number of builds to keep, per environment. + "build_cache_size": 8, + + // The commits after which the regression search in `asv publish` + // should start looking for regressions. Dictionary whose keys are + // regexps matching to benchmark names, and values corresponding to + // the commit (exclusive) after which to start looking for + // regressions. The default is to start from the first commit + // with results. If the commit is `null`, regression detection is + // skipped for the matching benchmark. + // + // "regressions_first_commits": { + // "some_benchmark": "352cdf", // Consider regressions only after this commit + // "another_benchmark": null, // Skip regression detection altogether + // } +} \ No newline at end of file diff --git a/benchmarks/asv_compare.conf.json.tpl b/benchmarks/asv_compare.conf.json.tpl new file mode 100644 index 0000000000..0df4060e60 --- /dev/null +++ b/benchmarks/asv_compare.conf.json.tpl @@ -0,0 +1,99 @@ +// This config file is almost similar to 'asv.conf.json' except it contains +// custom tokens that can be substituted by 'runtests.py' and ASV, +// due to the necessity to add custom build options when `--bench-compare` +// is used. +{ + // The version of the config file format. Do not change, unless + // you know what you are doing. + "version": 1, + + // The name of the project being benchmarked + "project": "dipy", + + // The project's homepage + "project_url": "https://www.dipy.org/", + + // The URL or local path of the source code repository for the + // project being benchmarked + "repo": "..", + + // List of branches to benchmark. If not provided, defaults to "master" + // (for git) or "tip" (for mercurial). + "branches": ["HEAD"], + + // The DVCS being used. If not set, it will be automatically + // determined from "repo" by looking at the protocol in the URL + // (if remote), or by looking for special directories, such as + // ".git" (if local). + "dvcs": "git", + + // The tool to use to create environments. May be "conda", + // "virtualenv" or other value depending on the plugins in use. + // If missing or the empty string, the tool will be automatically + // determined by looking for tools on the PATH environment + // variable. + "environment_type": "virtualenv", + + // the base URL to show a commit for the project. + "show_commit_url": "https://github.com/dipy/dipy/commit/", + + // The Pythons you'd like to test against. If not provided, defaults + // to the current version of Python used to run `asv`. + // "pythons": ["3.9"], + + // The matrix of dependencies to test. Each key is the name of a + // package (in PyPI) and the values are version numbers. An empty + // list indicates to just test against the default (latest) + // version. + "matrix": { + "Cython": [], + "setuptools": ["59.2.0"], + "packaging": [] + }, + + // The directory (relative to the current directory) that benchmarks are + // stored in. If not provided, defaults to "benchmarks" + "benchmark_dir": "benchmarks", + + // The directory (relative to the current directory) to cache the Python + // environments in. If not provided, defaults to "env" + // NOTE: changes dir name will requires update `generate_asv_config()` in + // runtests.py + "env_dir": "env", + + + // The directory (relative to the current directory) that raw benchmark + // results are stored in. If not provided, defaults to "results". + "results_dir": "results", + + // The directory (relative to the current directory) that the html tree + // should be written to. If not provided, defaults to "html". + "html_dir": "html", + + // The number of characters to retain in the commit hashes. + // "hash_length": 8, + + // `asv` will cache wheels of the recent builds in each + // environment, making them faster to install next time. This is + // number of builds to keep, per environment. + "build_cache_size": 8, + + "build_command" : [ + "python -m build {dipy_build_options}", + // pip ignores '--global-option' when pep517 is enabled, we also enabling pip verbose to + // be reached from asv `--verbose` so we can verify the build options. + "PIP_NO_BUILD_ISOLATION=false python {build_dir}/benchmarks/asv_pip_nopep517.py -v {dipy_global_options} --no-deps --no-index -w {build_cache_dir} {build_dir}" + ], + // The commits after which the regression search in `asv publish` + // should start looking for regressions. Dictionary whose keys are + // regexps matching to benchmark names, and values corresponding to + // the commit (exclusive) after which to start looking for + // regressions. The default is to start from the first commit + // with results. If the commit is `null`, regression detection is + // skipped for the matching benchmark. + // + // "regressions_first_commits": { + // "some_benchmark": "352cdf", // Consider regressions only after this commit + // "another_benchmark": null, // Skip regression detection altogether + // } +} \ No newline at end of file diff --git a/benchmarks/asv_pip_nopep517.py b/benchmarks/asv_pip_nopep517.py new file mode 100644 index 0000000000..c1f2d295e8 --- /dev/null +++ b/benchmarks/asv_pip_nopep517.py @@ -0,0 +1,31 @@ +""" +This file is used by asv_compare.conf.json.tpl. + +Note +---- + +This file is copied (possibly with major modifications) from the +sources of the numpy project - https://github.com/numpy/numpy. +It remains licensed as the rest of NUMPY (BSD 3-Clause as of November 2023). + +# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# +# See COPYING file distributed along with the Numpy package for the +# copyright and license terms. +# +# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +""" +import subprocess +import sys + +# pip ignores '--global-option' when pep517 is enabled therefore we disable it. +cmd = [sys.executable, '-mpip', 'wheel', '--no-use-pep517'] +try: + output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, text=True) +except Exception as e: + output = str(e.output) +if "no such option" in output: + print("old version of pip, escape '--no-use-pep517'") + cmd.pop() + +subprocess.run(cmd + sys.argv[1:]) \ No newline at end of file diff --git a/dipy/core/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py similarity index 100% rename from dipy/core/benchmarks/__init__.py rename to benchmarks/benchmarks/__init__.py diff --git a/benchmarks/benchmarks/bench_reconst.py b/benchmarks/benchmarks/bench_reconst.py new file mode 100644 index 0000000000..1110af026a --- /dev/null +++ b/benchmarks/benchmarks/bench_reconst.py @@ -0,0 +1,89 @@ +""" Benchmarks for``dipy.reconst`` module.""" + +import numpy as np + +from dipy.core.sphere import unique_edges +from dipy.core.gradients import GradientTable +from dipy.data import default_sphere, read_stanford_labels +from dipy.io.image import load_nifti_data +from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel +from dipy.reconst.recspeed import local_maxima +from dipy.reconst.vec_val_sum import vec_val_vect + + +class BenchRecSpeed: + + def setup(self): + vertices, faces = default_sphere.vertices, default_sphere.faces + self.edges = unique_edges(faces) + self.odf = np.zeros(len(vertices)) + self.odf[1] = 1. + self.odf[143] = 143. + self.odf[305] = 305. + + def time_local_maxima(self): + local_maxima(self.odf, self.edges) + + +class BenchVecValSum: + + def setup(self): + + def make_vecs_vals(shape): + return (np.random.randn(*shape), + np.random.randn(*(shape[:-2] + shape[-1:]))) + + shape = (10, 12, 3, 3) + self.evecs, self.evals = make_vecs_vals(shape) + + def time_vec_val_vect(self): + vec_val_vect(self.evecs, self.evals) + + +# class BenchCSD: + +# def setup(self): +# img, self.gtab, labels_img = read_stanford_labels() +# data = img.get_fdata() + +# labels = labels_img.get_fdata() +# shape = labels.shape +# mask = np.in1d(labels, [1, 2]) +# mask.shape = shape + +# center = (50, 40, 40) +# width = 12 +# a, b, c = center +# hw = width // 2 +# idx = (slice(a - hw, a + hw), slice(b - hw, b + hw), +# slice(c - hw, c + hw)) + +# self.data_small = data[idx].copy() +# self.mask_small = mask[idx].copy() +# voxels = self.mask_small.sum() +# self.small_gtab = GradientTable(self.gtab.gradients[:75]) + + + # def time_csdeconv_basic(self): + # # TODO: define response and remove None + # sh_order = 8 + # model = ConstrainedSphericalDeconvModel(self.gtab, None, + # sh_order=sh_order) + # model.fit(self.data_small, self.mask_small) + + # def time_csdeconv_small_dataset(self): + # # TODO: define response and remove None + # # Smaller data set + # # data_small = data_small[..., :75].copy() + # sh_order = 8 + # model = ConstrainedSphericalDeconvModel(self.small_gtab, None, + # sh_order=sh_order) + # model.fit(self.data_small, self.mask_small) + + # def time_csdeconv_super_resolution(self): + # # TODO: define response and remove None + # # Super resolution + # sh_order = 12 + # model = ConstrainedSphericalDeconvModel(self.gtab, None, + # sh_order=sh_order) + # model.fit(self.data_small, self.mask_small) diff --git a/benchmarks/benchmarks/bench_segment.py b/benchmarks/benchmarks/bench_segment.py new file mode 100644 index 0000000000..99bb6f120b --- /dev/null +++ b/benchmarks/benchmarks/bench_segment.py @@ -0,0 +1,86 @@ +""" Benchmarks for ``dipy.segment`` module.""" + +import numpy as np + +from dipy.data import get_fnames +from dipy.io.streamline import load_tractogram +from dipy.tracking.streamline import Streamlines, set_number_of_points +from dipy.segment.metricspeed import Metric +from dipy.segment.clustering import QuickBundles as QB_New +from dipy.segment.mask import bounding_box + + +class BenchMask: + + def setup(self): + self.dense_vol = np.zeros((100, 100, 100)) + self.dense_vol[:] = 10 + self.sparse_vol = np.zeros((100, 100, 100)) + self.sparse_vol[0, 0, 0] = 1 + + def time_bounding_box_sparse(self): + bounding_box(self.sparse_vol) + + def time_bounding_box_dense(self): + bounding_box(self.dense_vol) + + +class BenchQuickbundles: + + def setup(self): + dtype = "float32" + nb_points = 12 + # The expected number of clusters of the fornix using threshold=10 + # is 4. + self.basic_parameters = {"threshold": 10, + "expected_nb_clusters": 4 * 8, + } + + fname = get_fnames('fornix') + + fornix = load_tractogram(fname, 'same', + bbox_valid_check=False).streamlines + + fornix_streamlines = Streamlines(fornix) + fornix_streamlines = set_number_of_points(fornix_streamlines, + nb_points) + + # Create eight copies of the fornix to be clustered (one in + # each octant). + self.streamlines = [] + self.streamlines += [s + np.array([100, 100, 100], dtype) + for s in fornix_streamlines] + self.streamlines += [s + np.array([100, -100, 100], dtype) + for s in fornix_streamlines] + self.streamlines += [s + np.array([100, 100, -100], dtype) + for s in fornix_streamlines] + self.streamlines += [s + np.array([100, -100, -100], dtype) + for s in fornix_streamlines] + self.streamlines += [s + np.array([-100, 100, 100], dtype) + for s in fornix_streamlines] + self.streamlines += [s + np.array([-100, -100, 100], dtype) + for s in fornix_streamlines] + self.streamlines += [s + np.array([-100, 100, -100], dtype) + for s in fornix_streamlines] + self.streamlines += [s + np.array([-100, -100, -100], dtype) + for s in fornix_streamlines] + + class MDFpy(Metric): + def are_compatible(self, shape1, shape2): + return shape1 == shape2 + + def dist(self, features1, features2): + dist = np.sqrt(np.sum((features1 - features2)**2, axis=1)) + dist = np.sum(dist / len(features1)) + return dist + + self.custom_metric = MDFpy() + + def time_quickbundles(self): + qb2 = QB_New(self.basic_parameters.get('threshold', 10)) + _ = qb2.cluster(self.streamlines) + + def time_quickbundles_metric(self): + qb = QB_New(self.basic_parameters.get('threshold', 10), + metric=self.custom_metric) + _ = qb.cluster(self.streamlines) diff --git a/benchmarks/benchmarks/bench_tracking.py b/benchmarks/benchmarks/bench_tracking.py new file mode 100644 index 0000000000..82d9a4a118 --- /dev/null +++ b/benchmarks/benchmarks/bench_tracking.py @@ -0,0 +1,60 @@ +""" Benchmarks for functions related to streamline in ``dipy.tracking``module. +""" + +import numpy as np + +from dipy.data import get_fnames +from dipy.io.streamline import load_tractogram + +from dipy.tracking.streamline import set_number_of_points, length +from dipy.tracking.streamlinespeed import compress_streamlines + +from dipy.tracking import Streamlines + + +class BenchStreamlines: + + def setup(self): + rng = np.random.RandomState(42) + nb_streamlines = 20000 + min_nb_points = 2 + max_nb_points = 100 + + def generate_streamlines(nb_streamlines, min_nb_points, + max_nb_points, rng): + streamlines = \ + [rng.rand(*(rng.randint(min_nb_points, max_nb_points), 3)) + for _ in range(nb_streamlines)] + return streamlines + + self.data = {} + self.data['rng'] = rng + self.data['nb_streamlines'] = nb_streamlines + self.data['streamlines'] = generate_streamlines( + nb_streamlines, min_nb_points, max_nb_points, rng=rng) + self.data['streamlines_arrseq'] = Streamlines(self.data['streamlines']) + + fname = get_fnames('fornix') + fornix = load_tractogram(fname, 'same', + bbox_valid_check=False).streamlines + + self.fornix_streamlines = Streamlines(fornix) + + def time_set_number_of_points(self): + streamlines = self.data['streamlines'] + set_number_of_points(streamlines, 50) + + def time_set_number_of_points_arrseq(self): + streamlines = self.data['streamlines_arrseq'] + set_number_of_points(streamlines, 50) + + def time_length(self): + streamlines = self.data['streamlines'] + length(streamlines) + + def time_length_arrseq(self): + streamlines = self.data['streamlines_arrseq'] + length(streamlines) + + def time_compress_streamlines(self): + compress_streamlines(self.fornix_streamlines) \ No newline at end of file diff --git a/bin/dipy_align_affine b/bin/dipy_align_affine index 823f4d34e1..05a73c7de2 100755 --- a/bin/dipy_align_affine +++ b/bin/dipy_align_affine @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.align import ImageRegistrationFlow +from dipy.workflows.cli import dipy_align_affine if __name__ == "__main__": - run_flow(ImageRegistrationFlow()) + dipy_align_affine() diff --git a/bin/dipy_align_syn b/bin/dipy_align_syn index f3a40e9eb4..2899e20211 100644 --- a/bin/dipy_align_syn +++ b/bin/dipy_align_syn @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.align import SynRegistrationFlow +from dipy.workflows.cli import dipy_align_syn if __name__ == "__main__": - run_flow(SynRegistrationFlow()) + dipy_align_syn() diff --git a/bin/dipy_apply_transform b/bin/dipy_apply_transform index b6d6b4d719..9429e2e5b4 100644 --- a/bin/dipy_apply_transform +++ b/bin/dipy_apply_transform @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.align import ApplyTransformFlow +from dipy.workflows.cli import dipy_apply_transform if __name__ == "__main__": - run_flow(ApplyTransformFlow()) + dipy_apply_transform() diff --git a/bin/dipy_buan_lmm b/bin/dipy_buan_lmm index c57bcb4bd4..810585f861 100755 --- a/bin/dipy_buan_lmm +++ b/bin/dipy_buan_lmm @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.stats import LinearMixedModelsFlow +from dipy.workflows.cli import dipy_buan_lmm if __name__ == "__main__": - run_flow(LinearMixedModelsFlow()) + dipy_buan_lmm() diff --git a/bin/dipy_buan_profiles b/bin/dipy_buan_profiles index 0b68ce021f..95eadadf84 100755 --- a/bin/dipy_buan_profiles +++ b/bin/dipy_buan_profiles @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.stats import BundleAnalysisTractometryFlow +from dipy.workflows.cli import dipy_buan_profiles if __name__ == "__main__": - run_flow(BundleAnalysisTractometryFlow()) + dipy_buan_profiles() diff --git a/bin/dipy_buan_shapes b/bin/dipy_buan_shapes index 15e945fc49..4341a3735a 100755 --- a/bin/dipy_buan_shapes +++ b/bin/dipy_buan_shapes @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.stats import BundleShapeAnalysis +from dipy.workflows.cli import dipy_buan_shapes if __name__ == "__main__": - run_flow(BundleShapeAnalysis()) + dipy_buan_shapes() diff --git a/bin/dipy_bundlewarp b/bin/dipy_bundlewarp index b979450d33..3c443c9d5b 100755 --- a/bin/dipy_bundlewarp +++ b/bin/dipy_bundlewarp @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.align import BundleWarpFlow +from dipy.workflows.cli import dipy_bundlewarp if __name__ == "__main__": - run_flow(BundleWarpFlow()) + dipy_bundlewarp() diff --git a/bin/dipy_correct_motion b/bin/dipy_correct_motion index b44b94b7b0..b8ded02af2 100644 --- a/bin/dipy_correct_motion +++ b/bin/dipy_correct_motion @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.align import MotionCorrectionFlow +from dipy.workflows.cli import dipy_correct_motion if __name__ == "__main__": - run_flow(MotionCorrectionFlow()) + dipy_correct_motion() diff --git a/bin/dipy_denoise_lpca b/bin/dipy_denoise_lpca index d49eec3356..9864459bae 100644 --- a/bin/dipy_denoise_lpca +++ b/bin/dipy_denoise_lpca @@ -1,8 +1,7 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.denoise import LPCAFlow +from dipy.workflows.cli import dipy_denoise_lpca if __name__ == "__main__": - run_flow(LPCAFlow()) + dipy_denoise_lpca() diff --git a/bin/dipy_denoise_mppca b/bin/dipy_denoise_mppca index 1a4aa50589..678a10b2c2 100644 --- a/bin/dipy_denoise_mppca +++ b/bin/dipy_denoise_mppca @@ -1,8 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.denoise import MPPCAFlow - +from dipy.workflows.cli import dipy_denoise_mppca if __name__ == "__main__": - run_flow(MPPCAFlow()) + dipy_denoise_mppca() diff --git a/bin/dipy_denoise_nlmeans b/bin/dipy_denoise_nlmeans index 3c1cba6f34..34c11b6770 100644 --- a/bin/dipy_denoise_nlmeans +++ b/bin/dipy_denoise_nlmeans @@ -1,8 +1,7 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.denoise import NLMeansFlow +from dipy.workflows.cli import dipy_denoise_nlmeans if __name__ == "__main__": - run_flow(NLMeansFlow()) + dipy_denoise_nlmeans() diff --git a/bin/dipy_denoise_patch2self b/bin/dipy_denoise_patch2self index ceb24c8440..d566cd7f06 100644 --- a/bin/dipy_denoise_patch2self +++ b/bin/dipy_denoise_patch2self @@ -1,8 +1,7 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.denoise import Patch2SelfFlow +from dipy.workflows.cli import dipy_denoise_patch2self if __name__ == "__main__": - run_flow(Patch2SelfFlow()) + dipy_denoise_patch2self() diff --git a/bin/dipy_evac_plus b/bin/dipy_evac_plus index 3f3a86ebd6..0b2cf89055 100644 --- a/bin/dipy_evac_plus +++ b/bin/dipy_evac_plus @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.nn import EVACPlusFlow +from dipy.workflows.cli import dipy_evac_plus if __name__ == "__main__": - run_flow(EVACPlusFlow()) + dipy_evac_plus() diff --git a/bin/dipy_fetch b/bin/dipy_fetch index 10467bc4ff..b5f77c1d9f 100644 --- a/bin/dipy_fetch +++ b/bin/dipy_fetch @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.io import FetchFlow +from dipy.workflows.cli import dipy_fetch if __name__ == "__main__": - run_flow(FetchFlow()) + dipy_fetch() diff --git a/bin/dipy_fit_csa b/bin/dipy_fit_csa index 588bd13dc2..036ffba191 100755 --- a/bin/dipy_fit_csa +++ b/bin/dipy_fit_csa @@ -1,7 +1,5 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.reconst import ReconstCSAFlow - +from dipy.workflows.cli import dipy_fit_csa if __name__ == "__main__": - run_flow(ReconstCSAFlow()) + dipy_fit_csa() diff --git a/bin/dipy_fit_csd b/bin/dipy_fit_csd index 288dc47b1d..8dbea32f96 100755 --- a/bin/dipy_fit_csd +++ b/bin/dipy_fit_csd @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.reconst import ReconstCSDFlow +from dipy.workflows.cli import dipy_fit_csd if __name__ == "__main__": - run_flow(ReconstCSDFlow()) + dipy_fit_csd() diff --git a/bin/dipy_fit_dki b/bin/dipy_fit_dki index b9ae2c18e2..326df3bf5f 100755 --- a/bin/dipy_fit_dki +++ b/bin/dipy_fit_dki @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.reconst import ReconstDkiFlow +from dipy.workflows.cli import dipy_fit_dki if __name__ == "__main__": - run_flow(ReconstDkiFlow()) + dipy_fit_dki() diff --git a/bin/dipy_fit_dti b/bin/dipy_fit_dti index fc39e9d2eb..611443bc9b 100755 --- a/bin/dipy_fit_dti +++ b/bin/dipy_fit_dti @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.reconst import ReconstDtiFlow +from dipy.workflows.cli import dipy_fit_dti if __name__ == "__main__": - run_flow(ReconstDtiFlow()) + dipy_fit_dti() diff --git a/bin/dipy_fit_ivim b/bin/dipy_fit_ivim index 71dc1e9ddd..5540e7799b 100644 --- a/bin/dipy_fit_ivim +++ b/bin/dipy_fit_ivim @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.reconst import ReconstIvimFlow +from dipy.workflows.cli import dipy_fit_ivim if __name__ == "__main__": - run_flow(ReconstIvimFlow()) + dipy_fit_ivim() diff --git a/bin/dipy_fit_mapmri b/bin/dipy_fit_mapmri index 756284bcf8..ab6a416d9d 100755 --- a/bin/dipy_fit_mapmri +++ b/bin/dipy_fit_mapmri @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.reconst import ReconstMAPMRIFlow +from dipy.workflows.cli import dipy_fit_mapmri if __name__ == "__main__": - run_flow(ReconstMAPMRIFlow()) \ No newline at end of file + dipy_fit_mapmri() \ No newline at end of file diff --git a/bin/dipy_gibbs_ringing b/bin/dipy_gibbs_ringing index 5771387763..842006975a 100644 --- a/bin/dipy_gibbs_ringing +++ b/bin/dipy_gibbs_ringing @@ -1,8 +1,7 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.denoise import GibbsRingingFlow +from dipy.workflows.cli import dipy_gibbs_ringing if __name__ == "__main__": - run_flow(GibbsRingingFlow()) + dipy_gibbs_ringing() diff --git a/bin/dipy_horizon b/bin/dipy_horizon index e831b621a6..055f2de89a 100755 --- a/bin/dipy_horizon +++ b/bin/dipy_horizon @@ -1,7 +1,5 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.viz import HorizonFlow - +from dipy.workflows.cli import dipy_horizon if __name__ == "__main__": - run_flow(HorizonFlow()) + dipy_horizon() diff --git a/bin/dipy_info b/bin/dipy_info index 96d0a1b4cb..43fff8d0e5 100755 --- a/bin/dipy_info +++ b/bin/dipy_info @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.io import IoInfoFlow +from dipy.workflows.cli import dipy_info if __name__ == "__main__": - run_flow(IoInfoFlow()) + dipy_info() diff --git a/bin/dipy_labelsbundles b/bin/dipy_labelsbundles index ac4c39d40d..e4d6f67a07 100755 --- a/bin/dipy_labelsbundles +++ b/bin/dipy_labelsbundles @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.segment import LabelsBundlesFlow +from dipy.workflows.cli import dipy_labelsbundles if __name__ == "__main__": - run_flow(LabelsBundlesFlow()) + dipy_labelsbundles() diff --git a/bin/dipy_mask b/bin/dipy_mask index 81e4fbf544..3392f09f0d 100755 --- a/bin/dipy_mask +++ b/bin/dipy_mask @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.mask import MaskFlow +from dipy.workflows.cli import dipy_mask if __name__ == "__main__": - run_flow(MaskFlow()) + dipy_mask() diff --git a/bin/dipy_median_otsu b/bin/dipy_median_otsu index e8c7fdbda4..93491b7bca 100755 --- a/bin/dipy_median_otsu +++ b/bin/dipy_median_otsu @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.segment import MedianOtsuFlow +from dipy.workflows.cli import dipy_median_otsu if __name__ == "__main__": - run_flow(MedianOtsuFlow()) + dipy_median_otsu() diff --git a/bin/dipy_recobundles b/bin/dipy_recobundles index 1e23a3598d..b2dc4f508b 100755 --- a/bin/dipy_recobundles +++ b/bin/dipy_recobundles @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.segment import RecoBundlesFlow +from dipy.workflows.cli import dipy_recobundles if __name__ == "__main__": - run_flow(RecoBundlesFlow()) + dipy_recobundles() diff --git a/bin/dipy_reslice b/bin/dipy_reslice index 3f734fdb44..5eb67d795d 100755 --- a/bin/dipy_reslice +++ b/bin/dipy_reslice @@ -1,8 +1,7 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.align import ResliceFlow +from dipy.workflows.cli import dipy_reslice if __name__ == "__main__": - run_flow(ResliceFlow()) \ No newline at end of file + dipy_reslice() \ No newline at end of file diff --git a/bin/dipy_slr b/bin/dipy_slr index a56558ebdd..2591e45def 100755 --- a/bin/dipy_slr +++ b/bin/dipy_slr @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.align import SlrWithQbxFlow +from dipy.workflows.cli import dipy_slr if __name__ == "__main__": - run_flow(SlrWithQbxFlow()) \ No newline at end of file + dipy_slr() \ No newline at end of file diff --git a/bin/dipy_snr_in_cc b/bin/dipy_snr_in_cc index 630999d7c4..d7af980055 100755 --- a/bin/dipy_snr_in_cc +++ b/bin/dipy_snr_in_cc @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.stats import SNRinCCFlow +from dipy.workflows.cli import dipy_snr_in_cc if __name__ == "__main__": - run_flow(SNRinCCFlow()) + dipy_snr_in_cc() diff --git a/bin/dipy_split b/bin/dipy_split index 04c975b4a0..10504e59bf 100644 --- a/bin/dipy_split +++ b/bin/dipy_split @@ -1,8 +1,7 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.io import SplitFlow +from dipy.workflows.cli import dipy_split if __name__ == "__main__": - run_flow(SplitFlow()) + dipy_split() diff --git a/bin/dipy_track b/bin/dipy_track index 4a4e26c552..fa5a9dab0e 100755 --- a/bin/dipy_track +++ b/bin/dipy_track @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.tracking import LocalFiberTrackingPAMFlow +from dipy.workflows.cli import dipy_track if __name__ == "__main__": - run_flow(LocalFiberTrackingPAMFlow()) + dipy_track() diff --git a/bin/dipy_track_pft b/bin/dipy_track_pft index 95204fbd90..f77af79ecc 100755 --- a/bin/dipy_track_pft +++ b/bin/dipy_track_pft @@ -1,7 +1,6 @@ #!python -from dipy.workflows.flow_runner import run_flow -from dipy.workflows.tracking import PFTrackingPAMFlow +from dipy.workflows.cli import dipy_track_pft if __name__ == "__main__": - run_flow(PFTrackingPAMFlow()) + dipy_track_pft() diff --git a/cythexts.py b/cythexts.py deleted file mode 100644 index 0e7595b2af..0000000000 --- a/cythexts.py +++ /dev/null @@ -1,292 +0,0 @@ -import os -from os.path import splitext, sep as filesep, join as pjoin, relpath -from hashlib import sha1 - -from setuptools.command.build_ext import build_ext -from setuptools.command.sdist import sdist -from packaging.version import Version - - -def derror_maker(klass, msg): - """ Decorate setuptools class to make run method raise error """ - class K(klass): - def run(self): - raise RuntimeError(msg) - return K - - -def stamped_pyx_ok(exts, hash_stamp_fname): - """ Check for match of recorded hashes for pyx, corresponding c files - - Parameters - ---------- - exts : sequence of ``Extension`` - setuptools ``Extension`` instances, in fact only need to contain a - ``sources`` sequence field. - hash_stamp_fname : str - filename of text file containing hash stamps - - Returns - ------- - tf : bool - True if there is a corresponding c file for each pyx or py file in - `exts` sources, and the hash for both the (pyx, py) file *and* the c - file match those recorded in the file named in `hash_stamp_fname`. - """ - # Calculate hashes for pyx and c files. Check for presence of c files. - stamps = {} - for mod in exts: - for source in mod.sources: - base, ext = splitext(source) - if ext not in ('.pyx', '.py'): - continue - source_hash = sha1(open(source, 'rb').read()).hexdigest() - c_fname = base + '.c' - try: - c_file = open(c_fname, 'rb') - except OSError: - return False - c_hash = sha1(c_file.read()).hexdigest() - stamps[source_hash] = source - stamps[c_hash] = c_fname - # Read stamps from hash_stamp_fname; check in stamps dictionary - try: - stamp_file = open(hash_stamp_fname, 'rt') - except OSError: - return False - for line in stamp_file: - if line.startswith('#'): - continue - fname, hash = [e.strip() for e in line.split(',')] - if hash not in stamps: - return False - # Compare path made canonical for \/ - fname = fname.replace(filesep, '/') - if not stamps[hash].replace(filesep, '/') == fname: - return False - stamps.pop(hash) - # All good if we found all hashes we need - return len(stamps) == 0 - - -def cyproc_exts(exts, cython_min_version, - hash_stamps_fname='pyx-stamps', - build_ext=build_ext): - """ Process sequence of `exts` to check if we need Cython. Return builder - - Parameters - ---------- - exts : sequence of Setuptools ``Extension`` - If we already have good c files for any pyx or py sources, we replace - the pyx or py files with their compiled up c versions inplace. - cython_min_version : str - Minimum cython version needed for compile - hash_stamps_fname : str, optional - filename with hashes for pyx/py and c files known to be in sync. Default - is 'pyx-stamps' - build_ext : Setuptools command - default build_ext to return if not cythonizing. Default is setuptools - ``build_ext`` class - - Returns - ------- - builder : ``setuptools`` ``build_ext`` class or similar - Can be ``build_ext`` input (if we have good c files) or cython - ``build_ext`` if we have a good cython, or a class raising an informative - error on ``run()`` - need_cython : bool - True if we need Cython to build extensions, False otherwise. - """ - if stamped_pyx_ok(exts, hash_stamps_fname): - # Replace pyx with c files, use standard builder - for mod in exts: - sources = [] - for source in mod.sources: - base, ext = splitext(source) - if ext in ('.pyx', '.py'): - sources.append(base + '.c') - else: - sources.append(source) - mod.sources = sources - return build_ext, False - # We need cython - try: - from Cython.Compiler.Version import version as cyversion - except ImportError: - return derror_maker(build_ext, - 'Need cython>={0} to build extensions ' - 'but cannot import "Cython"'.format( - cython_min_version)), True - if Version(cyversion) >= Version(cython_min_version): - from Cython.Distutils import build_ext as extbuilder - return extbuilder, True - return derror_maker(build_ext, - 'Need cython>={0} to build extensions' - 'but found cython version {1}'.format( - cython_min_version, cyversion)), True - - -def build_stamp(pyxes, include_dirs=()): - """ Cythonize files in `pyxes`, return pyx, C filenames, hashes - - Parameters - ---------- - pyxes : sequence - sequence of filenames of files on which to run Cython - include_dirs : sequence - Any extra include directories in which to find Cython files. - - Returns - ------- - pyx_defs : dict - dict has key, value pairs of , , where - is a dict with key, value pairs of "pyx_hash", ; "c_filename", ; "c_hash", . - """ - pyx_defs = {} - from Cython.Compiler.Main import compile - from Cython.Compiler.CmdLine import parse_command_line - includes = sum([['--include-dir', d] for d in include_dirs], []) - for source in pyxes: - base, ext = splitext(source) - pyx_hash = sha1((open(source, 'rt').read().encode())).hexdigest() - c_filename = base + '.c' - options, sources = parse_command_line(['-3'] + includes + [source]) - result = compile(sources, options) - if result.num_errors > 0: - raise RuntimeError('Cython failed to compile ' + source) - c_hash = sha1(open(c_filename, 'rt').read().encode()).hexdigest() - pyx_defs[source] = dict(pyx_hash=pyx_hash, - c_filename=c_filename, - c_hash=c_hash) - return pyx_defs - - -def write_stamps(pyx_defs, stamp_fname='pyx-stamps'): - """ Write stamp information in `pyx_defs` to filename `stamp_fname` - - Parameters - ---------- - pyx_defs : dict - dict has key, value pairs of , , where - is a dict with key, value pairs of "pyx_hash", ; "c_filename", ; "c_hash", . - stamp_fname : str - filename to which to write stamp information - """ - with open(stamp_fname, 'wt') as stamp_file: - stamp_file.write('# SHA1 hashes for pyx files and generated c files\n') - stamp_file.write('# Auto-generated file, do not edit\n') - for pyx_fname, pyx_info in pyx_defs.items(): - stamp_file.write('%s, %s\n' % (pyx_fname, - pyx_info['pyx_hash'])) - stamp_file.write('%s, %s\n' % (pyx_info['c_filename'], - pyx_info['c_hash'])) - - -def find_pyx(root_dir): - """ Recursively find files with extension '.pyx' starting at `root_dir` - - Parameters - ---------- - root_dir : str - Directory from which to search for pyx files. - - Returns - ------- - pyxes : list - list of filenames relative to `root_dir` - """ - pyxes = [] - for dirpath, dirnames, filenames in os.walk(root_dir): - for filename in filenames: - if not filename.endswith('.pyx'): - continue - base = relpath(dirpath, root_dir) - pyxes.append(pjoin(base, filename)) - return pyxes - - -def get_pyx_sdist(sdist_like=sdist, hash_stamps_fname='pyx-stamps', - include_dirs=()): - """ Add pyx->c conversion, hash recording to sdist command `sdist_like` - - Parameters - ---------- - sdist_like : sdist command class, optional - command that will do work of ``setuptools.command.sdist.sdist``. By - default we use the setuptools version - hash_stamps_fname : str, optional - filename to which to write hashes of pyx / py and c files. Default is - ``pyx-stamps`` - include_dirs : sequence - Any extra include directories in which to find Cython files. - - Returns - ------- - modified_sdist : sdist-like command class - decorated `sdist_like` class, for compiling pyx / py files to c, - putting the .c files in the the source archive, and writing hashes - for these into the file named from `hash_stamps_fname` - """ - class PyxSDist(sdist_like): - """Custom setuptools sdist command to generate .c files from pyx files. - - Running the command object ``obj.run()`` will compile the pyx-py - files in any extensions, into c files, and add them to the list of - files to put into the source archive, as well as the usual behavior of - distutils ``sdist``. It will also take the sha1 hashes of the pyx-py - and c files, and store them in a file ``pyx-stamps``, and put this - file in the release tree. This allows someone who has the archive - to know that the pyx and c files that they have are the ones packed - into the archive, and therefore they may not need Cython at - install time. - - See Also - -------- - ``cython_process_exts`` for the build-time command. - - """ - def make_distribution(self): - """ Compile pyx to c files, add to sources, stamp sha1s """ - pyxes = [] - for mod in self.distribution.ext_modules: - for source in mod.sources: - base, ext = splitext(source) - if ext in ('.pyx', '.py'): - pyxes.append(source) - self.pyx_defs = build_stamp(pyxes, include_dirs) - for pyx_fname, pyx_info in self.pyx_defs.items(): - self.filelist.append(pyx_info['c_filename']) - sdist_like.make_distribution(self) - - def make_release_tree(self, base_dir, files): - """ Put pyx stamps file into release tree """ - sdist_like.make_release_tree(self, base_dir, files) - stamp_fname = pjoin(base_dir, hash_stamps_fname) - write_stamps(self.pyx_defs, stamp_fname) - - return PyxSDist - - -def build_stamp_source(root_dir=None, stamp_fname='pyx-stamps', - include_dirs=None): - """ Build cython c files, make stamp file in source tree `root_dir` - - Parameters - ---------- - root_dir : None or str, optional - Directory from which to find ``.pyx`` files. If None, use current - working directory. - stamp_fname : str, optional - Filename for stamp file we will write - include_dirs : None or sequence - Any extra Cython include directories - """ - if root_dir is None: - root_dir = os.getcwd() - if include_dirs is None: - include_dirs = [pjoin(root_dir, 'src')] - pyxes = find_pyx(root_dir) - pyx_defs = build_stamp(pyxes, include_dirs=include_dirs) - write_stamps(pyx_defs, stamp_fname) diff --git a/dipy/__init__.py b/dipy/__init__.py index 52c30fd8cf..878b649565 100644 --- a/dipy/__init__.py +++ b/dipy/__init__.py @@ -17,6 +17,7 @@ denoise -- Denoising algorithms direction -- Manage peaks and tracking io -- Loading/saving of dpy datasets + nn -- Neural networks algorithms reconst -- Signal reconstruction modules (tensor, spherical harmonics, diffusion spectrum, etc.) segment -- Tractography segmentation @@ -24,6 +25,7 @@ stats -- Tractometry tracking -- Tractography, metrics for streamlines viz -- Visualization and GUIs + workflows -- Predefined Command line for common tasks Utilities --------- @@ -35,12 +37,39 @@ """ import sys -from .info import __version__ +from dipy.version import version as __version__ from .testing import setup_test # Plumb in version etc info stuff from .pkg_info import get_pkg_info as _get_pkg_info + + def get_info(): from os.path import dirname return _get_pkg_info(dirname(__file__)) + + del sys + +submodules = [ + 'align', + 'boots', + 'core', + 'data', + 'denoise', + 'direction', + 'io', + 'nn', + 'reconst', + 'segment', + 'sims', + 'stats', + 'tracking', + 'utils', + 'viz', + 'workflows', + 'tests', + 'testing' +] + +__all__ = submodules + ['__version__', 'setup_test', 'get_info'] diff --git a/dipy/align/meson.build b/dipy/align/meson.build new file mode 100644 index 0000000000..2a4edd3af0 --- /dev/null +++ b/dipy/align/meson.build @@ -0,0 +1,54 @@ +cython_sources = ['bundlemin', +'crosscorr', +'expectmax', +'parzenhist', +'sumsqdiff', +'transforms', +'vector_fields',] + +cython_headers = [ + 'fused_types.pxd', + 'transforms.pxd', + 'vector_fields.pxd', +] + +foreach ext: cython_sources + extra_args = [] + # Todo: check why it is failing to compile with transforms.pxd + # C attributes cannot be added in implementation part of extension type + # defined in a pxd + # if fs.exists(ext + '.pxd') + # extra_args += ['--depfile', meson.current_source_dir() +'/'+ ext + '.pxd', ] + # endif + py3.extension_module(ext, + cython_gen.process(ext + '.pyx', extra_args: extra_args), + c_args: cython_c_args, + include_directories: [incdir_numpy, inc_local], + dependencies: [omp], + install: true, + subdir: 'dipy/align' + ) +endforeach + + +python_sources = ['__init__.py', + '_public.py', + 'cpd.py', + 'imaffine.py', + 'imwarp.py', + 'metrics.py', + 'reslice.py', + 'scalespace.py', + 'streamlinear.py', + 'streamwarp.py', + ] + + +py3.install_sources( + python_sources + cython_headers, + pure: false, + subdir: 'dipy/align' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/align/tests/meson.build b/dipy/align/tests/meson.build new file mode 100644 index 0000000000..9d51a35ce7 --- /dev/null +++ b/dipy/align/tests/meson.build @@ -0,0 +1,25 @@ +python_sources = ['__init__.py', + 'test_api.py', + 'test_crosscorr.py', + 'test_expectmax.py', + 'test_imaffine.py', + 'test_imwarp.py', + 'test_metrics.py', + 'test_parzenhist.py', + 'test_reslice.py', + 'test_scalespace.py', + 'test_streamlinear.py', + 'test_streamwarp.py', + 'test_sumsqdiff.py', + 'test_transforms.py', + 'test_vector_fields.py', + 'test_whole_brain_slr.py', + ] + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/align/tests' +) + + diff --git a/dipy/boots/meson.build b/dipy/boots/meson.build new file mode 100644 index 0000000000..6d1d6b72f3 --- /dev/null +++ b/dipy/boots/meson.build @@ -0,0 +1,12 @@ +python_sources = [ + '__init__.py', + 'resampling.py', + ] + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/boots' +) + + diff --git a/dipy/conftest.py b/dipy/conftest.py index fab515edc9..ddcc34c91d 100644 --- a/dipy/conftest.py +++ b/dipy/conftest.py @@ -18,4 +18,4 @@ warnings.simplefilter(action="default", category=FutureWarning) warnings.simplefilter("always", category=UserWarning) # List of files that pytest should ignore -collect_ignore = ["testing/decorators.py", ] +collect_ignore = ["testing/decorators.py", "bench*.py", "**/benchmarks/*"] diff --git a/dipy/core/benchmarks/bench_sphere.py b/dipy/core/benchmarks/bench_sphere.py deleted file mode 100644 index 397847b1fa..0000000000 --- a/dipy/core/benchmarks/bench_sphere.py +++ /dev/null @@ -1,116 +0,0 @@ -""" Benchmarks for sphere - -Run all benchmarks with:: - - import dipy.core as dipycore - dipycore.bench() - -With Pytest, Run this benchmark with: - - pytest -svv -c bench.ini /path/to/bench_sphere.py -""" - -import sys -import time - -import dipy.core.sphere_stats as sphere_stats -import dipy.core.sphere as sphere - -from matplotlib import pyplot as plt - - -mode = None -if len(sys.argv) > 1 and sys.argv[1] == '-s': - mode = "subdivide" - -class Timer: - def __enter__(self): - self.__start = time.time() - - def __exit__(self, exception_type, value, traceback): - # Error handling here - self.__finish = time.time() - - def duration_in_seconds(self): - return self.__finish - self.__start - - -def func_minimize_adhoc(init_hemisphere, num_iterations): - opt = sphere.disperse_charges(init_hemisphere, num_iterations)[0] - return opt.vertices - -def func_minimize_scipy(init_pointset, num_iterations): - return sphere.disperse_charges_alt(init_pointset, num_iterations) - -num_points = [20, 40, 60] -num_subdivide = [2, 3, 4] - -def bench_disperse_charges_alt(): - - dpi = 72 - figsize = (1920/dpi, 1080/dpi) - fig = plt.figure(num='Electrostatic repulsion methods benchmark', - figsize=figsize, dpi=dpi) - for (idx, subplot_index) in enumerate(['131', '132', '133']): - num_repetitions = 20 - num_trials = 3 - - execution_time_adhoc = [] - execution_time_scipy = [] - minimum_adhoc = [] - minimum_scipy = [] - - if mode == 'subdivide': - init_sphere = sphere.unit_octahedron.subdivide(num_subdivide[idx]) - init_hemisphere = sphere.HemiSphere.from_sphere(init_sphere) - init_pointset = init_hemisphere.vertices - else: - init_pointset = sphere_stats.random_uniform_on_sphere( - num_points[idx]) - init_hemisphere = sphere.HemiSphere(xyz=init_pointset) - print('num_points = {}'.format(init_pointset.shape[0])) - - for j in range(num_trials): - print(' Iteration {}/{}'.format(j + 1, num_trials)) - - for num_iterations in range(12): - # The time of an iteration of disperse charges is much - # faster than an iteration of fmin_slsqp. - num_iterations_dipy = 20 * num_iterations - - # Measure execution time for dipy.core.sphere.disperse_charges - timer = Timer() - with timer: - for i in range(num_repetitions): - opt = func_minimize_adhoc(init_hemisphere, - num_iterations_dipy) - execution_time_adhoc.append(timer.duration_in_seconds() / - num_repetitions) - minimum_adhoc.append(sphere._get_forces_alt(opt.ravel())) - - # Measure execution time for - # dipy.core.sphere.disperse_charges_alt - timer = Timer() - with timer: - for i in range(num_repetitions): - opt = func_minimize_scipy(init_pointset, num_iterations) - execution_time_scipy.append(timer.duration_in_seconds() / - num_repetitions) - minimum_scipy.append(sphere._get_forces_alt(opt.ravel())) - - ax = fig.add_subplot(subplot_index) - ax.plot(execution_time_adhoc, minimum_adhoc, 'r+', - label='DIPY original') - ax.plot(execution_time_scipy, minimum_scipy, 'g+', - label='SciPy-based') - ax.set_yscale('log') - - plt.xlabel('Average execution time (s)') - plt.ylabel('Objective function value') - if mode == 'subdivide': - plt.title('Num subdiv: {}'.format(num_subdivide[idx])) - else: - plt.title('Num points: {}'.format(num_points[idx])) - plt.legend() - - plt.show() diff --git a/dipy/core/meson.build b/dipy/core/meson.build new file mode 100644 index 0000000000..b4e9836a45 --- /dev/null +++ b/dipy/core/meson.build @@ -0,0 +1,44 @@ +cython_sources = ['interpolation',] + +cython_headers = ['interpolation.pxd',] + +foreach ext: cython_sources + if fs.exists(ext + '.pxd') + extra_args += ['--depfile', meson.current_source_dir() +'/'+ ext + '.pxd', ] + endif + py3.extension_module(ext, + cython_gen.process(ext + '.pyx'), + c_args: cython_c_args, + include_directories: [incdir_numpy, inc_local], + dependencies: [omp], + install: true, + subdir: 'dipy/core' + ) +endforeach + + +python_sources = ['__init__.py', + 'geometry.py', + 'gradients.py', + 'graph.py', + 'histeq.py', + 'ndindex.py', + 'onetime.py', + 'optimize.py', + 'profile.py', + 'rng.py', + 'sphere.py', + 'sphere_stats.py', + 'subdivide_octahedron.py', + 'wavelet.py', + ] + + +py3.install_sources( + python_sources + cython_headers, + pure: false, + subdir: 'dipy/core' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/core/tests/meson.build b/dipy/core/tests/meson.build new file mode 100644 index 0000000000..edbc8c652e --- /dev/null +++ b/dipy/core/tests/meson.build @@ -0,0 +1,20 @@ +python_sources = ['__init__.py', + 'test_geometry.py', + 'test_gradients.py', + 'test_graph.py', + 'test_interpolation.py', + 'test_ndindex.py', + 'test_optimize.py', + 'test_rng.py', + 'test_sphere.py', + 'test_subdivide_octahedron.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/core/tests' +) + + diff --git a/dipy/data/files/meson.build b/dipy/data/files/meson.build new file mode 100644 index 0000000000..44af525844 --- /dev/null +++ b/dipy/data/files/meson.build @@ -0,0 +1,71 @@ +data_files = [ + '55dir_grad.bval', + '55dir_grad.bvec', + 'eg_3voxels.pkl', + 'aniso_vox.nii.gz', + 'ascm_out_test.nii.gz', + 'C.npy', + 'C1.pkl.gz', + 'C3.pkl.gz', + 'cb_2.npz', + 'circle.npy', + 'dipy_colormaps.json', + 'dki_constraint_0.npz', + 'dki_constraint_2.npz', + 'dki_constraint_4.npz', + 'dki_constraint.npz', + 'dsi515_b_table.txt', + 'dsi4169_b_table.txt', + 'eg_3voxels.pkl', + 'EuDX_small_25.trk', + 'evenly_distributed_sphere_362.npz', + 'evenly_distributed_sphere_642.npz', + 'evenly_distributed_sphere_724.npz', + 'fib0.pkl.gz', + 'fib1.pkl.gz', + 'fib2.pkl.gz', + 'func_coef.nii.gz', + 'func_discrete.nii.gz', + 'grad_514.txt', + 'gtab_3shell.txt', + 'gtab_isbi2013_2shell.txt', + 'gtab_taiwan_dsi.txt', + 'hermite_constraint_0.npz', + 'hermite_constraint_2.npz', + 'hermite_constraint_4.npz', + 'hermite_constraint_6.npz', + 'hermite_constraint_8.npz', + 'hermite_constraint_10.npz', + 'life_matlab_rmse.npy', + 'life_matlab_weights.npy', + 'minimal_bundles.zip', + 'record_horizon.log.gz', + 'repulsion100.npz', + 'repulsion200.npz', + 'repulsion724.npz', + 'S0_10slices.nii.gz', + 'ScannerVectors_GQI101.txt', + 'small_25.bval', + 'small_25.bvec', + 'small_25.nii.gz', + 'small_64D.bval', + 'small_64D.bvals.npy', + 'small_64D.bvec', + 'small_64D.gradients.npy', + 'small_64D.nii', + 'small_101D.bval', + 'small_101D.bvec', + 'small_101D.nii.gz', + 'sphere_grad.txt', + 't1_coronal_slice.npy', + 'tdesign45.txt', + 'test_piesno.nii.gz', + 'test_ui_text_block.npz', + 'tracks300.trk', +] + +py3.install_sources( + data_files, + pure: false, + subdir: 'dipy/data/files' +) \ No newline at end of file diff --git a/dipy/data/meson.build b/dipy/data/meson.build new file mode 100644 index 0000000000..8cf1c658f1 --- /dev/null +++ b/dipy/data/meson.build @@ -0,0 +1,15 @@ +python_sources = [ + '__init__.py', + 'fetcher.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/data' +) + + +subdir('files') +subdir('tests') \ No newline at end of file diff --git a/dipy/data/tests/meson.build b/dipy/data/tests/meson.build new file mode 100644 index 0000000000..ebe1e4595c --- /dev/null +++ b/dipy/data/tests/meson.build @@ -0,0 +1,14 @@ +python_sources = [ + '__init__.py', + 'test_data.py', + 'test_fetcher.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/data/tests' +) + + diff --git a/dipy/denoise/meson.build b/dipy/denoise/meson.build new file mode 100644 index 0000000000..7746c623fb --- /dev/null +++ b/dipy/denoise/meson.build @@ -0,0 +1,39 @@ +cython_sources = [ + 'denspeed', + 'enhancement_kernel', + 'nlmeans_block', + 'pca_noise_estimate', + 'shift_twist_convolution', + ] + +foreach ext: cython_sources + py3.extension_module(ext, + cython_gen.process(ext + '.pyx'), + c_args: cython_c_args, + include_directories: [incdir_numpy, inc_local], + dependencies: [omp], + install: true, + subdir: 'dipy/denoise' + ) +endforeach + + +python_sources = ['__init__.py', + 'adaptive_soft_matching.py', + 'gibbs.py', + 'localpca.py', + 'nlmeans.py', + 'noise_estimate.py', + 'non_local_means.py', + 'patch2self.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/denoise' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/denoise/tests/meson.build b/dipy/denoise/tests/meson.build new file mode 100644 index 0000000000..767512508a --- /dev/null +++ b/dipy/denoise/tests/meson.build @@ -0,0 +1,21 @@ +python_sources = [ + '__init__.py', + 'test_ascm.py', + 'test_denoise.py', + 'test_gibbs.py', + 'test_kernel.py', + 'test_lpca.py', + 'test_nlmeans.py', + 'test_noise_estimate.py', + 'test_non_local_means.py', + 'test_patch2self.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/denoise/tests' +) + + diff --git a/dipy/denoise/tests/test_nlmeans.py b/dipy/denoise/tests/test_nlmeans.py index 5fa2845ff6..e3a925bcf2 100644 --- a/dipy/denoise/tests/test_nlmeans.py +++ b/dipy/denoise/tests/test_nlmeans.py @@ -10,6 +10,7 @@ from dipy.denoise.nlmeans import nlmeans from dipy.denoise.denspeed import (add_padding_reflection, remove_padding) from dipy.utils.omp import cpu_count, have_openmp +from dipy.testing import assert_greater from dipy.testing.decorators import set_random_number_generator @@ -141,4 +142,4 @@ def test_nlmeans_4d_3dsigma_and_threads(): if cpu_count() == 2: - assert_equal(duration_2core < duration_1core, True) + assert_greater(duration_1core, duration_2core) diff --git a/dipy/direction/meson.build b/dipy/direction/meson.build new file mode 100644 index 0000000000..b00e510429 --- /dev/null +++ b/dipy/direction/meson.build @@ -0,0 +1,42 @@ +cython_sources = [ + 'bootstrap_direction_getter', + 'closest_peak_direction_getter', + 'pmf', + 'probabilistic_direction_getter', + 'ptt_direction_getter', + ] + +cython_headers = [ + 'closest_peak_direction_getter.pxd', + 'pmf.pxd', + 'probabilistic_direction_getter.pxd', + ] + +foreach ext: cython_sources + if fs.exists(ext + '.pxd') + extra_args += ['--depfile', meson.current_source_dir() +'/'+ ext + '.pxd', ] + endif + py3.extension_module(ext, + cython_gen.process(ext + '.pyx'), + c_args: cython_c_args, + include_directories: [incdir_numpy, inc_local], + dependencies: [omp], + install: true, + subdir: 'dipy/direction' + ) +endforeach + + +python_sources = ['__init__.py', + 'peaks.py', + ] + + +py3.install_sources( + python_sources + cython_headers, + pure: false, + subdir: 'dipy/direction' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/direction/tests/meson.build b/dipy/direction/tests/meson.build new file mode 100644 index 0000000000..37680aded9 --- /dev/null +++ b/dipy/direction/tests/meson.build @@ -0,0 +1,17 @@ +python_sources = [ + '__init__.py', + 'test_bootstrap_direction_getter.py', + 'test_peaks.py', + 'test_pmf.py', + 'test_prob_direction_getter.py', + 'test_ptt_direction_getter.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/direction/tests' +) + + diff --git a/dipy/info.py b/dipy/info.py deleted file mode 100644 index 542673fe1a..0000000000 --- a/dipy/info.py +++ /dev/null @@ -1,147 +0,0 @@ -""" This file contains defines parameters for DIPY that we use to fill -settings in setup.py, the DIPY top-level docstring, and for building the -docs. In setup.py in particular, we exec this file, so it cannot import dipy -""" - -# DIPY version information. An empty _version_extra corresponds to a -# full release. '.dev' as a _version_extra string means this is a development -# version -_version_major = 1 -_version_minor = 8 -_version_micro = 0 -_version_extra = 'dev0' -# _version_extra = '' - -# Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" -__version__ = f"{_version_major}.{_version_minor}.{_version_micro}{_version_extra}" - -CLASSIFIERS = ["Development Status :: 3 - Alpha", - "Environment :: Console", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: BSD License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Topic :: Scientific/Engineering"] - -description = 'Diffusion MRI utilities in python' - -# Note: this long_description is actually a copy/paste from the top-level -# README.rst, so that it shows up nicely on PyPI. So please remember to edit -# it only in one place and sync it correctly. -long_description = """ -====== - DIPY -====== - -DIPY is a python toolbox for analysis of MR diffusion imaging. - -Website -======= - -Current information can always be found from the DIPY website - https://dipy.org - -Mailing Lists -============= - -Please see the developer's list at -https://mail.python.org/mailman3/lists/dipy.python.org/ - -Code -==== - -You can find our sources and single-click downloads: - -* `Main repository`_ on Github. -* Documentation_ for all releases and current development tree. -* Download as a tar/zip file the `current trunk`_. - -.. _main repository: https://github.com/dipy/dipy -.. _Documentation: https://dipy.org -.. _current trunk: https://github.com/dipy/dipy/archive/master.zip - -License -======= - -DIPY is licensed under the terms of the BSD license. -Please see the LICENSE file in the dipy distribution. - -DIPY uses other libraries also licensed under the BSD or the -MIT licenses. -""" - -# versions for dependencies -# Check these versions against .travis.yml and requirements.txt -CYTHON_MIN_VERSION = '0.29.24' -NUMPY_MIN_VERSION = '1.22.4' -SCIPY_MIN_VERSION = '1.8.1' -NIBABEL_MIN_VERSION = '4.0.0' -H5PY_MIN_VERSION = '3.7.0' -PACKAGING_MIN_VERSION = '19.0' -TQDM_MIN_VERSION = '4.30.0' -TRX_MIN_VERSION = '0.2.9' - -# Main setup parameters -NAME = 'dipy' -MAINTAINER = "Eleftherios Garyfallidis" -MAINTAINER_EMAIL = "neuroimaging@python.org" -DESCRIPTION = description -LONG_DESCRIPTION = long_description -URL = "https://dipy.org" -DOWNLOAD_URL = "https://github.com/dipy/dipy/releases" -LICENSE = "BSD license" -CLASSIFIERS = CLASSIFIERS -AUTHOR = "dipy developers" -AUTHOR_EMAIL = "neuroimaging@python.org" -PLATFORMS = "OS Independent" -MAJOR = _version_major -MINOR = _version_minor -MICRO = _version_micro -ISRELEASE = _version_extra == '' -VERSION = __version__ -PROVIDES = ["dipy"] -REQUIRES = ["numpy (>=%s)" % NUMPY_MIN_VERSION, - "scipy (>=%s)" % SCIPY_MIN_VERSION, - "nibabel (>=%s)" % NIBABEL_MIN_VERSION, - "h5py (>=%s)" % H5PY_MIN_VERSION, - "packaging (>=%s)" % PACKAGING_MIN_VERSION, - "tqdm (>=%s)" % TQDM_MIN_VERSION, - "trx (>=%s)" % TRX_MIN_VERSION] -EXTRAS_REQUIRE = { - "test": [ - "pytest", - "coverage", - "coveralls", - ], - "doc": [ - "cython", - "numpy>=1.22.4", - "scipy>=1.8.1", - "nibabel>=4.0.0", - "h5py", - "h5py<3.0.0; sys_platform == 'win32'", - "cvxpy", - "pandas", - "tables", - "matplotlib", - "fury>=0.9.0", - "scikit-learn", - "scikit-image", - "statsmodels", - "boto3", - ], - "viz": [ - "fury>=0.9.0", - "matplotlib", - ], - "ml": [ - "scikit-learn", - "pandas", - "statsmodels", - "tables", - "tensorflow", - "tensorflow-addons", - ] -} - -EXTRAS_REQUIRE["all"] = list(set([a[i] for a in list(EXTRAS_REQUIRE.values()) - for i in range(len(a))])) diff --git a/dipy/io/meson.build b/dipy/io/meson.build new file mode 100644 index 0000000000..602efbc362 --- /dev/null +++ b/dipy/io/meson.build @@ -0,0 +1,23 @@ +python_sources = [ + '__init__.py', + 'bvectxt.py', + 'dpy.py', + 'gradients.py', + 'image.py', + 'peaks.py', + 'pickles.py', + 'stateful_tractogram.py', + 'streamline.py', + 'utils.py', + 'vtk.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/io' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/io/tests/meson.build b/dipy/io/tests/meson.build new file mode 100644 index 0000000000..f21fe77dd6 --- /dev/null +++ b/dipy/io/tests/meson.build @@ -0,0 +1,19 @@ +python_sources = [ + '__init__.py', + 'test_dpy.py', + 'test_io.py', + 'test_io_gradients.py', + 'test_io_peaks.py', + 'test_stateful_tractogram.py', + 'test_streamline.py', + 'test_utils.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/io/tests' +) + + diff --git a/dipy/meson.build b/dipy/meson.build new file mode 100644 index 0000000000..f8a58febb3 --- /dev/null +++ b/dipy/meson.build @@ -0,0 +1,364 @@ +# Platform detection +is_windows = host_machine.system() == 'windows' +is_mingw = is_windows and cc.get_id() == 'gcc' + + +# ------------------------------------------------------------------------ +# Preprocessor flags +# ------------------------------------------------------------------------ + +numpy_nodepr_api_1_9 = '-DNPY_NO_DEPRECATED_API=NPY_1_9_API_VERSION' +numpy_nodepr_api_1_7 = '-DNPY_NO_DEPRECATED_API=NPY_1_7_API_VERSION' + +# ------------------------------------------------------------------------ +# Compiler flags +# ------------------------------------------------------------------------ + +# C warning flags +Wno_maybe_uninitialized = cc.get_supported_arguments('-Wno-maybe-uninitialized') +Wno_discarded_qualifiers = cc.get_supported_arguments('-Wno-discarded-qualifiers') +Wno_empty_body = cc.get_supported_arguments('-Wno-empty-body') +Wno_implicit_function_declaration = cc.get_supported_arguments('-Wno-implicit-function-declaration') +Wno_parentheses = cc.get_supported_arguments('-Wno-parentheses') +Wno_switch = cc.get_supported_arguments('-Wno-switch') +Wno_unused_label = cc.get_supported_arguments('-Wno-unused-label') +Wno_unused_variable = cc.get_supported_arguments('-Wno-unused-variable') + +# C++ warning flags +_cpp_Wno_cpp = cpp.get_supported_arguments('-Wno-cpp') +_cpp_Wno_deprecated_declarations = cpp.get_supported_arguments('-Wno-deprecated-declarations') +_cpp_Wno_class_memaccess = cpp.get_supported_arguments('-Wno-class-memaccess') +_cpp_Wno_format_truncation = cpp.get_supported_arguments('-Wno-format-truncation') +_cpp_Wno_format_extra_args = cpp.get_supported_arguments('-Wno-format-extra-args') +_cpp_Wno_format = cpp.get_supported_arguments('-Wno-format') +_cpp_Wno_non_virtual_dtor = cpp.get_supported_arguments('-Wno-non-virtual-dtor') +_cpp_Wno_sign_compare = cpp.get_supported_arguments('-Wno-sign-compare') +_cpp_Wno_switch = cpp.get_supported_arguments('-Wno-switch') +_cpp_Wno_terminate = cpp.get_supported_arguments('-Wno-terminate') +_cpp_Wno_unused_but_set_variable = cpp.get_supported_arguments('-Wno-unused-but-set-variable') +_cpp_Wno_unused_function = cpp.get_supported_arguments('-Wno-unused-function') +_cpp_Wno_unused_local_typedefs = cpp.get_supported_arguments('-Wno-unused-local-typedefs') +_cpp_Wno_unused_variable = cpp.get_supported_arguments('-Wno-unused-variable') +_cpp_Wno_int_in_bool_context = cpp.get_supported_arguments('-Wno-int-in-bool-context') + + +cython_c_args = [] +if is_windows + # For mingw-w64, link statically against the UCRT. + # automatic detect lto for now due to some issues. '-fno-use-linker-plugin' + gcc_link_args = ['-lucrt', '-static'] + if is_mingw + add_project_link_arguments(gcc_link_args, language: ['c', 'cpp']) + # Force gcc to float64 long doubles for compatibility with MSVC + # builds, for C only. + add_project_arguments('-mlong-double-64', language: 'c') + # Make fprintf("%zd") work (see https://github.com/rgommers/scipy/issues/118) + add_project_arguments('-D__USE_MINGW_ANSI_STDIO=1', language: ['c', 'cpp']) + # Manual add of MS_WIN64 macro when not using MSVC. + # https://bugs.python.org/issue28267 + if target_machine.cpu_family().to_lower().contains('64') + add_project_arguments('-DMS_WIN64', language: ['c', 'cpp']) + endif + # Silence warnings emitted by PyOS_snprintf for (%zd), see + # https://github.com/rgommers/scipy/issues/118. + # Use as c_args for extensions containing Cython code + cython_c_args += [_cpp_Wno_format_extra_args, _cpp_Wno_format] + endif +endif + + +# Deal with M_PI & friends; add `use_math_defines` to c_args +# Cython doesn't always get this correctly itself +# explicitly add the define as a compiler flag for Cython-generated code. +if is_windows + use_math_defines = ['-D_USE_MATH_DEFINES'] +else + use_math_defines = [] +endif + +# Suppress warning for deprecated Numpy API. +# (Suppress warning messages emitted by #warning directives). +# Replace with numpy_nodepr_api after Cython 3.0 is out +cython_c_args += [_cpp_Wno_cpp, use_math_defines] +cython_cpp_args = cython_c_args + +# ------------------------------------------------------------------------ +# NumPy include directory - needed in all submodules +# ------------------------------------------------------------------------ + +# The chdir is needed because within numpy there's an `import signal` +# statement, and we don't want that to pick up scipy's signal module rather +# than the stdlib module. The try-except is needed because when things are +# split across drives on Windows, there is no relative path and an exception +# gets raised. There may be other such cases, so add a catch-all and switch to +# an absolute path. Relative paths are needed when for example a virtualenv is +# placed inside the source tree; Meson rejects absolute paths to places inside +# the source tree. +# For cross-compilation it is often not possible to run the Python interpreter +# in order to retrieve numpy's include directory. It can be specified in the +# cross file instead: +# [properties] +# numpy-include-dir = /abspath/to/host-pythons/site-packages/numpy/core/include +# +# This uses the path as is, and avoids running the interpreter. +incdir_numpy = meson.get_external_property('numpy-include-dir', 'not-given') +if incdir_numpy == 'not-given' + incdir_numpy = run_command(py3, + [ + '-c', + ''' +import numpy as np +try: + incdir = os.path.relpath(np.get_include()) +except Exception: + incdir = np.get_include() +print(incdir) + ''' + ], + check: true + ).stdout().strip() + + # We do need an absolute path to feed to `cc.find_library` below + _incdir_numpy_abs = run_command(py3, + ['-c', 'import os; os.chdir(".."); import numpy; print(numpy.get_include())'], + check: true + ).stdout().strip() +else + _incdir_numpy_abs = incdir_numpy +endif +inc_np = include_directories(incdir_numpy) +np_dep = declare_dependency(include_directories: inc_np) + + +npymath_path = _incdir_numpy_abs / '..' / 'lib' +npyrandom_path = _incdir_numpy_abs / '..' / '..' / 'random' / 'lib' +npymath_lib = cc.find_library('npymath', dirs: npymath_path) +npyrandom_lib = cc.find_library('npyrandom', dirs: npyrandom_path) + +# ------------------------------------------------------------------------ +# Define Optimisation for cython extensions +# ------------------------------------------------------------------------ +omp = dependency('openmp', required: false) + +# SSE intrinsics +sse2_cflags = [] +sse_prog = ''' +#if defined(__GNUC__) +# if !defined(__amd64__) && !defined(__x86_64__) +# error "SSE2 intrinsics are only available on x86_64" +# endif +#elif defined (_MSC_VER) && !defined (_M_X64) && !defined (_M_AMD64) +# error "SSE2 intrinsics not supported on x86 MSVC builds" +#endif +#if defined(__SSE__) || (_M_X64 > 0) +# include +# include +# include +#else +# error "No SSE intrinsics available" +#endif +int main () { + __m128i a = _mm_set1_epi32 (0), b = _mm_set1_epi32 (0), c; + c = _mm_xor_si128 (a, b); + return 0; +}''' + +if cc.get_id() != 'msvc' + test_sse2_cflags = ['-mfpmath=sse', '-msse', '-msse2'] + # might need to check the processor type here + # arm neon flag: -mfpu=neon -mfloat-abi=softfp # see test below + # freescale altivec flag: -maltivec -mabi=altivec +else + test_sse2_cflags = ['/arch:SSE2'] # SSE2 support is only available in 32 bit mode. +endif + +if cc.compiles(sse_prog, args: test_sse2_cflags, name: 'SSE intrinsics') + sse2_cflags = test_sse2_cflags + cython_c_args += test_sse2_cflags + cython_cpp_args = cython_c_args +endif + +if host_cpu_family in ['x86', 'x86_64'] + x86_intrinsics = [] + if cc.get_id() == 'msvc' + x86_intrinsics = [ + [ 'AVX', 'immintrin.h', '__m256', '_mm256_setzero_ps()', ['/ARCH:AVX'] ], + [ 'AVX2', 'immintrin.h', '__m256i', '_mm256_setzero_si256()', ['/ARCH:AVX2'] ], + [ 'AVX512', 'immintrin.h', '__m512', '_mm512_setzero_si512()', ['/ARCH:AVX512'] ], + ] + else + x86_intrinsics = [ + # [ 'SSE', 'xmmintrin.h', '__m128', '_mm_setzero_ps()', ['-msse'] ], + # [ 'SSE2', 'emmintrin.h', '__m128i', '_mm_setzero_si128()', ['-msse2'] ], + [ 'SSE4.1', 'smmintrin.h', '__m128i', '_mm_setzero_si128(); mtest = _mm_cmpeq_epi64(mtest, mtest)', ['-msse4.1'] ], + [ 'AVX', 'immintrin.h', '__m256', '_mm256_setzero_ps()', ['-mavx'] ], + ] + endif + + foreach intrin : x86_intrinsics + intrin_check = '''#include <@0@> + int main (int argc, char ** argv) { + static @1@ mtest; + mtest = @2@; + return *((unsigned char *) &mtest) != 0; + }'''.format(intrin[1],intrin[2],intrin[3]) + intrin_name = intrin[0] + if cc.links(intrin_check, name : 'compiler supports @0@ intrinsics'.format(intrin_name)) + cython_c_args += intrin[4] + cython_cpp_args = cython_c_args + endif + endforeach +endif + +# ARM NEON intrinsics +neon_prog = ''' +#if !defined (_MSC_VER) || defined (__clang__) +# if !defined (_M_ARM64) && !defined (__aarch64__) +# ifndef __ARM_EABI__ +# error "EABI is required (to be sure that calling conventions are compatible)" +# endif +# ifndef __ARM_NEON__ +# error "No ARM NEON instructions available" +# endif +# endif +#endif +#if defined (_MSC_VER) && (_MSC_VER < 1920) && defined (_M_ARM64) +# include +#else +# include +#endif +int main () { + const float32_t __v[4] = { 1, 2, 3, 4 }; \ + const unsigned int __umask[4] = { \ + 0x80000000, \ + 0x80000000, \ + 0x80000000, \ + 0x80000000 \ + }; \ + const uint32x4_t __mask = vld1q_u32 (__umask); \ + float32x4_t s = vld1q_f32 (__v); \ + float32x4_t c = vreinterpretq_f32_u32 (veorq_u32 (vreinterpretq_u32_f32 (s), __mask)); \ + return 0; +}''' + +test_neon_cflags = [] + +if cc.get_id() != 'msvc' and host_cpu_family != 'aarch64' + test_neon_cflags += ['-mfpu=neon'] +endif + +if host_system == 'android' # dipy not in android but I keep it just in case + test_neon_cflags += ['-mfloat-abi=softfp'] +endif + +if cc.compiles(neon_prog, args: test_neon_cflags, name: 'ARM NEON intrinsics') + neon_cflags = test_neon_cflags + cython_c_args += neon_cflags + cython_cpp_args = cython_c_args +endif + + +# ------------------------------------------------------------------------ +# include openmp +# Copy the main __init__.py and pxd files to the build dir. +# Needed to trick Cython, it won't do a relative import outside a package +# ------------------------------------------------------------------------ + +_cython_tree = [ + fs.copyfile('__init__.py'), + fs.copyfile('../src/conditional_omp.h'), + fs.copyfile('../src/cythonutils.h'), + fs.copyfile('../src/dpy_math.h'), + fs.copyfile('../src/safe_openmp.pxd'), +] + +# include some local folder +# Todo: need more explicit name +incdir_local = meson.current_build_dir() +inc_local = include_directories('.') + + +# ------------------------------------------------------------------------ +# Manage version file +# ------------------------------------------------------------------------ +dipy_dir = py3.get_install_dir() / 'dipy' + +generate_version = custom_target( + 'generate-version', + install: true, + build_always_stale: true, + build_by_default: true, + output: 'version.py', + input: '../tools/version_utils.py', + command: [py3, '@INPUT@', '--source-root', '@SOURCE_ROOT@'], + install_dir: dipy_dir +) + +# ------------------------------------------------------------------------ +# Include Python Sources +# ------------------------------------------------------------------------ +python_sources = [ + '__init__.py', + 'conftest.py', + 'pkg_info.py' +] + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy' +) + +# ------------------------------------------------------------------------ +# Manage datafiles +# ------------------------------------------------------------------------ + +data_install_dir = join_paths(get_option('datadir'), 'doc', meson.project_name()) +ex_file_excludes = ['_valid_examples.toml', '.gitignore', 'README.md'] +install_subdir('../doc/examples', + install_dir: data_install_dir, + exclude_files: ex_file_excludes, +) + +# ------------------------------------------------------------------------ +# Custom Meson Command line tools +# ------------------------------------------------------------------------ + +cython_args = ['-3', '--fast-fail', '@EXTRA_ARGS@', '--output-file', '@OUTPUT@', + '--include-dir', incdir_local, + '@INPUT@'] +cython_cplus_args = ['--cplus'] + cython_args + +cython_gen = generator(cython, + arguments : cython_args, + output : '@BASENAME@.c', + depends : _cython_tree) + +cython_gen_cpp = generator(cython, + arguments : cython_cplus_args, + output : '@BASENAME@.cpp', + depends : [_cython_tree]) + + +# ------------------------------------------------------------------------ +# Add subfolders +# ------------------------------------------------------------------------ + +subdir('align') +subdir('boots') +subdir('core') +subdir('data') +subdir('denoise') +subdir('direction') +subdir('io') +subdir('nn') +subdir('reconst') +subdir('segment') +subdir('sims') +subdir('stats') +subdir('testing') +subdir('tests') +subdir('tracking') +subdir('utils') +subdir('viz') +subdir('workflows') \ No newline at end of file diff --git a/dipy/nn/meson.build b/dipy/nn/meson.build new file mode 100644 index 0000000000..84ec936255 --- /dev/null +++ b/dipy/nn/meson.build @@ -0,0 +1,19 @@ +python_sources = [ + '__init__.py', + 'cnn_1d_denoising.py', + 'evac.py', + 'histo_resdnn.py', + 'model.py', + 'synb0.py', + 'utils.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/nn' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/nn/tests/meson.build b/dipy/nn/tests/meson.build new file mode 100644 index 0000000000..dde3794c17 --- /dev/null +++ b/dipy/nn/tests/meson.build @@ -0,0 +1,18 @@ +python_sources = [ + '__init__.py', + 'test_cnn_1denoiser.py', + 'test_evac.py', + 'test_histo_resdnn.py', + 'test_synb0.py', + 'test_tf.py', + 'test_utils.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/nn/tests' +) + + diff --git a/dipy/reconst/benchmarks/__init__.py b/dipy/reconst/benchmarks/__init__.py deleted file mode 100644 index a72bde35d8..0000000000 --- a/dipy/reconst/benchmarks/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Init for reconst bench diff --git a/dipy/reconst/benchmarks/bench_bounding_box.py b/dipy/reconst/benchmarks/bench_bounding_box.py deleted file mode 100644 index db5eedaa92..0000000000 --- a/dipy/reconst/benchmarks/bench_bounding_box.py +++ /dev/null @@ -1,30 +0,0 @@ -""" Benchmarks for bounding_box - -Run all benchmarks with:: - - import dipy.reconst as dire - dire.bench() - -With Pytest, Run this benchmark with: - - pytest -svv -c bench.ini /path/to/bench_bounding_box.py -""" -import numpy as np -from numpy.testing import measure - - -def bench_bounding_box(): - vol = np.zeros((100, 100, 100)) - - vol[0, 0, 0] = 1 - times = 100 - time = measure("bounding_box(vol)", times) / times - print("Bounding_box on a sparse volume: {}".format(time)) - - vol[:] = 10 - times = 1 - time = measure("bounding_box(vol)", times) / times - print("Bounding_box on a dense volume: {}".format(time)) - -if __name__ == "__main__": - bench_bounding_box() diff --git a/dipy/reconst/benchmarks/bench_csd.py b/dipy/reconst/benchmarks/bench_csd.py deleted file mode 100644 index a10dadd517..0000000000 --- a/dipy/reconst/benchmarks/bench_csd.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python3 -import numpy as np -import numpy.testing as npt - -from dipy.reconst.csdeconv import ConstrainedSphericalDeconvModel -from dipy.core.gradients import GradientTable -from dipy.data import read_stanford_labels -from dipy.io.image import load_nifti_data - - -def num_grad(gtab): - return (~gtab.b0s_mask).sum() - - -def bench_csdeconv(center=(50, 40, 40), width=12): - img, gtab, labels_img = read_stanford_labels() - data = load_nifti_data(img) - - labels = load_nifti_data(labels_img) - shape = labels.shape - mask = np.in1d(labels, [1, 2]) - mask.shape = shape - - a, b, c = center - hw = width // 2 - idx = (slice(a - hw, a + hw), slice(b - hw, b + hw), slice(c - hw, c + hw)) - - data_small = data[idx].copy() - mask_small = mask[idx].copy() - voxels = mask_small.sum() - - cmd = "model.fit(data_small, mask_small)" - print("== Benchmarking CSD fit on %d voxels ==" % voxels) - msg = "SH order - %d, gradient directions - %d :: %g sec" - - # Basic case - sh_order = 8 - ConstrainedSphericalDeconvModel(gtab, None, sh_order=sh_order) - time = npt.measure(cmd) - print(msg % (sh_order, num_grad(gtab), time)) - - # Smaller data set - # data_small = data_small[..., :75].copy() - gtab = GradientTable(gtab.gradients[:75]) - ConstrainedSphericalDeconvModel(gtab, None, sh_order=sh_order) - time = npt.measure(cmd) - print(msg % (sh_order, num_grad(gtab), time)) - - # Super resolution - sh_order = 12 - ConstrainedSphericalDeconvModel(gtab, None, sh_order=sh_order) - time = npt.measure(cmd) - print(msg % (sh_order, num_grad(gtab), time)) - -if __name__ == "__main__": - bench_csdeconv() diff --git a/dipy/reconst/benchmarks/bench_peaks.py b/dipy/reconst/benchmarks/bench_peaks.py deleted file mode 100644 index 6ed6538f70..0000000000 --- a/dipy/reconst/benchmarks/bench_peaks.py +++ /dev/null @@ -1,31 +0,0 @@ -""" Benchmarks for peak finding - -Run all benchmarks with:: - - import dipy.reconst as dire - dire.bench() - -With Pytest, Run this benchmark with: - - pytest -svv -c bench.ini /path/to/bench_peaks.py -""" -import numpy as np - -from dipy.data import default_sphere - -from numpy.testing import measure - - -def bench_local_maxima(): - repeat = 10000 - vertices, faces = default_sphere.vertices, default_sphere.faces - print('Timing peak finding') - timed0 = measure("local_maxima(odf, edges)", repeat) - print('Actual sphere: %0.2f' % timed0) - # Create an artificial odf with a few peaks - odf = np.zeros(len(vertices)) - odf[1] = 1. - odf[143] = 143. - odf[505] = 505. - timed1 = measure("local_maxima(odf, edges)", repeat) - print('Few-peak sphere: %0.2f' % timed1) diff --git a/dipy/reconst/benchmarks/bench_squash.py b/dipy/reconst/benchmarks/bench_squash.py deleted file mode 100644 index c8dc65b61a..0000000000 --- a/dipy/reconst/benchmarks/bench_squash.py +++ /dev/null @@ -1,144 +0,0 @@ -""" Benchmarks for fast squashing - -Run all benchmarks with:: - - import dipy.reconst as dire - dire.bench() - -With Pytest, Run this benchmark with: - - pytest -svv -c bench.ini /path/to/bench_squash.py -""" - -from functools import reduce - -import numpy as np - -from dipy.core.ndindex import ndindex - -from numpy.testing import measure - - -def old_squash(arr, mask=None, fill=0): - """Try and make a standard array from an object array - - This function takes an object array and attempts to convert it to a more - useful dtype. If array can be converted to a better dtype, Nones are - replaced by `fill`. To make the behaviour of this function more clear, here - are the most common cases: - - 1. `arr` is an array of scalars of type `T`. Returns an array like - `arr.astype(T)` - 2. `arr` is an array of arrays. All items in `arr` have the same shape - `S`. Returns an array with shape `arr.shape + S`. - 3. `arr` is an array of arrays of different shapes. Returns `arr`. - 4. Items in `arr` are not ndarrys or scalars. Returns `arr`. - - Parameters - ---------- - arr : array, dtype=object - The array to be converted. - mask : array, dtype=bool, optional - Where arr has Nones. - fill : number, optional - Nones are replaced by fill. - - Returns - ------- - result : array - - Examples - -------- - >>> arr = np.empty(3, dtype=object) - >>> arr.fill(2) - >>> old_squash(arr) - array([2, 2, 2]) - >>> arr[0] = None - >>> old_squash(arr) - array([0, 2, 2]) - >>> arr.fill(np.ones(2)) - >>> r = old_squash(arr) - >>> r.shape == (3, 2) - True - >>> r.dtype - dtype('float64') - """ - if mask is None: - mask = np.vectorize(lambda x : x is not None)(arr) - not_none = arr[mask] - # all None, just return arr - if not_none.size == 0: - return arr - first = not_none[0] - # If the first item is an ndarray - if type(first) is np.ndarray: - shape = first.shape - try: - # Check the shapes of all items - all_same_shape = all(item.shape == shape for item in not_none) - except AttributeError: - return arr - # If items have different shapes just return arr - if not all_same_shape: - return arr - # Find common dtype. np.result_type can do this more simply, but it is - # only available for numpy 1.6.0 - dtypes = set(a.dtype for a in not_none) - tiny_arrs = [np.zeros((1,), dtype=dt) for dt in dtypes] - dtype = reduce(np.add, tiny_arrs).dtype - # Create output array and fill - result = np.empty(arr.shape + shape, dtype=dtype) - result.fill(fill) - for ijk in ndindex(arr.shape): - if mask[ijk]: - result[ijk] = arr[ijk] - return result - - # If the first item is a scalar - elif np.isscalar(first): - "first is not an ndarray" - all_scalars = all(np.isscalar(item) for item in not_none) - if not all_scalars: - return arr - # See comment about np.result_type above. We sum against the smallest - # possible type, bool, and let numpy type promotion find the best - # common type. The values might all be Python scalars so we need to - # cast to numpy type at the end to be sure of having a dtype. - dtype = np.asarray(sum(not_none, False)).dtype - temp = arr.copy() - temp[~mask] = fill - return temp.astype(dtype) - else: - return arr - - -def bench_quick_squash(): - repeat = 10 - shape = (300, 200) - arrs = np.zeros(shape, dtype=object) - scalars = np.zeros(shape, dtype=object) - for ijk in ndindex(arrs.shape): - arrs[ijk] = np.ones((3, 5)) - scalars[ijk] = np.float32(0) - print('\nSquashing benchmarks') - for name, objs in ( - ('floats', np.zeros(shape, float).astype(object)), - ('ints', np.zeros(shape, int).astype(object)), - ('arrays', arrs), - ('scalars', scalars), - ): - print(name) - timed0 = measure("quick_squash(objs)", repeat) - timed1 = measure("old_squash(objs)", repeat) - print("fast %4.2f; slow %4.2f" % (timed0, timed1)) - objs[50, 50] = None - timed0 = measure("quick_squash(objs)", repeat) - timed1 = measure("old_squash(objs)", repeat) - print("With None: fast %4.2f; slow %4.2f" % (timed0, timed1)) - timed0 = measure("quick_squash(objs, msk)", repeat) - timed1 = measure("old_squash(objs, msk)", repeat) - print("With mask: fast %4.2f; slow %4.2f" % (timed0, timed1)) - objs[50, 50] = np.float32(0) - timed0 = measure("quick_squash(objs, msk)", repeat) - timed1 = measure("old_squash(objs, msk)", repeat) - print("Other dtype: fast %4.2f; slow %4.2f" % (timed0, timed1)) diff --git a/dipy/reconst/benchmarks/bench_vec_val_sum.py b/dipy/reconst/benchmarks/bench_vec_val_sum.py deleted file mode 100644 index 52219eb576..0000000000 --- a/dipy/reconst/benchmarks/bench_vec_val_sum.py +++ /dev/null @@ -1,21 +0,0 @@ -""" Benchmarks for vec / val summation routine - -Run benchmarks with:: - - import dipy.reconst as dire - dire.bench() - -With Pytest, Run this benchmark with: - - pytest -svv -c bench.ini /path/to/bench_vec_val_sum.py -""" - -from numpy.testing import measure - - -def bench_vec_val_vect(): - repeat = 100 - etime = measure("np.einsum('...ij,...j,...kj->...ik', evecs, evals, evecs)", - repeat) - vtime = measure("vec_val_vect(evecs, evals)", repeat) - print("einsum %4.2f; vec_val_vect %4.2f" % (etime, vtime)) diff --git a/dipy/reconst/meson.build b/dipy/reconst/meson.build new file mode 100644 index 0000000000..0316671046 --- /dev/null +++ b/dipy/reconst/meson.build @@ -0,0 +1,55 @@ +cython_sources = [ + 'eudx_direction_getter', + 'quick_squash', + 'recspeed', + 'vec_val_sum', + ] + +foreach ext: cython_sources + py3.extension_module(ext, + cython_gen.process(ext + '.pyx'), + c_args: cython_c_args, + include_directories: [incdir_numpy, inc_local], + dependencies: [omp], + install: true, + subdir: 'dipy/reconst' + ) +endforeach + + +python_sources = ['__init__.py', + 'base.py', + 'cache.py', + 'cross_validation.py', + 'csdeconv.py', + 'cti.py', + 'dki_micro.py', + 'dki.py', + 'dsi.py', + 'dti.py', + 'forecast.py', + 'fwdti.py', + 'gqi.py', + 'ivim.py', + 'mapmri.py', + 'mcsd.py', + 'msdki.py', + 'multi_voxel.py', + 'odf.py', + 'qtdmri.py', + 'qti.py', + 'rumba.py', + 'sfm.py', + 'shm.py', + 'shore.py', + 'utils.py', + ] + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/reconst' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/reconst/tests/meson.build b/dipy/reconst/tests/meson.build new file mode 100644 index 0000000000..a6847e2786 --- /dev/null +++ b/dipy/reconst/tests/meson.build @@ -0,0 +1,44 @@ +python_sources = [ + '__init__.py', + 'test_cache.py', + 'test_cross_validation.py', + 'test_csdeconv.py', + 'test_cti.py', + 'test_dki.py', + 'test_dki_micro.py', + 'test_dsi.py', + 'test_dsi_deconv.py', + 'test_dsi_metrics.py', + 'test_dti.py', + 'test_eudx_dg.py', + 'test_forecast.py', + 'test_fwdti.py', + 'test_gqi.py', + 'test_ivim.py', + 'test_mapmri.py', + 'test_mcsd.py', + 'test_msdki.py', + 'test_multi_voxel.py', + 'test_odf.py', + 'test_peak_finding.py', + 'test_qtdmri.py', + 'test_qti.py', + 'test_reco_utils.py', + 'test_rumba.py', + 'test_sfm.py', + 'test_shm.py', + 'test_shore.py', + 'test_shore_metrics.py', + 'test_shore_odf.py', + 'test_utils.py', + 'test_vec_val_vect.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/reconst/tests' +) + + diff --git a/dipy/segment/benchmarks/__init__.py b/dipy/segment/benchmarks/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/dipy/segment/benchmarks/bench_quickbundles.py b/dipy/segment/benchmarks/bench_quickbundles.py deleted file mode 100644 index 911c82d493..0000000000 --- a/dipy/segment/benchmarks/bench_quickbundles.py +++ /dev/null @@ -1,94 +0,0 @@ -""" Benchmarks for QuickBundles - -Run all benchmarks with:: - - import dipy.segment as dipysegment - dipysegment.bench() - -With Pytest, Run this benchmark with: - - pytest -svv -c bench.ini /path/to/bench_quickbundles.py - -""" -import numpy as np - -from dipy.data import get_fnames -from dipy.io.streamline import load_tractogram -from dipy.tracking.streamline import Streamlines, set_number_of_points -from dipy.segment.metricspeed import Metric -from dipy.segment.clustering import QuickBundles as QB_New -from numpy.testing import assert_equal - -from dipy.testing import assert_arrays_equal -from numpy.testing import assert_array_equal, measure - - -class MDFpy(Metric): - def are_compatible(self, shape1, shape2): - return shape1 == shape2 - - def dist(self, features1, features2): - dist = np.sqrt(np.sum((features1 - features2)**2, axis=1)) - dist = np.sum(dist / len(features1)) - return dist - - -def bench_quickbundles(): - dtype = "float32" - repeat = 10 - nb_points = 12 - - fname = get_fnames('fornix') - - fornix = load_tractogram(fname, 'same', - bbox_valid_check=False).streamlines - - fornix_streamlines = Streamlines(fornix) - fornix_streamlines = set_number_of_points(fornix_streamlines, nb_points) - - # Create eight copies of the fornix to be clustered (one in each octant). - streamlines = [] - streamlines += [s + np.array([100, 100, 100], dtype) - for s in fornix_streamlines] - streamlines += [s + np.array([100, -100, 100], dtype) - for s in fornix_streamlines] - streamlines += [s + np.array([100, 100, -100], dtype) - for s in fornix_streamlines] - streamlines += [s + np.array([100, -100, -100], dtype) - for s in fornix_streamlines] - streamlines += [s + np.array([-100, 100, 100], dtype) - for s in fornix_streamlines] - streamlines += [s + np.array([-100, -100, 100], dtype) - for s in fornix_streamlines] - streamlines += [s + np.array([-100, 100, -100], dtype) - for s in fornix_streamlines] - streamlines += [s + np.array([-100, -100, -100], dtype) - for s in fornix_streamlines] - - # The expected number of clusters of the fornix using threshold=10 is 4. - threshold = 10. - expected_nb_clusters = 4 * 8 - - print("Timing QuickBundles 1.0 vs. 2.0") - - qb2 = QB_New(threshold) - qb2_time = measure("clusters = qb2.cluster(streamlines)", repeat) - print("QuickBundles2 time: {0:.4}sec".format(qb2_time)) - print("Speed up of {0}x".format(qb1_time / qb2_time)) - clusters = qb2.cluster(streamlines) - sizes2 = map(len, clusters) - indices2 = map(lambda c: c.indices, clusters) - assert_equal(len(clusters), expected_nb_clusters) - assert_array_equal(list(sizes2), sizes1) - assert_arrays_equal(indices2, indices1) - - qb = QB_New(threshold, metric=MDFpy()) - qb3_time = measure("clusters = qb.cluster(streamlines)", repeat) - print("QuickBundles2_python time: {0:.4}sec".format(qb3_time)) - print("Speed up of {0}x".format(qb1_time / qb3_time)) - clusters = qb.cluster(streamlines) - sizes3 = map(len, clusters) - indices3 = map(lambda c: c.indices, clusters) - assert_equal(len(clusters), expected_nb_clusters) - assert_array_equal(list(sizes3), sizes1) - assert_arrays_equal(indices3, indices1) diff --git a/dipy/segment/meson.build b/dipy/segment/meson.build new file mode 100644 index 0000000000..b5d73dc851 --- /dev/null +++ b/dipy/segment/meson.build @@ -0,0 +1,49 @@ +cython_sources = [ + 'clustering_algorithms', + 'clusteringspeed', + 'cythonutils', + 'featurespeed', + 'metricspeed', + 'mrf', + ] + +cython_headers = [ + 'clusteringspeed.pxd', + 'cythonutils.pxd', + 'featurespeed.pxd', + 'metricspeed.pxd', + ] + +foreach ext: cython_sources + if fs.exists(ext + '.pxd') + extra_args += ['--depfile', meson.current_source_dir() +'/'+ ext + '.pxd', ] + endif + py3.extension_module(ext, + cython_gen.process(ext + '.pyx'), + c_args: cython_c_args, + include_directories: [incdir_numpy, inc_local], + dependencies: [omp], + install: true, + subdir: 'dipy/segment' + ) +endforeach + + +python_sources = ['__init__.py', + 'bundles.py', + 'clustering.py', + 'fss.py', + 'mask.py', + 'metric.py', + 'threshold.py', + 'tissue.py', + ] + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/segment' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/segment/tests/meson.build b/dipy/segment/tests/meson.build new file mode 100644 index 0000000000..173118bf28 --- /dev/null +++ b/dipy/segment/tests/meson.build @@ -0,0 +1,22 @@ +python_sources = [ + '__init__.py', + 'test_adjustment.py', + 'test_bundles.py', + 'test_clustering.py', + 'test_feature.py', + 'test_fss.py', + 'test_mask.py', + 'test_metric.py', + 'test_mrf.py', + 'test_qbx.py', + 'test_quickbundles.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/segment/tests' +) + + diff --git a/dipy/sims/meson.build b/dipy/sims/meson.build new file mode 100644 index 0000000000..e1a3579399 --- /dev/null +++ b/dipy/sims/meson.build @@ -0,0 +1,15 @@ +python_sources = [ + '__init__.py', + 'phantom.py', + 'voxel.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/sims' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/sims/tests/meson.build b/dipy/sims/tests/meson.build new file mode 100644 index 0000000000..04912c25a5 --- /dev/null +++ b/dipy/sims/tests/meson.build @@ -0,0 +1,14 @@ +python_sources = [ + '__init__.py', + 'test_phantom.py', + 'test_voxel.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/sims/tests' +) + + diff --git a/dipy/stats/meson.build b/dipy/stats/meson.build new file mode 100644 index 0000000000..7a64742c8d --- /dev/null +++ b/dipy/stats/meson.build @@ -0,0 +1,14 @@ +python_sources = [ + '__init__.py', + 'analysis.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/stats' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/stats/tests/meson.build b/dipy/stats/tests/meson.build new file mode 100644 index 0000000000..5d4a127263 --- /dev/null +++ b/dipy/stats/tests/meson.build @@ -0,0 +1,13 @@ +python_sources = [ + '__init__.py', + 'test_analysis.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/stats/tests' +) + + diff --git a/dipy/testing/meson.build b/dipy/testing/meson.build new file mode 100644 index 0000000000..847e2ad998 --- /dev/null +++ b/dipy/testing/meson.build @@ -0,0 +1,16 @@ +python_sources = [ + '__init__.py', + 'decorators.py', + 'memory.py', + 'spherepoints.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/testing' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/testing/tests/meson.build b/dipy/testing/tests/meson.build new file mode 100644 index 0000000000..a989779392 --- /dev/null +++ b/dipy/testing/tests/meson.build @@ -0,0 +1,15 @@ +python_sources = [ + '__init__.py', + 'test_decorators.py', + 'test_memory.py', + 'test_testing.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/testing/tests' +) + + diff --git a/dipy/tests/meson.build b/dipy/tests/meson.build new file mode 100644 index 0000000000..f2a4c22ced --- /dev/null +++ b/dipy/tests/meson.build @@ -0,0 +1,14 @@ +python_sources = [ + '__init__.py', + 'scriptrunner.py', + 'test_scripts.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/tests' +) + + diff --git a/dipy/tracking/benchmarks/__init__.py b/dipy/tracking/benchmarks/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/dipy/tracking/benchmarks/bench_streamline.py b/dipy/tracking/benchmarks/bench_streamline.py deleted file mode 100644 index 090c4b7221..0000000000 --- a/dipy/tracking/benchmarks/bench_streamline.py +++ /dev/null @@ -1,123 +0,0 @@ -""" Benchmarks for functions related to streamline - -Run all benchmarks with:: - - import dipy.tracking as dipytracking - dipytracking.bench() - -With Pytest, Run this benchmark with: - - pytest -svv -c bench.ini /path/to/bench_streamline.py - -""" -import numpy as np -from numpy.testing import measure -from numpy.testing import assert_array_equal, assert_array_almost_equal - -from dipy.data import get_fnames -from dipy.io.streamline import load_tractogram - -from dipy.tracking.streamline import set_number_of_points, length -from dipy.tracking.tests.test_streamline import (set_number_of_points_python, - length_python,) - -from dipy.tracking import Streamlines - -DATA = {} - - -def setup(): - global DATA - rng = np.random.RandomState(42) - nb_streamlines = 20000 - min_nb_points = 2 - max_nb_points = 100 - - DATA['rng'] = rng - DATA['nb_streamlines'] = nb_streamlines - DATA['streamlines'] = generate_streamlines(nb_streamlines, - min_nb_points, max_nb_points, - rng=rng) - DATA['streamlines_arrseq'] = Streamlines(DATA['streamlines']) - - -def generate_streamlines(nb_streamlines, min_nb_points, max_nb_points, rng): - streamlines = [rng.rand(*(rng.randint(min_nb_points, max_nb_points), 3)) - for _ in range(nb_streamlines)] - return streamlines - - -def bench_set_number_of_points(): - repeat = 5 - nb_streamlines = DATA['nb_streamlines'] - - msg = "Timing set_number_of_points() with {0:,} streamlines." - print(msg.format(nb_streamlines * repeat)) - cython_time = measure("set_number_of_points(streamlines, nb_points)", - repeat) - print("Cython time: {0:.3f} sec".format(cython_time)) - - python_time = measure("[set_number_of_points_python(s, nb_points)" - " for s in streamlines]", repeat) - print("Python time: {0:.2f} sec".format(python_time)) - print("Speed up of {0:.2f}x".format(python_time/cython_time)) - - # Make sure it produces the same results. - assert_array_almost_equal([set_number_of_points_python(s) for s in DATA["streamlines"]], - set_number_of_points(DATA["streamlines"])) - - cython_time_arrseq = measure("set_number_of_points(streamlines, nb_points)", repeat) - print("Cython time (ArrSeq): {0:.3f} sec".format(cython_time_arrseq)) - print("Speed up of {0:.2f}x".format(python_time/cython_time_arrseq)) - - # Make sure it produces the same results. - assert_array_equal(set_number_of_points(DATA["streamlines"]), - set_number_of_points(DATA["streamlines_arrseq"])) - - -def bench_length(): - repeat = 10 - nb_streamlines = DATA['nb_streamlines'] - - msg = "Timing length() with {0:,} streamlines." - print(msg.format(nb_streamlines * repeat)) - python_time = measure("[length_python(s) for s in streamlines]", repeat) - print("Python time: {0:.2f} sec".format(python_time)) - - cython_time = measure("length(streamlines)", repeat) - print("Cython time: {0:.3f} sec".format(cython_time)) - print("Speed up of {0:.2f}x".format(python_time/cython_time)) - - # Make sure it produces the same results. - assert_array_almost_equal([length_python(s) for s in DATA["streamlines"]], - length(DATA["streamlines"])) - - cython_time_arrseq = measure("length(streamlines)", repeat) - print("Cython time (ArrSeq): {0:.3f} sec".format(cython_time_arrseq)) - print("Speed up of {0:.2f}x".format(python_time/cython_time_arrseq)) - - # Make sure it produces the same results. - assert_array_equal(length(DATA["streamlines"]), - length(DATA["streamlines_arrseq"])) - - -def bench_compress_streamlines(): - repeat = 10 - fname = get_fnames('fornix') - fornix = load_tractogram(fname, 'same', - bbox_valid_check=False).streamlines - - streamlines = Streamlines(fornix) - - print("Timing compress_streamlines() in Cython" - " ({0} streamlines)".format(len(streamlines))) - cython_time = measure("compress_streamlines(streamlines)", repeat) - print("Cython time: {0:.3}sec".format(cython_time)) - del streamlines - - streamlines = Streamlines(fornix) - python_time = measure("map(compress_streamlines_python, streamlines)", - repeat) - print("Python time: {0:.2}sec".format(python_time)) - print("Speed up of {0}x".format(python_time/cython_time)) - del streamlines diff --git a/dipy/tracking/meson.build b/dipy/tracking/meson.build new file mode 100644 index 0000000000..14aa621796 --- /dev/null +++ b/dipy/tracking/meson.build @@ -0,0 +1,53 @@ +cython_sources = [ + 'direction_getter', + 'distances', + 'fbcmeasures', + 'localtrack', + 'propspeed', + 'stopping_criterion', + 'streamlinespeed', + 'vox2track', + ] + +cython_headers = [ + 'direction_getter.pxd', + 'fbcmeasures.pxd', + 'propspeed.pxd', + 'stopping_criterion.pxd', + 'streamlinespeed.pxd', +] + +foreach ext: cython_sources + if fs.exists(ext + '.pxd') + extra_args += ['--depfile', meson.current_source_dir() +'/'+ ext + '.pxd', ] + endif + py3.extension_module(ext, + cython_gen.process(ext + '.pyx'), + c_args: cython_c_args, + include_directories: [incdir_numpy, inc_local], + dependencies: [omp], + install: true, + subdir: 'dipy/tracking' + ) +endforeach + + +python_sources = ['__init__.py', + '_utils.py', + 'learning.py', + 'life.py', + 'local_tracking.py', + 'mesh.py', + 'metrics.py', + 'streamline.py', + 'utils.py', + ] + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/tracking' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/tracking/tests/meson.build b/dipy/tracking/tests/meson.build new file mode 100644 index 0000000000..c997e1a79a --- /dev/null +++ b/dipy/tracking/tests/meson.build @@ -0,0 +1,24 @@ +python_sources = [ + '__init__.py', + 'test_distances.py', + 'test_fbc.py', + 'test_learning.py', + 'test_life.py', + 'test_mesh.py', + 'test_metrics.py', + 'test_propagation.py', + 'test_stopping_criterion.py', + 'test_streamline.py', + 'test_track_volumes.py', + 'test_tracking.py', + 'test_utils.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/tracking/tests' +) + + diff --git a/dipy/utils/meson.build b/dipy/utils/meson.build new file mode 100644 index 0000000000..1cbe964b62 --- /dev/null +++ b/dipy/utils/meson.build @@ -0,0 +1,46 @@ +cython_sources = [ + 'fast_numpy', + 'omp', + ] + +cython_headers = [ + 'fast_numpy.pxd', + 'omp.pxd', + ] + +foreach ext: cython_sources + if fs.exists(ext + '.pxd') + extra_args += ['--depfile', meson.current_source_dir() +'/'+ ext + '.pxd', ] + endif + py3.extension_module(ext, + cython_gen.process(ext + '.pyx'), + c_args: cython_c_args, + include_directories: [incdir_numpy, inc_local], + dependencies: [omp], + install: true, + subdir: 'dipy/utils' + ) +endforeach + + +python_sources = [ + '__init__.py', + '_importlib.py', + 'arrfuncs.py', + 'deprecator.py', + 'multiproc.py', + 'optpkg.py', + 'parallel.py', + 'tripwire.py', + 'volume.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/utils' +) + + +subdir('tests') \ No newline at end of file diff --git a/dipy/utils/tests/meson.build b/dipy/utils/tests/meson.build new file mode 100644 index 0000000000..8e1e9d4c91 --- /dev/null +++ b/dipy/utils/tests/meson.build @@ -0,0 +1,20 @@ +python_sources = [ + '__init__.py', + 'test_arrfuncs.py', + 'test_deprecator.py', + 'test_fast_numpy.py', + 'test_multiproc.py', + 'test_omp.py', + 'test_parallel.py', + 'test_tripwire.py', + 'test_volume.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/utils/tests' +) + + diff --git a/dipy/viz/horizon/meson.build b/dipy/viz/horizon/meson.build new file mode 100644 index 0000000000..42ca8e8ee7 --- /dev/null +++ b/dipy/viz/horizon/meson.build @@ -0,0 +1,15 @@ +python_sources = [ + '__init__.py', + 'app.py', + 'util.py', + ] + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/viz/horizon' +) + + +subdir('tab') +subdir('visualizer') \ No newline at end of file diff --git a/dipy/viz/horizon/tab/meson.build b/dipy/viz/horizon/tab/meson.build new file mode 100644 index 0000000000..72c06f76cf --- /dev/null +++ b/dipy/viz/horizon/tab/meson.build @@ -0,0 +1,16 @@ +python_sources = [ + '__init__.py', + 'base.py', + 'cluster.py', + 'peak.py', + 'roi.py', + 'slice.py', + ] + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/viz/horizon/tab' +) + +subdir('tests') \ No newline at end of file diff --git a/dipy/viz/horizon/tab/tests/meson.build b/dipy/viz/horizon/tab/tests/meson.build new file mode 100644 index 0000000000..ff3df068e0 --- /dev/null +++ b/dipy/viz/horizon/tab/tests/meson.build @@ -0,0 +1,10 @@ +python_sources = [ + '__init__.py', + 'test_base.py', + ] + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/viz/horizon/tab/tests' +) \ No newline at end of file diff --git a/dipy/viz/horizon/visualizer/meson.build b/dipy/viz/horizon/visualizer/meson.build new file mode 100644 index 0000000000..7042a375dc --- /dev/null +++ b/dipy/viz/horizon/visualizer/meson.build @@ -0,0 +1,12 @@ +python_sources = [ + '__init__.py', + 'cluster.py', + 'slice.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/viz/horizon/visualizer' +) diff --git a/dipy/viz/meson.build b/dipy/viz/meson.build new file mode 100644 index 0000000000..5cb1808c11 --- /dev/null +++ b/dipy/viz/meson.build @@ -0,0 +1,20 @@ +python_sources = [ + '__init__.py', + 'gmem.py', + 'panel.py', + 'plotting.py', + 'projections.py', + 'regtools.py', + 'streamline.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/viz' +) + + +subdir('horizon') +subdir('tests') \ No newline at end of file diff --git a/dipy/viz/tests/meson.build b/dipy/viz/tests/meson.build new file mode 100644 index 0000000000..195f0d494a --- /dev/null +++ b/dipy/viz/tests/meson.build @@ -0,0 +1,17 @@ +python_sources = [ + '__init__.py', + 'test_apps.py', + 'test_fury.py', + 'test_regtools.py', + 'test_streamline.py', + 'test_viz_import.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/viz/tests' +) + + diff --git a/dipy/workflows/cli.py b/dipy/workflows/cli.py new file mode 100644 index 0000000000..ce784dd3fc --- /dev/null +++ b/dipy/workflows/cli.py @@ -0,0 +1,137 @@ +#!python + +from dipy.workflows.flow_runner import run_flow +from dipy.workflows import (align, denoise, io, mask, nn, reconst, segment, + stats, tracking, viz) + + +def dipy_align_affine(): + run_flow(align.ImageRegistrationFlow()) + + +def dipy_align_syn(): + run_flow(align.SynRegistrationFlow()) + + +def dipy_apply_transform(): + run_flow(align.ApplyTransformFlow()) + + +def dipy_buan_lmm(): + run_flow(stats.LinearMixedModelsFlow()) + + +def dipy_buan_shapes(): + run_flow(stats.BundleShapeAnalysis()) + + +def dipy_buan_profiles(): + run_flow(stats.BundleAnalysisTractometryFlow()) + + +def dipy_bundlewarp(): + run_flow(align.BundleWarpFlow()) + + +def dipy_correct_motion(): + run_flow(align.MotionCorrectionFlow()) + + +def dipy_denoise_nlmeans(): + run_flow(denoise.NLMeansFlow()) + + +def dipy_denoise_lpca(): + run_flow(denoise.LPCAFlow()) + + +def dipy_denoise_mppca(): + run_flow(denoise.MPPCAFlow()) + + +def dipy_denoise_patch2self(): + run_flow(denoise.Patch2SelfFlow()) + + +def dipy_evac_plus(): + run_flow(nn.EVACPlusFlow()) + + +def dipy_fetch(): + run_flow(io.FetchFlow()) + + +def dipy_fit_csa(): + run_flow(reconst.ReconstCSAFlow()) + + +def dipy_fit_csd(): + run_flow(reconst.ReconstCSDFlow()) + + +def dipy_fit_dki(): + run_flow(reconst.ReconstDkiFlow()) + + +def dipy_fit_dti(): + run_flow(reconst.ReconstDtiFlow()) + + +def dipy_fit_ivim(): + run_flow(reconst.ReconstIvimFlow()) + + +def dipy_fit_mapmri(): + run_flow(reconst.ReconstMAPMRIFlow()) + + +def dipy_mask(): + run_flow(mask.MaskFlow()) + + +def dipy_gibbs_ringing(): + run_flow(denoise.GibbsRingingFlow()) + + +def dipy_horizon(): + run_flow(viz.HorizonFlow()) + + +def dipy_info(): + run_flow(io.IoInfoFlow()) + + +def dipy_labelsbundles(): + run_flow(segment.LabelsBundlesFlow()) + + +def dipy_median_otsu(): + run_flow(segment.MedianOtsuFlow()) + + +def dipy_recobundles(): + run_flow(segment.RecoBundlesFlow()) + + +def dipy_reslice(): + run_flow(align.ResliceFlow()) + + +def dipy_snr_in_cc(): + run_flow(stats.SNRinCCFlow()) + + +def dipy_split(): + run_flow(io.SplitFlow()) + + +def dipy_track(): + run_flow(tracking.LocalFiberTrackingPAMFlow()) + + +def dipy_track_pft(): + run_flow(tracking.PFTrackingPAMFlow()) + + +def dipy_slr(): + run_flow(align.StreamlineLinearRegistrationFlow()) diff --git a/dipy/workflows/meson.build b/dipy/workflows/meson.build new file mode 100644 index 0000000000..dd193129db --- /dev/null +++ b/dipy/workflows/meson.build @@ -0,0 +1,28 @@ +python_sources = [ + '__init__.py', + 'align.py', + 'base.py', + 'cli.py', + 'combined_workflow.py', + 'denoise.py', + 'docstring_parser.py', + 'flow_runner.py', + 'io.py', + 'mask.py', + 'multi_io.py', + 'nn.py', + 'reconst.py', + 'segment.py', + 'stats.py', + 'tracking.py', + 'viz.py', + 'workflow.py', +] + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/workflows' +) + +subdir('tests') \ No newline at end of file diff --git a/dipy/workflows/tests/meson.build b/dipy/workflows/tests/meson.build new file mode 100644 index 0000000000..c9d8978443 --- /dev/null +++ b/dipy/workflows/tests/meson.build @@ -0,0 +1,31 @@ +python_sources = [ + '__init__.py', + 'test_align.py', + 'test_denoise.py', + 'test_docstring_parser.py', + 'test_iap.py', + 'test_io.py', + 'test_masking.py', + 'test_nn.py', + 'test_reconst_csa_csd.py', + 'test_reconst_dki.py', + 'test_reconst_dti.py', + 'test_reconst_ivim.py', + 'test_reconst_mapmri.py', + 'test_reconst_rumba.py', + 'test_segment.py', + 'test_stats.py', + 'test_tracking.py', + 'test_viz.py', + 'test_workflow.py', + 'workflow_tests_utils.py', + ] + + +py3.install_sources( + python_sources, + pure: false, + subdir: 'dipy/workflows/tests' +) + + diff --git a/doc/conf.py b/doc/conf.py index 5be4f2094a..83690b1903 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -32,12 +32,6 @@ # -- General configuration ----------------------------------------------------- -# We load the DIPY release info into a dict by explicit execution -rel = {} -with open(os.path.join('..', 'dipy', 'info.py')) as f: - exec(f.read(), rel) - - # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', @@ -89,7 +83,7 @@ # built documents. # # The short X.Y version. -version = rel['__version__'] +version = dipy.__version__ # The full version, including alpha/beta/rc tags. release = version diff --git a/doc/devel/benchmarking.rst b/doc/devel/benchmarking.rst new file mode 100644 index 0000000000..71045f8ce2 --- /dev/null +++ b/doc/devel/benchmarking.rst @@ -0,0 +1,3 @@ +.. _benchmarking: + +.. include:: ../../benchmarks/README.rst \ No newline at end of file diff --git a/doc/devel/index.rst b/doc/devel/index.rst index 6a1c639a58..755c8126da 100644 --- a/doc/devel/index.rst +++ b/doc/devel/index.rst @@ -11,4 +11,5 @@ DIPY Developer Guide make_release commit_codes coding_style_guideline + benchmarking diff --git a/doc/links_names.inc b/doc/links_names.inc index c921197931..8b56175051 100644 --- a/doc/links_names.inc +++ b/doc/links_names.inc @@ -8,179 +8,179 @@ __not_case_sensitive__, so only one target definition is needed for nipy, NIPY, Nipy, etc... -.. _nipy: http://nipy.org -.. _`Brain Imaging Center`: http://bic.berkeley.edu/ -.. _dipy: http://dipy.org -.. _`dipy github`: http://github.com/dipy/dipy -.. _`dipy pypi`: http://pypi.python.org/pypi/dipy -.. _nipy issues: http://github.com/nipy/nipy/issues -.. _dipy issues: http://github.com/dipy/dipy/issues -.. _dipy paper: http://www.frontiersin.org/Neuroinformatics/10.3389/fninf.2014.00008/abstract -.. _journal paper: http://www.frontiersin.org/Neuroinformatics/10.3389/fninf.2014.00008/abstract -.. _nibabel: http://nipy.org/nibabel -.. _nibabel pypi: http://pypi.python.org/pypi/nibabel -.. _nipy development guidelines: http://nipy.org/nipy/devel/guidelines/index.html -.. _buildbots: http://nipy.bic.berkeley.edu/builders +.. _nipy: https://nipy.org +.. _`Brain Imaging Center`: https://bic.berkeley.edu/ +.. _dipy: https://dipy.org +.. _`dipy github`: https://github.com/dipy/dipy +.. _`dipy pypi`: https://pypi.python.org/pypi/dipy +.. _nipy issues: https://github.com/nipy/nipy/issues +.. _dipy issues: https://github.com/dipy/dipy/issues +.. _dipy paper: https://www.frontiersin.org/Neuroinformatics/10.3389/fninf.2014.00008/abstract +.. _journal paper: https://www.frontiersin.org/Neuroinformatics/10.3389/fninf.2014.00008/abstract +.. _nibabel: https://nipy.org/nibabel +.. _nibabel pypi: https://pypi.python.org/pypi/nibabel +.. _nipy development guidelines: https://nipy.org/nipy/devel/guidelines/index.html +.. _buildbots: https://nipy.bic.berkeley.edu/builders .. _`dipy gitter`: https://gitter.im/dipy/dipy .. _neurostars: https://neurostars.org/ .. _h5py: https://www.h5py.org/ -.. _cvxpy: http://www.cvxpy.org/ +.. _cvxpy: https://www.cvxpy.org/ .. Packaging -.. _neurodebian: http://neuro.debian.net -.. _neurodebian how to: http://neuro.debian.net/#how-to-use-this-repository -.. _pip: http://www.pip-installer.org/en/latest/ +.. _neurodebian: https://neuro.debian.net +.. _neurodebian how to: https://neuro.debian.net/#how-to-use-this-repository +.. _pip: https://www.pip-installer.org/en/latest/ .. _easy_install: https://pypi.python.org/pypi/setuptools -.. _homebrew: http://brew.sh/ +.. _homebrew: https://brew.sh/ .. Documentation tools -.. _graphviz: http://www.graphviz.org/ -.. _`Sphinx reST`: http://sphinx.pocoo.org/rest.html -.. _reST: http://docutils.sourceforge.net/rst.html -.. _docutils: http://docutils.sourceforge.net +.. _graphviz: https://www.graphviz.org/ +.. _`Sphinx reST`: https://sphinx.pocoo.org/rest.html +.. _reST: https://docutils.sourceforge.net/rst.html +.. _docutils: https://docutils.sourceforge.net .. Licenses -.. _GPL: http://www.gnu.org/licenses/gpl.html -.. _BSD: http://www.opensource.org/licenses/bsd-license.php -.. _LGPL: http://www.gnu.org/copyleft/lesser.html +.. _GPL: https://www.gnu.org/licenses/gpl.html +.. _BSD: https://www.opensource.org/licenses/bsd-license.php +.. _LGPL: https://www.gnu.org/copyleft/lesser.html .. Working process -.. _pynifti: http://niftilib.sourceforge.net/pynifti/ -.. _nifticlibs: http://nifti.nimh.nih.gov -.. _nifti: http://nifti.nimh.nih.gov +.. _pynifti: https://niftilib.sourceforge.net/pynifti/ +.. _nifticlibs: https://nifti.nimh.nih.gov +.. _nifti: https://nifti.nimh.nih.gov .. _`nipy launchpad`: https://launchpad.net/nipy .. _launchpad: https://launchpad.net/ .. _`nipy trunk`: https://code.launchpad.net/~nipy-developers/nipy/trunk .. _`nipy mailing list`: https://mail.python.org/mailman/listinfo/neuroimaging .. _`nipy bugs`: https://bugs.launchpad.net/nipy -.. _pep8: http://www.python.org/dev/peps/pep-0008/ +.. _pep8: https://www.python.org/dev/peps/pep-0008/ .. _`numpy coding style`: https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt -.. _`python module path`: http://docs.python.org/tutorial/modules.html#the-module-search-path +.. _`python module path`: https://docs.python.org/tutorial/modules.html#the-module-search-path .. Code support stuff -.. _pychecker: http://pychecker.sourceforge.net/ -.. _pylint: http://www.logilab.org/project/pylint -.. _pyflakes: http://divmod.org/trac/wiki/DivmodPyflakes -.. _virtualenv: http://pypi.python.org/pypi/virtualenv -.. _git: http://git.or.cz/ -.. _github: http://github.com -.. _flymake: http://flymake.sourceforge.net/ -.. _rope: http://rope.sourceforge.net/ -.. _pymacs: http://pymacs.progiciels-bpi.ca/pymacs.html -.. _ropemacs: http://rope.sourceforge.net/ropemacs.html -.. _ECB: http://ecb.sourceforge.net/ -.. _emacs_python_mode: http://www.emacswiki.org/cgi-bin/wiki/PythonMode -.. _doctest-mode: http://www.cis.upenn.edu/~edloper/projects/doctestmode/ -.. _bazaar: http://bazaar-vcs.org/ -.. _nose: http://somethingaboutorange.com/mrl/projects/nose +.. _pychecker: https://pychecker.sourceforge.net/ +.. _pylint: https://www.logilab.org/project/pylint +.. _pyflakes: https://divmod.org/trac/wiki/DivmodPyflakes +.. _virtualenv: https://pypi.python.org/pypi/virtualenv +.. _git: https://git.or.cz/ +.. _github: https://github.com +.. _flymake: https://flymake.sourceforge.net/ +.. _rope: https://rope.sourceforge.net/ +.. _pymacs: https://pymacs.progiciels-bpi.ca/pymacs.html +.. _ropemacs: https://rope.sourceforge.net/ropemacs.html +.. _ECB: https://ecb.sourceforge.net/ +.. _emacs_python_mode: https://www.emacswiki.org/cgi-bin/wiki/PythonMode +.. _doctest-mode: https://www.cis.upenn.edu/~edloper/projects/doctestmode/ +.. _bazaar: https://bazaar-vcs.org/ +.. _nose: https://somethingaboutorange.com/mrl/projects/nose .. _pytest: https://docs.pytest.org -.. _`python coverage tester`: http://nedbatchelder.com/code/modules/coverage.html -.. _cython: http://cython.org +.. _`python coverage tester`: https://nedbatchelder.com/code/modules/coverage.html +.. _cython: https://cython.org .. _travis-ci: https://travis-ci.com/ .. Other python projects -.. _numpy: http://numpy.scipy.org -.. _scipy: http://www.scipy.org -.. _IPython: http://www.ipython.org/ -.. _`ipython manual`: http://ipython.scipy.org/doc/manual/html -.. _matplotlib: http://matplotlib.sourceforge.net -.. _pythonxy: http://www.pythonxy.com -.. _ETS: http://code.enthought.com/projects/tool-suite.php -.. _`Enthought Tool Suite`: http://code.enthought.com/projects/tool-suite.php -.. _canopy: https://www.enthought.com/products/canopy -.. _anaconda: http://continuum.io/downloads -.. _python: http://www.python.org -.. _mayavi: http://mayavi.sourceforge.net/ -.. _sympy: http://code.google.com/p/sympy/ -.. _networkx: http://networkx.lanl.gov/ -.. _setuptools: http://pypi.python.org/pypi/setuptools -.. _distribute: http://packages.python.org/distribute -.. _datapkg: http://okfn.org/projects/datapkg -.. _pytables: http://www.pytables.org -.. _python-vtk: http://www.vtk.org +.. _numpy: https://numpy.org +.. _scipy: https://www.scipy.org +.. _IPython: https://www.ipython.org/ +.. _`ipython manual`: https://ipython.scipy.org/doc/manual/html +.. _matplotlib: https://matplotlib.sourceforge.net +.. _pythonxy: https://www.pythonxy.com +.. _ETS: https://code.enthought.com/projects/tool-suite.php +.. _`Enthought Tool Suite`: https://code.enthought.com/projects/tool-suite.php +.. _canopy: https://assets.enthought.com/downloads/ +.. _anaconda: https://www.anaconda.com/download +.. _python: https://www.python.org +.. _mayavi: https://mayavi.sourceforge.net/ +.. _sympy: https://code.google.com/p/sympy/ +.. _networkx: https://networkx.lanl.gov/ +.. _setuptools: https://pypi.python.org/pypi/setuptools +.. _distribute: https://packages.python.org/distribute +.. _datapkg: https://okfn.org/projects/datapkg +.. _pytables: https://www.pytables.org +.. _python-vtk: https://www.vtk.org .. _pypi: https://pypi.python.org/pypi .. _FURY: https://fury.gl .. Python imaging projects -.. _PyMVPA: http://www.pymvpa.org -.. _BrainVISA: http://brainvisa.info -.. _anatomist: http://brainvisa.info -.. _pydicom: http://code.google.com/p/pydicom/ +.. _PyMVPA: https://www.pymvpa.org +.. _BrainVISA: https://brainvisa.info +.. _anatomist: https://brainvisa.info +.. _pydicom: https://code.google.com/p/pydicom/ .. Not so python imaging projects -.. _matlab: http://www.mathworks.com -.. _spm: http://www.fil.ion.ucl.ac.uk/spm -.. _spm8: http://www.fil.ion.ucl.ac.uk/spm/software/spm8 -.. _eeglab: http://sccn.ucsd.edu/eeglab -.. _AFNI: http://afni.nimh.nih.gov/afni -.. _FSL: http://www.fmrib.ox.ac.uk/fsl -.. _FreeSurfer: http://surfer.nmr.mgh.harvard.edu -.. _voxbo: http://www.voxbo.org -.. _mricron: http://www.mccauslandcenter.sc.edu/mricro/mricron/index.html -.. _slicer: http://www.slicer.org/ +.. _matlab: https://www.mathworks.com +.. _spm: https://www.fil.ion.ucl.ac.uk/spm +.. _spm8: https://www.fil.ion.ucl.ac.uk/spm/software/spm8 +.. _eeglab: https://sccn.ucsd.edu/eeglab +.. _AFNI: https://afni.nimh.nih.gov/afni +.. _FSL: https://www.fmrib.ox.ac.uk/fsl +.. _FreeSurfer: https://surfer.nmr.mgh.harvard.edu +.. _voxbo: https://www.voxbo.org +.. _mricron: https://www.mccauslandcenter.sc.edu/mricro/mricron/index.html +.. _slicer: https://www.slicer.org/ .. _fibernavigator: https://github.com/scilus/fibernavigator .. File formats -.. _DICOM: http://medical.nema.org/ -.. _`wikipedia DICOM`: http://en.wikipedia.org/wiki/Digital_Imaging_and_Communications_in_Medicine -.. _GDCM: http://sourceforge.net/apps/mediawiki/gdcm +.. _DICOM: https://medical.nema.org/ +.. _`wikipedia DICOM`: https://en.wikipedia.org/wiki/Digital_Imaging_and_Communications_in_Medicine +.. _GDCM: https://sourceforge.net/apps/mediawiki/gdcm .. _`DICOM specs`: ftp://medical.nema.org/medical/dicom/2009/ .. _`DICOM object definitions`: ftp://medical.nema.org/medical/dicom/2009/09_03pu3.pdf -.. _dcm2nii: http://www.cabiatl.com/mricro/mricron/dcm2nii.html -.. _`mricron install`: http://www.cabiatl.com/mricro/mricron/install.html -.. _dicom2nrrd: http://www.slicer.org/slicerWiki/index.php/Modules:DicomToNRRD-3.4 -.. _Nrrd: http://teem.sourceforge.net/nrrd/format.html +.. _dcm2nii: https://www.cabiatl.com/mricro/mricron/dcm2nii.html +.. _`mricron install`: https://www.cabiatl.com/mricro/mricron/install.html +.. _dicom2nrrd: https://www.slicer.org/slicerWiki/index.php/Modules:DicomToNRRD-3.4 +.. _Nrrd: https://teem.sourceforge.net/nrrd/format.html .. General software -.. _gcc: http://gcc.gnu.org -.. _xcode: http://developer.apple.com/TOOLS/xcode -.. _mingw: http://www.mingw.org/wiki/Getting_Started -.. _mingw distutils bug: http://bugs.python.org/issue2698 -.. _cygwin: http://cygwin.com -.. _macports: http://www.macports.org/ -.. _VTK: http://www.vtk.org/ -.. _ITK: http://www.itk.org/ -.. _swig: http://www.swig.org -.. _openmp: http://www.openmp.org/ +.. _gcc: https://gcc.gnu.org +.. _xcode: https://developer.apple.com/xcode/resources/ +.. _mingw: https://www.mingw.org/wiki/Getting_Started +.. _mingw distutils bug: https://bugs.python.org/issue2698 +.. _cygwin: https://cygwin.com +.. _macports: https://www.macports.org/ +.. _VTK: https://www.vtk.org/ +.. _ITK: https://www.itk.org/ +.. _swig: https://www.swig.org +.. _openmp: https://www.openmp.org/ .. Windows development -.. _mingw: http://www.mingw.org/wiki/Getting_Started -.. _msys: http://www.mingw.org/wiki/MSYS -.. _powershell: http://www.microsoft.com/powershell -.. _msysgit: http://code.google.com/p/msysgit -.. _putty: http://www.chiark.greenend.org.uk/~sgtatham/putty -.. _visualstudiobuildtools: http://landinghub.visualstudio.com/visual-cpp-build-tools +.. _mingw: https://www.mingw.org/wiki/Getting_Started +.. _msys: https://www.mingw.org/wiki/MSYS +.. _powershell: https://www.microsoft.com/powershell +.. _msysgit: https://code.google.com/p/msysgit +.. _putty: https://www.chiark.greenend.org.uk/~sgtatham/putty +.. _visualstudiobuildtools: https://landinghub.visualstudio.com/visual-cpp-build-tools .. Functional imaging labs -.. _`functional imaging laboratory`: http://www.fil.ion.ucl.ac.uk -.. _FMRIB: http://www.fmrib.ox.ac.uk +.. _`functional imaging laboratory`: https://www.fil.ion.ucl.ac.uk +.. _FMRIB: https://www.fmrib.ox.ac.uk .. Other organizations -.. _enthought: http://www.enthought.com -.. _kitware: http://www.kitware.com -.. _nitrc: http://www.nitrc.org +.. _enthought: https://www.enthought.com +.. _kitware: https://www.kitware.com +.. _nitrc: https://www.nitrc.org .. General information links -.. _`wikipedia FMRI`: http://en.wikipedia.org/wiki/Functional_magnetic_resonance_imaging -.. _`wikipedia PET`: http://en.wikipedia.org/wiki/Positron_emission_tomography +.. _`wikipedia FMRI`: https://en.wikipedia.org/wiki/Functional_magnetic_resonance_imaging +.. _`wikipedia PET`: https://en.wikipedia.org/wiki/Positron_emission_tomography .. Mathematical methods -.. _`wikipedia ICA`: http://en.wikipedia.org/wiki/Independent_component_analysis -.. _`wikipedia PCA`: http://en.wikipedia.org/wiki/Principal_component_analysis +.. _`wikipedia ICA`: https://en.wikipedia.org/wiki/Independent_component_analysis +.. _`wikipedia PCA`: https://en.wikipedia.org/wiki/Principal_component_analysis .. Mathematical ideas -.. _`wikipedia spherical coordinate system`: http://en.wikipedia.org/wiki/Spherical_coordinate_system -.. _`mathworld spherical coordinate system`: http://mathworld.wolfram.com/SphericalCoordinates.html -.. _`wikipedia affine`: http://en.wikipedia.org/wiki/Affine_transformation -.. _`wikipedia linear transform`: http://en.wikipedia.org/wiki/Linear_transformation -.. _`wikipedia rotation matrix`: http://en.wikipedia.org/wiki/Rotation_matrix -.. _`wikipedia homogeneous coordinates`: http://en.wikipedia.org/wiki/Homogeneous_coordinates -.. _`wikipedia axis angle`: http://en.wikipedia.org/wiki/Axis_angle -.. _`wikipedia Euler angles`: http://en.wikipedia.org/wiki/Euler_angles -.. _`Mathworld Euler angles`: http://mathworld.wolfram.com/EulerAngles.html -.. _`wikipedia quaternion`: http://en.wikipedia.org/wiki/Quaternion -.. _`wikipedia shear matrix`: http://en.wikipedia.org/wiki/Shear_matrix -.. _`wikipedia reflection`: http://en.wikipedia.org/wiki/Reflection_(mathematics) -.. _`wikipedia direction cosine`: http://en.wikipedia.org/wiki/Direction_cosine +.. _`wikipedia spherical coordinate system`: https://en.wikipedia.org/wiki/Spherical_coordinate_system +.. _`mathworld spherical coordinate system`: https://mathworld.wolfram.com/SphericalCoordinates.html +.. _`wikipedia affine`: https://en.wikipedia.org/wiki/Affine_transformation +.. _`wikipedia linear transform`: https://en.wikipedia.org/wiki/Linear_transformation +.. _`wikipedia rotation matrix`: https://en.wikipedia.org/wiki/Rotation_matrix +.. _`wikipedia homogeneous coordinates`: https://en.wikipedia.org/wiki/Homogeneous_coordinates +.. _`wikipedia axis angle`: https://en.wikipedia.org/wiki/Axis_angle +.. _`wikipedia Euler angles`: https://en.wikipedia.org/wiki/Euler_angles +.. _`Mathworld Euler angles`: https://mathworld.wolfram.com/EulerAngles.html +.. _`wikipedia quaternion`: https://en.wikipedia.org/wiki/Quaternion +.. _`wikipedia shear matrix`: https://en.wikipedia.org/wiki/Shear_matrix +.. _`wikipedia reflection`: https://en.wikipedia.org/wiki/Reflection_(mathematics) +.. _`wikipedia direction cosine`: https://en.wikipedia.org/wiki/Direction_cosine .. vim:syntax=rst diff --git a/doc/tools/apigen.py b/doc/tools/apigen.py index a14fb742aa..4cbfaf9b83 100644 --- a/doc/tools/apigen.py +++ b/doc/tools/apigen.py @@ -106,7 +106,10 @@ def set_package_name(self, package_name): # It's also possible to imagine caching the module parsing here self._package_name = package_name root_module = self._import(package_name) - self.root_path = root_module.__path__[-1] + if root_module.__path__: + self.root_path = root_module.__path__[-1] + else: + self.root_path = os.path.dirname(root_module.__file__) self.written_modules = None package_name = property(get_package_name, set_package_name, None, diff --git a/doc/tools/build_modref_templates.py b/doc/tools/build_modref_templates.py index 451822b5d1..3a68fd8614 100755 --- a/doc/tools/build_modref_templates.py +++ b/doc/tools/build_modref_templates.py @@ -39,27 +39,29 @@ def abort(error): except ImportError as e: abort("Can not import " + package) - module = sys.modules[package] - - # Check that the source version is equal to the installed - # version. If the versions mismatch the API documentation sources - # are not (re)generated. This avoids automatic generation of documentation - # for older or newer versions if such versions are installed on the system. - - installed_version = Version(module.__version__) - - info_file = pjoin('..', package, 'info.py') - info_lines = open(info_file).readlines() - source_version = '.'.join([v.split('=')[1].strip(" '\n.") - for v in info_lines if re.match( - '^_version_(major|minor|micro|extra)', v - )]).strip('.') - source_version = Version(source_version) - print('***', source_version) - - if source_version != installed_version: - print('***', installed_version) - abort("Installed version does not match source version") + # NOTE: with the new versioning scheme, this check is not needed anymore + # Also, this might be needed if we do not use spin to generate the docs + + # module = sys.modules[package] + + # # Check that the source version is equal to the installed + # # version. If the versions mismatch the API documentation sources + # # are not (re)generated. This avoids automatic generation of documentation + # # for older or newer versions if such versions are installed on the system. + + # installed_version = Version(module.__version__) + # info_file = pjoin('..', package, 'info.py') + # info_lines = open(info_file).readlines() + # source_version = '.'.join([v.split('=')[1].strip(" '\n.") + # for v in info_lines if re.match( + # '^_version_(major|minor|micro|extra)', v + # )]).strip('.') + # source_version = Version(source_version) + # print('***', source_version) + + # if source_version != installed_version: + # print('***', installed_version) + # abort("Installed version does not match source version") docwriter = ApiDocWriter(package, rst_extension='.rst', other_defines=other_defines) diff --git a/doc/tools/docgen_cmd.py b/doc/tools/docgen_cmd.py index 24d8087af5..d1e39252bc 100755 --- a/doc/tools/docgen_cmd.py +++ b/doc/tools/docgen_cmd.py @@ -92,27 +92,30 @@ def format_title(text): except ImportError: abort("Cannot import " + package) - module = sys.modules[package] - - # Check that the source version is equal to the installed - # version. If the versions mismatch the API documentation sources - # are not (re)generated. This avoids automatic generation of documentation - # for older or newer versions if such versions are installed on the system. - - installed_version = Version(module.__version__) - - info_file = pjoin('..', package, 'info.py') - info_lines = open(info_file).readlines() - source_version = '.'.join( - [v.split('=')[1].strip(" '\n.") - for v in info_lines - if re.match('^_version_(major|minor|micro|extra)', v)]).strip('.') - source_version = Version(source_version) - print('***', source_version) - - if source_version != installed_version: - print('***', installed_version) - abort("Installed version does not match source version") + + # NOTE: with the new versioning scheme, this check is not needed anymore + # Also, this might be needed if we do not use spin to generate the docs + # module = sys.modules[package] + + # # Check that the source version is equal to the installed + # # version. If the versions mismatch the API documentation sources + # # are not (re)generated. This avoids automatic generation of documentation + # # for older or newer versions if such versions are installed on the system. + + # installed_version = Version(module.__version__) + + # info_file = pjoin('..', package, 'info.py') + # info_lines = open(info_file).readlines() + # source_version = '.'.join( + # [v.split('=')[1].strip(" '\n.") + # for v in info_lines + # if re.match('^_version_(major|minor|micro|extra)', v)]).strip('.') + # source_version = Version(source_version) + # print('***', source_version) + + # if source_version != installed_version: + # print('***', installed_version) + # abort("Installed version does not match source version") # generate docs diff --git a/doc/user_guide/installation.rst b/doc/user_guide/installation.rst index a6bab6eb9c..a8f5990acf 100644 --- a/doc/user_guide/installation.rst +++ b/doc/user_guide/installation.rst @@ -56,6 +56,7 @@ Windows When the installation has finished we can check if it is successful in the following way. From a Python console script try:: >>> import dipy + >>> print(dipy.__version__) This should work with no error. @@ -151,7 +152,7 @@ and then remove the DIPY directory that contains that file. Alternatives to Anaconda ------------------------- -If you have problems installing Anaconda_ we recommend using Canopy_ or pythonxy_. +If you have problems installing Anaconda_ we recommend using Canopy_. Memory issues ------------- @@ -182,8 +183,8 @@ latest changes. In that case, you can use:: For more information about this see :ref:`following-latest`. After you've cloned the repository, you will have a new directory, containing -the DIPY ``setup.py`` file, among others. We'll call this directory - that -contains the ``setup.py`` file - the *DIPY source root directory*. Sometimes +the DIPY ``pyproject.toml`` file, among others. We'll call this directory - that +contains the ``pyproject.toml`` file - the *DIPY source root directory*. Sometimes we'll also call it the ```` directory. Building and installing @@ -202,11 +203,11 @@ This command will delete all files not present in your github repository. Then, complete your installation by using this command:: - pip install --user -e . + pip install --no-build-isolation --user -e . This command will do the following : - remove the old dipy installation if present - - build dipy (equivalent to `python setup.py build_ext --inplace`) + - build dipy - install dipy locally on your user environment .. _install-source-nix: @@ -218,11 +219,11 @@ Change directory into the *DIPY source root directory*. To install for the system:: - python setup.py install + pip install dipy To build DIPY in the source tree (locally) so you can run the code in the source tree (recommended for following the latest source) run:: - python setup.py build_ext --inplace + pip install --no-build-isolation --user -e . add the *DIPY source root directory* into your ``PYTHONPATH`` environment variable. Search google for ``PYTHONPATH`` for details or see `python module path`_ for an introduction. @@ -364,17 +365,12 @@ If you are already using the Homebrew Python, or the standard python.org Python, you will need to use the CLANG compiler with OMP. Run:: brew install clang-omp - -And then edit the ``setup.py`` file to include the following line (e.g., on line 14, -at the top of the file, but after the initial imports):: - - os.environ['CC'] = '/usr/local/bin/clang-omp' - + export CC=/usr/local/bin/clang-omp Building and installing ~~~~~~~~~~~~~~~~~~~~~~~ Whether you are using Anaconda_ or Hombrew/python.org Python, you will need to then -run ``python setup.py install``. When you do that, it should now +run ``pip install dipy``. When you do that, it should now compile the code with this OpenMP-enabled compiler, and things should go faster! @@ -385,10 +381,9 @@ If you want to run the tests:: sudo pip install pytest -Then (in python or ipython_):: +Then, in the terminal from ````:: - >>> import dipy - >>> dipy.test() + pytest -svv dipy You can also run the examples in ``/doc``. @@ -403,7 +398,7 @@ Then change directory to ```` and:: cd doc make clean - make html + make -C . html Tip --- @@ -412,7 +407,7 @@ Building the entire ``DIPY`` documentation takes a few hours. You may want to skip building the documentation for the examples, which will reduce the documentation build time to a few minutes. You can do so by executing:: - make -C . html-after-examples + make -C . html-no-examples Troubleshooting --------------- diff --git a/fake_pyrex/Pyrex/Distutils/__init__.py b/fake_pyrex/Pyrex/Distutils/__init__.py deleted file mode 100644 index 971c255343..0000000000 --- a/fake_pyrex/Pyrex/Distutils/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# to work around bug in setuptools monkeypatching of distutils diff --git a/fake_pyrex/Pyrex/Distutils/build_ext.py b/fake_pyrex/Pyrex/Distutils/build_ext.py deleted file mode 100644 index 86556e8d9f..0000000000 --- a/fake_pyrex/Pyrex/Distutils/build_ext.py +++ /dev/null @@ -1 +0,0 @@ -build_ext = "placeholder" diff --git a/fake_pyrex/Pyrex/__init__.py b/fake_pyrex/Pyrex/__init__.py deleted file mode 100644 index 971c255343..0000000000 --- a/fake_pyrex/Pyrex/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# to work around bug in setuptools monkeypatching of distutils diff --git a/meson.build b/meson.build new file mode 100644 index 0000000000..3e17837e96 --- /dev/null +++ b/meson.build @@ -0,0 +1,66 @@ +project( + 'dipy', + 'c', 'cpp', 'cython', + version: '1.8.0dev0', + license: 'BSD-3', + meson_version: '>= 1.1.0', + default_options: [ + 'buildtype=debugoptimized', + 'c_std=c99', + 'cpp_std=c++14', + 'optimization=2', + ], +) + +# https://mesonbuild.com/Python-module.html +py_mod = import('python') +py3 = py_mod.find_installation(pure: false) +py3_dep = py3.dependency() + +# filesystem Module to manage files and directories +fs = import('fs') + +cc = meson.get_compiler('c') +cpp = meson.get_compiler('cpp') +cy = meson.get_compiler('cython') +host_system = host_machine.system() +host_cpu_family = host_machine.cpu_family() + +cython = find_program('cython') + +# Check compiler is recent enough (see "Toolchain Roadmap" for details) +if cc.get_id() == 'gcc' + if not cc.version().version_compare('>=8.0') + error('DIPY requires GCC >= 8.0') + endif +elif cc.get_id() == 'msvc' + if not cc.version().version_compare('>=19.20') + error('DIPY requires at least vc142 (default with Visual Studio 2019) ' + \ + 'when building with MSVC') + endif +endif +if not cy.version().version_compare('>=0.29.35') + error('DIPY requires Cython >= 0.29.35') +endif + + +# TODO: the below -Wno flags are all needed to silence warnings in +# f2py-generated code. This should be fixed in f2py itself. +#_global_c_args = cc.get_supported_arguments( +# '-Wno-unused-but-set-variable', +# '-Wno-unused-function', +# '-Wno-conversion', +# '-Wno-misleading-indentation', +# '-Wno-incompatible-pointer-types', +#) +#add_project_arguments(_global_c_args, language : 'c') + +# We need -lm for all C code (assuming it uses math functions, which is safe to +# assume for dipy). For C++ it isn't needed, because libstdc++/libc++ is +# guaranteed to depend on it. +m_dep = cc.find_library('m', required : false) +if m_dep.found() + add_project_link_arguments('-lm', language : 'c') +endif + +subdir('dipy') \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 26844375b6..54fe4138c2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,177 @@ +[project] +name = "dipy" +description = "Diffusion MRI Imaging in Python" +license = {file = "LICENSE"} +readme = "README.rst" +requires-python = ">=3.8" +authors = [{ name = "DIPY developers", email = "dipy@python.org" }] +maintainers = [ + {name = "Eleftherios Garyfallidis", email="neuroimaging@python.org"}, + {name = "Ariel Rokem", email="neuroimaging@python.org"}, + {name = "Serge Koudoro", email="neuroimaging@python.org"}, +] +keywords = ["dipy", "diffusionimaging", "dti", "tracking", "tractography", + "diffusionmri", "mri", "tractometry", "connectomics", "brain", + "dipymri", "microstructure", "deeplearning", "registration", + "segmentations", "simulation", "medical", "imaging", "brain", + "machinelearning", "signalprocessing"] +classifiers = [ + 'Development Status :: 4 - Beta', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: BSD License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3 :: Only', + "Topic :: Software Development :: Libraries", + 'Topic :: Scientific/Engineering', + "Operating System :: OS Independent", + 'Operating System :: Microsoft :: Windows', + 'Operating System :: POSIX', + 'Operating System :: Unix', + 'Operating System :: MacOS', +] +dynamic = ['version'] +dependencies = [ + "cython>=0.29.24, !=0.29.29", + "numpy>=1.22.4", + "scipy>=1.8", + "nibabel>=3.0.0", + "h5py>=3.1.0", + "packaging>=21", + "tqdm>=4.30.0", + "trx-python>=0.2.9", +] + +[project.scripts] +dipy_align_syn = "dipy.workflows.cli:dipy_align_syn" +dipy_align_affine = "dipy.workflows.cli:dipy_align_affine" +dipy_apply_transform = "dipy.workflows.cli:dipy_apply_transform" +dipy_buan_shapes = "dipy.workflows.cli:dipy_buan_shapes" +dipy_buan_profiles = "dipy.workflows.cli:dipy_buan_profiles" +dipy_buan_lmm = "dipy.workflows.cli:dipy_buan_lmm" +dipy_bundlewarp = "dipy.workflows.cli:dipy_bundlewarp" +dipy_correct_motion = "dipy.workflows.cli:dipy_correct_motion" +dipy_denoise_nlmeans = "dipy.workflows.cli:dipy_denoise_nlmeans" +dipy_denoise_lpca = "dipy.workflows.cli:dipy_denoise_lpca" +dipy_denoise_mppca = "dipy.workflows.cli:dipy_denoise_mppca" +dipy_denoise_patch2self = "dipy.workflows.cli:dipy_denoise_patch2self" +dipy_evac_plus = "dipy.workflows.cli:dipy_evac_plus" +dipy_fetch = "dipy.workflows.cli:dipy_fetch" +dipy_fit_csa = "dipy.workflows.cli:dipy_fit_csa" +dipy_fit_csd = "dipy.workflows.cli:dipy_fit_csd" +dipy_fit_dki = "dipy.workflows.cli:dipy_fit_dki" +dipy_fit_dti = "dipy.workflows.cli:dipy_fit_dti" +dipy_fit_ivim = "dipy.workflows.cli:dipy_fit_ivim" +dipy_fit_mapmri = "dipy.workflows.cli:dipy_fit_mapmri" +dipy_mask = "dipy.workflows.cli:dipy_mask" +dipy_gibbs_ringing = "dipy.workflows.cli:dipy_gibbs_ringing" +dipy_horizon = "dipy.workflows.cli:dipy_horizon" +dipy_info = "dipy.workflows.cli:dipy_info" +dipy_labelsbundles = "dipy.workflows.cli:dipy_labelsbundles" +dipy_median_otsu = "dipy.workflows.cli:dipy_median_otsu" +dipy_recobundles = "dipy.workflows.cli:dipy_recobundles" +dipy_reslice = "dipy.workflows.cli:dipy_reslice" +dipy_snr_in_cc = "dipy.workflows.cli:dipy_snr_in_cc" +dipy_split = "dipy.workflows.cli:dipy_split" +dipy_track = "dipy.workflows.cli:dipy_track" +dipy_track_pft = "dipy.workflows.cli:dipy_track_pft" +dipy_slr = "dipy.workflows.cli:dipy_slr" + +[project.optional-dependencies] +all = ["dipy[dev,doc,style,test, viz, ml, extra]"] +style = ["flake8", "isort"] +viz = ["fury>=0.9.0", "matplotlib"] +test = ["pytest", "coverage", "coveralls", "codecov", "asv"] +ml = ["scikit_learn", "pandas", "statsmodels", "tables", "tensorflow", "tensorflow-addons"] +dev = [ + # Also update [build-system] -> requires + 'meson-python>=0.13', + 'wheel', + 'setuptools>=67', + 'packaging>=21', + 'ninja', + 'Cython>=0.29.32', + 'numpy>=1.22', + # Developer UI + 'spin>=0.5', + 'build', +] + +extra = ["dipy[viz, ml]", + "cvxpy", + "scikit-image", + "boto3" + ] + +doc = [ + "numpydoc", + "sphinx ~= 5.3", + "texext", + "tomli; python_version < \"3.11\"", + "sphinxcontrib-bibtex", + "sphinx_design", + "sphinx-gallery>=0.10.0", + "tomli>=2.0.1", + "grg-sphinx-theme>=0.2.0", + "Jinja2" +] + + +[project.urls] +homepage = "https://dipy.org" +documentation = "https://dipy.org/documentation/latest/documentation/" +source = "https://github.com/dipy/dipy" +download = "https://pypi.org/project/dipy/#files" +tracker = "https://github.com/dipy/dipy/issues" + [build-system] -requires = ["setuptools>=42", "wheel", "packaging", "cython", "nibabel", "numpy", "scipy"] +build-backend = "mesonpy" +requires = [ + "meson-python>=0.13.1", + "Cython>=0.29.35", + "packaging>21.0", + "wheel", + "numpy==1.19.5; python_version=='3.8' and platform_python_implementation != 'PyPy'", + "numpy==1.22.4; python_version=='3.9' and platform_python_implementation != 'PyPy'", + "numpy==1.22.4; python_version=='3.10' and platform_system=='Windows' and platform_python_implementation != 'PyPy'", + "numpy==1.22.4; python_version=='3.10' and platform_system != 'Windows' and platform_python_implementation != 'PyPy'", + "numpy==1.23.3; python_version=='3.11' and platform_python_implementation != 'PyPy'", + "numpy>=1.26.0b1; python_version>='3.12'", + "numpy; python_version>='3.8' and platform_python_implementation=='PyPy'", +] + +[tool.spin] +package = 'dipy' + +[tool.spin.commands] +Build = [ + "spin.cmds.meson.build", + "spin.cmds.meson.test", + "spin.cmds.build.sdist", + ".spin/cmds.py:clean" +] + +Environments = [ + "spin.cmds.meson.run", + "spin.shell", + "spin.ipython", + "spin.python" +] + +"Documentation" = [ + "spin.cmds.meson.docs", + # ".spin/cmds.py:docs" +] +Metrics = [ + ".spin/cmds.py:bench", + # ".spin/cmds.py:coverage +] +# TODO: Add custom commands + +[tool.pytest.ini_options] +addopts = "--ignore=dipy/testing/decorators.py --ignore-glob='bench*.py' --ignore-glob=**/benchmarks/*" \ No newline at end of file diff --git a/requirements/build.txt b/requirements/build.txt new file mode 100644 index 0000000000..57683bff45 --- /dev/null +++ b/requirements/build.txt @@ -0,0 +1,8 @@ +meson-python>=0.13 +ninja +Cython>=0.29.21 +packaging>=20.0 +wheel +build +numpy>=1.21.1 +spin==0.5 \ No newline at end of file diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index e65f668e12..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,5 +0,0 @@ -[metadata] -license_files = LICENSE - -[tool:pytest] -addopts = --ignore=dipy/testing/decorators.py diff --git a/setup.py b/setup.py deleted file mode 100755 index 70fc36b1ec..0000000000 --- a/setup.py +++ /dev/null @@ -1,253 +0,0 @@ -#!/usr/bin/env python3 -""" Installation script for dipy package """ - -import os -import platform -import sys -from copy import deepcopy -from glob import glob -from os.path import dirname, exists -from os.path import join as pjoin - -# BEFORE importing setuptools, remove MANIFEST. setuptools doesn't properly -# update it when the contents of directories change. -if exists('MANIFEST'): - os.remove('MANIFEST') - -# force_setuptools can be set from the setup_egg.py script -if 'force_setuptools' not in globals(): - # For some commands, always use setuptools - if len({'develop', 'bdist_egg', 'bdist_rpm', 'bdist', 'bdist_dumb', - 'bdist_mpkg', 'bdist_wheel', 'install_egg_info', 'egg_info', - 'easy_install'}.intersection(sys.argv)) > 0: - force_setuptools = True - else: - force_setuptools = False - -if force_setuptools: - import setuptools - -# Import setuptools _after_ potential setuptools import above, and after -# removing MANIFEST -from setuptools import setup -from setuptools.extension import Extension - -from cythexts import cyproc_exts, get_pyx_sdist -from setup_helpers import (SetupDependency, add_flag_checking, - install_scripts_bat, make_np_ext_builder, - read_vars_from) -from version_helpers import get_comrec_build - -# Get version and release info, which is all stored in dipy/info.py -info = read_vars_from(pjoin('dipy', 'info.py')) - -# We may just have imported setuptools, or we may have been exec'd from a -# setuptools environment like pip -using_setuptools = 'setuptools' in sys.modules -extra_setuptools_args = {} -if using_setuptools: - # Try to preempt setuptools monkeypatching of Extension handling when Pyrex - # is missing. Otherwise the monkeypatched Extension will change .pyx - # filenames to .c filenames, and we probably don't have the .c files. - sys.path.insert(0, pjoin(dirname(__file__), 'fake_pyrex')) - # Set setuptools extra arguments - extra_setuptools_args = dict( - tests_require=['pytest'], - zip_safe=False, - extras_require=info.EXTRAS_REQUIRE, - python_requires=">= 3.8", - ) - -# Define extensions -EXTS = [] - -# We use some defs from npymath, but we don't want to link against npymath lib -ext_kwargs = { - 'include_dirs': ['src'], # We add np.get_include() later - 'define_macros': [("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION")] - } - -for modulename, other_sources, language in ( - ('dipy.core.interpolation', [], 'c'), - ('dipy.direction.pmf', [], 'c'), - ('dipy.direction.probabilistic_direction_getter', [], 'c'), - ('dipy.direction.ptt_direction_getter', [], 'c'), - ('dipy.direction.closest_peak_direction_getter', [], 'c'), - ('dipy.direction.bootstrap_direction_getter', [], 'c'), - ('dipy.reconst.eudx_direction_getter', [], 'c'), - ('dipy.reconst.recspeed', [], 'c'), - ('dipy.reconst.vec_val_sum', [], 'c'), - ('dipy.reconst.quick_squash', [], 'c'), - ('dipy.tracking.distances', [], 'c'), - ('dipy.tracking.streamlinespeed', [], 'c'), - ('dipy.tracking.localtrack', [], 'c'), - ('dipy.tracking.direction_getter', [], 'c'), - ('dipy.tracking.stopping_criterion', [], 'c'), - ('dipy.tracking.vox2track', [], 'c'), - ('dipy.tracking.propspeed', [], 'c'), - ('dipy.tracking.fbcmeasures', [], 'c'), - ('dipy.segment.cythonutils', [], 'c'), - ('dipy.segment.featurespeed', [], 'c'), - ('dipy.segment.metricspeed', [], 'c'), - ('dipy.segment.clusteringspeed', [], 'c'), - ('dipy.segment.clustering_algorithms', [], 'c'), - ('dipy.segment.mrf', [], 'c'), - ('dipy.denoise.denspeed', [], 'c'), - ('dipy.denoise.pca_noise_estimate', [], 'c'), - ('dipy.denoise.nlmeans_block', [], 'c'), - ('dipy.denoise.enhancement_kernel', [], 'c'), - ('dipy.denoise.shift_twist_convolution', [], 'c'), - ('dipy.align.vector_fields', [], 'c'), - ('dipy.align.sumsqdiff', [], 'c'), - ('dipy.align.expectmax', [], 'c'), - ('dipy.align.crosscorr', [], 'c'), - ('dipy.align.bundlemin', [], 'c'), - ('dipy.align.transforms', [], 'c'), - ('dipy.align.parzenhist', [], 'c'), - ('dipy.utils.omp', [], 'c'), - ('dipy.utils.fast_numpy', [], 'c')): - pyx_src = pjoin(*modulename.split('.')) + '.pyx' - EXTS.append(Extension(modulename, [pyx_src] + other_sources, - language=language, - **deepcopy(ext_kwargs))) # deepcopy lists - -# Do our own build and install time dependency checking. setup.py gets called -# in many different ways, and may be called just to collect information -# (egg_info). We need to set up tripwires to raise errors when actually doing -# things, like building, rather than unconditionally in the setup.py import or -# exec We may make tripwire versions of build_ext, build_py, install -need_cython = True -pybuilder = get_comrec_build('dipy') -# Cython is a dependency for building extensions, iff we don't have stamped -# up pyx and c files. -build_ext, need_cython = cyproc_exts(EXTS, - info.CYTHON_MIN_VERSION, - 'pyx-stamps') -# Add openmp flags if they work -simple_test_c = """int main(int argc, char** argv) { return(0); }""" -omp_test_c = """#include -int main(int argc, char** argv) { return(0); }""" - -msc_flag_defines = [[['/openmp'], [], omp_test_c, 'HAVE_VC_OPENMP'], - ] -gcc_flag_defines = [[['-msse2', '-mfpmath=sse'], [], simple_test_c, 'USING_GCC_SSE2'], - ] - -if 'clang' not in platform.python_compiler().lower(): - gcc_flag_defines += [[['-fopenmp'], ['-fopenmp'], omp_test_c, 'HAVE_OPENMP'], ] - -# Test if it is a 32 bits version -if not sys.maxsize > 2 ** 32: - # This flag is needed only on 32 bits - msc_flag_defines += [[['/arch:SSE2'], [], simple_test_c, 'USING_VC_SSE2'], ] - -flag_defines = msc_flag_defines if 'msc' in platform.python_compiler().lower() else gcc_flag_defines - -extbuilder = add_flag_checking(build_ext, flag_defines, 'dipy') - -# Use ext builder to add np.get_include() at build time, not during setup.py -# execution. -extbuilder = make_np_ext_builder(extbuilder) -if need_cython: - SetupDependency('Cython', info.CYTHON_MIN_VERSION, - req_type='install_requires', - heavy=True).check_fill(extra_setuptools_args) -SetupDependency('numpy', info.NUMPY_MIN_VERSION, - req_type='install_requires', - heavy=True).check_fill(extra_setuptools_args) -SetupDependency('scipy', info.SCIPY_MIN_VERSION, - req_type='install_requires', - heavy=True).check_fill(extra_setuptools_args) -SetupDependency('nibabel', info.NIBABEL_MIN_VERSION, - req_type='install_requires', - heavy=False).check_fill(extra_setuptools_args) -SetupDependency('h5py', info.H5PY_MIN_VERSION, - req_type='install_requires', - heavy=False).check_fill(extra_setuptools_args) -SetupDependency('tqdm', info.TQDM_MIN_VERSION, - req_type='install_requires', - heavy=False).check_fill(extra_setuptools_args) -SetupDependency('trx-python', info.TRX_MIN_VERSION, - req_type='install_requires', - heavy=False).check_fill(extra_setuptools_args) - -cmdclass = dict( - build_py=pybuilder, - build_ext=extbuilder, - install_scripts=install_scripts_bat, - sdist=get_pyx_sdist(include_dirs=['src'])) - - -def main(**extra_args): - setup(name=info.NAME, - maintainer=info.MAINTAINER, - maintainer_email=info.MAINTAINER_EMAIL, - description=info.DESCRIPTION, - long_description=info.LONG_DESCRIPTION, - url=info.URL, - download_url=info.DOWNLOAD_URL, - license=info.LICENSE, - classifiers=info.CLASSIFIERS, - author=info.AUTHOR, - author_email=info.AUTHOR_EMAIL, - platforms=info.PLATFORMS, - version=info.VERSION, - requires=info.REQUIRES, - provides=info.PROVIDES, - packages=['dipy', - 'dipy.tests', - 'dipy.align', - 'dipy.align.tests', - 'dipy.core', - 'dipy.core.tests', - 'dipy.direction', - 'dipy.direction.tests', - 'dipy.tracking', - 'dipy.tracking.tests', - 'dipy.tracking.benchmarks', - 'dipy.reconst', - 'dipy.reconst.benchmarks', - 'dipy.reconst.tests', - 'dipy.io', - 'dipy.io.tests', - 'dipy.viz', - 'dipy.viz.horizon', - 'dipy.viz.horizon.tab', - 'dipy.viz.horizon.visualizer', - 'dipy.viz.tests', - 'dipy.testing', - 'dipy.testing.tests', - 'dipy.boots', - 'dipy.data', - 'dipy.data.tests', - 'dipy.utils', - 'dipy.utils.tests', - 'dipy.segment', - 'dipy.segment.benchmarks', - 'dipy.segment.tests', - 'dipy.sims', - 'dipy.sims.tests', - 'dipy.stats', - 'dipy.stats.tests', - 'dipy.denoise', - 'dipy.denoise.tests', - 'dipy.workflows', - 'dipy.workflows.tests', - 'dipy.nn', - 'dipy.nn.tests'], - - ext_modules=EXTS, - package_data={'dipy': [pjoin('data', 'files', '*')], - }, - data_files=[('share/doc/dipy/examples', - glob(pjoin('doc', 'examples', '*.py')))], - scripts=glob(pjoin('bin', 'dipy_*')), - cmdclass=cmdclass, - **extra_args - ) - - -# simple way to test what setup will do -# python setup.py install --prefix=/tmp -if __name__ == "__main__": - main(**extra_setuptools_args) diff --git a/setup_egg.py b/setup_egg.py deleted file mode 100644 index 3176199a14..0000000000 --- a/setup_egg.py +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env python3 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Wrapper to run setup.py using setuptools.""" - -if __name__ == '__main__': - with open('setup.py') as f: - exec(f.read(), dict(__name__='__main__', - __file__='setup.py', # needed in setup.py - force_setuptools=True)) diff --git a/setup_helpers.py b/setup_helpers.py deleted file mode 100644 index 38edff12f2..0000000000 --- a/setup_helpers.py +++ /dev/null @@ -1,367 +0,0 @@ -""" Distutils / setuptools helpers - -""" -import os -import sys -from os.path import join as pjoin, split as psplit, splitext, dirname, exists -import tempfile -import shutil -import logging as log - -from setuptools.command.install_scripts import install_scripts -try: - from setuptools.errors import CompileError, LinkError -except ImportError: - # can remove this except case once we require setuptools>=59.0 - from distutils.errors import CompileError, LinkError -from packaging.version import Version - - -BAT_TEMPLATE = \ - r"""@echo off -REM wrapper to use shebang first line of {FNAME} -set mypath=%~dp0 -set pyscript="%mypath%{FNAME}" -set /p line1=<%pyscript% -if "%line1:~0,2%" == "#!" (goto :goodstart) -echo First line of %pyscript% does not start with "#!" -exit /b 1 -:goodstart -set py_exe=%line1:~2% -REM quote exe in case of spaces in path name -set py_exe="%py_exe%" -call %py_exe% %pyscript% %* -""" - -# Path of file to which to write C conditional vars from build-time checks -CONFIG_H = pjoin('build', 'config.h') -# File name (no directory) to which to write Python vars from build-time checks -CONFIG_PY = '__config__.py' -# Directory to which to write libraries for building -LIB_DIR_TMP = pjoin('build', 'extra_libs') - - -class install_scripts_bat(install_scripts): - """ Make scripts executable on Windows - - Scripts are bare file names without extension on Unix, fitting (for example) - Debian rules. They identify as python scripts with the usual ``#!`` first - line. Unix recognizes and uses this first "shebang" line, but Windows does - not. So, on Windows only we add a ``.bat`` wrapper of name - ``bare_script_name.bat`` to call ``bare_script_name`` using the python - interpreter from the #! first line of the script. - - Notes - ----- - See discussion at - http://matthew-brett.github.com/pydagogue/installing_scripts.html and - example at git://github.com/matthew-brett/myscripter.git for more - background. - """ - - def run(self): - install_scripts.run(self) - if not os.name == "nt": - return - for filepath in self.get_outputs(): - # If we can find an executable name in the #! top line of the script - # file, make .bat wrapper for script. - with open(filepath, 'rt') as fobj: - first_line = fobj.readline() - if not (first_line.startswith('#!') and 'python' in first_line.lower()): - log.info("No #!python executable found, skipping .bat wrapper") - continue - pth, fname = psplit(filepath) - froot, ext = splitext(fname) - bat_file = pjoin(pth, froot + '.bat') - bat_contents = BAT_TEMPLATE.replace('{FNAME}', fname) - log.info(f"Making {bat_file} wrapper for {filepath}") - if self.dry_run: - continue - with open(bat_file, 'wt') as fobj: - fobj.write(bat_contents) - - -def add_flag_checking(build_ext_class, flag_defines, top_package_dir=''): - """ Override input `build_ext_class` to check compiler `flag_defines` - - Parameters - ---------- - build_ext_class : class - Class implementing ``setuptools.command.build_ext.build_ext`` interface, - with a ``build_extensions`` method. - flag_defines : sequence - A sequence of elements, where the elements are sequences of length 4 - consisting of (``compile_flags``, ``link_flags``, ``code``, - ``defvar``). ``compile_flags`` is a sequence of compiler flags; - ``link_flags`` is a sequence of linker flags. We - check ``compile_flags`` to see whether a C source string ``code`` will - compile, and ``link_flags`` to see whether the resulting object file - will link. If both compile and link works, we add ``compile_flags`` to - ``extra_compile_args`` and ``link_flags`` to ``extra_link_args`` of - each extension when we build the extensions. If ``defvar`` is not - None, it is the name of C variable to be defined in ``build/config.h`` - with 1 if the combination of (``compile_flags``, ``link_flags``, - ``code``) will compile and link, 0 otherwise. If None, do not write - variable. - top_package_dir : str - String giving name of top-level package, for writing Python file - containing configuration variables. If empty, do not write this file. - Variables written are the same as the Cython variables generated via - the `flag_defines` setting. - - Returns - ------- - checker_class : class - A class with similar interface to - ``setuptools.command.build_ext.build_ext``, that adds all working - ``compile_flags`` values to the ``extra_compile_args`` and working - ``link_flags`` to ``extra_link_args`` attributes of extensions, before - compiling. - """ - - class Checker(build_ext_class): - flag_defs = tuple(flag_defines) - - def can_compile_link(self, compile_flags, link_flags, code): - cc = self.compiler - fname = 'test.c' - cwd = os.getcwd() - tmpdir = tempfile.mkdtemp() - try: - os.chdir(tmpdir) - with open(fname, 'wt') as fobj: - fobj.write(code) - try: - objects = cc.compile([fname], - extra_postargs=compile_flags) - except CompileError: - return False - try: - # Link shared lib rather than executable to avoid - # https://github.com/python/cpython/issues/48681 - # with MSVC 10+ - cc.link_shared_lib(objects, "testlib", - extra_postargs=link_flags) - except (LinkError, TypeError): - return False - finally: - os.chdir(cwd) - shutil.rmtree(tmpdir) - return True - - def build_extensions(self): - """ Hook into extension building to check compiler flags """ - def_vars = [] - good_compile_flags = [] - good_link_flags = [] - config_dir = dirname(CONFIG_H) - for compile_flags, link_flags, code, def_var in self.flag_defs: - compile_flags = list(compile_flags) - link_flags = list(link_flags) - flags_good = self.can_compile_link(compile_flags, - link_flags, - code) - if def_var: - def_vars.append((def_var, flags_good)) - if flags_good: - good_compile_flags += compile_flags - good_link_flags += link_flags - else: - log.warn("Flags {0} omitted because of compile or link " - "error".format(compile_flags + link_flags)) - if def_vars: # write config.h file - if not exists(config_dir): - self.mkpath(config_dir) - with open(CONFIG_H, 'wt') as fobj: - fobj.write('/* Automatically generated; do not edit\n') - fobj.write(' C defines from build-time checks */\n') - for v_name, v_value in def_vars: - fobj.write('int {0} = {1};\n'.format( - v_name, 1 if v_value else 0)) - if def_vars and top_package_dir: # write __config__.py file - config_py_dir = (top_package_dir if self.inplace else - pjoin(self.build_lib, top_package_dir)) - if not exists(config_py_dir): - self.mkpath(config_py_dir) - config_py = pjoin(config_py_dir, CONFIG_PY) - with open(config_py, 'wt') as fobj: - fobj.write('# Automatically generated; do not edit\n') - fobj.write('# Variables from compile checks\n') - for v_name, v_value in def_vars: - fobj.write('{0} = {1}\n'.format(v_name, v_value)) - if def_vars or good_compile_flags or good_link_flags: - for ext in self.extensions: - ext.extra_compile_args += good_compile_flags - ext.extra_link_args += good_link_flags - if def_vars: - ext.include_dirs.append(config_dir) - self.cython_directives = { - 'language_level': '3', - } - build_ext_class.build_extensions(self) - - return Checker - - -def get_pkg_version(pkg_name): - """ Return package version for `pkg_name` if installed - - Returns - ------- - pkg_version : str or None - Return None if package not importable. Return 'unknown' if standard - ``__version__`` string not present. Otherwise return version string. - """ - try: - pkg = __import__(pkg_name) - except ImportError: - return None - try: - return pkg.__version__ - except AttributeError: - return 'unknown' - - -def version_error_msg(pkg_name, found_ver, min_ver): - """ Return informative error message for version or None - """ - if found_ver is None: - return 'We need package {0}, but not importable'.format(pkg_name) - if found_ver == 'unknown': - return 'We need {0} version {1}, but cannot get version'.format( - pkg_name, min_ver) - if Version(found_ver) >= Version(min_ver): - return None - return 'We need {0} version {1}, but found version {2}'.format(pkg_name, min_ver, found_ver) - - -class SetupDependency: - """ SetupDependency class - - Parameters - ---------- - import_name : str - Name with which required package should be ``import``ed. - min_ver : str - Version string giving minimum version for package. - req_type : {'install_requires', 'setup_requires'}, optional - Setuptools dependency type. - heavy : {False, True}, optional - If True, and package is already installed (importable), then do not add - to the setuptools dependency lists. This prevents setuptools - reinstalling big packages when the package was installed without using - setuptools, or this is an upgrade, and we want to avoid the pip default - behavior of upgrading all dependencies. - install_name : str, optional - Name identifying package to install from pypi etc, if different from - `import_name`. - """ - - def __init__(self, import_name, - min_ver, - req_type='install_requires', - heavy=False, - install_name=None): - self.import_name = import_name - self.min_ver = min_ver - self.req_type = req_type - self.heavy = heavy - self.install_name = (import_name if install_name is None - else install_name) - - def check_fill(self, setuptools_kwargs): - """ Process this dependency, maybe filling `setuptools_kwargs` - - Run checks on this dependency. If not using setuptools, then raise - error for unmet dependencies. If using setuptools, add missing or - not-heavy dependencies to `setuptools_kwargs`. - - A heavy dependency is one that is inconvenient to install - automatically, such as numpy or (particularly) scipy, matplotlib. - - Parameters - ---------- - setuptools_kwargs : dict - Dictionary of setuptools keyword arguments that may be modified - in-place while checking dependencies. - """ - found_ver = get_pkg_version(self.import_name) - ver_err_msg = version_error_msg(self.import_name, - found_ver, - self.min_ver) - if 'setuptools' not in sys.modules: - # Not using setuptools; raise error for any unmet dependencies - if ver_err_msg is not None: - raise RuntimeError(ver_err_msg) - return - # Using setuptools; add packages to given section of - # setup/install_requires, unless it's a heavy dependency for which we - # already have an acceptable importable version. - if self.heavy and ver_err_msg is None: - return - new_req = '{0}>={1}'.format(self.import_name, self.min_ver) - old_reqs = setuptools_kwargs.get(self.req_type, []) - setuptools_kwargs[self.req_type] = old_reqs + [new_req] - - -class Bunch: - def __init__(self, vars): - for key, name in vars.items(): - if key.startswith('__'): - continue - self.__dict__[key] = name - - -def read_vars_from(ver_file): - """ Read variables from Python text file - - Parameters - ---------- - ver_file : str - Filename of file to read - - Returns - ------- - info_vars : Bunch instance - Bunch object where variables read from `ver_file` appear as - attributes - """ - # Use exec for compatibility with Python 3 - ns = {} - with open(ver_file, 'rt') as fobj: - exec(fobj.read(), ns) - return Bunch(ns) - - -def make_np_ext_builder(build_ext_class): - """ Override input `build_ext_class` to add numpy includes to extension - - This is useful to delay call of ``np.get_include`` until the extension is - being built. - - Parameters - ---------- - build_ext_class : class - Class implementing ``setuptools.command.build_ext.build_ext`` interface, - with a ``build_extensions`` method. - - Returns - ------- - np_build_ext_class : class - A class with similar interface to - ``setuptools.command.build_ext.build_ext``, that adds libraries in - ``np.get_include()`` to include directories of extension. - """ - - class NpExtBuilder(build_ext_class): - def build_extensions(self): - """ Hook into extension building to add np include dirs - """ - # Delay numpy import until last moment - import numpy as np - for ext in self.extensions: - ext.include_dirs.append(np.get_include()) - build_ext_class.build_extensions(self) - - return NpExtBuilder diff --git a/tools/ci/install.sh b/tools/ci/install.sh index d81e394fa8..9881b54b5c 100755 --- a/tools/ci/install.sh +++ b/tools/ci/install.sh @@ -5,7 +5,7 @@ source tools/ci/activate_env.sh set -ex -PIPI="pip install --timeout=60" +PIPI="pip install --timeout=60 -Csetup-args=--vsenv -Ccompile-args=-v" if [ "$USE_PRE" == "1" ] || [ "$USE_PRE" == true ]; then PIPI="$PIPI --extra-index-url=$PRE_WHEELS --pre"; @@ -16,7 +16,7 @@ fi if [ "$INSTALL_TYPE" == "setup" ]; then python setup.py install elif [ "$INSTALL_TYPE" == "pip" ]; then - $PIPI . + $PIPI -vv . elif [ "$INSTALL_TYPE" == "sdist" ]; then # python -m pep517.build python setup_egg.py egg_info # check egg_info while we're here @@ -30,7 +30,7 @@ elif [ "$INSTALL_TYPE" == "requirements" ]; then $PIPI -r requirements.txt python setup.py install elif [ "$INSTALL_TYPE" == "conda" ]; then - $PIPI . + $PIPI -vv . fi set +ex \ No newline at end of file diff --git a/tools/ci/run_tests.sh b/tools/ci/run_tests.sh index bf7130a4f4..c398b55784 100755 --- a/tools/ci/run_tests.sh +++ b/tools/ci/run_tests.sh @@ -11,18 +11,18 @@ echo "Run the tests" mkdir for_testing cd for_testing # We need the setup.cfg for the pytest settings -cp ../setup.cfg . +cp ../pyproject.toml . # No figure windows for mpl; quote to hide : from travis-ci yaml parsing echo "backend : agg" > matplotlibrc if [ "$COVERAGE" == "1" ] || [ "$COVERAGE" == true ]; then cp ../.coveragerc .; cp ../.codecov.yml .; # Run the tests and check for test coverage. - coverage run -m pytest -svv --doctest-modules --verbose --durations=10 --pyargs dipy + coverage run -m pytest -c pyproject.toml -svv --doctest-modules --verbose --durations=10 --pyargs dipy coverage report -m # Generate test coverage report. coverage xml # Generate coverage report in xml format for codecov upload. else - pytest -svv --doctest-modules --verbose --durations=10 --pyargs dipy + pytest -c pyproject.toml -svv --doctest-modules --verbose --durations=10 --pyargs dipy fi cd .. set +ex \ No newline at end of file diff --git a/tools/version_utils.py b/tools/version_utils.py new file mode 100644 index 0000000000..f07573f8b3 --- /dev/null +++ b/tools/version_utils.py @@ -0,0 +1,136 @@ +""" +Note +---- + +This file is copied (possibly with major modifications) from the +sources of the scipy project - https://github.com/scipy/scipy. +It remains licensed as the rest of scipy (BSD-3 license as of October 2023). + +# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +# +# See COPYING file distributed along with the scipy package for the +# copyright and license terms. +# +# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## +""" +import os +import subprocess +import argparse + + +MAJOR = 1 +MINOR = 8 +MICRO = 0 +ISRELEASED = False +IS_RELEASE_BRANCH = False +VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) + + +def get_version_info(source_root): + # Adding the git rev number needs to be done inside + # write_version_py(), otherwise the import of dipy.version messes + # up the build under Python 3. + FULLVERSION = VERSION + if os.path.exists(os.path.join(source_root, '.git')): + GIT_REVISION, COMMIT_COUNT = git_version(source_root) + elif os.path.exists('dipy/version.py'): + # must be a source distribution, use existing version file + # load it as a separate module to not load dipy/__init__.py + import runpy + ns = runpy.run_path('dipy/version.py') + GIT_REVISION = ns['git_revision'] + COMMIT_COUNT = ns['git_revision'] + else: + GIT_REVISION = "Unknown" + COMMIT_COUNT = "Unknown" + + if not ISRELEASED: + FULLVERSION += '.dev0+' + COMMIT_COUNT + '.' + GIT_REVISION + + return FULLVERSION, GIT_REVISION, COMMIT_COUNT + + +def write_version_py(source_root, filename='dipy/version.py'): + cnt = """\ +# THIS FILE IS GENERATED DURING THE DIPY BUILD +# See tools/version_utils.py for details + +short_version = '%(version)s' +version = '%(version)s' +full_version = '%(full_version)s' +git_revision = '%(git_revision)s' +commit_count = '%(commit_count)s' +release = %(isrelease)s + +if not release: + version = full_version +""" + FULLVERSION, GIT_REVISION, COMMIT_COUNT = get_version_info(source_root) + + a = open(filename, 'w') + try: + a.write(cnt % {'version': VERSION, + 'full_version': FULLVERSION, + 'git_revision': GIT_REVISION, + 'commit_count': COMMIT_COUNT, + 'isrelease': str(ISRELEASED)}) + finally: + a.close() + + +# Return the git revision as a string +def git_version(cwd): + def _minimal_ext_cmd(cmd): + # construct minimal environment + env = {} + for k in ['SYSTEMROOT', 'PATH']: + v = os.environ.get(k) + if v is not None: + env[k] = v + # LANGUAGE is used on win32 + env['LANGUAGE'] = 'C' + env['LANG'] = 'C' + env['LC_ALL'] = 'C' + out = subprocess.Popen(cmd, stdout=subprocess.PIPE, + env=env, cwd=cwd).communicate()[0] + return out + + try: + git_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + git_dir = os.path.join(git_dir, ".git") + out = _minimal_ext_cmd(['git', + '--git-dir', + git_dir, + 'rev-parse', + 'HEAD']) + GIT_REVISION = out.strip().decode('ascii')[:7] + + # We need a version number that's regularly incrementing for newer + # commits, so the sort order in a wheelhouse of nightly builds is + # correct (see https://github.com/MacPython/scipy-wheels/issues/114). + # It should also be a reproducible version number, so don't rely on + # date/time but base it on commit history. This gives the commit + # count since the previous branch point from the current branch + # (assuming a full `git clone`, it may be less if `--depth` was + # used - commonly the default in CI): + prev_version_tag = '^{}.{}.0'.format(MAJOR, MINOR - 2) + out = _minimal_ext_cmd(['git', '--git-dir', git_dir, + 'rev-list', 'HEAD', prev_version_tag, + '--count']) + COMMIT_COUNT = out.strip().decode('ascii') + COMMIT_COUNT = '0' if not COMMIT_COUNT else COMMIT_COUNT + except OSError: + GIT_REVISION = "Unknown" + COMMIT_COUNT = "Unknown" + + return GIT_REVISION, COMMIT_COUNT + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--source-root", type=str, default='.', + help="Relative path to the root of the " + "source directory") + args = parser.parse_args() + + write_version_py(args.source_root) diff --git a/version_helpers.py b/version_helpers.py deleted file mode 100644 index 6d12e2a0dd..0000000000 --- a/version_helpers.py +++ /dev/null @@ -1,70 +0,0 @@ -""" Distutils / setuptools helpers for versioning - -Code transferred by Matthew Brett, who holds copyright. - -This version under the standard dipy BSD license. -""" - -from os.path import join as pjoin -try: - from ConfigParser import ConfigParser -except ImportError: - from configparser import ConfigParser - -from setuptools.command.build_py import build_py - - -def get_comrec_build(pkg_dir, build_cmd=build_py): - """ Return extended build command class for recording commit - - The extended command tries to run git to find the current commit, getting - the empty string if it fails. It then writes the commit hash into a file - in the `pkg_dir` path, named ``COMMIT_INFO.txt``. - - In due course this information can be used by the package after it is - installed, to tell you what commit it was installed from if known. - - To make use of this system, you need a package with a COMMIT_INFO.txt file - - e.g. ``myproject/COMMIT_INFO.txt`` - that might well look like this:: - - # This is an ini file that may contain information about the code state - [commit hash] - # The line below may contain a valid hash if it has been substituted during 'git archive' - archive_subst_hash=$Format:%h$ - # This line may be modified by the install process - install_hash= - - The COMMIT_INFO file above is also designed to be used with git substitution - - so you probably also want a ``.gitattributes`` file in the root directory - of your working tree that contains something like this:: - - myproject/COMMIT_INFO.txt export-subst - - That will cause the ``COMMIT_INFO.txt`` file to get filled in by ``git - archive`` - useful in case someone makes such an archive - for example with - via the github 'download source' button. - - Although all the above will work as is, you might consider having something - like a ``get_info()`` function in your package to display the commit - information at the terminal. See the ``pkg_info.py`` module in the nipy - package for an example. - """ - class MyBuildPy(build_cmd): - """ Subclass to write commit data into installation tree """ - def run(self): - build_cmd.run(self) - import subprocess - proc = subprocess.Popen('git rev-parse --short HEAD', - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - shell=True) - repo_commit, _ = proc.communicate() - # Fix for python 3 - repo_commit = str(repo_commit) - # We write the installation commit even if it's empty - cfg_parser = ConfigParser() - cfg_parser.read(pjoin(pkg_dir, 'COMMIT_INFO.txt')) - cfg_parser.set('commit hash', 'install_hash', repo_commit) - out_pth = pjoin(self.build_lib, pkg_dir, 'COMMIT_INFO.txt') - cfg_parser.write(open(out_pth, 'wt')) - return MyBuildPy