diff --git a/.circleci/config.yml b/.circleci/config.yml index 5c4c8331..19e32772 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,8 +17,8 @@ _setup_env: &setup_kwds conda update -yq --all pip install -q --no-cache-dir -U pip - pip install -q --no-cache-dir dropbox pytest-cov reprozip - pip install -q --no-cache-dir -e ~/neurodocker + pip install -q --no-cache-dir reprozip codecov + pip install -q --no-cache-dir -e ~/neurodocker[dev] version: 2 @@ -26,6 +26,7 @@ jobs: test_docker: machine: *machine_kwds + working_directory: ~/neurodocker steps: - checkout: *checkout_kwds @@ -41,7 +42,8 @@ jobs: no_output_timeout: 360m command: | source ~/.bashrc - pytest -k 'test_docker' ~/neurodocker/neurodocker + pytest --cov -k 'test_docker' neurodocker + codecov - save_cache: key: dfs-v0-{{ .Branch }}-{{ .Revision }} when: always @@ -51,6 +53,7 @@ jobs: test_singularity: machine: *machine_kwds + working_directory: ~/neurodocker steps: - checkout: *checkout_kwds @@ -76,7 +79,8 @@ jobs: no_output_timeout: 360m command: | source ~/.bashrc - pytest -k 'test_singularity' ~/neurodocker/neurodocker + pytest --cov -k 'test_singularity' neurodocker + codecov - save_cache: key: srs-v0-{{ .Branch }}-{{ .Revision }} when: always @@ -86,6 +90,7 @@ jobs: test_others: machine: *machine_kwds + working_directory: ~/neurodocker steps: - checkout: *checkout_kwds @@ -96,7 +101,8 @@ jobs: no_output_timeout: 30m command: | source ~/.bashrc - pytest -k 'not test_docker and not test_singularity' ~/neurodocker/neurodocker + pytest --cov -k 'not test_docker and not test_singularity' neurodocker + codecov workflows: diff --git a/.dockerignore b/.dockerignore index 6e471d9a..5c1b39b5 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,11 +3,17 @@ __pycache__ **/.DS_Store .DS_Store /scraper +tests +**/tests .cache +.pytest_cache .ipynb_checkpoints dockerfile_tests *.egg-info +.coverage +coverage.xml + *.ipynb *.pyc *.tar diff --git a/.gitignore b/.gitignore index f9998746..d9e6966a 100644 --- a/.gitignore +++ b/.gitignore @@ -2,10 +2,14 @@ __pycache__ .cache .DS_Store .ipynb_checkpoints +.pytest_cache dockerfile_tests /scraper *.egg-info +.coverage +coverage.xml + *.ipynb *.pyc *.tar diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 1348cafc..00000000 --- a/.travis.yml +++ /dev/null @@ -1,50 +0,0 @@ -sudo: required -dist: trusty -group: edge -services: - - docker -language: python -python: - - 3.6 -branches: - only: - - master -env: - matrix: - - INTERFACE_TO_BUILD=none - - INTERFACE_TO_BUILD=afni - - INTERFACE_TO_BUILD=ants - - INTERFACE_TO_BUILD=convert3d - - INTERFACE_TO_BUILD=dcm2niix - - INTERFACE_TO_BUILD=freesurfer - - INTERFACE_TO_BUILD=fsl - - INTERFACE_TO_BUILD=minc - - INTERFACE_TO_BUILD=miniconda - - INTERFACE_TO_BUILD=mrtrix3 - - INTERFACE_TO_BUILD=neurodebian - - INTERFACE_TO_BUILD=petpvc - - INTERFACE_TO_BUILD=spm - - global: - - secure: YQAc7V0h4jIVJJmY47no4OyQolMv49wqM4xbtkRB3SZE6GonoplcB0iP8olvWn0XJF9dDVXQzU+NhVTDk8sSY+tiuU0pClPMkGC1KfxD6uNDVW26nQIa6FcDy17xayOH3UxB2DSl6ZlqHwXx1Lb0hgoDluGVnz2i8mJWpZycNr/C94RIIKCEQHPV8hu30VtHJcowzv8/LoZ1wgWQeAg08azXcx9h4yfOcvSoH9fvliD3C71fvLiwDugbJ5+6dgF1Pqyq6wjwsopE9PxAanariqoMITpqHAFgaqMeyXirr/a1y4Xgnf5PB/Ci86VZgZ6KrMRez9UlpePHKA06WW5wdUeMPTFL4B38EEtV5l6RwswZHrypoL3t0WS+uLtwqYoyMzAmKf63dBZPhwzv0xHrziriOB0MNfgsUHulzayxgcHiQr0G8hPG09OIvNPNwuy4ije+GUENL8wMUZHuQ22Nw9YyPTbGWsxYRfG2GTcR7qmb+TpfPJlbVMiVg2bqN+6vbREyeU0rXSEFTNqF3WOYEUzoen8QDj4Jh8D63Ew1vc0awjxvF98O3puUjYdrooJr1nyufQdPXqAsvszHm8NnUpBGgG4eEox+VIAw1De5HhMmsRqn7LNzgTueRLRtIIN+DeaI8xV4qZMrvkdyvqgJR8elWKay/21WS69o8Wmb8KA= - - secure: NbPJOFqRvWeWps5kc/QUYN8n57w0cHut9N51uX1Y+/sU0Z5SBY/0SHTPZu2sypSNzB73RVCBonJ1nyBFtI0qzQUmPIjxa5MpGLkltsDom3x0jGGaY2/1mgWSck59PFajK7l89W1M0kB9OEEzwiq0Htrq3cLu2IKyXenLtYkhvT9A2INAh4dxuQ32tj8ksRlmH+TNd/7bj22Et+Wz/k/tTRNuttR6mrmQl74nydQhRjOzn/OgSOp6EPn+FqXkfPMMNrgLxKmGWi/+uIzllWc3uvUSMXxwcATf755odNpcuIJoXBYLiaYka4xpe/L+Yuw1PFAHFRuPVCgewFY6tejizF6++mKR31k1KTmhGyYgQB1M7J3X+karu/qgvv26K+VGX3egoLlKFCMBKqYp33fTc2F6HUKoSPO2JnvPR8yCEL6u5bmRuIY4UiDuk6xxENMLMyuoXuTDOueb67jUtwGi7gwu9Bjtk5Vi2TOKNs7I9aXQyvUyeP48VYiQuJ9jmKW4J32DwDV0FvIohv5oPK2X6w1hTDMsP57Rka8R7/DS7eiO8vqwekwzTdFGOSK1qzKkr6Dt5cVYqQpskuUKH+28Z5DJ1Q5UYuXM6UfRizMzgq4n2GwyjSVYMgxEXJwQhzx0OsUQbm3YRZ9eUxJqqMQ+G5oMD8hFpkdMxmu5JjxOW/o= - - secure: PdajcfJAm+ePQFOKcLRZDwlT0pKiTEABXs3u0R/lFfxR7Vl0TZ/R7TSfEDzDdPOODvjInqbCaIBcGXgu+JxrkkoREdmS3b997yq1HjQxFsnmWty/qJ5AHAPkIfBFHS0/cr6RU4Sgiq1jibAMvW9MnGFybyiwOGeo79bXwx55rLzJ8+XGWL1xeAfK3nvajSjK91STugeREwXlyg5CYTR+RJnCVIqQySBZNkn+LhPX0vkpMJ3Ab0ONcuhSwlv6enP0CLuc/4xaC+/6bKQ84W1iw5eTvltv4VcUnVsN1FRG+VAudlg4/qDZ6q7d9pPNxQ5HajhTdE8Gds4x2pxqQTjFCH475eMQVoSbDjjGsuixqxntY2Wx/L4UWMUAST1GsNefofyTvIYKWcN5PjQaD+bR3sRDcYe1tb8Ew01T4zu4eGJ9VhrUIrerS4L2Qatn8EDgmN6kzDzWXdDzwkuumjaC/7OaNtQ6sJ4lBgae5hbcMQlwCk1o/IuKnjzt1K8YvqDLa5DZGyJfJk+GXKOyFp1pTWD0d/blgAw3JlkFLebmL3r3heKVmDHIqhT1FeqY5jIuy0rhsOXEopwCrLHCC/0Mo0baJKmAL1a8zdQTvn9oZMfYZtK1C4RWqV6nOSjxi/RXRfz/XJcTes3v/o4mrrjkjui6kfxSF0jkth4HFGWhBhc= - -before_install: - - travis_retry sudo apt-get update -qq - - travis_retry sudo apt-get install -yq libsqlite3-dev - -install: - - travis_retry pip install -r requirements.txt - - travis_retry pip install dropbox pytest-cov reprozip -script: - - function run_tests { - if [ "$INTERFACE_TO_BUILD" == 'none' ]; then - python -m pytest -v -k "not test_build_image" --cov=./ neurodocker; - else - travis_wait 50 python -m pytest -v -k "test_build_image_$INTERFACE_TO_BUILD" --cov=./ neurodocker; - fi } - - if [ ! -z "$DOCKER_PASS" ]; then - docker login -u $DOCKER_USER -p $DOCKER_PASS; - fi - - run_tests diff --git a/Dockerfile b/Dockerfile index 021971b9..decea082 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,14 +2,15 @@ FROM alpine:3.7 LABEL maintainer="Jakub Kaczmarzyk " -COPY . /opt/neurodocker - RUN tmp_pkgs="curl gcc musl-dev python3-dev sqlite-dev" \ - && apk add --update --no-cache git python3 rsync $tmp_pkgs \ + && apk add --update --no-cache git python3 py3-yaml rsync $tmp_pkgs \ && curl -fsSL https://bootstrap.pypa.io/get-pip.py | python3 - \ && pip install --no-cache-dir reprozip \ - && pip install --no-cache-dir -e /opt/neurodocker \ - && neurodocker --help \ - && apk del $tmp_pkgs + && apk del $tmp_pkgs \ + && rm -rf /var/cache/apk/* ~/.cache/pip/* + +COPY . /opt/neurodocker +RUN pip install --no-cache-dir -e /opt/neurodocker \ + && neurodocker --help ENTRYPOINT ["neurodocker"] diff --git a/README.md b/README.md index e61e32bf..817b5ac9 100644 --- a/README.md +++ b/README.md @@ -1,35 +1,30 @@ # Neurodocker -[![Build Status](https://travis-ci.org/kaczmarj/neurodocker.svg?branch=master)](https://travis-ci.org/kaczmarj/neurodocker) -[![codecov](https://codecov.io/gh/kaczmarj/neurodocker/branch/master/graph/badge.svg)](https://codecov.io/gh/kaczmarj/neurodocker) +[![build status](https://img.shields.io/circleci/project/github/kaczmarj/neurodocker/master.svg)](https://circleci.com/gh/kaczmarj/neurodocker/tree/master) - -_Neurodocker_ is a Python project that generates custom Dockerfiles for neuroimaging and minifies existing Docker images (using [ReproZip](https://www.reprozip.org/)). The package can be used from the command-line or within a Python script. The command-line interface generates Dockerfiles and minifies Docker images, but interaction with the Docker Engine is left to the various `docker` commands. Within a Python script, however, _Neurodocker_ can generate Dockerfiles, build Docker images, run commands within resulting containers (using the [`docker` Python package](https://github.com/docker/docker-py)), and minify Docker images. The project is used for regression testing of [Nipype](https://github.com/nipy/nipype/) interfaces. +_Neurodocker_ is a command-line program that generates custom Dockerfiles and Singularity recipes for neuroimaging and minifies existing containers. Examples: - - [Generate Dockerfile](#generate-dockerfile) - - [Generate Dockerfile (full)](#generate-dockerfile-full) + - [Canonical examples](#canonical-examples) + - [Docker](#docker) + - [Singularity](#singularity) + - [Assorted examples](./examples) - [Minimize existing Docker image](#minimize-existing-docker-image) - [Example of minimizing Docker image for FreeSurfer recon-all](https://github.com/freesurfer/freesurfer/issues/70#issuecomment-316361886) -# Note to users - -This software is still in the early stages of development. If you come across an issue or a way to improve _Neurodocker_, please submit an issue or a pull request. - - # Installation Use the _Neurodocker_ Docker image: ``` -docker run --rm kaczmarj/neurodocker:v0.3.1 --help +docker run --rm kaczmarj/neurodocker:0.4.0 --help ``` Note: it is not yet possible to minimize Docker containers using the _Neurodocker_ Docker image. -# Supported Software +# Supported software | software | argument | description | | -------- | -------- | ----------- | @@ -79,6 +74,9 @@ Note: it is not yet possible to minimize Docker containers using the _Neurodocke | **NeuroDebian** | os_codename* | Codename of the operating system (e.g., stretch, zesty). | | | server* | Server to download NeuroDebian packages from. Choose the one closest to you. See `neurodocker generate docker --help` for the full list of servers. | | | full | If true (default), use non-free sources. If false, use libre sources. | +| **PETPVC** | version* | 1.2.2, 1.2.1, 1.2.0-b, 1.2.0-a, 1.1.0, 1.0.0 | +| | method | binaries (default) | +| | install_path | Installation path. Default `/opt/petpvc-{version}`. | | **SPM12** | version* | r7219, r6914, r6685, r6472, r6225 | | | install_path | Installation path. Default `/opt/spm12-{version}`. | | | | _Note: Matlab Compiler Runtime is installed when SPM12 is installed._ | @@ -92,14 +90,28 @@ Note: it is not yet possible to minimize Docker containers using the _Neurodocke Please see the [examples](examples) directory. -## Canonical example +## Canonical examples + +The canonical examples install ANTs version 2.2.0 on Ubuntu 18.04. -Generate a Dockerfile which will install ANTs on Ubuntu 17.04. The result can be piped to `docker build` to build the Docker image. +### Docker ```shell -docker run --rm kaczmarj/neurodocker:v0.3.2 generate -b ubuntu:17.04 -p apt --ants version=2.2.0 +$ docker run --rm kaczmarj/neurodocker:0.4.0 generate \ + --base ubuntu:18.04 --pkg-manager apt --ants version=2.2.0 -docker run --rm kaczmarj/neurodocker:v0.3.2 generate -b ubuntu:17.04 -p apt --ants version=2.2.0 | docker build - +# Build image by piping Dockerfile to `docker build` +$ docker run --rm kaczmarj/neurodocker:0.4.0 generate \ + --base ubuntu:18.04 --pkg-manager apt --ants version=2.2.0 | docker build - +``` + +### Singularity + +Install ANTs on Ubuntu 18.04. + +```shell +$ docker run --rm kaczmarj/neurodocker:v0.4.0 generate singularity \ + --base ubuntu:18.04 --pkag-manager apt --ants version=2.2.0 ``` @@ -117,18 +129,17 @@ In the following example, a Docker image is built with ANTs version 2.2.0 and a ```shell # Create a Docker image with ANTs, and download a functional scan. -download_cmd="RUN curl -sSL -o /home/func.nii.gz http://psydata.ovgu.de/studyforrest/phase2/sub-01/ses-movie/func/sub-01_ses-movie_task-movie_run-1_bold.nii.gz" -neurodocker generate -b centos:7 -p yum --ants version=2.2.0 --instruction="$download_cmd" | docker build -t ants:2.2.0 - +$ download_cmd="curl -sSL -o /home/func.nii.gz http://psydata.ovgu.de/studyforrest/phase2/sub-01/ses-movie/func/sub-01_ses-movie_task-movie_run-1_bold.nii.gz" +$ neurodocker generate docker -b centos:7 -p yum --ants version=2.2.0 --run="$download_cmd" | docker build -t ants:2.2.0 - # Run the container. -docker run --rm -it --name ants-reprozip-container --security-opt=seccomp:unconfined ants:2.2.0 +$ docker run --rm -itd --name ants-reprozip-container --security-opt=seccomp:unconfined ants:2.2.0 -# (in a new terminal window) # Output a ReproZip pack file in ~/neurodocker-reprozip-output with the files # necessary to run antsMotionCorr. # See https://github.com/stnava/ANTs/blob/master/Scripts/antsMotionCorrExample -cmd="antsMotionCorr -d 3 -a /home/func.nii.gz -o /home/func_avg.nii.gz" -neurodocker reprozip-trace ants-reprozip-container "$cmd" - -reprounzip docker setup neurodocker-reprozip.rpz test +$ cmd="antsMotionCorr -d 3 -a /home/func.nii.gz -o /home/func_avg.nii.gz" +$ neurodocker reprozip-trace ants-reprozip-container "$cmd" +# Create a Docker container with the contents of ReproZip's trace. +$ reprounzip docker setup neurodocker-reprozip.rpz test ``` diff --git a/neurodocker/generators/common.py b/neurodocker/generators/common.py index 65d1043d..19e411a6 100644 --- a/neurodocker/generators/common.py +++ b/neurodocker/generators/common.py @@ -77,13 +77,13 @@ def clear_memory(cls): def _get_json_spec_str(specs): """Return instruction to write out specs dictionary to JSON file.""" - json_specs = json.dumps(specs, indent=2) - json_specs = json_specs.replace('\\n', '__TO_REPLACE_NEWLINE__') - json_specs = "\n\\n".join(json_specs.split("\n")) + js = json.dumps(specs, indent=2) + js = js.replace('\\n', '__TO_REPLACE_NEWLINE__') + js = "\n\\n".join(js.split("\n")) # Escape newline characters that the user provided. - json_specs = json_specs.replace('__TO_REPLACE_NEWLINE__', '\\\\n') + js = js.replace('__TO_REPLACE_NEWLINE__', '\\\\n') # Workaround to escape single quotes in a single-quoted string. # https://stackoverflow.com/a/1250279/5666087 - json_specs = json_specs.replace("'", """'"'"'""") - cmd = "echo '{string}' > {path}".format(string=json_specs, path=SPEC_FILE) + js = js.replace("'", """'"'"'""") + cmd = "echo '{string}' > {path}".format(string=js, path=SPEC_FILE) return cmd diff --git a/neurodocker/generators/singularity.py b/neurodocker/generators/singularity.py index 6b3efefc..8a2eeda7 100644 --- a/neurodocker/generators/singularity.py +++ b/neurodocker/generators/singularity.py @@ -4,6 +4,8 @@ import copy import inspect +from neurodocker.generators.common import _add_to_entrypoint +from neurodocker.generators.common import _get_json_spec_str from neurodocker.generators.common import _installation_implementations from neurodocker.generators.common import _install from neurodocker.generators.common import _Users @@ -16,7 +18,7 @@ def __init__(self, singularity_recipe_object): self._singobj = singularity_recipe_object def add_to_entrypoint(self, cmd): - self._singobj._runscript.insert(0, cmd) + self._singobj._post.append(_add_to_entrypoint(cmd)) def base(self, base): if base.startswith('docker://'): @@ -39,11 +41,14 @@ def install(self, pkgs, pkg_manager, opts=None): self._singobj._post.append(_install(pkgs, pkg_manager)) def entrypoint(self, entrypoint): - self._singobj._runscript.append(entrypoint) + self._singobj._runscript = entrypoint def env(self, d): self._singobj._environment.update(**d) + def run(self, s): + self._singobj._post.append(s) + def user(self, user): user_cmd = "su - {}".format(user) add_user_cmd = _Users.add(user) @@ -53,6 +58,9 @@ def user(self, user): cmd = user_cmd self._singobj._post.append(cmd) + def workdir(self, path): + self._singobj._post.append("cd {}".format(path)) + class SingularityRecipe: @@ -65,7 +73,7 @@ def __init__(self, specs): self._post = [] self._environment = OrderedDict() self._files = [] - self._runscript = ['/neurodocker/startup.sh "$@"'] + self._runscript = '/neurodocker/startup.sh "$@"' self._test = [] self._labels = [] @@ -89,6 +97,7 @@ def __init__(self, specs): self._parts_filled = False _Users.clear_memory() self._add_neurodocker_header() + self._add_json() def render(self): def _render_one(section): @@ -125,7 +134,7 @@ def _render_files(self): + "\n".join("{} {}".format(*f) for f in self._files)) def _render_runscript(self): - return "%runscript\n" + "\n".join(self._runscript) + return "%runscript\n" + self._runscript def _render_test(self): return "%test\n" + "\n".join(self._test) @@ -160,3 +169,7 @@ def _fill_parts(self): if not self._runscript: self._runscript.append(NEURODOCKER_ENTRYPOINT) self._parts_filled = True + + def _add_json(self): + jsonstr = _get_json_spec_str(self._specs) + self._specs['instructions'].append(("run", jsonstr)) diff --git a/neurodocker/interfaces/tests/test_petpvc.py b/neurodocker/interfaces/tests/test_petpvc.py index e226ab8b..028eea94 100644 --- a/neurodocker/interfaces/tests/test_petpvc.py +++ b/neurodocker/interfaces/tests/test_petpvc.py @@ -1,22 +1,19 @@ """Tests for neurodocker.interfaces.PETPVC""" # Author: Sulantha Mathotaarachchi -import pytest - from neurodocker.interfaces.tests import utils class TestPETPVC(object): """Tests for PETPVC class.""" - @pytest.mark.skip("petpvc not implemented yet") def test_docker(self): """Install PETPVC binaries on Ubuntu Xenial.""" specs = { 'pkg_manager': 'apt', 'instructions': [ ('base', 'ubuntu:xenial'), - ('petpvc', {'version': '1.2.0-b'}), + ('petpvc', {'version': '1.2.2'}), ('user', 'neuro'), ] } @@ -25,13 +22,12 @@ def test_docker(self): utils.test_docker_container_from_specs( specs=specs, bash_test_file=bash_test_file) - @pytest.mark.skip("petpvc not implemented yet") def test_singularity(self): specs = { 'pkg_manager': 'apt', 'instructions': [ ('base', 'docker://ubuntu:xenial'), - ('petpvc', {'version': '1.2.0-b'}), + ('petpvc', {'version': '1.2.2'}), ('user', 'neuro'), ] } diff --git a/neurodocker/interfaces/tests/test_petpvc.sh b/neurodocker/interfaces/tests/test_petpvc.sh index a748d658..e36515ed 100644 --- a/neurodocker/interfaces/tests/test_petpvc.sh +++ b/neurodocker/interfaces/tests/test_petpvc.sh @@ -2,6 +2,6 @@ set -ex -ls /opt/petpvc/bin/petpvc +which petpvc printf 'passed' diff --git a/neurodocker/neurodocker.py b/neurodocker/neurodocker.py index 01287730..5f5adcf5 100644 --- a/neurodocker/neurodocker.py +++ b/neurodocker/neurodocker.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 -""" -Neurodocker command-line interface to generate Dockerfiles and minify -existing containers. +"""Neurodocker is a command-line interface to generate custom Dockerfiles and +Singularity recipes. For help generating Dockerfiles and Singularity recipes, run @@ -46,60 +45,46 @@ def _add_generate_common_arguments(parser): p.add_argument("-b", "--base", help="Base Docker image. Eg, ubuntu:17.04") p.add_argument( "-p", "--pkg-manager", choices={'apt', 'yum'}, - help="Linux package manager." - ) + help="Linux package manager.") p.add_argument( '--add-to-entrypoint', action=OrderedArgs, help=("Add a command to the file /neurodocker/startup.sh, which is the" - " container's default entrypoint.") - ) + " container's default entrypoint.")) p.add_argument( '--copy', action=OrderedArgs, nargs="+", - help="Copy files into container. Use format ... " - ) + help="Copy files into container. Use format ... ") p.add_argument( '--install', action=OrderedArgs, nargs="+", help=("Install system packages with apt-get or yum, depending on the" - " package manager specified.") - ) + " package manager specified.")) p.add_argument( '--entrypoint', action=OrderedArgs, - help=( - "Set the container's entrypoint (Docker) / append to runscript" - " (Singularity)" - ) - ) + help="Set the container's entrypoint (Docker) / append to runscript" + " (Singularity)") p.add_argument( '-e', '--env', action=OrderedArgs, nargs="+", type=_list_of_kv, - help="Set environment variable(s). Use the format KEY=VALUE" - ) + help="Set environment variable(s). Use the format KEY=VALUE") p.add_argument( '-r', '--run', action=OrderedArgs, - help="Run a command when building container" - ) + help="Run a command when building container") p.add_argument( '-u', '--user', action=OrderedArgs, - help="Switch current user (creates user if necessary)" - ) + help="Switch current user (creates user if necessary)") p.add_argument( - '-w', '--workdir', action=OrderedArgs, help="Set working directory" - ) + '-w', '--workdir', action=OrderedArgs, help="Set working directory") # To generate from file. p.add_argument( '-f', '--file', dest='file', - help="Generate file from JSON. Overrides other `generate` arguments" - ) + help="Generate file from JSON. Overrides other `generate` arguments") # Other arguments (no order). p.add_argument( '-o', '--output', dest="output", - help="If specified, save Dockerfile to file with this name." - ) + help="If specified, save Dockerfile to file with this name.") p.add_argument( '--no-print', dest='no_print', action="store_true", - help="Do not print the generated file" - ) + help="Do not print the generated file") _ndeb_servers = ", ".join( _installation_implementations['neurodebian']._servers.keys() @@ -107,74 +92,49 @@ def _add_generate_common_arguments(parser): # Software package options. pkgs_help = { - "all": ( - "Install software packages. Each argument takes a list of" - " key=value pairs. Where applicable, the default installation" - " behavior is to install by downloading and uncompressing" - " binaries." - ), - "afni": ( - "Install AFNI. Valid keys are version (required), install_r," - " install_python2, and install_python3. Only the latest" - " version and version 17.2.02 are supported at this time." - ), - "ants": ( - "Install ANTs. Valid keys are version (required), use_binaries" - " (default true), and git_hash. If use_binaries=true, installs" - " pre-compiled binaries; if use_binaries=false, builds ANTs from" - " source. If git_hash is specified, build from source from that" - " commit." - ), - "convert3d": ( - "Install Convert3D. The only valid key is version (required)." - ), - "dcm2niix": ( - "Install dcm2niix. The only valid key is version (required)." - ), - "freesurfer": ( - "Install FreeSurfer. Valid keys are version (required)," - " license_path (relative path to license), min (if true, install" - " binaries minimized for recon-all) and use_binaries (default true" - "). A FreeSurfer license is required to run the software and is" - " not provided by Neurodocker." - ), - "fsl": ( - "Install FSL. Valid keys are version (required), use_binaries" - " (default true) and use_installer." - ), - "matlabmcr": ( - "Install Matlab Compiler Runtime." - ), - "miniconda": ( - "Install Miniconda. Valid keys are env_name (required)," - " conda_install, pip_install, conda_opts, pip_opts, activate" - " (default false) and miniconda_version (defaults to latest). The" - " options conda_install and pip_install accept strings of" - ' packages: conda_install="python=3.6 numpy traits".' - ), - "mrtrix3": ( - "Install MRtrix3. Valid keys are use_binaries (default true) and" - " git_hash. If git_hash is specified and use_binaries is false," - " will checkout to that commit before building." - ), - "neurodebian": ( - "Add NeuroDebian repository and optionally install NeuroDebian" - " packages. Valid keys are os_codename (required; e.g., 'zesty')," - " download_server (required), full (if true, default, use non-free" - " packages), and pkgs (list of packages to install). Valid" - " download servers are {}.".format(_ndeb_servers) - ), - "spm12": ( - "Install SPM (and its dependency, Matlab Compiler Runtime). Valid" - " keys are version and matlab_version." - ), - "minc": ( - "Install MINC. Valid keys is version (required). Only version" - " 1.9.15 is supported at this time." - ), - "petpvc": ( - "Install PETPVC. Valid keys are version (required)." - ), + "all": "Install software packages. Each argument takes a list of" + " key=value pairs. Where applicable, the default installation" + " behavior is to install by downloading and uncompressing" + " binaries. Some programs can be built from source.", + "afni": "Install AFNI. Valid keys are version (required), method," + " install_path, install_r, install_r_pkgs, install_python2," + " and install_python3. Only the latest version and version" + " 17.2.02 are supported at this time.", + "ants": "Install ANTs. Valid keys are version (required), method" + " install_path, cmake_opts, and make_opts. Version can be a " + " git commit hash if building from source.", + "convert3d": "Install Convert3D. Valid keys are version (required)," + " method, and install_path.", + "dcm2niix": "Install dcm2niix. Valid keys are version, method," + " install_path, cmake_opts, and make_opts", + "freesurfer": "Install FreeSurfer. Valid keys are version (required)," + " method, install_path, exclude_paths, and license_path" + " (relative path to license). A FreeSurfer license is" + " required to run the software and is not provided by" + " Neurodocker.", + "fsl": "Install FSL. Valid keys are version (required), method, and" + " install_path.", + "matlabmcr": "Install Matlab Compiler Runtime. Valid keys are version," + " method, and install_path", + "miniconda": "Install Miniconda. Valid keys are install_path," + " env_name, conda_install, pip_install, conda_opts," + " pip_opts, activate (default false), and version" + " (defaults to latest). The options conda_install and" + " pip_install accept strings of packages: conda_install=" + '"python=3.6 numpy traits".', + "mrtrix3": "Install MRtrix3. Valid keys are version (required)," + " method, and install_path", + "neurodebian": "Add NeuroDebian repository. Valid keys are " + "os_codename (eg zesty), server (eg usa-nh), and full" + " (if true, use non-free packages). Valid download" + " servers are {}.".format(_ndeb_servers), + "spm12": "Install SPM12 and its dependency, Matlab Compiler Runtime." + " Valid keys are version and install_path.", + "minc": "Install MINC. Valid keys is version (required), method, and" + " install_path. Only version 1.9.15 is supported at this" + " time.", + "petpvc": "Install PETPVC. Valid keys are version (required), method," + " and install_path." } pkgs = p.add_argument_group( @@ -200,43 +160,30 @@ def _add_generate_docker_arguments(parser): # Arguments that should be ordered. p.add_argument( '--add', action=OrderedArgs, nargs="+", - help="Dockerfile ADD instruction. Use format ... " - ) + help="Dockerfile ADD instruction. Use format ... ") p.add_argument( '--arg', action=OrderedArgs, nargs="+", type=_list_of_kv, - help="Dockerfile ARG instruction. Use format KEY[=DEFAULT_VALUE] ...", - ) + help="Dockerfile ARG instruction. Use format KEY[=DEFAULT_VALUE] ...") p.add_argument( '--cmd', action=OrderedArgs, nargs="+", - help="Dockerfile CMD instruction." - ) + help="Dockerfile CMD instruction.") p.add_argument( '--expose', nargs="+", action=OrderedArgs, - help="Dockerfile EXPOSE instruction." - ) - p.add_argument( - '--instruction', action=OrderedArgs, - help="Arbitrary text to write to Dockerfile." - ) + help="Dockerfile EXPOSE instruction.") p.add_argument( '--label', action=OrderedArgs, nargs="+", type=_list_of_kv, - help="Dockerfile LABEL instruction." - ) + help="Dockerfile LABEL instruction.") p.add_argument( '--run-bash', action=OrderedArgs, - help="Run BASH code in RUN instruction." - ) + help="Run BASH code in RUN instruction.") p.add_argument( '--volume', action=OrderedArgs, nargs="+", - help="Dockerfile VOLUME instruction." - ) + help="Dockerfile VOLUME instruction.") def _add_generate_singularity_arguments(parser): """Add arguments to `parser` for sub-command `generate singularity`.""" - p = parser - - # p.add_argument('--add-to-entrypoint', help=) + pass def _add_reprozip_trace_arguments(parser): diff --git a/neurodocker/templates/freesurfer.yaml b/neurodocker/templates/freesurfer.yaml index 09391f6a..174b62b2 100644 --- a/neurodocker/templates/freesurfer.yaml +++ b/neurodocker/templates/freesurfer.yaml @@ -9,9 +9,10 @@ generic: binaries: urls: - "6.0.0": foobar + "6.0.1": ftp://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.1/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.1.tar.gz + "6.0.0": ftp://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.0/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.0.tar.gz # See https://github.com/freesurfer/freesurfer/issues/70 - "6.0.0-min": "https://dl.dropbox.com/s/nnzcfttc41qvt31/recon-all-freesurfer6-3.min.tgz" + "6.0.0-min": https://dl.dropbox.com/s/nnzcfttc41qvt31/recon-all-freesurfer6-3.min.tgz dependencies: apt: bc libgomp1 libxmu6 libxt6 tcsh perl yum: bc libgomp libXmu libXt tcsh perl diff --git a/neurodocker/templates/minc.yaml b/neurodocker/templates/minc.yaml index 63a9e2fe..2c436fb9 100644 --- a/neurodocker/templates/minc.yaml +++ b/neurodocker/templates/minc.yaml @@ -6,8 +6,6 @@ # Dockerfile: # https://github.com/BIC-MNI/build_packages/blob/master/build_centos_6.9_x64/Dockerfile -# TODO: Replace mni urls with string formatting keys. - generic: binaries: urls: diff --git a/neurodocker/templates/petpvc.yaml b/neurodocker/templates/petpvc.yaml new file mode 100644 index 00000000..e8aba4e1 --- /dev/null +++ b/neurodocker/templates/petpvc.yaml @@ -0,0 +1,20 @@ +# Instructions to install PETPVC. +# +# Repository: https://github.com/UCL/PETPVC + +generic: + binaries: + urls: + "1.2.2": https://github.com/UCL/PETPVC/releases/download/v1.2.2/PETPVC-1.2.2-Linux.tar.gz + "1.2.1": https://github.com/UCL/PETPVC/releases/download/v1.2.1/PETPVC-1.2.1-Linux.tar.gz + "1.2.0-b": https://github.com/UCL/PETPVC/releases/download/v1.2.0-b/PETPVC-1.2.0-b-Linux.tar.gz + "1.2.0-a": https://github.com/UCL/PETPVC/releases/download/v1.2.0-a/PETPVC-1.2.0-a-Linux.tar.gz + "1.1.0": https://github.com/UCL/PETPVC/releases/download/v1.1.0/PETPVC-1.1.0-Linux.tar.gz + "1.0.0": https://github.com/UCL/PETPVC/releases/download/v1.0.0/PETPVC-1.0.0-Linux.tar.gz + env: + PATH: "{{ petpvc.install_path }}/bin:$PATH" + instructions: | + echo "Downloading PETPVC ..." + mkdir -p {{ petpvc.install_path }} + curl {{ petpvc.curl_opts }} {{ petpvc.binaries_url }} \ + | tar -xz -C {{ petpvc.install_path }} --strip-components 1 diff --git a/neurodocker/tests/test_neurodocker.py b/neurodocker/tests/test_neurodocker.py index 5f6ba65e..40f5bccd 100644 --- a/neurodocker/tests/test_neurodocker.py +++ b/neurodocker/tests/test_neurodocker.py @@ -83,13 +83,13 @@ def test_generate_opts(capsys): def test_generate_from_json(capsys, tmpdir): import json - cmd = "generate docker -b debian:stretch -p apt --c3d version=1.0.0" + cmd = "generate docker -b debian:stretch -p apt --convert3d version=1.0.0" main(cmd.split()) true, _ = capsys.readouterr() specs = {'generation_timestamp': '2017-08-31 21:49:04', 'instructions': [['base', 'debian:stretch'], - ['c3d', {'version': '1.0.0'}]], + ['convert3d', {'version': '1.0.0'}]], 'neurodocker_version': '0.2.0-18-g9227b17', 'pkg_manager': 'apt'} str_specs = json.dumps(specs) diff --git a/neurodocker/version.py b/neurodocker/version.py index 97d813f6..b1d232e2 100644 --- a/neurodocker/version.py +++ b/neurodocker/version.py @@ -3,7 +3,7 @@ Copied from https://github.com/nipy/nipype/blob/master/nipype/info.py. """ -__version__ = '0.3.2' +__version__ = '0.4.0.dev0' def get_gitversion(): @@ -17,22 +17,18 @@ def get_gitversion(): import os import subprocess - here = os.path.dirname(os.path.realpath(__file__)) - + here = os.path.abspath(os.path.dirname(__file__)) try: - cmd = 'git describe'.split() - stdout, stderr = subprocess.Popen(cmd, cwd=here, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE).communicate() - ver = stdout.decode().strip() - except Exception: - ver = None - - return ver - - -gitversion = get_gitversion() -if gitversion: - __version__ = gitversion - if gitversion.startswith('v'): - __version__ = __version__[1:] + cmd = "git describe" + return subprocess.check_output(cmd.split(), cwd=here).decode().strip() + except subprocess.CalledProcessError: + return None + + +# Only append git hash if this is not a release. +if 'dev' in __version__: + gitversion = get_gitversion() # v0.3.2-183-gea5425b + if gitversion is not None: + __version__ = gitversion + if gitversion.startswith('v'): + __version__ = __version__[1:] diff --git a/requirements-dev.txt b/requirements-dev.txt index 2dd2efdc..7dcab461 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1 +1,3 @@ -pytest>=3.0 +docker>=3.0 +pytest-cov>=2.0 +singularity>=1.0 diff --git a/requirements.txt b/requirements.txt index 59cc433d..ef3e6ca5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,2 @@ -docker>=3.0 jinja2>=2.0 PyYAML>=3.0 diff --git a/setup.py b/setup.py index f2552af0..ff1a6b8c 100755 --- a/setup.py +++ b/setup.py @@ -1,33 +1,74 @@ -#!/usr/bin/env python - import os -from setuptools import find_packages, setup +import re +from setuptools import find_packages +from setuptools import setup + +here = os.path.abspath(os.path.dirname(__file__)) + + +def read(*parts): + with open(os.path.join(here, *parts), 'r') as fp: + return fp.read() + + +def find_version(*file_paths): + version_file = read(*file_paths) + version_match = re.search( + r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) + if version_match: + return version_match.group(1) + raise RuntimeError("Unable to find version string.") def main(): - here = os.path.dirname(os.path.realpath(__file__)) + """Main setup function.""" + + with open(os.path.join(here, 'README.md'), encoding='utf-8') as fp: + long_description = fp.read() # https://github.com/nipy/nipype/blob/master/setup.py#L114-L120 - ldict = locals() - version_file = os.path.join(here, 'neurodocker', 'version.py') - with open(version_file) as fp: - exec(fp.read(), globals(), ldict) + # ldict = locals() + # with open(os.path.join(here, 'neurodocker', 'version.py')) as fp: + # exec(fp.read(), globals(), ldict) - reqs_file = os.path.join(here, 'requirements.txt') - with open(reqs_file) as fp: + with open(os.path.join(here, 'requirements.txt')) as fp: requirements = [r.strip() for r in fp.readlines()] - setup(name='neurodocker', - version=ldict['__version__'], - url='https://github.com/kaczmarj/neurodocker', - author='Jakub Kaczmarzyk', - author_email='jakubk@mit.edu', - license='Apache License, 2.0', - packages=find_packages(), - install_requires = requirements, - entry_points={'console_scripts': - ['neurodocker=neurodocker.neurodocker:main']} - ) + with open(os.path.join(here, 'requirements-dev.txt')) as fp: + requirements_dev = [r.strip() for r in fp.readlines()] + + setup( + name="neurodocker", + version=find_version("neurodocker", "version.py"), + license="Apache License, 2.0", + description="Create custom containers for neuroimaging", + long_description=long_description, + long_description_content_type='text/markdown', + url="https://github.com/kaczmarj/neurodocker", + author="Jakub Kaczmarzyk", + author_email="jakubk@mit.edu", + classifiers=[ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers' + 'Intended Audience :: Science/Research', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + ], + keywords='containers, neuroimaging reproducibility research', + packages=find_packages(exclude=["tests"]), + install_requires=requirements, + entry_points={ + "console_scripts": [ + "neurodocker=neurodocker.neurodocker:main" + ], + }, + python_requires='>=3.5', + extras_require={ + 'dev': requirements_dev, + }, + ) + if __name__ == '__main__': main()