From 49aaee90e3aff1eb628912ca130f51e364be646c Mon Sep 17 00:00:00 2001 From: Jakub Kaczmarzyk Date: Thu, 26 Apr 2018 09:08:47 -0400 Subject: [PATCH] Revert "REF+REWRITE: use yaml templates + generate singularity recipes (#157)" (#158) This reverts commit 89b19df7f54b5e6d5a3bd62fe966bb7700199f4f. --- .circleci/config.yml | 114 ---- .dockerignore | 12 - .gitignore | 5 - .travis.yml | 50 ++ Dockerfile | 18 +- LICENSE | 2 +- Makefile | 5 - README.md | 137 +++-- neurodocker/__init__.py | 14 +- neurodocker/docker/__init__.py | 5 + neurodocker/docker/docker.py | 332 ++++++++++++ neurodocker/docker/tests/test_docker.py | 167 ++++++ neurodocker/generate.py | 491 ++++++++++++++++++ neurodocker/generators/__init__.py | 4 - neurodocker/generators/common.py | 89 ---- neurodocker/generators/docker.py | 319 ------------ neurodocker/generators/singularity.py | 176 ------- neurodocker/interfaces/__init__.py | 18 +- neurodocker/interfaces/_base.py | 318 ------------ neurodocker/interfaces/afni.py | 248 +++++++++ neurodocker/interfaces/ants.py | 154 ++++++ neurodocker/interfaces/convert3d.py | 73 +++ neurodocker/interfaces/dcm2niix.py | 71 +++ neurodocker/interfaces/freesurfer.py | 224 ++++++++ neurodocker/interfaces/fsl.py | 214 ++++++++ neurodocker/interfaces/interfaces.py | 286 ---------- neurodocker/interfaces/minc.py | 121 +++++ neurodocker/interfaces/miniconda.py | 221 ++++++++ neurodocker/interfaces/mrtrix.py | 109 ++++ neurodocker/interfaces/neurodebian.py | 113 ++++ neurodocker/interfaces/petpvc.py | 90 ++++ neurodocker/interfaces/spm.py | 162 ++++++ neurodocker/interfaces/tests/memory.py | 131 +++++ neurodocker/interfaces/tests/test_afni.py | 51 +- neurodocker/interfaces/tests/test_afni.sh | 5 +- neurodocker/interfaces/tests/test_ants.py | 66 ++- neurodocker/interfaces/tests/test_ants.sh | 5 +- .../interfaces/tests/test_convert3d.py | 53 +- .../interfaces/tests/test_convert3d.sh | 5 +- neurodocker/interfaces/tests/test_dcm2niix.py | 53 +- neurodocker/interfaces/tests/test_dcm2niix.sh | 5 +- .../interfaces/tests/test_freesurfer.py | 66 ++- .../interfaces/tests/test_freesurfer.sh | 7 +- neurodocker/interfaces/tests/test_fsl.py | 74 ++- neurodocker/interfaces/tests/test_fsl.sh | 5 +- .../interfaces/tests/test_matlabmcr.py | 1 - neurodocker/interfaces/tests/test_minc.py | 70 ++- neurodocker/interfaces/tests/test_minc.sh | 7 +- .../interfaces/tests/test_miniconda.py | 79 ++- .../interfaces/tests/test_miniconda.sh | 11 +- neurodocker/interfaces/tests/test_mrtrix.py | 68 ++- neurodocker/interfaces/tests/test_mrtrix.sh | 5 +- .../interfaces/tests/test_neurodebian.py | 71 +-- .../interfaces/tests/test_neurodebian.sh | 5 +- neurodocker/interfaces/tests/test_petpvc.py | 50 +- neurodocker/interfaces/tests/test_petpvc.sh | 7 +- neurodocker/interfaces/tests/test_spm.py | 30 ++ .../tests/{test_spm12.sh => test_spm.sh} | 5 +- neurodocker/interfaces/tests/test_spm12.py | 34 -- neurodocker/interfaces/tests/utils.py | 276 +++++----- neurodocker/neurodocker.py | 382 ++++++-------- neurodocker/parser.py | 28 +- neurodocker/reprozip/merge.py | 4 + neurodocker/reprozip/tests/test_merge.py | 17 +- neurodocker/reprozip/tests/test_trace.py | 21 +- neurodocker/reprozip/trace.py | 115 +--- .../reprozip/utils/reprozip_trace_runner.sh | 27 +- neurodocker/templates/README.md | 47 -- neurodocker/templates/_header.yaml | 30 -- neurodocker/templates/afni.yaml | 81 --- neurodocker/templates/ants.yaml | 48 -- neurodocker/templates/convert3d.yaml | 19 - neurodocker/templates/dcm2niix.yaml | 21 - neurodocker/templates/freesurfer.yaml | 32 -- neurodocker/templates/fsl.yaml | 37 -- neurodocker/templates/matlabmcr.yaml | 41 -- neurodocker/templates/minc.yaml | 34 -- neurodocker/templates/miniconda.yaml | 56 -- neurodocker/templates/mrtrix3.yaml | 19 - neurodocker/templates/neurodebian.yaml | 13 - neurodocker/templates/petpvc.yaml | 20 - neurodocker/templates/spm12.yaml | 26 - neurodocker/tests/test_generate.py | 256 +++++++++ neurodocker/tests/test_neurodocker.py | 95 ++-- neurodocker/tests/test_utils.py | 45 ++ neurodocker/tests/test_version.py | 20 + neurodocker/utils.py | 175 +++++-- neurodocker/version.py | 34 +- requirements-dev.txt | 3 - requirements.txt | 4 +- setup.py | 83 +-- 91 files changed, 4428 insertions(+), 3012 deletions(-) delete mode 100644 .circleci/config.yml create mode 100644 .travis.yml delete mode 100644 Makefile create mode 100644 neurodocker/docker/__init__.py create mode 100644 neurodocker/docker/docker.py create mode 100644 neurodocker/docker/tests/test_docker.py create mode 100644 neurodocker/generate.py delete mode 100644 neurodocker/generators/__init__.py delete mode 100644 neurodocker/generators/common.py delete mode 100644 neurodocker/generators/docker.py delete mode 100644 neurodocker/generators/singularity.py delete mode 100644 neurodocker/interfaces/_base.py create mode 100644 neurodocker/interfaces/afni.py create mode 100644 neurodocker/interfaces/ants.py create mode 100644 neurodocker/interfaces/convert3d.py create mode 100644 neurodocker/interfaces/dcm2niix.py create mode 100644 neurodocker/interfaces/freesurfer.py create mode 100644 neurodocker/interfaces/fsl.py delete mode 100644 neurodocker/interfaces/interfaces.py create mode 100644 neurodocker/interfaces/minc.py create mode 100644 neurodocker/interfaces/miniconda.py create mode 100644 neurodocker/interfaces/mrtrix.py create mode 100644 neurodocker/interfaces/neurodebian.py create mode 100644 neurodocker/interfaces/petpvc.py create mode 100644 neurodocker/interfaces/spm.py create mode 100644 neurodocker/interfaces/tests/memory.py delete mode 100644 neurodocker/interfaces/tests/test_matlabmcr.py mode change 100644 => 100755 neurodocker/interfaces/tests/test_minc.sh mode change 100644 => 100755 neurodocker/interfaces/tests/test_petpvc.sh create mode 100644 neurodocker/interfaces/tests/test_spm.py rename neurodocker/interfaces/tests/{test_spm12.sh => test_spm.sh} (76%) delete mode 100644 neurodocker/interfaces/tests/test_spm12.py delete mode 100644 neurodocker/templates/README.md delete mode 100644 neurodocker/templates/_header.yaml delete mode 100644 neurodocker/templates/afni.yaml delete mode 100644 neurodocker/templates/ants.yaml delete mode 100644 neurodocker/templates/convert3d.yaml delete mode 100644 neurodocker/templates/dcm2niix.yaml delete mode 100644 neurodocker/templates/freesurfer.yaml delete mode 100644 neurodocker/templates/fsl.yaml delete mode 100644 neurodocker/templates/matlabmcr.yaml delete mode 100644 neurodocker/templates/minc.yaml delete mode 100644 neurodocker/templates/miniconda.yaml delete mode 100644 neurodocker/templates/mrtrix3.yaml delete mode 100644 neurodocker/templates/neurodebian.yaml delete mode 100644 neurodocker/templates/petpvc.yaml delete mode 100644 neurodocker/templates/spm12.yaml create mode 100644 neurodocker/tests/test_generate.py create mode 100644 neurodocker/tests/test_version.py delete mode 100644 requirements-dev.txt diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 19e32772..00000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,114 +0,0 @@ ---- -_machine_kwds: &machine_kwds - image: circleci/classic:201711-01 - -_checkout_kwds: &checkout_kwds - path: ~/neurodocker - -_setup_env: &setup_kwds - name: Setup environment - command: | - curl -fsSLO https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh - bash Miniconda3-latest-Linux-x86_64.sh -b -p ~/conda - echo "export PATH=~/conda/bin:$PATH" >> ~/.bashrc - source ~/.bashrc - conda config --system --prepend channels conda-forge - conda update -n base -yq conda - conda update -yq --all - - pip install -q --no-cache-dir -U pip - pip install -q --no-cache-dir reprozip codecov - pip install -q --no-cache-dir -e ~/neurodocker[dev] - - -version: 2 -jobs: - test_docker: - machine: - *machine_kwds - working_directory: ~/neurodocker - steps: - - checkout: - *checkout_kwds - - run: - *setup_kwds - - restore_cache: - keys: - - dfs-v0-{{ .Branch }}-{{ .Revision }} - - dfs-v0-{{ .Branch }} - - dfs-v0 - - run: - name: Test docker image builds - no_output_timeout: 360m - command: | - source ~/.bashrc - pytest --cov -k 'test_docker' neurodocker - codecov - - save_cache: - key: dfs-v0-{{ .Branch }}-{{ .Revision }} - when: always - paths: - - /tmp/cache - - test_singularity: - machine: - *machine_kwds - working_directory: ~/neurodocker - steps: - - checkout: - *checkout_kwds - - run: - *setup_kwds - - restore_cache: - keys: - - srs-v0-{{ .Branch }}-{{ .Revision }} - - srs-v0-{{ .Branch }} - - srs-v0 - - run: - name: Install singularity - command: | - source ~/.bashrc - curl -fsSL http://neuro.debian.net/lists/trusty.us-nh.full | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list - curl -fsSL https://dl.dropbox.com/s/zxs209o955q6vkg/neurodebian.gpg | sudo apt-key add - - (sudo apt-key adv --refresh-keys --keyserver hkp://pool.sks-keyservers.net:80 0xA5D32F012649A5A9 || true) - sudo apt-get -qq update - sudo apt-get install -yq singularity-container - pip install -q --no-cache-dir singularity - - run: - name: Test singularity image builds - no_output_timeout: 360m - command: | - source ~/.bashrc - pytest --cov -k 'test_singularity' neurodocker - codecov - - save_cache: - key: srs-v0-{{ .Branch }}-{{ .Revision }} - when: always - paths: - - ~/tmp/cache - - test_others: - machine: - *machine_kwds - working_directory: ~/neurodocker - steps: - - checkout: - *checkout_kwds - - run: - *setup_kwds - - run: - name: Test others (not docker or singularity) - no_output_timeout: 30m - command: | - source ~/.bashrc - pytest --cov -k 'not test_docker and not test_singularity' neurodocker - codecov - - -workflows: - version: 2 - build_test_deploy: - jobs: - - test_docker - - test_singularity - - test_others diff --git a/.dockerignore b/.dockerignore index e2eeecfd..ac3f29c3 100644 --- a/.dockerignore +++ b/.dockerignore @@ -2,24 +2,12 @@ __pycache__ **/__pycache__ **/.DS_Store .DS_Store -/scraper -tests -**/tests .cache -.pytest_cache .ipynb_checkpoints dockerfile_tests *.egg-info -.coverage -coverage.xml - -examples -.circleci - *.ipynb *.pyc *.tar *.tar.gz - -Dockerfile diff --git a/.gitignore b/.gitignore index d9e6966a..77ab7032 100644 --- a/.gitignore +++ b/.gitignore @@ -2,14 +2,9 @@ __pycache__ .cache .DS_Store .ipynb_checkpoints -.pytest_cache dockerfile_tests -/scraper *.egg-info -.coverage -coverage.xml - *.ipynb *.pyc *.tar diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..1348cafc --- /dev/null +++ b/.travis.yml @@ -0,0 +1,50 @@ +sudo: required +dist: trusty +group: edge +services: + - docker +language: python +python: + - 3.6 +branches: + only: + - master +env: + matrix: + - INTERFACE_TO_BUILD=none + - INTERFACE_TO_BUILD=afni + - INTERFACE_TO_BUILD=ants + - INTERFACE_TO_BUILD=convert3d + - INTERFACE_TO_BUILD=dcm2niix + - INTERFACE_TO_BUILD=freesurfer + - INTERFACE_TO_BUILD=fsl + - INTERFACE_TO_BUILD=minc + - INTERFACE_TO_BUILD=miniconda + - INTERFACE_TO_BUILD=mrtrix3 + - INTERFACE_TO_BUILD=neurodebian + - INTERFACE_TO_BUILD=petpvc + - INTERFACE_TO_BUILD=spm + + global: + - secure: YQAc7V0h4jIVJJmY47no4OyQolMv49wqM4xbtkRB3SZE6GonoplcB0iP8olvWn0XJF9dDVXQzU+NhVTDk8sSY+tiuU0pClPMkGC1KfxD6uNDVW26nQIa6FcDy17xayOH3UxB2DSl6ZlqHwXx1Lb0hgoDluGVnz2i8mJWpZycNr/C94RIIKCEQHPV8hu30VtHJcowzv8/LoZ1wgWQeAg08azXcx9h4yfOcvSoH9fvliD3C71fvLiwDugbJ5+6dgF1Pqyq6wjwsopE9PxAanariqoMITpqHAFgaqMeyXirr/a1y4Xgnf5PB/Ci86VZgZ6KrMRez9UlpePHKA06WW5wdUeMPTFL4B38EEtV5l6RwswZHrypoL3t0WS+uLtwqYoyMzAmKf63dBZPhwzv0xHrziriOB0MNfgsUHulzayxgcHiQr0G8hPG09OIvNPNwuy4ije+GUENL8wMUZHuQ22Nw9YyPTbGWsxYRfG2GTcR7qmb+TpfPJlbVMiVg2bqN+6vbREyeU0rXSEFTNqF3WOYEUzoen8QDj4Jh8D63Ew1vc0awjxvF98O3puUjYdrooJr1nyufQdPXqAsvszHm8NnUpBGgG4eEox+VIAw1De5HhMmsRqn7LNzgTueRLRtIIN+DeaI8xV4qZMrvkdyvqgJR8elWKay/21WS69o8Wmb8KA= + - secure: NbPJOFqRvWeWps5kc/QUYN8n57w0cHut9N51uX1Y+/sU0Z5SBY/0SHTPZu2sypSNzB73RVCBonJ1nyBFtI0qzQUmPIjxa5MpGLkltsDom3x0jGGaY2/1mgWSck59PFajK7l89W1M0kB9OEEzwiq0Htrq3cLu2IKyXenLtYkhvT9A2INAh4dxuQ32tj8ksRlmH+TNd/7bj22Et+Wz/k/tTRNuttR6mrmQl74nydQhRjOzn/OgSOp6EPn+FqXkfPMMNrgLxKmGWi/+uIzllWc3uvUSMXxwcATf755odNpcuIJoXBYLiaYka4xpe/L+Yuw1PFAHFRuPVCgewFY6tejizF6++mKR31k1KTmhGyYgQB1M7J3X+karu/qgvv26K+VGX3egoLlKFCMBKqYp33fTc2F6HUKoSPO2JnvPR8yCEL6u5bmRuIY4UiDuk6xxENMLMyuoXuTDOueb67jUtwGi7gwu9Bjtk5Vi2TOKNs7I9aXQyvUyeP48VYiQuJ9jmKW4J32DwDV0FvIohv5oPK2X6w1hTDMsP57Rka8R7/DS7eiO8vqwekwzTdFGOSK1qzKkr6Dt5cVYqQpskuUKH+28Z5DJ1Q5UYuXM6UfRizMzgq4n2GwyjSVYMgxEXJwQhzx0OsUQbm3YRZ9eUxJqqMQ+G5oMD8hFpkdMxmu5JjxOW/o= + - secure: PdajcfJAm+ePQFOKcLRZDwlT0pKiTEABXs3u0R/lFfxR7Vl0TZ/R7TSfEDzDdPOODvjInqbCaIBcGXgu+JxrkkoREdmS3b997yq1HjQxFsnmWty/qJ5AHAPkIfBFHS0/cr6RU4Sgiq1jibAMvW9MnGFybyiwOGeo79bXwx55rLzJ8+XGWL1xeAfK3nvajSjK91STugeREwXlyg5CYTR+RJnCVIqQySBZNkn+LhPX0vkpMJ3Ab0ONcuhSwlv6enP0CLuc/4xaC+/6bKQ84W1iw5eTvltv4VcUnVsN1FRG+VAudlg4/qDZ6q7d9pPNxQ5HajhTdE8Gds4x2pxqQTjFCH475eMQVoSbDjjGsuixqxntY2Wx/L4UWMUAST1GsNefofyTvIYKWcN5PjQaD+bR3sRDcYe1tb8Ew01T4zu4eGJ9VhrUIrerS4L2Qatn8EDgmN6kzDzWXdDzwkuumjaC/7OaNtQ6sJ4lBgae5hbcMQlwCk1o/IuKnjzt1K8YvqDLa5DZGyJfJk+GXKOyFp1pTWD0d/blgAw3JlkFLebmL3r3heKVmDHIqhT1FeqY5jIuy0rhsOXEopwCrLHCC/0Mo0baJKmAL1a8zdQTvn9oZMfYZtK1C4RWqV6nOSjxi/RXRfz/XJcTes3v/o4mrrjkjui6kfxSF0jkth4HFGWhBhc= + +before_install: + - travis_retry sudo apt-get update -qq + - travis_retry sudo apt-get install -yq libsqlite3-dev + +install: + - travis_retry pip install -r requirements.txt + - travis_retry pip install dropbox pytest-cov reprozip +script: + - function run_tests { + if [ "$INTERFACE_TO_BUILD" == 'none' ]; then + python -m pytest -v -k "not test_build_image" --cov=./ neurodocker; + else + travis_wait 50 python -m pytest -v -k "test_build_image_$INTERFACE_TO_BUILD" --cov=./ neurodocker; + fi } + - if [ ! -z "$DOCKER_PASS" ]; then + docker login -u $DOCKER_USER -p $DOCKER_PASS; + fi + - run_tests diff --git a/Dockerfile b/Dockerfile index decea082..53c14b95 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,14 @@ -FROM alpine:3.7 +FROM alpine:3.6 LABEL maintainer="Jakub Kaczmarzyk " -RUN tmp_pkgs="curl gcc musl-dev python3-dev sqlite-dev" \ - && apk add --update --no-cache git python3 py3-yaml rsync $tmp_pkgs \ - && curl -fsSL https://bootstrap.pypa.io/get-pip.py | python3 - \ - && pip install --no-cache-dir reprozip \ - && apk del $tmp_pkgs \ - && rm -rf /var/cache/apk/* ~/.cache/pip/* - COPY . /opt/neurodocker -RUN pip install --no-cache-dir -e /opt/neurodocker \ - && neurodocker --help + +RUN tmp_pkgs="gcc musl-dev python3-dev sqlite-dev" \ + && apk add --update --no-cache git python3 rsync $tmp_pkgs \ + && pip3 install --no-cache-dir reprozip \ + && pip3 install --no-cache-dir -e /opt/neurodocker \ + && neurodocker --help \ + && apk del $tmp_pkgs ENTRYPOINT ["neurodocker"] diff --git a/LICENSE b/LICENSE index 4ff85eb7..0f57580a 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. -Copyright 2018 Neurodocker Developers +Copyright 2017 Neurodocker Developers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Makefile b/Makefile deleted file mode 100644 index 44dafd46..00000000 --- a/Makefile +++ /dev/null @@ -1,5 +0,0 @@ -clean-pyc: - find . -name "*.pyc" -type f -exec rm -f {} + - find . -name "__pycache__" -type d -exec rm -rf {} + - -clean: clean-pyc diff --git a/README.md b/README.md index 817b5ac9..51d5fb6b 100644 --- a/README.md +++ b/README.md @@ -1,88 +1,80 @@ # Neurodocker -[![build status](https://img.shields.io/circleci/project/github/kaczmarj/neurodocker/master.svg)](https://circleci.com/gh/kaczmarj/neurodocker/tree/master) +[![Build Status](https://travis-ci.org/kaczmarj/neurodocker.svg?branch=master)](https://travis-ci.org/kaczmarj/neurodocker) +[![codecov](https://codecov.io/gh/kaczmarj/neurodocker/branch/master/graph/badge.svg)](https://codecov.io/gh/kaczmarj/neurodocker) -_Neurodocker_ is a command-line program that generates custom Dockerfiles and Singularity recipes for neuroimaging and minifies existing containers. + +_Neurodocker_ is a Python project that generates custom Dockerfiles for neuroimaging and minifies existing Docker images (using [ReproZip](https://www.reprozip.org/)). The package can be used from the command-line or within a Python script. The command-line interface generates Dockerfiles and minifies Docker images, but interaction with the Docker Engine is left to the various `docker` commands. Within a Python script, however, _Neurodocker_ can generate Dockerfiles, build Docker images, run commands within resulting containers (using the [`docker` Python package](https://github.com/docker/docker-py)), and minify Docker images. The project is used for regression testing of [Nipype](https://github.com/nipy/nipype/) interfaces. Examples: - - [Canonical examples](#canonical-examples) - - [Docker](#docker) - - [Singularity](#singularity) - - [Assorted examples](./examples) + - [Generate Dockerfile](#generate-dockerfile) + - [Generate Dockerfile (full)](#generate-dockerfile-full) - [Minimize existing Docker image](#minimize-existing-docker-image) - [Example of minimizing Docker image for FreeSurfer recon-all](https://github.com/freesurfer/freesurfer/issues/70#issuecomment-316361886) +# Note to users + +This software is still in the early stages of development. If you come across an issue or a way to improve _Neurodocker_, please submit an issue or a pull request. + + # Installation Use the _Neurodocker_ Docker image: ``` -docker run --rm kaczmarj/neurodocker:0.4.0 --help +docker run --rm kaczmarj/neurodocker:v0.3.1 --help ``` Note: it is not yet possible to minimize Docker containers using the _Neurodocker_ Docker image. -# Supported software +# Supported Software + +Valid options for each software package are the keyword arguments for the class that installs that package. These classes live in [`neurodocker.interfaces`](neurodocker/interfaces/). The default installation behavior for every software package (except Miniconda) is to install by downloading and un-compressing the binaries. + | software | argument | description | | -------- | -------- | ----------- | -| **AFNI** | version* | latest | -| | method | binaries (default), source. Install pre-compiled binaries or build form source. | -| | install_path | Installation path. Default `/opt/afni-{version}`. | -| | install_r | If true, install R. | -| | install_r_pkgs | If true, install R and AFNI's R packages. | +| **AFNI** | version* | Either 17.2.02 or latest. | +| | install_r | If true, install R and AFNI R packages. False by default. | | | install_python2 | If true, install Python 2. | | | install_python3 | If true, install Python 3. | -| **ANTs** | version* | 2.2.0, 2.1.0, 2.0.3, or 2.0.0. If `method=source`, version can be a git commit hash or branch. | -| | method | binaries (default), source. | -| | install_path | Installation path. Default `/opt/ants-{version}`. | -| | cmake_opts | If `method=source`, options for `cmake`. | -| | make_opts | If `method=source`, options for `make`. | -| **Convert3D** | version* | 1.0.0 or nightly. | -| | method | binaries (default) | -| | install_path | Installation path. Default `/opt/convert3d-{version}`. | -| **dcm2niix** | version* | latest, git commit hash or branch. | -| | method | source (default) | -| | install_path | Installation path. Default `/opt/dcm2niix-{version}`. | -| | cmake_opts | If `method=source`, options for `cmake`. | -| | make_opts | If `method=source`, options for `make`. | -| **FreeSurfer** | version* | 6.0.0-min | -| | method | binaries (default) | -| | install_path | Installation path. Default `/opt/freesurfer-{version}`. | -| | exclude_paths | Sequence of path(s) to exclude when inflating the tarball. | +| **ANTs** | version* | 2.2.0, 2.1.0, 2.0.3, or 2.0.0 | +| | use_binaries | If true (default), use pre-compiled binaries. If false, build from source. | +| | git_hash | Git hash to checkout to before building from source (only used if use_binaries is false). | +| **Convert3D** | version* | "1.0.0" or "nightly". | +| **dcm2niix** | version* | "latest", "master", git commit hash, or git tag. | +| **FreeSurfer** | version* | Any version for which binaries are provided. | | | license_path | Relative path to license file. If provided, this file will be copied into the Docker image. Must be within the build context. | -| **FSL**** | version* | 5.0.11, 5.0.10, 5.0.9, 5.0.8 | -| | method | binaries (default) | -| | install_path | Installation path. Default `/opt/fsl-{version}`. | -| **Matlab Compiler Runtime** | version* | 2018a, 2012-17[a-b], 2010a | -| | method | binaries (default) | -| | install_path | Installation path. Default `/opt/matlabmcr-{version}`. | +| | min | If true, install a version of FreeSurfer minimized for recon-all. See [freesurfer/freesurfer#70](https://github.com/freesurfer/freesurfer/issues/70). False by default. | +| **FSL**** | version* | Any version for which binaries are provided. | +| | eddy_5011 | If true, use pre-release version of FSL eddy v5.0.11 | +| | eddy_5011_cuda | 6.5, 7.0, 7.5, 8.0; only valid if using eddy pre-release | +| | use_binaries | If true (default), use pre-compiled binaries. Building from source is not available now but might be added in the future. | +| | use_installer | If true, use FSL's Python installer. Only valid on CentOS images. | | **MINC** | version* | 1.9.15 | -| | method | binaries (default) | -| | install_path | Installation path. Default `/opt/minc-{version}`. | -| **Miniconda** | version | latest (default), all other hosted versions. | -| | install_path | Installation path. Default `/opt/miniconda-{version}`. | -| | env_name* | Name of this conda environment. | -| | conda_install | Packages to install with `conda`. E.g., `conda_install="python=3.6 numpy traits"` | -| | pip_install | Packages to install with `pip`. | +| **Miniconda** | env_name* | Name of this conda environment. | +| | yaml_file | Environment specification file. Can be path on host or URL. | +| | conda_install | Packages to install with conda. e.g., `conda_install="python=3.6 numpy traits"` | +| | pip_install | Packages to install with pip. | +| | conda_opts | Command-line options to pass to [`conda create`](https://conda.io/docs/commands/conda-create.html). e.g., `conda_opts="-c vida-nyu"` | +| | pip_opts | Command-line options to pass to [`pip install`](https://pip.pypa.io/en/stable/reference/pip_install/#options). | | | activate | If true (default), activate this environment in container entrypoint. | -| **MRtrix3** | version* | 3.0 | -| | method | binaries (default) | -| | install_path | Installation path. Default `/opt/mrtrix3-{version}`. | +| | miniconda_version | Version of Miniconda. Latest by default. | +| **MRtrix3** | use_binaries | If true (default), use pre-compiled binaries. If false, build from source. | +| | git_hash | Git hash to checkout to before building from source (only used if use_binaries is false). | | **NeuroDebian** | os_codename* | Codename of the operating system (e.g., stretch, zesty). | -| | server* | Server to download NeuroDebian packages from. Choose the one closest to you. See `neurodocker generate docker --help` for the full list of servers. | +| | download_server* | Server to download NeuroDebian packages from. Choose the one closest to you. See `neurodocker generate --help` for the full list of servers. | +| | pkgs | Packages to download from NeuroDebian. | | | full | If true (default), use non-free sources. If false, use libre sources. | -| **PETPVC** | version* | 1.2.2, 1.2.1, 1.2.0-b, 1.2.0-a, 1.1.0, 1.0.0 | -| | method | binaries (default) | -| | install_path | Installation path. Default `/opt/petpvc-{version}`. | -| **SPM12** | version* | r7219, r6914, r6685, r6472, r6225 | -| | install_path | Installation path. Default `/opt/spm12-{version}`. | -| | | _Note: Matlab Compiler Runtime is installed when SPM12 is installed._ | +| **PETPVC** | version* | 1.2.0-b, 1.2.0-a, 1.1.0, 1.0.0 | +| **SPM** | version* | 12 (earlier versions will be supported in the future). | +| | matlab_version* | R2017a (other MCR versions will be supported once earlier SPM versions are supported). | + +\* required argument. -\* required argument. ** FSL is non-free. If you are considering commercial use of FSL, please consult the [relevant license](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence). @@ -90,28 +82,14 @@ Note: it is not yet possible to minimize Docker containers using the _Neurodocke Please see the [examples](examples) directory. -## Canonical examples +## Canonical example -The canonical examples install ANTs version 2.2.0 on Ubuntu 18.04. - -### Docker +Generate a Dockerfile which will install ANTs on Ubuntu 17.04. The result can be piped to `docker build` to build the Docker image. ```shell -$ docker run --rm kaczmarj/neurodocker:0.4.0 generate \ - --base ubuntu:18.04 --pkg-manager apt --ants version=2.2.0 +docker run --rm kaczmarj/neurodocker:v0.3.2 generate -b ubuntu:17.04 -p apt --ants version=2.2.0 -# Build image by piping Dockerfile to `docker build` -$ docker run --rm kaczmarj/neurodocker:0.4.0 generate \ - --base ubuntu:18.04 --pkg-manager apt --ants version=2.2.0 | docker build - -``` - -### Singularity - -Install ANTs on Ubuntu 18.04. - -```shell -$ docker run --rm kaczmarj/neurodocker:v0.4.0 generate singularity \ - --base ubuntu:18.04 --pkag-manager apt --ants version=2.2.0 +docker run --rm kaczmarj/neurodocker:v0.3.2 generate -b ubuntu:17.04 -p apt --ants version=2.2.0 | docker build - ``` @@ -129,17 +107,18 @@ In the following example, a Docker image is built with ANTs version 2.2.0 and a ```shell # Create a Docker image with ANTs, and download a functional scan. -$ download_cmd="curl -sSL -o /home/func.nii.gz http://psydata.ovgu.de/studyforrest/phase2/sub-01/ses-movie/func/sub-01_ses-movie_task-movie_run-1_bold.nii.gz" -$ neurodocker generate docker -b centos:7 -p yum --ants version=2.2.0 --run="$download_cmd" | docker build -t ants:2.2.0 - +download_cmd="RUN curl -sSL -o /home/func.nii.gz http://psydata.ovgu.de/studyforrest/phase2/sub-01/ses-movie/func/sub-01_ses-movie_task-movie_run-1_bold.nii.gz" +neurodocker generate -b centos:7 -p yum --ants version=2.2.0 --instruction="$download_cmd" | docker build -t ants:2.2.0 - # Run the container. -$ docker run --rm -itd --name ants-reprozip-container --security-opt=seccomp:unconfined ants:2.2.0 +docker run --rm -it --name ants-reprozip-container --security-opt=seccomp:unconfined ants:2.2.0 +# (in a new terminal window) # Output a ReproZip pack file in ~/neurodocker-reprozip-output with the files # necessary to run antsMotionCorr. # See https://github.com/stnava/ANTs/blob/master/Scripts/antsMotionCorrExample -$ cmd="antsMotionCorr -d 3 -a /home/func.nii.gz -o /home/func_avg.nii.gz" -$ neurodocker reprozip-trace ants-reprozip-container "$cmd" -# Create a Docker container with the contents of ReproZip's trace. -$ reprounzip docker setup neurodocker-reprozip.rpz test +cmd="antsMotionCorr -d 3 -a /home/func.nii.gz -o /home/func_avg.nii.gz" +neurodocker reprozip-trace ants-reprozip-container "$cmd" + +reprounzip docker setup neurodocker-reprozip.rpz test ``` diff --git a/neurodocker/__init__.py b/neurodocker/__init__.py index be246741..1c90db05 100644 --- a/neurodocker/__init__.py +++ b/neurodocker/__init__.py @@ -1,13 +1,17 @@ +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, unicode_literals + import sys from neurodocker.version import __version__ -from neurodocker.generators import Dockerfile -from neurodocker.generators import SingularityRecipe + +from neurodocker.docker import DockerContainer, DockerImage +from neurodocker.generate import Dockerfile from neurodocker.utils import set_log_level if sys.version_info[0] < 3: - raise RuntimeError( - "Neurodocker requires Python 3. Use Neurodocker's Docker image or" - " install Python 3.") + raise RuntimeError("Neurodocker requires Python 3. Install Python 3 or use" + " Neurodocker's Docker image.") set_log_level('info') diff --git a/neurodocker/docker/__init__.py b/neurodocker/docker/__init__.py new file mode 100644 index 00000000..8db7412f --- /dev/null +++ b/neurodocker/docker/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import + +from neurodocker.docker.docker import (client, copy_file_to_container, + copy_file_from_container, + DockerContainer, DockerImage,) diff --git a/neurodocker/docker/docker.py b/neurodocker/docker/docker.py new file mode 100644 index 00000000..18773834 --- /dev/null +++ b/neurodocker/docker/docker.py @@ -0,0 +1,332 @@ +"""Classes to interact with the Docker Engine (using the package docker-py)""" +# Author: Jakub Kaczmarzyk + +from __future__ import (absolute_import, division, print_function, + unicode_literals) +import functools +import os +import threading + +import docker +import requests + +client = docker.from_env(timeout=30) + + +def docker_is_running(client): + """Return true if Docker server is responsive. + + Parameters + ---------- + client : docker.client.DockerClient + The Docker client. E.g., `client = docker.from_env()`. + + Returns + ------- + running : bool + True if Docker server is responsive. + """ + try: + client.ping() + return True + except (requests.exceptions.ConnectionError, docker.errors.APIError): + return False + + +def require_docker(func): + """Raise Exception if Docker server is unresponsive (Docker might not be + installed or not running). Decorate any function that requires the Docker + application with this wrapper. + + Parameters + ---------- + func : callable + Function that requires Docker to run. + + Returns + ------- + wrapper : callable + Wrapped function. + """ + @functools.wraps(func) + def wrapper(*args, **kwargs): + if not docker_is_running(client): + raise Exception("The Docker server is unresponsive. Is Docker " + "installed and running?") + return func(*args, **kwargs) + return wrapper + + +class BuildOutputLogger(threading.Thread): + """Log raw output of `docker build` command in separate thread. This class + is used to capture the output of the build using the generator returned + by `docker.APIClient.build()`. Because instances of this class are not in + the main thread, error checking is done in the DockerImage class (so errors + are raised in the main thread). + + Parameters + ---------- + generator : generator + Generator of build logs. + console : bool + If true, log to console. + filepath : str + Log to file `filepath`. Default is to not log to file. + """ + def __init__(self, generator, console=True, filepath=None, **kwargs): + self.generator = generator + self.logger = self.create_logger(console, filepath) + self.logs = [] + + super(BuildOutputLogger, self).__init__(**kwargs) + self.daemon = True + + @staticmethod + def create_logger(console, filepath): + import logging + logger = logging.getLogger("docker_image_build_logs") + logger.setLevel(logging.DEBUG) + if console: + ch = logging.StreamHandler() + ch.setLevel(logging.DEBUG) + logger.addHandler(ch) + if filepath is not None: + fh = logging.FileHandler(filepath) + fh.setLevel(logging.DEBUG) + logger.addHandler(fh) + return logger + + def run(self): + from docker.utils.json_stream import json_stream + + for line in json_stream(self.generator): + self.logger.debug(line) + self.logs.append(line) + + +class DockerImage(object): + """Build Docker image.""" + def __init__(self, dockerfile_or_str): + from io import BytesIO + try: + try: + self.fileobj = BytesIO(dockerfile_or_str.cmd.encode('utf-8')) + except AttributeError: + self.fileobj = BytesIO(dockerfile_or_str.encode('utf-8')) + except AttributeError: + raise TypeError("`dockerfile_or_str` must be an instance of " + "`neurodocker.docker_api.Dockerfile` or a string.") + + @require_docker + def build(self, log_console=False, log_filepath=None, rm=True, **kwargs): + """Build image, and return it. If specified, log build output. + + See https://docker-py.readthedocs.io/en/stable/images.html. + + Parameters + ---------- + log_console : bool + If true, log to console. + log_filepath : str + Log to file `filepath`. Default is to not log to file. + `kwargs` are for `docker.APIClient.build()`. + + Returns + ------- + image : docker.models.images.Image + Docker image object. + """ + build_logs = client.api.build(fileobj=self.fileobj, rm=rm, **kwargs) + + build_logger = BuildOutputLogger(build_logs, log_console, log_filepath, + name='DockerBuildLogger') + build_logger.daemon = True + build_logger.start() + while build_logger.is_alive(): + pass + + self.image = self._get_image(build_logger) + return self.image + + @staticmethod + @require_docker + def _get_image(build_logger_obj): + """Helper to check for build errors and return image. This method is + in the DockerImage class so that errors are raised in the main thread. + + This method borrows from the higher-level API of docker-py. + See https://github.com/docker/docker-py/pull/1581. + """ + import re + from docker.errors import BuildError + + if isinstance(build_logger_obj.generator, str): + return client.images.get(build_logger_obj.generator) + if not build_logger_obj.logs: + return BuildError('Unknown') + for event in build_logger_obj.logs: + if 'stream' in event: + match = re.search(r'(Successfully built |sha256:)([0-9a-f]+)', + event.get('stream', '')) + if match: + image_id = match.group(2) + return client.images.get(image_id) + + last_event = build_logger_obj.logs[-1] + raise BuildError(last_event.get('error') or last_event) + + +class DockerContainer(object): + """Class to interact with Docker container.""" + + def __init__(self, image): + self.image = image + self.container = None + + @require_docker + def run(self, command, **kwargs): + """Run command in the container. `kwargs` are arguments for + `client.containers.run()`. + """ + return client.containers.run(self.image, command=command, remove=True, + **kwargs) + + @require_docker + def start(self, **kwargs): + """Start the container in the background and optionally mount volumes. + `kwargs` are arguments for `client.containers.run()`. + + Equivalent to `docker run -td IMAGE`. + """ + self.container = client.containers.run(self.image, command=None, + detach=True, tty=True, + stdin_open=True, **kwargs) + return self + + @require_docker + def exec_run(self, cmd, **kwargs): + """Execute command inside the container. `kwargs` are arguments for + `container.exec_run()`. + + Equivalent to `docker exec CONTAINER CMD`. + + Parameters + ---------- + cmd : str or list + Command to execute. + + Returns + ------- + output : str or generator + If `stream` is false, return output of the command as one string. + If `stream` is true, return generator of command output. + """ + output = self.container.exec_run(cmd, **kwargs) + return output.decode('utf-8') + + @require_docker + def cleanup(self, remove=True, force=False): + """Stop the container, and optionally remove. + + Parameters + ---------- + remove : bool + If true, remove container after stopping. + force : bool + If true, force remove container. + """ + if remove and force: + self.container.remove(force=force) + return + + # If user only wants to stop, attempt to stop the container twice. + try: + self.container.stop() + except (docker.errors.APIError, requests.exceptions.ReadTimeout): + self.container.stop() + filters = {'status': 'running'} + if self.container in client.containers.list(filters=filters): + raise docker.errors.APIError("Container not stopped properly.") + + # If user wants to stop and remove, but not forcefully. + if remove: + self.container.remove() + + +def copy_file_to_container(container, src, dest): + """Copy `local_filepath` into `container`:`container_path`. + + Parameters + ---------- + container : str or container object + Container to which file is copied. + src : str + Filepath on the host. + dest : str + Directory inside container. Original filename is preserved. + + Returns + ------- + success : bool + True if copy was a success. False otherwise. + """ + # https://gist.github.com/zbyte64/6800eae10ce082bb78f0b7a2cca5cbc2 + + from io import BytesIO + import tarfile + + try: + container.put_archive + container = container + except AttributeError: + container = client.containers.get(container) + + + with BytesIO() as tar_stream: + with tarfile.TarFile(fileobj=tar_stream, mode='w') as tar: + filename = os.path.split(src)[-1] + tar.add(src, arcname=filename, recursive=False) + tar_stream.seek(0) + return container.put_archive(dest, tar_stream) + + +def copy_file_from_container(container, src, dest='.'): + """Copy file `filepath` from a running Docker `container`, and save it on + the host to `save_path` with the original filename. + + Parameters + ---------- + container : str or container object + Container from which file is copied. + src : str + Filepath within container. + dest : str + Directory on the host in which to save file. + + Returns + ------- + local_filepath : str + Relative path to saved file on the host. + """ + import tarfile + import tempfile + import traceback + + try: + container.put_archive + container = container + except AttributeError: + container = client.containers.get(container) + + tar_stream, tar_info = container.get_archive(src) + try: + with tempfile.NamedTemporaryFile() as tmp: + tmp.write(tar_stream.data) + tmp.flush() + with tarfile.TarFile(tmp.name) as tar: + tar.extractall(path=dest) + return os.path.join(dest, tar_info['name']) + except Exception as e: + raise + finally: + tar_stream.close() diff --git a/neurodocker/docker/tests/test_docker.py b/neurodocker/docker/tests/test_docker.py new file mode 100644 index 00000000..2a511047 --- /dev/null +++ b/neurodocker/docker/tests/test_docker.py @@ -0,0 +1,167 @@ +""""Tests for neurodocker.docker.docker""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function + +from io import BytesIO +import os +import tempfile +import threading + +import docker +import pytest + +from neurodocker import Dockerfile +from neurodocker.docker.docker import (BuildOutputLogger, client, + copy_file_from_container, + copy_file_to_container, + docker_is_running, DockerContainer, + DockerImage) + + +def test_docker_is_running(): + assert docker_is_running(client), "Docker is not running" + + +class TestBuildOutputLogger(object): + + @pytest.fixture(autouse=True) + def setup(self, tmpdir): + self.tmpdir = tmpdir + self.filepath = self.tmpdir.join("test.log") + self.cmd = "FROM alpine:latest" + self.fileobj = BytesIO(self.cmd.encode('utf-8')) + + def test_start(self): + logs = client.api.build(fileobj=self.fileobj, rm=True) + logger = BuildOutputLogger(logs, console=False, filepath=self.filepath.strpath) + logger.start() + living = logger.is_alive() + assert living, "BuildOutputLogger not alive" + + while logger.is_alive(): + pass + content = self.filepath.read() + assert content, "log file empty" + + def test_get_logs(self): + logs = client.api.build(fileobj=self.fileobj, rm=True) + logger = BuildOutputLogger(logs, console=True) + logger.daemon = True + logger.start() + + while logger.is_alive(): + pass + assert logger.logs + + +class TestDockerImage(object): + + def test___init__(self): + with pytest.raises(TypeError): + DockerImage(dict()) + + specs = {'pkg_manager': 'apt', + 'instructions': [ + ('base', 'debian:jessie',)], + } + df = Dockerfile(specs=specs) + # Test that fileobj is a file object. + image = DockerImage(df) + assert image.fileobj.read() + + def test_build(self): + # Correct instructions. + cmd = "FROM alpine:latest" + image = DockerImage(cmd).build() + correct_type = isinstance(image, docker.models.images.Image) + assert correct_type + + # Incorrect instructions + cmd = "FROM ubuntu:fake_version_12345" + with pytest.raises(docker.errors.BuildError): + DockerImage(cmd).build() + + +class TestDockerContainer(object): + + @pytest.fixture(autouse=True) + def setup(self): + self.image = DockerImage('FROM ubuntu:17.04').build() + + def test_start_cleanup(self): + pre = client.containers.list() + container = DockerContainer(self.image).start() + post = client.containers.list() + assert len(pre) + 1 == len(post), "container not started" + + container.cleanup(remove=True, force=True) + assert len(pre) == len(client.containers.list()), "container not removed" + + def test_exec_run(self): + container = DockerContainer(self.image).start() + assert "usr" in container.exec_run("ls /") + assert "hello" in container.exec_run('echo hello') + container.cleanup(remove=True, force=True) + + def test_cleanup(self): + pre = client.containers.list(all=True) + container = DockerContainer(self.image).start(remove=False) + assert len(pre) + 1 == len(client.containers.list(all=True)) + container.cleanup(remove=False) + assert len(pre) + 1 == len(client.containers.list(all=True)) + container.cleanup(remove=True, force=True) + assert len(pre) == len(client.containers.list(all=True)) + + +def test_copy_file_from_container(): + import posixpath + + tempdir = tempfile.mkdtemp() + container = client.containers.run('debian:stretch', detach=True, tty=True) + try: + filename = "newfile.txt" + filepath = posixpath.join("", "tmp", "newfile.txt") + container.exec_run("touch {}".format(filepath)) + assert not os.path.isfile(os.path.join(tempdir, filename)) + path = copy_file_from_container(container, filepath, tempdir) + + local_path = os.path.join(tempdir, filename) + assert os.path.isfile(local_path) + os.remove(local_path) + assert not os.path.isfile(local_path) + copy_file_from_container(container.id, filepath, tempdir) + assert os.path.isfile(local_path) + except: + raise + finally: + container.stop() + container.remove() + + +def test_copy_file_to_container(): + import posixpath + + tempdir = tempfile.mkdtemp() + container = client.containers.run('debian:stretch', detach=True, tty=True) + try: + contents = "hello from outside the container\n" + fname = 'tempfile.txt' + path = os.path.abspath(os.path.join(tempdir, fname)) + with open(path, 'w') as f: + f.write(contents) + + container_dir = "/tmp" + cmd = 'ls {}'.format(container_dir) + + assert not fname.encode() in container.exec_run(cmd) + copy_file_to_container(container.id, path, dest=container_dir) + assert fname.encode() in container.exec_run(cmd) + + copy_file_to_container(container, path, dest=container_dir) + assert fname.encode() in container.exec_run(cmd) + except: + raise + finally: + container.stop() + container.remove() diff --git a/neurodocker/generate.py b/neurodocker/generate.py new file mode 100644 index 00000000..7f78b025 --- /dev/null +++ b/neurodocker/generate.py @@ -0,0 +1,491 @@ +"""Functions and classes to generate Dockerfiles.""" +# Author: Jakub Kaczmarzyk + +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import os +import posixpath + +import neurodocker +from neurodocker import interfaces +from neurodocker.utils import apt_get_install, indent, manage_pkgs, yum_install + + +ND_DIRECTORY = posixpath.join(posixpath.sep, 'neurodocker') +ENTRYPOINT_FILE = posixpath.join(ND_DIRECTORY, 'startup.sh') +SPEC_FILE = posixpath.join(ND_DIRECTORY, 'neurodocker_specs.json') + + +def _base_add_copy(list_srcs_dest, cmd): + srcs = list_srcs_dest[:-1] + dest = list_srcs_dest[-1] + + for src in srcs: + if os.path.isabs(src): + raise ValueError("Path for {} cannot be absolute: {}" + "".format(cmd, src)) + srcs = '", "'.join(srcs) + return '{} ["{}", "{}"]'.format(cmd, srcs, dest) + + +def _add_add(list_srcs_dest, **kwargs): + """Return Dockerfile ADD instruction to add file or directory to Docker + image. + + See https://docs.docker.com/engine/reference/builder/#add. + + Parameters + ---------- + list_srcs_dest : list of str + All of the items except the last one are paths on local machine or a + URL to a file to be copied into the Docker container. Paths on the + local machine must be within the build context. The last item is the + destination in the Docker image for these file or directories. + """ + if len(list_srcs_dest) < 2: + raise ValueError("At least two paths must be provided.") + return _base_add_copy(list_srcs_dest, "ADD") + + +def _add_to_entrypoint(bash_cmd, with_run=True): + """Return command that adds the string `bash_cmd` to second-to-last line of + entrypoint file. + """ + import json + base_cmd = "sed -i '$i{}' $ND_ENTRYPOINT" + + # Escape quotes and remove quotes on either end of string. + if isinstance(bash_cmd, (list, tuple)): + escaped_cmds = [json.dumps(c)[1:-1] for c in bash_cmd] + cmds = [base_cmd.format(c) for c in escaped_cmds] + cmd = "\n&& ".join(cmds) + else: + escaped_bash_cmd = json.dumps(bash_cmd)[1:-1] + cmd = base_cmd.format(escaped_bash_cmd) + if with_run: + comment = "# Add command(s) to entrypoint" + cmd = indent("RUN", cmd) + cmd = "\n".join((comment, cmd)) + return cmd + + +def _add_arg(arg_dict, **kwargs): + """Return Dockerfile ARG instruction. + + Parameters + ---------- + arg_dict : dict + ARG variables where keys are the variable names, and values are the + values assigned to those variables. + """ + import json + cmds = [] + base = "ARG {}" + for arg, value in arg_dict.items(): + out = base.format(arg) + if value: # default value provided. + value = json.dumps(value) # Escape double quotes and other things. + out += "={}".format(value) + cmds.append(out) + return "\n".join(cmds) + + +def _add_base(base, **kwargs): + """Return Dockerfile FROM instruction to specify base image. + + Parameters + ---------- + base : str + Base image. + """ + return "FROM {}".format(base) + + +def _add_cmd(cmd, **kwargs): + """Return Dockerfile CMD instruction.""" + import json + + escaped = json.dumps(cmd) + return "CMD {}".format(escaped) + + +def _add_copy(list_srcs_dest, **kwargs): + """Return Dockerfile COPY instruction to add files or directories to Docker + image. + + See https://docs.docker.com/engine/reference/builder/#add. + + Parameters + ---------- + list_srcs_dest : list of str + All of the items except the last one are paths on local machine to be + copied into the Docker container. These paths must be within the build + context. The last item is the destination in the Docker image for these + file or directories. + """ + if len(list_srcs_dest) < 2: + raise ValueError("At least two paths must be provided.") + return _base_add_copy(list_srcs_dest, "COPY") + + +def _add_entrypoint(entrypoint, **kwargs): + """Return Dockerfile ENTRYPOINT instruction to set image entrypoint. + + Parameters + ---------- + entrypoint : str + The entrypoint. + """ + import json + + escaped = json.dumps(entrypoint) + return "ENTRYPOINT [{}]".format('", "'.join(escaped.split())) + + +def _add_env_vars(env_vars, **kwargs): + """Return Dockerfile ENV instruction to set environment variables. + + Parameters + ---------- + env_vars : dict + Environment variables where keys are the environment variables names, + and values are the values assigned to those environment variable names. + """ + import json + out = "" + for k, v in env_vars.items(): + newline = "\n" if out else "" + v = json.dumps(v) # Escape double quotes and other things. + out += '{}{}={}'.format(newline, k, v) + return indent("ENV", out) + + +def _add_exposed_ports(exposed_ports, **kwargs): + """Return Dockerfile EXPOSE instruction to expose ports. + + Parameters + ---------- + exposed_ports : str, list, tuple + Port(s) in the container to expose. + """ + if not isinstance(exposed_ports, (list, tuple)): + exposed_ports = [exposed_ports] + return "EXPOSE " + " ".join((str(p) for p in exposed_ports)) + + +def _add_install(pkgs, pkg_manager): + """Return Dockerfile RUN instruction that installs system packages. + + Parameters + ---------- + pkgs : list + List of system packages to install. + pkg_manager : {'apt', 'yum'} + Linux package manager. + """ + installers = {'apt': apt_get_install, + 'yum': yum_install} + flags = [jj for jj in pkgs if jj.startswith('flags=')] + pkgs = [kk for kk in pkgs if kk not in flags] + + if flags: + flags = flags[0].replace('flags=', '') + else: + flags = None + cmd = installers[pkg_manager](pkgs, flags) + cmd += "\n&& {clean}".format(**manage_pkgs[pkg_manager]) + return indent("RUN", cmd) + + +def _add_arbitrary_instruction(instruction, **kwargs): + """Return `instruction` with a comment.""" + comment = "# User-defined instruction\n" + return comment + instruction + + +def _add_label(labels, **kwargs): + """Return Dockerfile LABEL instruction to set image labels. + + Parameters + ---------- + labels : dict + Dictionary of label names and values. + """ + import json + out = "" + for k, v in labels.items(): + newline = "\n" if out else "" + v = json.dumps(v) # Escape double quotes and other things. + out += '{}{}={}'.format(newline, k, v) + return indent("LABEL", out) + + +def _add_run(cmd, **kwargs): + """Return Dockerfile RUN instruction.""" + comment = "# User-defined instruction\n" + return comment + indent("RUN", cmd) + + +def _add_run_bash(bash_code, **kwargs): + """Return Dockerfile RUN instruction to execute bash code.""" + comment = "# User-defined BASH instruction\n" + cmd = 'bash -c "{}"'.format(bash_code.replace('"', '\\"')) + return comment + indent("RUN", cmd) + + +def _add_volume(paths, **kwargs): + """Return Dockerfile VOLUME instruction. + + Parameters + ---------- + paths : list + List of paths in the container. + """ + import json + + escaped = json.dumps(" ".join(paths)) + return "VOLUME [{}]".format('", "'.join(escaped.split())) + + +def _add_workdir(path, **kwargs): + """Return Dockerfile WORKDIR instruction to set working directory.""" + return "WORKDIR {}".format(path) + + +class _DockerfileUsers(object): + """Class to add instructions to add Dockerfile users. Has memory of users + already added to the Dockerfile. + """ + initialized_users = ['root'] + + @classmethod + def add(cls, user, **kwargs): + instruction = "USER {0}" + if user not in cls.initialized_users: + cls.initialized_users.append(user) + comment = "# Create new user: {0}" + inst_user = ("RUN useradd --no-user-group --create-home" + " --shell /bin/bash {0}") + instruction = "\n".join((comment, inst_user, instruction)) + return instruction.format(user) + + @classmethod + def clear_memory(cls): + cls.initialized_users = ['root'] + + +def _add_spec_json_file(specs): + """Return Dockerfile instruction to write out specs dictionary to JSON + file. + """ + import json + + comment = ("#--------------------------------------" + "\n# Save container specifications to JSON" + "\n#--------------------------------------") + + json_specs = json.dumps(specs, indent=2) + json_specs = json_specs.replace('\\n', '__TO_REPLACE_NEWLINE__') + json_specs = "\n\\n".join(json_specs.split("\n")) + # Escape newline characters that the user provided. + json_specs = json_specs.replace('__TO_REPLACE_NEWLINE__', '\\\\n') + # Workaround to escape single quotes in a single-quoted string. + # https://stackoverflow.com/a/1250279/5666087 + json_specs = json_specs.replace("'", """'"'"'""") + cmd = "echo '{string}' > {path}".format(string=json_specs, path=SPEC_FILE) + cmd = indent("RUN", cmd) + return "\n".join((comment, cmd)) + + +def _add_neurodocker_header(specs): + """Return Dockerfile comment that references Neurodocker.""" + return ("# Generated by Neurodocker v{}." + "\n#" + "\n# Thank you for using Neurodocker. If you discover any issues" + "\n# or ways to improve this software, please submit an issue or" + "\n# pull request on our GitHub repository:" + "\n# https://github.com/kaczmarj/neurodocker" + "\n#" + "\n# Timestamp: {}".format(specs['neurodocker_version'], + specs['generation_timestamp'])) + + +def _add_common_dependencies(pkg_manager): + """Return Dockerfile instructions to download dependencies common to + many software packages. + + Parameters + ---------- + pkg_manager : {'apt', 'yum'} + Linux package manager. + """ + deps = ['bzip2', 'ca-certificates', 'curl', 'unzip'] + if pkg_manager == "apt": + deps += ['apt-utils', 'locales'] + if pkg_manager == "yum": + deps.append('epel-release') + deps = " ".join(sorted(deps)) + deps = "\n\t" + deps + + comment = ("#----------------------------------------------------------" + "\n# Install common dependencies and create default entrypoint" + "\n#----------------------------------------------------------") + + env = ('LANG="en_US.UTF-8"' + '\nLC_ALL="C.UTF-8"' + '\nND_ENTRYPOINT="{}"'.format(ENTRYPOINT_FILE)) + env = indent("ENV", env) + + cmd = "{install}\n&& {clean}".format(**manage_pkgs[pkg_manager]) + cmd = cmd.format(pkgs=deps) + + cmd += ("\n&& localedef --force --inputfile=en_US --charmap=UTF-8 C.UTF-8" + "\n&& chmod 777 /opt && chmod a+s /opt" + "\n&& mkdir -p /neurodocker" + '\n&& if [ ! -f "$ND_ENTRYPOINT" ]; then' + "\n echo '#!/usr/bin/env bash' >> $ND_ENTRYPOINT" + "\n && echo 'set +x' >> $ND_ENTRYPOINT" + "\n && echo 'if [ -z \"$*\" ]; then /usr/bin/env bash; else $*; fi' >> $ND_ENTRYPOINT;" + "\n fi" + "\n&& chmod -R 777 /neurodocker && chmod a+s /neurodocker") + cmd = indent("RUN", cmd) + entrypoint = 'ENTRYPOINT ["{}"]'.format(ENTRYPOINT_FILE) + + return "\n".join((comment, env, cmd, entrypoint)) + + +# Dictionary of each instruction or software package can be added to the +# Dockerfile and the function that returns the Dockerfile instruction(s). +dockerfile_implementations = { + 'software': { + 'afni': interfaces.AFNI, + 'ants': interfaces.ANTs, + 'c3d': interfaces.Convert3D, + 'dcm2niix': interfaces.Dcm2niix, + 'freesurfer': interfaces.FreeSurfer, + 'fsl': interfaces.FSL, + 'miniconda': interfaces.Miniconda, + 'mrtrix3': interfaces.MRtrix3, + 'neurodebian': interfaces.NeuroDebian, + 'spm': interfaces.SPM, + 'minc': interfaces.MINC, + 'petpvc': interfaces.PETPVC + }, + 'other': { + 'add': _add_add, + 'add_to_entrypoint': _add_to_entrypoint, + 'arg': _add_arg, + 'base': _add_base, + 'cmd': _add_cmd, + 'copy': _add_copy, + 'entrypoint': _add_entrypoint, + 'expose': _add_exposed_ports, + 'env': _add_env_vars, + 'install': _add_install, + 'instruction': _add_arbitrary_instruction, + 'label': _add_label, + 'run': _add_run, + 'run_bash': _add_run_bash, + 'user': _DockerfileUsers.add, + 'volume': _add_volume, + 'workdir': _add_workdir, + }, +} + + +def _get_dockerfile_chunk(instruction, options, specs): + """Return piece of Dockerfile (str) to implement `instruction` with + `options`. Include the dictionary of specifications. + """ + software_keys = dockerfile_implementations['software'].keys() + other_keys = dockerfile_implementations['other'].keys() + + if instruction in software_keys: + for ii in ['pkg_manager', 'check_urls']: + options.setdefault(ii, specs[ii]) + callable_ = dockerfile_implementations['software'][instruction] + chunk = callable_(**options).cmd + elif instruction in other_keys: + func = dockerfile_implementations['other'][instruction] + if instruction == "install": + chunk = func(options, specs['pkg_manager']) + else: + chunk = func(options) + else: + raise ValueError("Instruction not understood: {}" + "".format(instruction)) + return chunk + + +def _get_dockerfile_chunks(specs): + """Return list of Dockerfile chunks (str) given a dictionary of + specifications. + """ + import copy + dockerfile_chunks = [] + specs = copy.deepcopy(specs) + + for instruction, options in specs['instructions']: + chunk = _get_dockerfile_chunk(instruction, options, specs) + dockerfile_chunks.append(chunk) + + return dockerfile_chunks + + +class Dockerfile(object): + """Class to create Dockerfile. + + Parameters + ---------- + specs : dict + Dictionary of specifications. + """ + + def __init__(self, specs): + from neurodocker.parser import _SpecsParser + + self.specs = specs + self._add_metadata() + _SpecsParser(specs) # Raise exception on error in specs dict. + self.cmd = self._create_cmd() + + _DockerfileUsers.clear_memory() + interfaces.Miniconda.clear_memory() + + def __repr__(self): + return "{self.__class__.__name__}({self.cmd})".format(self=self) + + def __str__(self): + return self.cmd + + def _add_metadata(self): + import datetime + + timestamp = datetime.datetime.today().strftime("%Y-%m-%d %H:%M:%S") + # Overwrite already existing metadata. + self.specs['generation_timestamp'] = timestamp + self.specs['neurodocker_version'] = neurodocker.__version__ + + def _create_cmd(self): + """Return string representation of Dockerfile.""" + chunks = _get_dockerfile_chunks(self.specs) + + neurodocker_header = _add_neurodocker_header(self.specs) + common_deps_chunk = _add_common_dependencies(self.specs['pkg_manager']) + + chunks.insert(1, common_deps_chunk) + chunks.insert(0, neurodocker_header) + + if self.specs['pkg_manager'] == 'apt': + noninteractive = "ARG DEBIAN_FRONTEND=noninteractive" + chunks.insert(2, noninteractive) + + chunks.append(_add_spec_json_file(self.specs)) + + return "\n\n".join(chunks) + + def save(self, filepath="Dockerfile", **kwargs): + """Save Dockerfile to `filepath`. `kwargs` are for `open()`.""" + with open(filepath, mode='w', **kwargs) as fp: + fp.write(self.cmd) diff --git a/neurodocker/generators/__init__.py b/neurodocker/generators/__init__.py deleted file mode 100644 index 0a5fb511..00000000 --- a/neurodocker/generators/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -"""Module-level imports.""" - -from neurodocker.generators.docker import Dockerfile -from neurodocker.generators.singularity import SingularityRecipe diff --git a/neurodocker/generators/common.py b/neurodocker/generators/common.py deleted file mode 100644 index 19e411a6..00000000 --- a/neurodocker/generators/common.py +++ /dev/null @@ -1,89 +0,0 @@ -"""""" - -import json -import posixpath - -from neurodocker.interfaces._base import ( - _BaseInterface, apt_install, yum_install -) - -_installation_implementations = { - ii._name: ii for ii in _BaseInterface.__subclasses__() -} - -ND_DIRECTORY = posixpath.join(posixpath.sep, 'neurodocker') -NEURODOCKER_ENTRYPOINT = posixpath.join(ND_DIRECTORY, 'startup.sh') -SPEC_FILE = posixpath.join(ND_DIRECTORY, 'neurodocker_specs.json') - -# TODO: add common methods like `--install` here. Reference them in the -# Dockerfile and SingularityRecipe implementation classes. - - -def _add_to_entrypoint(cmd): - """Return command to add `cmd` to the container's entrypoint.""" - base_cmd = "sed -i '$i{}' $ND_ENTRYPOINT" - escaped_bash_cmd = json.dumps(cmd)[1:-1] - return base_cmd.format(escaped_bash_cmd) - - -def _apt_install(pkgs, apt_opts=None): - return apt_install.render(pkgs=pkgs, apt_opts=apt_opts) - - -def _yum_install(pkgs, yum_opts=None): - return yum_install.render(pkgs=pkgs, yum_opts=yum_opts) - - -def _install(pkgs, pkg_manager): - """Return instructions to install system packages.""" - installers = { - 'apt': _apt_install, - 'yum': _yum_install, - } - if pkg_manager not in installers.keys(): - raise ValueError( - "package manager '{}' not recognized".format(pkg_manager) - ) - opts_key = "{}_opts=".format(pkg_manager) - opts = [jj for jj in pkgs if jj.startswith(opts_key)] - pkgs = [kk for kk in pkgs if kk not in opts] - opts = opts[0].replace(opts_key, '') if opts else None - return installers[pkg_manager](pkgs, opts) - - -class _Users: - """Object to hold memory of initialized users.""" - - initialized_users = {'root'} - - @classmethod - def add(cls, user): - """If user has not been created yet, return command to create user. - Otherwise, return False. - """ - if user not in cls.initialized_users: - cls.initialized_users.add(user) - return ( - "useradd --no-user-group --create-home --shell /bin/bash {0}" - .format(user) - ) - else: - return False - - @classmethod - def clear_memory(cls): - cls.initialized_users = {'root'} - - -def _get_json_spec_str(specs): - """Return instruction to write out specs dictionary to JSON file.""" - js = json.dumps(specs, indent=2) - js = js.replace('\\n', '__TO_REPLACE_NEWLINE__') - js = "\n\\n".join(js.split("\n")) - # Escape newline characters that the user provided. - js = js.replace('__TO_REPLACE_NEWLINE__', '\\\\n') - # Workaround to escape single quotes in a single-quoted string. - # https://stackoverflow.com/a/1250279/5666087 - js = js.replace("'", """'"'"'""") - cmd = "echo '{string}' > {path}".format(string=js, path=SPEC_FILE) - return cmd diff --git a/neurodocker/generators/docker.py b/neurodocker/generators/docker.py deleted file mode 100644 index 68c5542e..00000000 --- a/neurodocker/generators/docker.py +++ /dev/null @@ -1,319 +0,0 @@ -"""""" - -import copy -import inspect -import json -import os - -from neurodocker.generators.common import _add_to_entrypoint -from neurodocker.generators.common import _get_json_spec_str -from neurodocker.generators.common import _installation_implementations -from neurodocker.generators.common import _install -from neurodocker.generators.common import _Users - - -def _indent(string, indent=4, add_list_op=False): - out = [] - lines = string.splitlines() - - for ii, line in enumerate(lines): - line = line.rstrip() - already_cont = line.startswith(('&&', '&', '||', '|', 'fi')) - previous_cont = ( - lines[ii - 1].endswith('\\') or lines[ii - 1].startswith('if')) - if ii: - if add_list_op and not already_cont and not previous_cont: - line = "&& " + line - if not already_cont and previous_cont: - line = " " * (indent + 3) + line - else: - line = " " * indent + line - if ii != len(lines) - 1: - if not line.endswith('\\'): - line += " \\" - out.append(line) - return "\n".join(out) - - -def _dockerfile_base_add_copy(list_srcs_dest, cmd): - """Base method for `ADD` and `COPY` instructions.""" - if len(list_srcs_dest) < 2: - raise ValueError("At least two paths must be provided.") - - srcs = list_srcs_dest[:-1] - dest = list_srcs_dest[-1] - - for src in srcs: - if os.path.isabs(src): - raise ValueError("Path for {} cannot be absolute: {}" - "".format(cmd, src)) - srcs = '", "'.join(srcs) - return '{} ["{}", "{}"]'.format(cmd, srcs, dest) - - -class _DockerfileImplementations: - - @staticmethod - def add(list_srcs_dest): - """Return Dockerfile ADD instruction to add file or directory to Docker - image. - - See https://docs.docker.com/engine/reference/builder/#add. - - Parameters - ---------- - list_srcs_dest : list of str - All of the items except the last one are paths on local machine or - a URL to a file to be copied into the Docker container. Paths on - the local machine must be within the build context. The last item - is the destination in the Docker image for these file or - directories. - """ - return _dockerfile_base_add_copy(list_srcs_dest, "ADD") - - @staticmethod - def add_to_entrypoint(cmd): - """Add command `cmd` to container entrypoint file.""" - return _indent("RUN " + _add_to_entrypoint(cmd)) - - @staticmethod - def arg(arg_dict): - """Return Dockerfile ARG instruction. - - Parameters - ---------- - arg_dict : dict - ARG variables where keys are the variable names, and values are the - values assigned to those variables. - """ - cmds = [] - base = "ARG {}" - for arg, value in arg_dict.items(): - out = base.format(arg) - if value: # default value provided. - value = json.dumps(value) - out += "={}".format(value) - cmds.append(out) - return "\n".join(cmds) - - @staticmethod - def base(base): - """Return Dockerfile FROM instruction to specify base image. - - Parameters - ---------- - base : str - Base image. - """ - return "FROM {}".format(base) - - @staticmethod - def cmd(cmd): - """Return Dockerfile CMD instruction.""" - escaped = json.dumps(cmd) - return "CMD {}".format(escaped) - - @staticmethod - def copy(list_srcs_dest): - """Return Dockerfile COPY instruction to add files or directories to - Docker image. - - See https://docs.docker.com/engine/reference/builder/#add. - - Parameters - ---------- - list_srcs_dest : list of str - All of the items except the last one are paths on local machine to - be copied into the Docker container. These paths must be within the - build context. The last item is the destination in the Docker image - for these file or directories. - """ - return _dockerfile_base_add_copy(list_srcs_dest, "COPY") - - @staticmethod - def entrypoint(entrypoint): - """Return Dockerfile ENTRYPOINT instruction to set image entrypoint. - - Parameters - ---------- - entrypoint : str - The entrypoint. - """ - escaped = json.dumps(entrypoint) - return "ENTRYPOINT [{}]".format('", "'.join(escaped.split())) - - @staticmethod - def env(env_vars): - """Return Dockerfile ENV instruction to set environment variables. - - Parameters - ---------- - env_vars : dict - Environment variables where keys are the environment variables - names, and values are the values assigned to those environment - variable names. - """ - out = "" - for k, v in env_vars.items(): - newline = "\n" if out else "" - v = json.dumps(v) # Escape double quotes and other things. - out += '{}{}={}'.format(newline, k, v) - return _indent("ENV " + out) - - @staticmethod - def expose(exposed_ports): - """Return Dockerfile EXPOSE instruction to expose ports. - - Parameters - ---------- - exposed_ports : str, list, tuple - Port(s) in the container to expose. - """ - if not isinstance(exposed_ports, (list, tuple)): - exposed_ports = [exposed_ports] - return "EXPOSE " + " ".join((str(p) for p in exposed_ports)) - - @staticmethod - def install(pkgs, pkg_manager): - """Return Dockerfile RUN instruction to install system packages.""" - return _indent( - "RUN " + _install(pkgs, pkg_manager), add_list_op=True) - - @staticmethod - def label(labels): - """Return Dockerfile LABEL instruction to set image labels. - - Parameters - ---------- - labels : dict - Dictionary of label names and values. - """ - out = "" - for k, v in labels.items(): - newline = "\n" if out else "" - v = json.dumps(v) # Escape double quotes and other things. - out += '{}{}={}'.format(newline, k, v) - return _indent("LABEL " + out, indent=6) - - @staticmethod - def run(cmd): - """Return Dockerfile RUN instruction to run `cmd`.""" - return _indent("RUN " + cmd) - - @staticmethod - def shell(sh): - """Return Dockerfile SHELL instruction to set shell.""" - return 'SHELL ["{}", "-c"]'.format(sh) - - @staticmethod - def user(user): - """Return Dockerfile instruction to create `user` if he/she does not - exist and switch to that user. - - Parameters - ---------- - user : str - Name of user to create and switch to. - """ - user_cmd = "USER {}".format(user) - add_user_cmd = _Users.add(user) - if add_user_cmd: - return "RUN " + add_user_cmd + "\n" + user_cmd - else: - return user_cmd - - @staticmethod - def volume(paths): - """Return Dockerfile VOLUME instruction. - - Parameters - ---------- - paths : list - List of paths in the container. - """ - escaped = json.dumps(" ".join(paths)) - return "VOLUME [{}]".format('", "'.join(escaped.split())) - - @staticmethod - def workdir(path): - """Return Dockerfile WORKDIR instruction to set working directory.""" - return "WORKDIR {}".format(path) - - -class _DockerfileInterfaceFormatter: - - def __init__(self, interface): - self.run = interface.render_run() - self.env = interface.render_env() - - def render(self): - if self.run and self.env is None: - return self._render_run() - elif self.env and self.run is None: - return self._render_run() - elif self.env and self.run: - return self._render_env() + '\n' + self._render_run() - - def _render_env(self): - """Return string of `ENV` instruction given dictionary of environment - variables. - """ - out = "\n".join('{}="{}"'.format(k, v) for k, v in self.env.items()) - return "ENV " + _indent(out) - - def _render_run(self): - """Return string of `RUN` instruction given string of instructions.""" - return "RUN " + _indent(self.run, add_list_op=True) - - -class Dockerfile: - - _implementations = { - **_installation_implementations, - **dict(inspect.getmembers(_DockerfileImplementations, - predicate=inspect.isfunction)) - } - - def __init__(self, specs): - self._specs = copy.deepcopy(specs) - - self._prep() - _Users.clear_memory() - - def render(self): - return "\n\n".join(self._ispecs_to_dockerfile_str()) - - def _prep(self): - self._add_json() - self._add_header() - - def _add_header(self): - self._specs['instructions'].insert( - 1, ('arg', {'DEBIAN_FRONTEND': 'noninteractive'}) - ) - self._specs['instructions'].insert( - 2, ('_header', {'version': 'generic', 'method': 'custom'})) - self._specs['instructions'].insert( - 3, ('entrypoint', "/neurodocker/startup.sh")) - - def _ispecs_to_dockerfile_str(self): - pkg_man = self._specs['pkg_manager'] - for item in self._specs['instructions']: - instruction, params = item - if instruction in self._implementations.keys(): - impl = self._implementations[instruction] - if impl in _installation_implementations.values(): - interface = impl(pkg_manager=pkg_man, **params) - yield _DockerfileInterfaceFormatter(interface).render() - else: - if instruction == 'install': - yield impl(params, pkg_manager=pkg_man) - else: - yield impl(params) - else: - raise ValueError( - "instruction not understood: '{}'".format(instruction)) - - def _add_json(self): - jsonstr = _get_json_spec_str(self._specs) - self._specs['instructions'].append(('run', jsonstr)) diff --git a/neurodocker/generators/singularity.py b/neurodocker/generators/singularity.py deleted file mode 100644 index 64c2e53b..00000000 --- a/neurodocker/generators/singularity.py +++ /dev/null @@ -1,176 +0,0 @@ -"""""" - -from collections import OrderedDict -import copy -import inspect - -from neurodocker.generators.common import _add_to_entrypoint -from neurodocker.generators.common import _get_json_spec_str -from neurodocker.generators.common import _installation_implementations -from neurodocker.generators.common import _install -from neurodocker.generators.common import _Users -from neurodocker.generators.common import NEURODOCKER_ENTRYPOINT - - -class _SingularityRecipeImplementations: - - def __init__(self, singularity_recipe_object): - self._singobj = singularity_recipe_object - - def add_to_entrypoint(self, cmd): - self._singobj._post.append(_add_to_entrypoint(cmd)) - - def base(self, base): - if base.startswith('docker://'): - bootstrap = 'docker' - from_ = base.split('docker://', 1)[1] - elif base.startswith('shub://'): - bootstrap = 'shub' - from_ = base.split('shub://', 1)[1] - # If no prefix given, assume base is a Docker image. - else: - bootstrap = 'docker' - from_ = base - - self._singobj._header['Bootstrap'] = bootstrap - self._singobj._header['From'] = from_ - - def copy(self, list_srcs_dest): - self._singobj._files.append(list_srcs_dest) - - def install(self, pkgs, pkg_manager, opts=None): - self._singobj._post.append(_install(pkgs, pkg_manager)) - - def entrypoint(self, entrypoint): - self._singobj._runscript = entrypoint - - def env(self, d): - self._singobj._environment.update(**d) - - def run(self, s): - self._singobj._post.append(s) - - def user(self, user): - user_cmd = "su - {}".format(user) - add_user_cmd = _Users.add(user) - if add_user_cmd: - cmd = add_user_cmd + "\n" + user_cmd - else: - cmd = user_cmd - self._singobj._post.append(cmd) - - def workdir(self, path): - self._singobj._post.append("cd {}".format(path)) - - -class SingularityRecipe: - - def __init__(self, specs): - self._specs = copy.deepcopy(specs) - - self._header = OrderedDict() - self._help = [] - self._setup = [] - self._post = [] - self._environment = OrderedDict() - self._files = [] - self._runscript = '/neurodocker/startup.sh "$@"' - self._test = [] - self._labels = [] - - self._implementations = { - **_installation_implementations, - **dict(inspect.getmembers(_SingularityRecipeImplementations(self), - predicate=inspect.ismethod)) - } - - self._order = ( - ('header', self._header), - ('help', self._help), - ('setup', self._setup), - ('post', self._post), - ('environment', self._environment), - ('files', self._files), - ('runscript', self._runscript), - ('test', self._test), - ('labels', self._labels) - ) - self._parts_filled = False - _Users.clear_memory() - self._add_neurodocker_header() - self._add_json() - - def render(self): - def _render_one(section): - renderer = getattr(self, "_render_{}".format(section)) - return renderer() - - if not self._parts_filled: - self._fill_parts() - return "\n\n".join( - map(_render_one, (sec for sec, con in self._order if con))) - - def _render_header(self): - return "\n".join( - "{}: {}".format(k, v) for k, v in self._header.items()) - - def _render_help(self): - return "%help\n" + "\n".join(self._help) - - def _render_setup(self): - return "%setup\n" + "\n".join(self._setup) - - def _render_post(self): - return "%post\n" + "\n\n".join(self._post) - - def _render_environment(self): - return ( - "%environment\n" - + "\n".join('export {}="{}"'.format(*kv) - for kv in self._environment.items())) - - def _render_files(self): - return ( - "%files\n" - + "\n".join("{} {}".format(*f) for f in self._files)) - - def _render_runscript(self): - return "%runscript\n" + self._runscript - - def _render_test(self): - return "%test\n" + "\n".join(self._test) - - def _render_labels(self): - return "%labels\n" + "\n".join(self._labels) - - def _add_neurodocker_header(self): - self._specs['instructions'].insert( - 1, ('_header', {'version': 'generic', 'method': 'custom'})) - - def _fill_parts(self): - pkg_man = self._specs['pkg_manager'] - for item in self._specs['instructions']: - instruction, params = item - if instruction in self._implementations.keys(): - impl = self._implementations[instruction] - if impl in _installation_implementations.values(): - interface = impl(pkg_manager=pkg_man, **params) - if interface.env: - self._environment.update(**interface.render_env()) - if interface.run: - self._post.append(interface.render_run()) - else: - if instruction == 'install': - impl(params, pkg_manager=pkg_man) - else: - impl(params) - else: - raise ValueError( - "instruction not understood: '{}'".format(instruction)) - if not self._runscript: - self._runscript.append(NEURODOCKER_ENTRYPOINT) - self._parts_filled = True - - def _add_json(self): - jsonstr = _get_json_spec_str(self._specs) - self._specs['instructions'].append(("run", jsonstr)) diff --git a/neurodocker/interfaces/__init__.py b/neurodocker/interfaces/__init__.py index ff76813d..9014fef4 100644 --- a/neurodocker/interfaces/__init__.py +++ b/neurodocker/interfaces/__init__.py @@ -1,6 +1,14 @@ -"""Module-level interface imports""" +from __future__ import absolute_import -from neurodocker.interfaces.interfaces import ( - AFNI, AFNI, ANTs, Convert3D, Dcm2niix, FSL, FreeSurfer, MatlabMCR, MINC, - Miniconda, MRtrix3, NeuroDebian, PETPVC, SPM12 -) +from neurodocker.interfaces.afni import AFNI +from neurodocker.interfaces.ants import ANTs +from neurodocker.interfaces.convert3d import Convert3D +from neurodocker.interfaces.dcm2niix import Dcm2niix +from neurodocker.interfaces.freesurfer import FreeSurfer +from neurodocker.interfaces.fsl import FSL +from neurodocker.interfaces.miniconda import Miniconda +from neurodocker.interfaces.mrtrix import MRtrix3 +from neurodocker.interfaces.neurodebian import NeuroDebian +from neurodocker.interfaces.spm import SPM +from neurodocker.interfaces.minc import MINC +from neurodocker.interfaces.petpvc import PETPVC diff --git a/neurodocker/interfaces/_base.py b/neurodocker/interfaces/_base.py deleted file mode 100644 index 8cb73199..00000000 --- a/neurodocker/interfaces/_base.py +++ /dev/null @@ -1,318 +0,0 @@ -"""""" - -from copy import deepcopy -import os -import posixpath - -import jinja2 - -from neurodocker.utils import load_yaml - -GENERIC_VERSION = 'generic' - -apt_install = """apt-get update -qq -apt-get install -y {{ apt_opts|default('-q --no-install-recommends', true) }} \\\ -{% for pkg in pkgs %} - {% if not loop.last -%} - {{ pkg }} \\\ - {%- else -%} - {{ pkg }} - {%- endif -%} -{% endfor %} -apt-get clean -rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -""" -apt_install = jinja2.Template(apt_install) - -yum_install = """yum install -y {{ yum_opts|default('-q', true) }} \\\ -{% for pkg in pkgs %} - {% if not loop.last -%} - {{ pkg }} \\\ - {%- else -%} - {{ pkg }} - {%- endif -%} -{% endfor %} -yum clean packages -rm -rf /var/cache/yum/* /tmp/* /var/tmp/* -""" -yum_install = jinja2.Template(yum_install) - -deb_install = """{% for deb_url in debs -%} -curl -sSL --retry 5 -o /tmp/toinstall.deb {{ deb_url }} -dpkg -i /tmp/toinstall.deb -rm /tmp/toinstall.deb -{% endfor -%} -apt-get install -f -apt-get clean -rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -""" -deb_install = jinja2.Template(deb_install) - - -def _load_global_specs(): - - def load_global_specs(glob_pattern): - import glob - - def load_interface_spec(filepath): - _, filename = os.path.split(filepath) - key, _ = os.path.splitext(filename) - return key, load_yaml(filepath) - - interface_yamls = glob.glob(glob_pattern) - instructions = {} - for ff in interface_yamls: - key, data = load_interface_spec(ff) - instructions[key] = data - return instructions - - base_path = os.path.dirname(os.path.realpath(__file__)) - glob_pattern = os.path.join(base_path, '..', 'templates', '*.yaml') - return load_global_specs(glob_pattern) - - -_global_specs = _load_global_specs() - - -def _interface_exists_in_yaml(name): - return name in _global_specs.keys() - - -class _Resolver: - """ - Parameters - ---------- - d : dict - """ - def __init__(self, d): - self._d = d - self._generic_only = self._d.keys() == {GENERIC_VERSION} - - @property - def versions(self): - return set(self._d.keys()) - - def version_exists(self, version): - if self._generic_only: - return True - else: - return version in self.versions - - def check_version_exists(self, version): - if not self.version_exists(version): - raise ValueError("version '{}' not found.".format(version)) - - def get_version_key(self, version): - """Return version key to use given a specific version. For example, - if a dictionary only has instructions for version 'generic', will - return 'generic' given any version string. - - Raises - ------ - `ValueError` if no key could be found for requested version. - """ - if self._generic_only: - return GENERIC_VERSION - else: - self.check_version_exists(version) - return version - - def version_has_method(self, version, method): - version_key = self.get_version_key(version) - return method in self._d[version_key].keys() - - def check_version_has_method(self, version, method): - if not self.version_has_method(version, method): - raise ValueError( - "version '{}' does not have method '{}'" - .format(version, method) - ) - - def version_method_has_instructions(self, version, method): - version_key = self.get_version_key(version) - self.check_version_has_method(version_key, method) - return 'instructions' in self._d[version_key][method].keys() - - def check_version_method_has_instructions(self, version, method): - if not self.version_method_has_instructions(version, method): - raise ValueError( - "installation method '{}' for version '{}' does not have an" - " 'instructions' key.".format(method, version) - ) - - def binaries_has_url(self, version): - version_key = self.get_version_key(version) - if self.version_has_method(version_key, 'binaries'): - try: - urls = self._d[version_key]['binaries']['urls'].keys() - return version in urls - except KeyError: - raise ValueError( - "no binary URLs defined for version '{}'".format(version) - ) - else: - raise ValueError( - "no binary installation method defined for version '{}" - .format(version) - ) - - def check_binaries_has_url(self, version): - if not self.binaries_has_url(version): - version_key = self.get_version_key(version) - valid_vers = self._d[version_key]['binaries']['urls'] - raise ValueError( - "URL not found for version '{}'. Valid versions are {}" - .format(version, ', '.join(valid_vers)) - ) - - def binaries_url(self, version): - self.check_binaries_has_url(version) - version_key = self.get_version_key(version) - return self._d[version_key]['binaries']['urls'][version] - - -class _BaseInterface: - """Base class for interface objects.""" - - def __init__(self, name, version, pkg_manager, method='binaries', - install_path=None, **kwargs): - self._name = name - self._version = version - self._pkg_manager = pkg_manager - self._method = method - self._install_path = install_path - self.__dict__.update(**kwargs) - - if not _interface_exists_in_yaml(self._name): - raise ValueError( - "No YAML entry for package '{}'".format(self._name) - ) - self._resolver = _Resolver(_global_specs[self._name]) - - self._version_key = self._resolver.get_version_key(self._version) - self._resolver.check_version_exists(self._version) - self._resolver.check_version_has_method(self._version, self._method) - self._resolver.check_version_method_has_instructions( - self._version, self._method - ) - - if method == 'binaries': - self.binaries_url = self._resolver.binaries_url(self._version) - - self._instance_specs = deepcopy( - _global_specs[self._name][self._version_key][self._method] - ) - - self._run = self._instance_specs['instructions'] - self._dependencies = self._get_dependencies() - - self._env = self._instance_specs.get('env', None) - - # Set default curl options for all interfaces. - self.__dict__.setdefault("curl_opts", "-fsSL --retry 5") - - @property - def _pretty_name(self): - return self.__class__.__name__ - - @property - def name(self): - return self._name - - @property - def version(self): - return self._version - - @property - def versions(self): - return self._resolver.versions - - @property - def pkg_manager(self): - return self._pkg_manager - - @property - def method(self): - return self._method - - @property - def env(self): - return self._env - - @property - def run(self): - return self._run - - @property - def install_path(self): - if self._install_path is None: - path = posixpath.join(posixpath.sep, 'opt', '{}-{}') - return path.format(self._name, self._version) - return self._install_path - - @property - def dependencies(self): - return self._dependencies - - def _get_dependencies(self): - if 'dependencies' not in self._instance_specs.keys(): - return None - if self._instance_specs['dependencies'] is None: - return None - try: - deps = self._instance_specs['dependencies'][self._pkg_manager] - return deps.split() if deps else None - except KeyError: - return None - - def _get_debs(self): - if 'dependencies' not in self._instance_specs.keys(): - return None - if self._instance_specs['dependencies'] is None: - return None - try: - debs = self._instance_specs['dependencies']['debs'] - return debs if debs else None - except KeyError: - return None - - def install_dependencies(self, sort=True): - if not self.dependencies: - raise ValueError( - "No dependencies to install. Add dependencies or remove the" - " `install_dependencies()` call in the package template." - ) - pkgs = sorted(self.dependencies) if sort else self.dependencies - - if self.pkg_manager == 'apt': - return apt_install.render( - pkgs=pkgs, - apt_opts=self.__dict__.get('apt_opts'), - sort=True, - ) - elif self.pkg_manager == 'yum': - return yum_install.render( - pkgs=pkgs, - yum_opts=self.__dict__.get('yum_opts'), - sort=True, - ) - - def install_debs(self): - debs = self._get_debs() - if not debs: - raise ValueError( - "No .deb files to install. Add .deb files or remove the" - " `install_debs()` call in the package template." - ) - return deb_install.render(debs=debs) - - def render_run(self): - return jinja2.Template(self.run).render({self.name: self}) - - def render_env(self): - """Return dictionary with rendered keys and values.""" - return { - jinja2.Template(k).render({self.name: self}): - jinja2.Template(v).render({self.name: self}) - for k, v in self.env.items() - } if self.env else self.env diff --git a/neurodocker/interfaces/afni.py b/neurodocker/interfaces/afni.py new file mode 100644 index 00000000..247498ca --- /dev/null +++ b/neurodocker/interfaces/afni.py @@ -0,0 +1,248 @@ +"""Add Dockerfile instructions to install AFNI. + +Homepage: https://afni.nimh.nih.gov/ +GitHub repo: https://github.com/afni/afni +Documentation: https://afni.nimh.nih.gov/pub/dist/doc/htmldoc/index.html + +Notes +----- +- AFNI uses something like semantic versioning starting from 15.3.00, released + on GitHub on January 4, 2016. Before this, it is unclear how AFNI was + versioned. +- Only the latest binaries exist on AFNI's website. +""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function + +from neurodocker.utils import check_url, indent, manage_pkgs + + +class AFNI(object): + """Add Dockerfile instructions to install AFNI. + + Parameters + ---------- + version : str + AFNI version. Can be "latest" or version string. + pkg_manager : {'apt', 'yum'} + Linux package manager. + use_binaries : bool + If true, uses pre-compiled AFNI binaries. True by default. + install_r : bool + If true, install R and R packages for AFNI. This can significantly + increase container build time. + install_python2: bool + If true, install Python 2 + install_python3: bool + If true, install Python 3 + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + """ + + VERSION_TARBALLS = { + "latest": "https://afni.nimh.nih.gov/pub/dist/tgz/linux_openmp_64.tgz", + "17.2.02": "https://dl.dropbox.com/s/yd4umklaijydn13/afni-Linux-openmp64-v17.2.02.tgz", + } + + def __init__(self, version, pkg_manager, use_binaries=True, build_source=False, + install_r=False, install_python2=False, install_python3=False, + check_urls=True): + self.version = version + self.pkg_manager = pkg_manager + self.use_binaries = use_binaries + self.build_source = build_source + self.install_r = install_r + self.install_python2 = install_python2 + self.install_python3 = install_python3 + self.check_urls = check_urls + + self.cmd = self._create_cmd() + + def _create_cmd(self): + """Return full command to install AFNI.""" + comment = ("#--------------------\n" + "# Install AFNI {}\n" + "#--------------------".format(self.version)) + + if self.use_binaries: + chunks = [comment, self.install_binaries()] + elif self.build_source: + chunks = [comment, self.install_from_source()] + else: + raise ValueError("`use_binaries=True` is the only available " + "option at this time.") + + return "\n".join(chunks) + + def _get_binaries_urls(cls, version): + try: + return AFNI.VERSION_TARBALLS[version] + except KeyError: + raise ValueError("AFNI version not available: {}".format(version)) + + def _get_binaries_dependencies(self): + base_deps = { + 'apt': 'ed gsl-bin libglu1-mesa-dev libglib2.0-0 libglw1-mesa' + '\nlibgomp1 libjpeg62 libxm4 netpbm tcsh xfonts-base xvfb', + 'yum': 'ed gsl libGLU libgomp libpng12 libXp libXpm netpbm-progs' + '\nopenmotif R-devel tcsh xorg-x11-fonts-misc' + ' xorg-x11-server-Xvfb', + } + if self.install_python2: + base_deps['apt'] += ' python' + base_deps['yum'] += ' python' + if self.install_python3: + base_deps['apt'] += ' python3' + base_deps['yum'] += ' python3' + + return base_deps[self.pkg_manager] + + def _get_source_dependencies(self): + base_deps = { + 'apt': 'ed git curl gcc g++ make m4 zlib1g-dev libxt-dev libxext-dev' + '\nlibxmu-headers libmotif-dev libxpm-dev tcsh libgsl-dev' + '\nmesa-common-dev libglu1-mesa-dev libxi-dev libnetpbm10-dev' + ' libglib2.0-dev r-base r-base-dev', + 'yum': 'git gcc make m4 zlib-devel libXt-devel libXext-devel' + '\nlibXmu-devel openmotif-devel expat-devel compat-gcc-34 tcsh' + '\nlibXpm-devel gsl-devel mesa-libGL-devel mesa-libGLU-devel' + '\nlibXi-devel glib2-devel gcc-c++ netpbm-devel gcc-gfortran' + ' epel-release' + } + + if self.install_python2: + base_deps['apt'] += ' python' + base_deps['yum'] += ' python' + if self.install_python3: + base_deps['apt'] += ' python3' + base_deps['yum'] += ' python3' + + return base_deps[self.pkg_manager] + + def install_binaries(self): + """Return Dockerfile instructions to download and install AFNI + binaries. + """ + url = self._get_binaries_urls(self.version) + if self.check_urls: + check_url(url) + + pkgs = self._get_binaries_dependencies() + + cmd = ("{install}" + '\n&& libs_path=/usr/lib/x86_64-linux-gnu' + '\n&& if [ -f $libs_path/libgsl.so.19 ]; then' + '\n ln $libs_path/libgsl.so.19 $libs_path/libgsl.so.0;' + '\n fi' + "".format(**manage_pkgs[self.pkg_manager]).format(pkgs=pkgs)) + + if self.pkg_manager == "apt": + # libxp was removed after ubuntu trusty. + deb_url = ('http://mirrors.kernel.org/debian/pool/main/libx/' + 'libxp/libxp6_1.0.2-2_amd64.deb') + cmd += ('\n&& echo "Install libxp (not in all ubuntu/debian repositories)"' + "\n&& apt-get install -yq --no-install-recommends libxp6" + '\n|| /bin/bash -c "' + '\n curl --retry 5 -o /tmp/libxp6.deb -sSL {}' + '\n && dpkg -i /tmp/libxp6.deb && rm -f /tmp/libxp6.deb"' + ''.format(deb_url)) + + deb_url = ('http://mirrors.kernel.org/debian/pool/main/libp/' + 'libpng/libpng12-0_1.2.49-1%2Bdeb7u2_amd64.deb') + cmd += ('\n&& echo "Install libpng12 (not in all ubuntu/debian repositories"' + "\n&& apt-get install -yq --no-install-recommends libpng12-0" + '\n|| /bin/bash -c "' + '\n curl --retry 5 -o /tmp/libpng12.deb -sSL {}' + '\n && dpkg -i /tmp/libpng12.deb && rm -f /tmp/libpng12.deb"' + ''.format(deb_url)) + + if self.install_r: + sh_url = ("https://gist.githubusercontent.com/kaczmarj/" + "8e3792ae1af70b03788163c44f453b43/raw/" + "0577c62e4771236adf0191c826a25249eb69a130/" + "R_installer_debian_ubuntu.sh") + cmd += ('\n&& echo "Install R"' + "\n&& apt-get install -yq --no-install-recommends" + "\n\tr-base-dev r-cran-rmpi libnlopt-dev" + '\n || /bin/bash -c "' + '\n curl --retry 5 -o /tmp/install_R.sh -sSL {}' + '\n && /bin/bash /tmp/install_R.sh"').format(sh_url) + + cmd += ("\n&& {clean}" + '\n&& echo "Downloading AFNI ..."' + "\n&& mkdir -p /opt/afni" + "\n&& curl -sSL --retry 5 {}" + "\n| tar zx -C /opt/afni --strip-components=1" + "".format(url, **manage_pkgs[self.pkg_manager])) + if self.install_r: + cmd += ("\n&& /opt/afni/rPkgsInstall -pkgs ALL" + "\n&& rm -rf /tmp/*") + + cmd = indent("RUN", cmd) + + env_cmd = "PATH=/opt/afni:$PATH" + env_cmd = indent("ENV", env_cmd) + + return "\n".join((env_cmd, cmd)) + + def install_from_source(self): + """Return Dockerfile instructions to download and build AFNI from + source.""" + pkgs = self._get_source_dependencies() + + if self.version == "latest": + self.version = "master" + + cmd = ("{install}" + "".format(**manage_pkgs[self.pkg_manager]).format(pkgs=pkgs)) + + if self.pkg_manager == "yum": + cmd += ("\n&& yum install -y -q R R-devel") + + if self.pkg_manager == "apt": + # libxp was removed after ubuntu trusty. + deb_url = ('http://mirrors.kernel.org/debian/pool/main/libx/' + 'libxp/libxp6_1.0.2-2_amd64.deb') + cmd += ('\n&& echo "Install libxp (not in all ubuntu/debian repositories)"' + "\n&& apt-get install -yq --no-install-recommends libxp6" + '\n|| /bin/bash -c "' + '\n curl --retry 5 -o /tmp/libxp6.deb -sSL {}' + '\n && dpkg -i /tmp/libxp6.deb && rm -f /tmp/libxp6.deb"' + ''.format(deb_url)) + + cmd += ('\n&& ln -s /usr/lib/x86_64-linux-gnu/libXp.so.6.2.0 /usr/lib/x86_64-linux-gnu/libXp.so' + '\n&& ln -s /usr/lib/x86_64-linux-gnu/libXmu.so.6.2.0 /usr/lib/x86_64-linux-gnu/libXmu.so') + + cmd += ("\n&& {clean}" + '\n&& echo "Downloading AFNI ..."' + "\n&& cd /opt" + "\n&& git clone https://github.com/afni/afni.git" + "\n&& cd afni" + "\n&& git checkout {}" + "\n&& cd src" + "\n&& cp Makefile.linux_openmp_64 Makefile" + "\n&& perl -p -i -e 's/^LGIFTI.*/LGIFTI = -lexpat/' Makefile" + "\n&& perl -p -i -e 's/^USE_LOCAL_X_TREE/#USE_LOCAL_X_TREE/' Makefile" + "\n&& perl -p -i -e 's/XLIBS = \$\(XROOT\)\/lib64\/libXm.a -lXt/XLIBS = \$\(XROOT\)\/lib64\/libXm.a \$\(XROOT\)\/lib\/x86_64-linux-gnu\/libXm.a -lXt/' Makefile" + "\n&& perl -p -i -e 's/^# XLIBS =/XLIBS =/' Makefile" + "\n&& perl -p -i -e 's/^CCOLD.*/CCOLD = \$\(CC\)/' Makefile" + "\n&& perl -p -i -e 's/(^LFLAGS.*)/$1 -L\/usr\/lib\/x86_64-linux-gnu/' Makefile" + "\n&& perl -p -i -e 's/(^PLFLAGS.*)/$1 -L\/usr\/lib -L\/usr\/lib\/x86_64-linux-gnu/' Makefile" + "\n&& perl -p -i -e 's/-lXpm -lXext/-lXpm -lfontconfig -lXext/' Makefile" + "\n&& perl -p -i -e 's/(^SUMA_INCLUDE_PATH.*)/$1 -I\/usr\/lib\/x86_64-linux-gnu\/glib-2.0\/include/' Makefile" + "\n&& make INSTALLDIR=/opt/afni vastness" + "".format(self.version, **manage_pkgs[self.pkg_manager])) + + if self.install_r: + cmd += ("\n&& /opt/afni/rPkgsInstall -pkgs ALL" + "\n&& rm -rf /tmp/*") + + cmd = indent("RUN", cmd) + + env_cmd = ("PATH=/opt/afni:$PATH" + "\nAFNI_PLUGINPATH=/opt/afni") + env_cmd = indent("ENV", env_cmd) + + return "\n".join((env_cmd, cmd)) diff --git a/neurodocker/interfaces/ants.py b/neurodocker/interfaces/ants.py new file mode 100644 index 00000000..4a2d166a --- /dev/null +++ b/neurodocker/interfaces/ants.py @@ -0,0 +1,154 @@ +"""Add Dockerfile instructions to install ANTs. + +Project repository: https://github.com/stnava/ANTs/ + +ANTs recommends building from source. Jakub Kaczmarzyk build several versions +on CentOS 5 Docker images. Those Docker images are located at +https://hub.docker.com/r/kaczmarj/ants/ and the binaries are on Dropbox. See +the ANTs class definition for the Dropbox URLs. + +Source for ANTs versions 2.0.0 and newer are available on GitHub, and older +versions are available on SourceForge. + +Instructions to build from source (takes approximately 40 minutes): +https://github.com/stnava/ANTs/wiki/Compiling-ANTs-on-Linux-and-Mac-OS +""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function + +from neurodocker.utils import check_url, indent, manage_pkgs + + +class ANTs(object): + """Add Dockerfile instructions to install ANTs. Versions 2.0.0 and newer + are supported. Pre-compiled binaries can be downloaded, or ANTs can be + built from source. The pre-compiled binaries were compiled on a CentOS 5 + Docker image. + + Adapted from the Dockerfile at https://hub.docker.com/r/nipype/workshops/ + `docker pull nipype/workshops:latest-nofsspm` + + Parameters + ---------- + version : str + ANTs version. + pkg_manager : {'apt', 'yum'} + Linux package manager. + use_binaries : bool, str + If true, uses pre-compiled ANTs binaries. If false, attempts to build + from source. + git_hash : str + If this is specified and use_binaries is false, build from source from + this commit. If this is not specified and use_binaries is false, will + use git hash of the specified version. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + """ + VERSION_HASHES = {"latest": None, + "2.2.0": "0740f9111e5a9cd4768323dc5dfaa7c29481f9ef", + "2.1.0": "78931aa6c4943af25e0ee0644ac611d27127a01e", + "2.1.0rc3": "465cc8cdf0f8cc958edd2d298e05cc2d7f8a48d8", + "2.1.0rc2": "17160f72f5e1c9d6759dd32b7f2dc0b36ded338b", + "2.1.0rc1": "1593a7777d0e6c8be0b9462012328bde421510b9", + "2.0.3": "c9965390c1a302dfa9e63f6ca3cb88f68aab329f", + "2.0.2": "7b83036c987e481b2a04490b1554196cb2fc0dab", + "2.0.1": "dd23c394df9292bae4c5a4ece3023a7571791b7d", + "2.0.0": "7ae1107c102f7c6bcffa4df0355b90c323fcde92", + "HoneyPot": "7ae1107c102f7c6bcffa4df0355b90c323fcde92",} + + VERSION_TARBALLS = {"2.2.0": "https://dl.dropbox.com/s/2f4sui1z6lcgyek/ANTs-Linux-centos5_x86_64-v2.2.0-0740f91.tar.gz", + "2.1.0": "https://dl.dropbox.com/s/h8k4v6d1xrv0wbe/ANTs-Linux-centos5_x86_64-v2.1.0-78931aa.tar.gz", + "2.0.3": "https://dl.dropbox.com/s/oe4v52lveyt1ry9/ANTs-Linux-centos5_x86_64-v2.0.3-c996539.tar.gz", + "2.0.0": "https://dl.dropbox.com/s/kgqydc44cc2uigb/ANTs-Linux-centos5_x86_64-v2.0.0-7ae1107.tar.gz",} + + def __init__(self, version, pkg_manager, use_binaries=True, git_hash=None, + check_urls=True): + self.version = version + self.pkg_manager = pkg_manager + self.use_binaries = use_binaries + self.git_hash = git_hash + self.check_urls = check_urls + + self.cmd = self._create_cmd() + + def _create_cmd(self): + """Return full command to install ANTs.""" + comment = ("#-------------------\n" + "# Install ANTs {}\n" + "#-------------------".format(self.version)) + + if self.use_binaries: + chunks = [comment, self.install_binaries()] + else: + chunks = [comment, self.build_from_source_github()] + + return "\n".join(chunks) + + def install_binaries(self): + """Return command to download and install ANTs binaries.""" + try: + url = ANTs.VERSION_TARBALLS[self.version] + except KeyError: + raise ValueError("Tarball not available for version {}." + "".format(self.version)) + + if self.check_urls: + check_url(url) + + cmd = ('echo "Downloading ANTs ..."' + "\n&& curl -sSL --retry 5 {}" + "\n| tar zx -C /opt".format(url)) + cmd = indent("RUN", cmd) + + env_cmd = ("ANTSPATH=/opt/ants" + "\nPATH=/opt/ants:$PATH") + env_cmd = indent("ENV", env_cmd) + + return "\n".join((cmd, env_cmd)) + + def build_from_source_github(self): + """Return Dockerfile instructions to build ANTs from source. Checkout + to commit based on git_hash or version. If 'latest', build from master. + """ + pkgs = {'apt': 'cmake g++ gcc git make zlib1g-dev', + 'yum': 'cmake gcc-c++ git make zlib-devel'} + + if self.git_hash is None: + try: + self.git_hash = ANTs.VERSION_HASHES[self.version] + except KeyError: + raise ValueError("git hash not known for version {}" + "".format(self.version)) + + if self.version == "latest": + checkout = "" + else: + checkout = ("\n&& cd ANTs" + "\n&& git checkout {}" + "\n&& cd .." + "".format(self.git_hash)) + + workdir_cmd = "WORKDIR /tmp/ants-build" + cmd = ("deps='{pkgs}'" + "\n&& {install}" + "\n&& {clean}" + "\n&& git clone https://github.com/stnava/ANTs.git" + "{checkout}" + "\n&& mkdir build && cd build" + "\n&& cmake ../ANTs && make -j 1" + "\n&& mkdir -p /opt/ants" + "\n&& mv bin/* /opt/ants && mv ../ANTs/Scripts/* /opt/ants" + "\n&& rm -rf /tmp/*" + "\n&& {remove}" + "".format(pkgs=pkgs[self.pkg_manager], checkout=checkout, + **manage_pkgs[self.pkg_manager])) + cmd = cmd.format(pkgs='$deps') + cmd = indent("RUN", cmd) + + env_cmd = ("ANTSPATH=/opt/ants\n" + "PATH=/opt/ants:$PATH") + env_cmd = indent("ENV", env_cmd) + + return "\n".join((workdir_cmd, cmd, env_cmd)) diff --git a/neurodocker/interfaces/convert3d.py b/neurodocker/interfaces/convert3d.py new file mode 100644 index 00000000..a641f98d --- /dev/null +++ b/neurodocker/interfaces/convert3d.py @@ -0,0 +1,73 @@ +"""Add Dockerfile instructions to install Convert3D (C3D). + +Project repository: https://sourceforge.net/projects/c3d/ + + +""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function + +import posixpath + +from neurodocker.utils import check_url, indent + + +class Convert3D(object): + """Add Dockerfile instructions to install Convert3D (C3D). + + Copied from Dockerfile at + https://github.com/nipy/nipype/blob/master/docker/base.Dockerfile#L110-L116 + + Parameters + ---------- + version : str + C3D version. + pkg_manager : {'apt', 'yum'} + Linux package manager. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + """ + VERSION_TARBALLS = {"nightly": "https://sourceforge.net/projects/c3d/files/c3d/Nightly/c3d-nightly-Linux-x86_64.tar.gz/download", + "1.0.0": "https://sourceforge.net/projects/c3d/files/c3d/1.0.0/c3d-1.0.0-Linux-x86_64.tar.gz/download",} + + def __init__(self, version, pkg_manager, check_urls=True): + self.version = version + self.pkg_manager = pkg_manager + self.check_urls = check_urls + + self.cmd = self._create_cmd() + + def _create_cmd(self): + """Return full command to install Convert3D.""" + comment = ("#------------------------" + "\n# Install Convert3D {}" + "\n#------------------------".format(self.version)) + + chunks = [comment, self.install_binaries()] + return "\n".join(chunks) + + def install_binaries(self): + """Return command to download and install C3D binaries.""" + try: + url = Convert3D.VERSION_TARBALLS[self.version.lower()] + except KeyError: + raise ValueError("Unsupported version: {}".format(self.version)) + + if self.check_urls: + check_url(url) + + cmd = ('echo "Downloading C3D ..."' + "\n&& mkdir /opt/c3d" + "\n&& curl -sSL --retry 5 {}" + "\n| tar -xzC /opt/c3d --strip-components=1".format(url)) + cmd = indent("RUN", cmd) + + c3d_path = "/opt/c3d" + c3d_bin_path = posixpath.join(c3d_path, 'bin') + env_cmd = ("C3DPATH={}" + "\nPATH={}:$PATH").format(c3d_path, c3d_bin_path) + env_cmd = indent("ENV", env_cmd) + + return "\n".join((cmd, env_cmd)) diff --git a/neurodocker/interfaces/dcm2niix.py b/neurodocker/interfaces/dcm2niix.py new file mode 100644 index 00000000..19c7bebd --- /dev/null +++ b/neurodocker/interfaces/dcm2niix.py @@ -0,0 +1,71 @@ +"""Add Dockerfile instructions to install dcm2niix. + +Project repository: https://github.com/rordenlab/dcm2niix +""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function + +from neurodocker.utils import check_url, indent, manage_pkgs + + +class Dcm2niix(object): + """Add Dockerfile instructions to install dcm2niix. + + Parameters + ---------- + version : str + Dcm2niix version. Use "latest" or "master" for version of current + master branch. Can also be git commit hash or git tag. + pkg_manager : {'apt', 'yum'} + Linux package manager. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + """ + + def __init__(self, version, pkg_manager, check_urls=True): + self.version = version + self.pkg_manager = pkg_manager + self.check_urls = check_urls + + if self.version in ["latest", "master"]: + self.version = "master" + + self.cmd = self._create_cmd() + + def _create_cmd(self): + """Return full command to install ANTs.""" + comment = ("#------------------------\n" + "# Install dcm2niix {}\n" + "#------------------------".format(self.version)) + chunks = [comment, self.build_from_source()] + return "\n".join(chunks) + + def build_from_source(self): + """Return Dockerfile instructions to build dcm2niix from source. + """ + pkgs = {'apt': 'cmake g++ gcc git make pigz zlib1g-dev', + 'yum': 'cmake gcc-c++ git libstdc++-static make pigz zlib-devel'} + + url = ("https://github.com/rordenlab/dcm2niix/tarball/{}" + .format(self.version)) + if self.check_urls: + check_url(url) + + workdir_cmd = "WORKDIR /tmp" + cmd = ("deps='{pkgs}'" + "\n&& {install}" + "\n&& {clean}" + "\n&& mkdir dcm2niix" + "\n&& curl -sSL {url} | tar xz -C dcm2niix --strip-components 1" + "\n&& mkdir dcm2niix/build && cd dcm2niix/build" + "\n&& cmake .. && make" + "\n&& make install" + "\n&& rm -rf /tmp/*" + "".format(pkgs=pkgs[self.pkg_manager], url=url, + **manage_pkgs[self.pkg_manager])) + cmd = cmd.format(pkgs='$deps') + cmd = indent("RUN", cmd) + + return "\n".join((workdir_cmd, cmd)) diff --git a/neurodocker/interfaces/freesurfer.py b/neurodocker/interfaces/freesurfer.py new file mode 100644 index 00000000..c584ec23 --- /dev/null +++ b/neurodocker/interfaces/freesurfer.py @@ -0,0 +1,224 @@ +"""Add Dockerfile instructions to install FreeSurfer. + +Project repository: https://github.com/freesurfer/freesurfer +Project website: https://surfer.nmr.mgh.harvard.edu/ +Project wiki: https://surfer.nmr.mgh.harvard.edu/fswiki/FreeSurferWiki +""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function + +from neurodocker.utils import check_url, indent, manage_pkgs + + +def _get_dirs_to_exclude(dirs): + import posixpath + + dirs = (posixpath.join('freesurfer', dd) for dd in dirs) + return "\n".join("--exclude='{}'".format(dd) for dd in dirs) + + +class FreeSurfer(object): + """Add Dockerfile instructions to install FreeSurfer. A FreeSurfer license + is required to run the software. + + See FreeSurfer's download and install instructions: + https://surfer.nmr.mgh.harvard.edu/fswiki/DownloadAndInstall + + Parameters + ---------- + version : str + FreeSurfer version (e.g., '6.0.0'). To install nightly build, use + version='dev'. + pkg_manager : {'apt', 'yum'} + Linux package manager. + min : bool + If true, install FreeSurfer minimized for recon-all. + license_path : str + Relative path to license.txt file. If provided, adds a COPY instruction + to copy the file into $FREESURFER_HOME (always /opt/freesurfer/). + use_binaries : bool, str + If true, uses pre-compiled FreeSurfer binaries. Building from source + is not yet supported. + exclude : list + Directories to exclude when extracting the FreeSurfer tarball. If None, + excludes average/mult-comp-cor', 'lib/cuda', 'lib/qt', + 'subjects/V1_average', 'subjects/bert', 'subjects/cvs_avg35', + 'subjects/cvs_avg35_inMNI152', 'subjects/fsaverage3', + 'subjects/fsaverage4', 'subjects/fsaverage5', 'subjects/fsaverage6', + 'subjects/fsaverage_sym', and 'trctrain. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + """ + + def __init__(self, version, pkg_manager, min=False, license_path=None, + use_binaries=True, exclude=None, check_urls=True): + self.version = version + self.pkg_manager = pkg_manager + self.min = min + self.license_path = license_path + self.use_binaries = use_binaries + self.exclude = exclude + self.check_urls = check_urls + + if self.exclude is None: + self.exclude = [ + 'average/mult-comp-cor', + 'lib/cuda', + 'lib/qt', + 'subjects/V1_average', + 'subjects/bert', + 'subjects/cvs_avg35', + 'subjects/cvs_avg35_inMNI152', + 'subjects/fsaverage3', + 'subjects/fsaverage4', + 'subjects/fsaverage5', + 'subjects/fsaverage6', + 'subjects/fsaverage_sym', + 'trctrain', + ] + elif not self.exclude: + pass + elif isinstance(self.exclude, str): + self.exclude = self.exclude.split(' ') + + self.cmd = self._create_cmd() + + def _create_cmd(self): + """Return full command to install FreeSurfer.""" + comment = ("#--------------------------\n" + "# Install FreeSurfer v{}\n" + "#--------------------------".format(self.version)) + + chunks = [comment] + + if self.min: + if self.version != "6.0.0": + raise ValueError("Minimized version is only avialable for" + " FreeSurfer version 6.0.0") + min_comment = ("# Install version minimized for recon-all" + "\n# See https://github.com/freesurfer/freesurfer/issues/70") + chunks.append(min_comment) + chunks.append(self.add_min_recon_all()) + elif self.use_binaries: + chunks.append(self.install_binaries()) + else: + raise ValueError("Installation via binaries is the only available " + "installation method for now.") + + if self.license_path is not None: + chunks.append(self._copy_license()) + + return "\n".join(chunks) + + def _get_binaries_url(self): + """Return URL for FreeSurfer `version`.""" + from distutils.version import StrictVersion + try: + from urllib.parse import urljoin # python 3 + except ImportError: + from urlparse import urljoin # python 2 + + if self.version == 'dev': + return ("ftp://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/dev/" + "freesurfer-Linux-centos6_x86_64-dev.tar.gz") + + version = StrictVersion(self.version) + base = "https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/{ver}/" + + if version >= StrictVersion('6.0.0'): + rel_url = "freesurfer-Linux-centos6_x86_64-stable-pub-v{ver}.tar.gz" + elif version >= StrictVersion('5.0.0'): + rel_url = "freesurfer-Linux-centos4_x86_64-stable-pub-v{ver}.tar.gz" + elif version >= StrictVersion('3.0.4'): + rel_url = ("freesurfer-Linux-centos4_x86_64-stable-pub-v{ver}-full" + ".tar.gz") + elif version == StrictVersion('3.0.3'): + rel_url = "freesurfer-Linux-rh7.3-stable-pub-v{ver}-full.tar.gz" + elif version == StrictVersion('2.2'): + rel_url = "freesurfer-Linux-centos4.0_x86_64-v{ver}.tar.gz" + else: + rel_url = "" + + return urljoin(base, rel_url).format(ver=self.version) + + def _install_binaries_deps(self): + """Return command to install FreeSurfer dependencies. Use this for + FreeSurfer binaries, not if attempting to build FreeSurfer from source. + """ + pkgs = {'apt': "bc libgomp1 libxmu6 libxt6 tcsh perl", + 'yum': "bc libgomp libXmu libXt tcsh perl"} + + cmd = "{install}\n&& {clean}".format(**manage_pkgs[self.pkg_manager]) + return cmd.format(pkgs=pkgs[self.pkg_manager]) + + def install_binaries(self): + """Return command to download and install FreeSurfer binaries.""" + from neurodocker.generate import _add_to_entrypoint + + url = self._get_binaries_url() + + if self.check_urls and self.version == 'dev': + raise ValueError("check_urls=True and version='dev' cannot be used " + "together. Set check_urls to False.") + elif self.check_urls: + check_url(url) + + if self.exclude: + excluded_dirs = _get_dirs_to_exclude(self.exclude) + else: + excluded_dirs = '' + + cmd = self._install_binaries_deps() + ent = _add_to_entrypoint("source $FREESURFER_HOME/SetUpFreeSurfer.sh", + with_run=False) + cmd += ('\n&& echo "Downloading FreeSurfer ..."' + "\n&& curl -sSL --retry 5 {url}" + "\n| tar xz -C /opt\n{excluded}" + "\n&& {entrypoint_cmd}" + "".format(url=url, excluded=excluded_dirs, entrypoint_cmd=ent)) + cmd = indent("RUN", cmd) + + env_cmd = "ENV FREESURFER_HOME=/opt/freesurfer" + + return "\n".join((cmd, env_cmd)) + + def add_min_recon_all(self): + """Return Dockerfile instructions to install minimized version of + recon-all. + + See https://github.com/freesurfer/freesurfer/issues/70 for more + information. + """ + from neurodocker.generate import _add_to_entrypoint + + cmd = self._install_binaries_deps() + url = ("https://dl.dropbox.com/s/nnzcfttc41qvt31/" + "recon-all-freesurfer6-3.min.tgz") + ent = _add_to_entrypoint("source $FREESURFER_HOME/SetUpFreeSurfer.sh", + with_run=False) + cmd += ('\n&& echo "Downloading minimized FreeSurfer ..."' + "\n&& curl -sSL --retry 5 {} | tar xz -C /opt" + "\n&& {entrypoint_cmd}" + "".format(url, entrypoint_cmd=ent)) + cmd = indent("RUN", cmd) + + env_cmd = "ENV FREESURFER_HOME=/opt/freesurfer" + return "\n".join((cmd, env_cmd)) + + def _copy_license(self): + """Return command to copy local license file into the container. Path + must be a relative path within the build context. + """ + import os + + if os.path.isabs(self.license_path): + raise ValueError("Path to license file must be relative, but " + "absolute path was given.") + + comment = ("# Copy license file into image. " + "Must be relative path within build context.") + cmd = ('COPY ["{file}", "/opt/freesurfer/license.txt"]' + ''.format(file=self.license_path)) + return '\n'.join((comment, cmd)) diff --git a/neurodocker/interfaces/fsl.py b/neurodocker/interfaces/fsl.py new file mode 100644 index 00000000..df774090 --- /dev/null +++ b/neurodocker/interfaces/fsl.py @@ -0,0 +1,214 @@ +"""Class to add FSL installation to Dockerfile. + +FSL wiki: https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/ +FSL license: https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence +""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function +from distutils.version import LooseVersion +import logging + +try: + from urllib.parse import urljoin # Python 3 +except ImportError: + from urlparse import urljoin # Python 2 + +from neurodocker.utils import check_url, indent, manage_pkgs + +logger = logging.getLogger(__name__) + + +class FSL(object): + """Add Dockerfile instructions to install FSL. + + Parameters + ---------- + version : str + Version of FSL. + pkg_manager : {'apt', 'yum'} + Linux package manager. + use_binaries : bool + If true, use binaries from FSL's website (default true). + use_installer : bool + If true, install FSL using FSL's Python installer. Only works on + CentOS/RHEL (default false). + eddy_5011 : bool + If true, install pre-release of FSL eddy v5.0.11. + eddy_5011_cuda : {'6.5', '7.0', '7.5', '8.0'} + Version of CUDA for FSL eddy pre-release. Only applies if eddy_5011 is + true. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + + Notes + ----- + Look into ReproNim/simple_workflow to learn how to install specific + versions of FSL on Debian (https://github.com/ReproNim/simple_workflow). + """ + def __init__(self, version, pkg_manager, use_binaries=True, + use_installer=False, eddy_5011=False, eddy_5011_cuda=None, + check_urls=True): + self.version = LooseVersion(version) + self.pkg_manager = pkg_manager + self.use_binaries = use_binaries + self.use_installer = use_installer + self.eddy_5011 = eddy_5011 + self.eddy_5011_cuda = eddy_5011_cuda + self.check_urls = check_urls + + self._check_args() + self.cmd = self._create_cmd() + + def _check_args(self): + """Raise `ValueError` if combinations of arguments are invalid.""" + if not self.use_binaries + self.use_installer: + raise ValueError("Please specify installation method.") + if self.use_binaries and self.use_installer: + raise ValueError("More than one installation method specified.") + if self.use_installer and self.pkg_manager != 'yum': + raise ValueError("FSL's Python installer works only on" + " CentOS/RHEL-based systems.") + if self.version < LooseVersion('5.0.10') and self.eddy_5011: + raise ValueError("Pre-release of FSL eddy can only be installed" + " with FSL v5.0.10.") + return True + + def _create_cmd(self): + """Return full Dockerfile instructions to install FSL.""" + comment = ("#-----------------------------------------------------------" + "\n# Install FSL v{}" + "\n# FSL is non-free. If you are considering commerical use" + "\n# of this Docker image, please consult the relevant license:" + "\n# https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence" + "\n#-----------------------------------------------------------") + comment = comment.format(self.version) + + if self.use_binaries: + url = self._get_binaries_url() + cmd = self.install_binaries(url) + elif self.use_installer: + cmd = self.install_with_pyinstaller(self.check_urls) + return "\n".join((comment, cmd)) + + @staticmethod + def install_with_pyinstaller(check_urls=False): + """Return Dockerfile instructions to install FSL using FSL's Python + installer. This will install the latest version and only works on + CentOS/RHEL. + """ + workdir_cmd = "WORKDIR /opt" + url = "https://fsl.fmrib.ox.ac.uk/fsldownloads/fslinstaller.py" + if check_urls: + check_url(url) + cmd = ("curl -sSL --retry 5 -o fslinstaller.py {url}" + "\n&& python fslinstaller.py --dest=/opt --quiet" + "\n&& . /opt/fsl/etc/fslconf/fsl.sh" + "\n&& rm -f fslinstaller.py" + "".format(url=url)) + cmd = indent("RUN", cmd) + + path_cmd = ("FSLDIR=/opt/fsl" + "\n&& PATH=/opt/fsl/bin:$PATH") + path_cmd = indent("ENV", path_cmd) + + return "\n".join((workdir_cmd, cmd, path_cmd)) + + def _get_binaries_url(self): + """Return URL to binaries for requested version.""" + base = "https://fsl.fmrib.ox.ac.uk/fsldownloads/" + if self.version >= LooseVersion('5.0.9'): + url = urljoin(base, "fsl-{ver}-centos6_64.tar.gz") + else: + url = urljoin(base, "oldversions/fsl-{ver}-centos5_64.tar.gz") + url = url.format(ver=self.version) + if self.check_urls: + check_url(url) + return url + + def _install_binaries_deps(self): + """Return command to install FSL dependencies.""" + pkgs = {'apt': ("bc dc libfontconfig1 libfreetype6 libgl1-mesa-dev" + " libglu1-mesa-dev libgomp1 libice6" + " libxcursor1 libxft2 libxinerama1 libxrandr2" + " libxrender1 libxt6 file"), + 'yum': ("bc libGL libGLU libgomp libICE libjpeg libmng" + " libpng12 libSM libX11 libXcursor libXext libXft" + " libXinerama libXrandr libXt file")} + + cmd = "{install}\n&& {clean}".format(**manage_pkgs[self.pkg_manager]) + return cmd.format(pkgs=pkgs[self.pkg_manager]) + + def install_binaries(self, url): + """Return Dockerfile instructions to install FSL using binaries hosted + on FSL's website. + """ + from neurodocker.generate import _add_to_entrypoint + + cmd = self._install_binaries_deps() + cmd += ('\n&& echo "Downloading FSL ..."' + '\n&& curl -sSL --retry 5 {}' + '\n| tar zx -C /opt'.format(url)) + + if self.version >= LooseVersion('5.0.10'): + fsl_python = "/opt/fsl/etc/fslconf/fslpython_install.sh" + cmd += "\n&& /bin/bash {} -q -f /opt/fsl".format(fsl_python) + + if self.eddy_5011: + cmd += self._install_eddy_5011() + + ent_cmds = ["echo Some packages in this Docker container are non-free", + ("echo If you are considering commercial use of this" + " container, please consult the relevant license:"), + "echo https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence", + "source $FSLDIR/etc/fslconf/fsl.sh"] + cmd += "\n&& {}".format(_add_to_entrypoint(ent_cmds, with_run=False)) + cmd = indent("RUN", cmd) + + env_cmd = ("FSLDIR=/opt/fsl" + "\nPATH=/opt/fsl/bin:$PATH") + env_cmd = indent("ENV", env_cmd) + + return "\n".join((cmd, env_cmd)) + + def _get_eddy_5011_url(self): + """Return URL of FSL eddy 5.0.11 pre-release.""" + # This function should probably be removed once FSL v5.0.11 is released + base_url = ("https://fsl.fmrib.ox.ac.uk/fsldownloads/patches/" + "eddy-patch-fsl-5.0.11/centos6/") + cuda_versions = { + '6.5': 'eddy_cuda6.5', + '7.0': 'eddy_cuda7.0', + '7.5': 'eddy_cuda7.5', + '8.0': 'eddy_cuda8.0', + } + if self.eddy_5011_cuda is None: + filename = "eddy_openmp" + else: + filename = cuda_versions.get(self.eddy_5011_cuda, None) + if filename is None: + raise ValueError("Valid CUDA versions are {}" + .format(', '.join(cuda_versions.keys()))) + return urljoin(base_url, filename) + + def _install_eddy_5011(self): + """Return Dockerfile instructions to install FSL eddy v5.0.11 + pre-release. + """ + url = self._get_eddy_5011_url() + + if self.check_urls: + check_url(url) + + cmd = ('\n&& cd /opt/fsl/bin' + '\n&& rm -f eddy_openmp eddy_cuda*' + '\n&& echo "Downloading FSL eddy v5.0.11 pre-release ..."' + '\n&& curl -sSLO --retry 5 {}' + '\n&& chmod +x eddy_*').format(url) + + filename = url.split('/')[-1] + if 'cuda' in filename: + cmd += '\n&& ln -sv {} eddy_cuda'.format(filename) + + return cmd diff --git a/neurodocker/interfaces/interfaces.py b/neurodocker/interfaces/interfaces.py deleted file mode 100644 index 42625074..00000000 --- a/neurodocker/interfaces/interfaces.py +++ /dev/null @@ -1,286 +0,0 @@ -"""""" - -import posixpath - -from neurodocker.interfaces._base import _BaseInterface - - -class _Header(_BaseInterface): - """Create instance of _Header oject.""" - - _name = "_header" - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - -class AFNI(_BaseInterface): - """Create instance of AFNI object.""" - - _name = 'afni' - _pretty_name = 'AFNI' - - def __init__(self, *args, install_python2=False, install_python3=False, - install_r=False, install_r_pkgs=False, **kwargs): - self.install_python2 = install_python2 - self.install_python3 = install_python3 - self.install_r = install_r - self.install_r_pkgs = install_r_pkgs - super().__init__(self._name, *args, **kwargs) - - if self.install_python2: - self._dependencies.append('python') - if self.install_python3: - self._dependencies.append('python3') - if self.install_r or self.install_r_pkgs: - r = { - 'apt': ['r-base', 'r-base-dev'], - 'yum': ['R-devel'], - } - self._dependencies.extend(r[self._pkg_manager]) - - -class ANTs(_BaseInterface): - """Create instance of ANTs object.""" - - _name = 'ants' - _pretty_name = 'ANTs' - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - -class Convert3D(_BaseInterface): - """Create instance of Convert3D object.""" - - _name = 'convert3d' - _pretty_name = 'Convert3D' - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - -class Dcm2niix(_BaseInterface): - """Create instance of Dcm2niix object.""" - - _name = 'dcm2niix' - _pretty_name = 'dcm2niix' - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - -class FreeSurfer(_BaseInterface): - """Create instance of FreeSurfer object.""" - - _name = 'freesurfer' - _pretty_name = 'FreeSurfer' - - _exclude_paths = ( - 'average/mult-comp-cor', - 'lib/cuda', - 'lib/qt', - 'subjects/V1_average', - 'subjects/bert', - 'subjects/cvs_avg35', - 'subjects/cvs_avg35_inMNI152', - 'subjects/fsaverage3', - 'subjects/fsaverage4', - 'subjects/fsaverage5', - 'subjects/fsaverage6', - 'subjects/fsaverage_sym', - 'trctrain', - ) - - # TODO(kaczmarj): add option to add license file. - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - if hasattr(self, 'exclude_paths'): - if isinstance(self.exclude_paths, str): - self.exclude_paths = self.exclude_paths.split() - elif 'min' in self.version: - self.exclude_paths = tuple() - else: - self.exclude_paths = FreeSurfer._exclude_paths - - self.exclude_paths = tuple( - posixpath.join('freesurfer', path) for path in self.exclude_paths - ) - - -class FSL(_BaseInterface): - """Create instance of FSL object.""" - - _name = 'fsl' - _pretty_name = 'FSL' - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - -class MatlabMCR(_BaseInterface): - """Create instance of MatlabMCR object.""" - - _name = 'matlabmcr' - _pretty_name = "MATLAB MCR" - - _mcr_versions = { - '2018a': '94', - '2017b': '93', - '2017a': '92', - '2016b': '91', - '2016a': '901', - '2015b': '90', - '2015a': '85', - '2014b': '84', - '2014a': '83', - '2013b': '82', - '2013a': '81', - '2012b': '80', - '2012a': '717', - '2010a': '713', - } - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - @property - def mcr_version(self): - try: - return "v{}".format(self._mcr_versions[self.version]) - except KeyError: - raise ValueError( - "Matlab MCR version not known for Matlab version '{}'." - .format(self.version) - ) - - -class MINC(_BaseInterface): - """Create instance of MINC object.""" - - _name = 'minc' - _pretty_name = 'MINC' - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - -class Miniconda(_BaseInterface): - """Create instance of Miniconda object.""" - - _name = 'miniconda' - _pretty_name = 'Miniconda' - - _installed = False - _environments = set() - - # TODO(kaczmarj): use create_env and use_env options. - # TODO(kaczmarj): add method to create environment from file. - # TODO(kaczmarj): add conda_opts and pip_opts - def __init__(self, *args, env_name, conda_install=None, pip_install=None, - preinstalled=False, **kwargs): - self.env_name = env_name - self.conda_install = conda_install - self.pip_install = pip_install - self.preinstalled = preinstalled - - if self.preinstalled: - Miniconda._installed = True - - if not self.env_name: - raise ValueError("env_name is required") - - kwargs.setdefault('version', 'latest') - super().__init__(self._name, *args, **kwargs) - - def render_run(self): - out = super().render_run() - Miniconda._installed = True - Miniconda._environments.add(self.env_name) - return out - - -class MRtrix3(_BaseInterface): - """Create instance of MRtrix3 object.""" - - _name = 'mrtrix3' - _pretty_name = 'MRtrix3' - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - -class NeuroDebian(_BaseInterface): - """Create instance of NeuroDebian object.""" - - _name = 'neurodebian' - _pretty_name = 'NeuroDebian' - - _servers = { - 'australia': 'au', - 'china-tsinghua': 'cn-bj1', - 'china-scitech': 'cn-bj2', - 'china-zhejiang': 'cn-zj', - 'germany-munich': 'de-m', - 'germany-magdeburg': 'de-md', - 'greece': 'gr', - 'japan': 'jp', - 'usa-ca': 'us-ca', - 'usa-nh': 'us-nh', - 'usa-tn': 'us-tn', - } - - def __init__(self, os_codename, server, full=True, **kwargs): - self.os_codename = os_codename - self.server = server - - self._server = NeuroDebian._servers.get(server, None) - if self._server is None: - msg = ( - "Server '{}' not found. Choices are " - + ', '.join(NeuroDebian._servers.keys())) - raise ValueError(msg.format(server)) - - self._full = 'full' if full else 'libre' - - self.url = 'http://neuro.debian.net/lists/{os}.{srv}.{full}'.format( - os=self.os_codename, srv=self._server, full=self._full) - - super().__init__( - self._name, version='generic', method='custom', - os_codename=os_codename, server=server, **kwargs) - - -class PETPVC(_BaseInterface): - """Create instance of PETPVC object.""" - - _name = 'petpvc' - _pretty_name = 'PETPVC' - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - -class SPM12(_BaseInterface): - """Create instance of SPM12 object.""" - - _name = 'spm12' - _pretty_name = 'SPM12' - - def __init__(self, *args, **kwargs): - super().__init__(self._name, *args, **kwargs) - - matlabmcr_version = self.binaries_url[-9:-4] - self.matlabmcr_obj = MatlabMCR(matlabmcr_version, self.pkg_manager) - self.mcr_path = posixpath.join( - self.matlabmcr_obj.install_path, self.matlabmcr_obj.mcr_version) - - def render_run(self): - return "\n".join( - (self.matlabmcr_obj.render_run(), super().render_run())) - - def render_env(self): - """Return dictionary with rendered keys and values.""" - return {**super().render_env(), **self.matlabmcr_obj.render_env()} diff --git a/neurodocker/interfaces/minc.py b/neurodocker/interfaces/minc.py new file mode 100644 index 00000000..4c4feae1 --- /dev/null +++ b/neurodocker/interfaces/minc.py @@ -0,0 +1,121 @@ +"""Add Dockerfile instructions to install MINC. + +Homepage: http://www.bic.mni.mcgill.ca/ServicesSoftware/MINC +GitHub repo: https://github.com/BIC-MNI/minc-toolkit-v2 +Documentation: https://en.wikibooks.org/wiki/MINC +Installation: https://bic-mni.github.io/ + +Notes +----- +- Latest releases are from https://bic-mni.github.io/ +""" +# Author: Sulantha Mathotaarachchi + +from __future__ import absolute_import, division, print_function + +from neurodocker.utils import check_url, indent, manage_pkgs + +class MINC(object): + """Add Dockerfile instructions to install MINC. + + Parameters + ---------- + version : str + MINC release version. Must be version string. + pkg_manager : {'apt', 'yum'} + Linux package manager. + use_binaries : bool + If true, uses pre-compiled MINC binaries. True by default. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + """ + VERSION_TARBALLS = { + "1.9.15": "https://dl.dropbox.com/s/40hjzizaqi91373/minc-toolkit-1.9.15-20170529-CentOS_6.9-x86_64.tar.gz", + } + BEAST_URL = { + "1.1": "http://packages.bic.mni.mcgill.ca/tgz/beast-library-1.1.tar.gz", + } + MODELS_URL = { + "09a": "http://www.bic.mni.mcgill.ca/~vfonov/icbm/2009/mni_icbm152_nlin_sym_09a_minc2.zip", + "09c": "http://www.bic.mni.mcgill.ca/~vfonov/icbm/2009/mni_icbm152_nlin_sym_09c_minc2.zip", + } + + def __init__(self, version, pkg_manager, use_binaries=True, check_urls=True): + self.version = version + self.pkg_manager = pkg_manager + self.use_binaries = use_binaries + self.check_urls = check_urls + + self.cmd = self._create_cmd() + + def _create_cmd(self): + """Return full command to install MINC.""" + comment = ("#--------------------\n" + "# Install MINC {}\n" + "#--------------------".format(self.version)) + + if self.use_binaries: + chunks = [comment, self.install_binaries()] + else: + raise ValueError("`use_binaries=True` is the only available " + "option at this time.") + + return "\n".join(chunks) + + def _get_binaries_urls(self, version): + try: + return MINC.VERSION_TARBALLS[version] + except KeyError: + raise ValueError("MINC version not available: {}".format(version)) + + def _get_binaries_dependencies(self): + pkgs = { + 'apt': 'libgl1-mesa-dev libice6 libsm6 libx11-6 libxext6 libxi6 libxmu6 libgomp1 libjpeg62', + 'yum': 'mesa-libGL-devel libICE libSM libX11 libXext libXi libXmu libgomp libjpeg-turbo', + } + return pkgs[self.pkg_manager] + + def _install_binaries_deps(self): + """Install the dependencies for binary installation + """ + cmd = "{install}\n&& {clean}".format(**manage_pkgs[self.pkg_manager]) + return cmd.format(pkgs=self._get_binaries_dependencies()) + + def _get_install_cmd(self, minc_url, beast_url, models_90a_url, models_90c_url, entrypoint_cmd): + cmd = ('\n&& echo " Downloading MINC, BEASTLIB, and MODELS..."' + "\n&& curl -sSL --retry 5 {minc_url}" + "\n| tar zx -C /opt" + "\n&& curl -sSL --retry 5 {beast_url}" + "\n| tar zx -C /opt/minc/share" + "\n&& curl -sSL --retry 5 -o /tmp/mni_90a.zip {models_09a_url}" + "\n&& unzip /tmp/mni_90a.zip -d /opt/minc/share/icbm152_model_09a" + "\n&& curl -sSL --retry 5 -o /tmp/mni_90c.zip {models_09c_url}" + "\n&& unzip /tmp/mni_90c.zip -d /opt/minc/share/icbm152_model_09c" + "\n&& rm -r /tmp/mni_90* " + "\n&& {entrypoint_cmd}".format(minc_url=minc_url, beast_url=beast_url, models_09a_url=models_90a_url, + models_09c_url=models_90c_url, entrypoint_cmd=entrypoint_cmd)) + return cmd + + + def install_binaries(self): + """Return Dockerfile instructions to download and install MINC + binaries. + """ + from neurodocker.generate import _add_to_entrypoint + minc_url = self._get_binaries_urls(self.version) + beast_url = self.BEAST_URL['1.1'] + models_09a_url = self.MODELS_URL['09a'] + models_09c_url = self.MODELS_URL['09c'] + if self.check_urls: + check_url(minc_url) + check_url(beast_url) + check_url(models_09a_url) + check_url(models_09c_url) + + cmd = self._install_binaries_deps() + ent = _add_to_entrypoint("source /opt/minc/minc-toolkit-config.sh", + with_run=False) + cmd += self._get_install_cmd(minc_url, beast_url, models_09a_url, models_09c_url, ent) + cmd = indent("RUN", cmd) + return cmd diff --git a/neurodocker/interfaces/miniconda.py b/neurodocker/interfaces/miniconda.py new file mode 100644 index 00000000..d953c863 --- /dev/null +++ b/neurodocker/interfaces/miniconda.py @@ -0,0 +1,221 @@ +"""Class to add Miniconda and create Conda environment.""" +# Author: Jakub Kaczmarzyk + +# Refer to the jupyter base-notebook Dockerfile for good practices: +# https://github.com/jupyter/docker-stacks/blob/master/base-notebook/Dockerfile + +from __future__ import absolute_import, division, print_function +import logging +import posixpath + +from neurodocker.utils import _indent_pkgs, check_url, indent, is_url + +logger = logging.getLogger(__name__) + + +class Miniconda(object): + """Add Dockerfile instructions to install Miniconda and create a new + environment with packages installed with conda and pip. + + Parameters + ---------- + env_name : str + Name to give this environment. + pkg_manager : {'apt', 'yum'} + Linux package manager. + yaml_file : path-like or url-like + Conda environment specification file. + conda_install : str or list or tuple + Packages to install using `conda`, including Python. Follow the syntax + for `conda install`. For example, the input ['numpy=1.12', 'scipy'] is + interpreted as `conda install numpy=1.12 scipy`. The conda-forge + channel is added by default. + pip_install : str or list or tuple + Packages to install using `pip`. Follow the syntax for `pip install`. + For example, the input "https://github.com/nipy/nipype/" is interpreted + as `pip install https://github.com/nipy/nipype/`. + conda_opts : str + Command-line options to pass to `conda create`. E.g., "-c vida-nyu" + pip_opts : str + Command-line options to pass to `pip install`. + activate : bool + If true, activate the environment in the container's entrpoint. False + by default. + miniconda_version : str + Version of Miniconda to install. Defaults to 'latest'. This does not + correspond to Python version. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + + Notes + ----- + Miniconda is installed once by the root user in /opt/conda. Separate conda + environments can be created by non-root users. + """ + + created_envs = [] + INSTALLED = False + INSTALL_PATH = "/opt/conda" + + def __init__(self, env_name, pkg_manager, yaml_file=None, + conda_install=None, pip_install=None, conda_opts=None, + pip_opts=None, activate=False, miniconda_version='latest', + check_urls=True): + self.env_name = env_name + self.yaml_file = yaml_file + self.pkg_manager = pkg_manager + self.conda_install = conda_install + self.pip_install = pip_install + self.conda_opts = conda_opts + self.pip_opts = pip_opts + self.activate = activate + self.miniconda_version = miniconda_version + self.check_urls = check_urls + + self._check_args() + self.cmd = self._create_cmd() + + def _check_args(self): + if self.yaml_file and (self.conda_install is not None + or self.pip_install is not None): + raise ValueError("Packages cannot be installed while creating an" + " environment from a yaml file.") + + def _create_cmd(self): + cmds = [] + comment = ("#------------------" + "\n# Install Miniconda" + "\n#------------------") + if not Miniconda.INSTALLED: + cmds.append(comment) + cmds.append(self.install_miniconda()) + cmds.append('') + + create = self.env_name not in Miniconda.created_envs + _comment_base = "Create" if create else "Update" + comment = ("#-------------------------" + "\n# {} conda environment" + "\n#-------------------------").format(_comment_base) + cmds.append(comment) + if self.yaml_file is not None: + cmds.append(self.create_from_yaml()) + else: + cmds.append(self.conda_and_pip_install(create=create)) + + return "\n".join(cmds) + + def _get_source_activate_cmd(self): + from neurodocker.generate import _add_to_entrypoint + + cmd = "source activate {}".format(self.env_name) + return "\n&& " + _add_to_entrypoint(cmd, with_run=False) + + def install_miniconda(self): + """Return Dockerfile instructions to install Miniconda.""" + + install_url = ("https://repo.continuum.io/miniconda/" + "Miniconda3-{}-Linux-x86_64.sh" + "".format(self.miniconda_version)) + if self.check_urls: + check_url(install_url) + + env_cmd = ("CONDA_DIR={0}" + "\nPATH={0}/bin:$PATH".format(Miniconda.INSTALL_PATH)) + env_cmd = indent("ENV", env_cmd) + + cmds = ['echo "Downloading Miniconda installer ..."', + "miniconda_installer=/tmp/miniconda.sh", + "curl -sSL --retry 5 -o $miniconda_installer {url}", + "/bin/bash $miniconda_installer -b -p $CONDA_DIR", + "rm -f $miniconda_installer", + "conda config --system --prepend channels conda-forge", + "conda config --system --set auto_update_conda false", + "conda config --system --set show_channel_urls true", + "conda clean -tipsy && sync", + ] + if self.miniconda_version == 'latest': + cmds.insert(-1, 'conda update -n base conda') + cmd = indent("RUN", '\n&& '.join(cmds).format(url=install_url)) + + Miniconda.INSTALLED = True + + return "\n".join((env_cmd, cmd)) + + def create_from_yaml(self): + """Return Dockerfile instructions to create conda environment from + a YAML file. + """ + tmp_yml = "/tmp/environment.yml" + cmd = ("conda env create -q --name {n} --file {tmp}" + "\n&& rm -f {tmp}") + + if self.activate: + cmd += self._get_source_activate_cmd() + + if is_url(self.yaml_file): + get_file = "curl -sSL {f} > {tmp}" + cmd = get_file + "\n&& " + cmd + if self.check_urls: + check_url(self.yaml_file) + cmd = indent("RUN", cmd) + else: + get_file = 'COPY ["{f}", "{tmp}"]' + cmd = indent("RUN", cmd) + cmd = "\n".join((get_file, cmd)) + + cmd = cmd.format(n=self.env_name, f=self.yaml_file, tmp=tmp_yml) + return cmd + + def conda_and_pip_install(self, create=True): + """Return Dockerfile instructions to create conda environment with + desired version of Python and desired conda and pip packages. + """ + conda_cmd = "conda create" if create else "conda install" + cmd = "{} -y -q --name {}".format(conda_cmd, self.env_name) + + if self.conda_opts: + cmd = "{} {}".format(cmd, self.conda_opts) + + if self.conda_install: + if isinstance(self.conda_install, str): + self.conda_install = self.conda_install.split() + pkgs = _indent_pkgs(len(cmd.split('\n')[-1]), self.conda_install) + cmd += pkgs + # cmd += "\n\t{}".format(self.conda_install) + + cmd += "\n&& sync && conda clean -tipsy && sync" + + if not self.conda_install and not create: + cmd = "" + if self.pip_install: + if self.conda_install or create: + cmd += "\n&& " + cmd += self._pip_install() + + if self.activate: + cmd += self._get_source_activate_cmd() + + cmd = indent("RUN", cmd) + self.created_envs.append(self.env_name) + return cmd + + def _pip_install(self): + """Return Dockerfile instruction to install desired pip packages.""" + if isinstance(self.pip_install, str): + self.pip_install = self.pip_install.split() + + cmd = ('/bin/bash -c "source activate {}' + '\n && pip install -q --no-cache-dir').format(self.env_name) + + if self.pip_opts: + cmd = "{} {}".format(cmd, self.pip_opts) + + pkgs = _indent_pkgs(len(cmd.split('\n')[-1]), self.pip_install) + + return '{}{}"\n&& sync'.format(cmd, pkgs) + + @classmethod + def clear_memory(cls): + cls.INSTALLED = False + cls.created_envs = [] diff --git a/neurodocker/interfaces/mrtrix.py b/neurodocker/interfaces/mrtrix.py new file mode 100644 index 00000000..933932a8 --- /dev/null +++ b/neurodocker/interfaces/mrtrix.py @@ -0,0 +1,109 @@ +"""Add Dockerfile instructions to install MRtrix. + +MRtrix GitHub repository: https://github.com/MRtrix3/mrtrix3 + +MRtrix recommends building from source. Binaries for MRtrix3 were compiled on +CentOS 6.6 and uploaded to Dropbox. This file uses those binaries if the user +wants to use pre-compiled binaries. +""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function + +from neurodocker.utils import check_url, indent, manage_pkgs + + +class MRtrix3(object): + """Add Dockerfile instructions to install MRtrix3. Pre-compiled binaries + can be downloaded, or MRtrix can be built from source. The pre-compiled + binaries were compiled on a CentOS 6.6 Docker image. + + Parameters + ---------- + pkg_manager : {'apt', 'yum'} + Linux package manager. + use_binaries : bool, str + If true, uses pre-compiled MRtrix3 binaries. If false, attempts to + build from source (with -nogui option). + git_hash : str + If this is specified and use_binaries is false, checkout to this commit + before building. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + """ + + def __init__(self, pkg_manager, use_binaries=True, git_hash=None, + check_urls=True): + self.pkg_manager = pkg_manager + self.use_binaries = use_binaries + self.git_hash = git_hash + self.check_urls = check_urls + + if not self.use_binaries and self.pkg_manager == "yum": + raise ValueError("Building MRtrix3 on CentOS/Fedora is not " + "supported yet.") + + self.cmd = self._create_cmd() + + def _create_cmd(self): + """Return full command to install MRtrix.""" + comment = ("#----------------\n" + "# Install MRtrix3\n" + "#----------------") + + if self.use_binaries: + chunks = [comment, self.install_binaries()] + else: + chunks = [comment, self.build_from_source()] + + return "\n".join(chunks) + + def install_binaries(self): + """Return command to download and install MRtrix3 binaries.""" + url = ("https://dl.dropbox.com/s/2g008aaaeht3m45/" + "mrtrix3-Linux-centos6.tar.gz") + + if self.check_urls: + check_url(url) + + cmd = ('echo "Downloading MRtrix3 ..."' + '\n&& curl -sSL --retry 5 {}' + '\n| tar zx -C /opt'.format(url)) + cmd = indent("RUN", cmd) + env_cmd = ("ENV PATH=/opt/mrtrix3/bin:$PATH") + + return "\n".join((cmd, env_cmd)) + + def build_from_source(self): + """Return Dockerfile instructions to build MRtrix from source. Checkout + to git_hash if specified. + """ + # QUESTION: how to download eigen3-devel? Have to add EPEL. + pkgs = {'apt': 'g++ git libeigen3-dev zlib1g-dev', + 'yum': 'eigen3-devel gcc-c++ git zlib-devel'} + + if self.git_hash == None: + checkout = "" + else: + checkout = ("\n&& git checkout {}".format(self.git_hash)) + + workdir_cmd = "WORKDIR /opt" + cmd = ("deps='{pkgs}'" + "\n&& {install}" + "\n&& {clean}" + "\n&& git clone https://github.com/MRtrix3/mrtrix3.git" + "\n&& cd mrtrix3" + "{checkout}" + "\n&& ./configure -nogui" + "\n&& ./build" + "\n&& rm -rf tmp/* /tmp/*" + "\n&& {remove}" + "".format(pkgs=pkgs[self.pkg_manager], checkout=checkout, + **manage_pkgs[self.pkg_manager])) + cmd = cmd.format(pkgs='$deps') + cmd = indent("RUN", cmd) + + env_cmd = ("ENV PATH=/opt/mrtrix3/bin:$PATH") + + return "\n".join((workdir_cmd, cmd, env_cmd)) diff --git a/neurodocker/interfaces/neurodebian.py b/neurodocker/interfaces/neurodebian.py new file mode 100644 index 00000000..4768b3eb --- /dev/null +++ b/neurodocker/interfaces/neurodebian.py @@ -0,0 +1,113 @@ +"""Add Dockerfile instructions to add NeuroDebian repository.""" +# Author: Jakub Kaczmarzyk + +from neurodocker.utils import check_url, indent, manage_pkgs + + +class NeuroDebian(object): + """Object to add NeuroDebian repository. + + Parameters + ---------- + os_codename : str + Operating system codename (e.g., 'zesty', 'jessie'). + download_server : {'australia', 'china-tsinghua', 'china-scitech', + 'china-zhejiang', 'germany-munich', 'germany-magdeburg', + 'greece', 'japan', 'usa-ca', 'usa-nh', 'usa-tn'} + The server to use to download NeuroDebian packages. Choose the one + closest to you. + full : bool + If true (default), use the full NeuroDebian sources. If false, use the + libre sources. + pkgs : str or list or tuple + Packages to install from NeuroDebian. + pkg_manager : {'apt'} + Linux package manager. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + """ + + SERVERS = {'australia': 'au', + 'china-tsinghua': 'cn-bj1', + 'china-scitech': 'cn-bj2', + 'china-zhejiang': 'cn-zj', + 'germany-munich': 'de-m', + 'germany-magdeburg': 'de-md', + 'greece': 'gr', + 'japan': 'jp', + 'usa-ca': 'us-ca', + 'usa-nh': 'us-nh', + 'usa-tn': 'us-tn',} + + def __init__(self, os_codename, download_server, full=True, pkgs=None, + pkg_manager='apt', check_urls=True): + self.pkgs = pkgs + self.check_urls = check_urls + + download_server = self._get_server(download_server) + suffix = "full" if full else "libre" + self.url = self._create_url(os_codename, download_server, suffix) + if self.check_urls: + check_url(self.url) + + self.cmd = self._create_cmd() + + def _create_cmd(self): + comment = ("#--------------------------------------------------" + "\n# Add NeuroDebian repository" + "\n# Please note that some packages downloaded through" + "\n# NeuroDebian may have restrictive licenses." + "\n#--------------------------------------------------") + + chunks = [comment, self._add_neurodebian()] + if self.pkgs is not None and self.pkgs: + chunks.append(self._install_pkgs()) + return "\n".join(chunks) + + @classmethod + def _get_server(cls, download_server): + try: + return cls.SERVERS[download_server] + except KeyError: + raise ValueError("Invalid download server: {}" + "".format(download_server)) + + @staticmethod + def _create_url(os_codename, download_server, suffix): + """Return neurodebian URL.""" + try: + from urllib.parse import urljoin # Python 3 + except ImportError: + from urlparse import urljoin # Python 2 + + base = "http://neuro.debian.net/lists/" + rel = "{0}.{1}.{2}".format(os_codename, download_server, suffix) + return urljoin(base, rel) + + def _add_neurodebian(self): + """Return instruction to add NeuroDebian repository.""" + pkgs = "dirmngr gnupg" + cmd = ("{install}" + "\n&& {clean}" + "\n&& curl -sSL {url}" + "\n> /etc/apt/sources.list.d/neurodebian.sources.list" + "\n&& curl -sSL https://dl.dropbox.com/s/zxs209o955q6vkg/neurodebian.gpg" + "\n| apt-key add -" + # Syntax from + # https://github.com/poldracklab/fmriprep/blob/master/Dockerfile#L21 + "\n&& (apt-key adv --refresh-keys --keyserver" + " hkp://pool.sks-keyservers.net:80 0xA5D32F012649A5A9 || true)" + "\n&& apt-get update" + "".format(url=self.url, **manage_pkgs['apt']).format(pkgs=pkgs)) + return indent("RUN", cmd) + + def _install_pkgs(self): + """Return instruction to install NeuroDebian packages.""" + if isinstance(self.pkgs, (list, tuple)): + self.pkgs = " ".join(self.pkgs) + + cmd = ("{install}\n&& {clean}".format(**manage_pkgs['apt']) + .format(pkgs=self.pkgs)) + comment = "\n# Install NeuroDebian packages" + return "\n".join((comment, indent("RUN", cmd))) diff --git a/neurodocker/interfaces/petpvc.py b/neurodocker/interfaces/petpvc.py new file mode 100644 index 00000000..85a5a080 --- /dev/null +++ b/neurodocker/interfaces/petpvc.py @@ -0,0 +1,90 @@ +"""Add Dockerfile instructions to install PETPVC. + +Homepage: https://github.com/UCL/PETPVC +GitHub repo: https://github.com/UCL/PETPVC +Documentation: https://github.com/UCL/PETPVC +Installation: https://github.com/UCL/PETPVC + +Notes +----- +- Latest releases are from https://github.com/UCL/PETPVC +""" +# Author: Sulantha Mathotaarachchi + +from __future__ import absolute_import, division, print_function + +from neurodocker.utils import check_url, indent, manage_pkgs + +class PETPVC(object): + """Add Dockerfile instructions to install PETPVC. + + Parameters + ---------- + version : str + PETPVC release version. Must be version string. + pkg_manager : {'apt', 'yum'} + Linux package manager. + use_binaries : bool + If true, uses pre-compiled PETPVC binaries. True by default. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + """ + + VERSION_TARBALLS = { + "1.2.0-b": "https://github.com/UCL/PETPVC/releases/download/v1.2.0-b/PETPVC-1.2.0-b-Linux.tar.gz", + "1.2.0-a": "https://github.com/UCL/PETPVC/releases/download/v1.2.0-a/PETPVC-1.2.0-a-Linux.tar.gz", + "1.1.0": "https://github.com/UCL/PETPVC/releases/download/v1.1.0/PETPVC-1.1.0-Linux.tar.gz", + "1.0.0": "https://github.com/UCL/PETPVC/releases/download/v1.0.0/PETPVC-1.0.0-Linux.tar.gz", + } + + def __init__(self, version, pkg_manager, use_binaries=True, check_urls=True): + self.version = version + self.pkg_manager = pkg_manager + self.use_binaries = use_binaries + self.check_urls = check_urls + + self.cmd = self._create_cmd() + + def _create_cmd(self): + """Return full command to install PETPVC.""" + comment = ("#--------------------\n" + "# Install PETPVC {}\n" + "#--------------------".format(self.version)) + + if self.use_binaries: + chunks = [comment, self.install_binaries()] + else: + raise ValueError("`use_binaries=True` is the only available " + "option at this time.") + + return "\n".join(chunks) + + def _get_binaries_urls(self, version): + try: + return PETPVC.VERSION_TARBALLS[version] + except KeyError: + raise ValueError("PETPVC version not available: {}".format(version)) + + def _get_install_cmd(self, petpvc_url): + cmd = ('echo "Downloading PETPVC..."' + "\n&& mkdir /opt/petpvc" + "\n&& curl --retry 5 -sSL {petpvc_url}" + "| tar zx --strip-components=1 -C /opt/petpvc" + .format(petpvc_url=petpvc_url)) + return cmd + + def install_binaries(self): + """Return Dockerfile instructions to download and install PETPVC + binaries. + """ + petpvc_url = self._get_binaries_urls(self.version) + if self.check_urls: + check_url(petpvc_url) + + cmd = self._get_install_cmd(petpvc_url) + cmd = indent("RUN", cmd) + + env_cmd = ("ENV PATH=/opt/petpvc/bin:$PATH") + + return "\n".join((cmd, env_cmd)) diff --git a/neurodocker/interfaces/spm.py b/neurodocker/interfaces/spm.py new file mode 100644 index 00000000..7492dc70 --- /dev/null +++ b/neurodocker/interfaces/spm.py @@ -0,0 +1,162 @@ +"""Add Dockerfile instructions to install SPM. + +Project website: http://www.fil.ion.ucl.ac.uk/spm/ + +This script installs the standalone SPM, which requires MATLAB Compiler Runtime +but does not require a MATLAB license. +""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function +from distutils.version import LooseVersion +import logging +import posixpath + +try: + from urllib.parse import urljoin # Python 3 +except ImportError: + from urlparse import urljoin # Python 2 + +from neurodocker.utils import check_url, indent, manage_pkgs + +logger = logging.getLogger(__name__) + + +class SPM(object): + """Add Dockerfile instructions to install SPM. For now, only SPM12 and + MATLAB R2017a are supported. + + Inspired by the Dockerfile at https://hub.docker.com/r/nipype/workshops/ + `docker pull nipype/workshops:latest-complete` + + Parameters + ---------- + version : {12} + SPM version. + matlab_version : str + MATLAB version. For example, 'R2017a'. + pkg_manager : {'apt', 'yum'} + Linux package manager. + check_urls : bool + If true, raise error if a URL used by this class responds with an error + code. + + Notes + ----- + Instructions to install MATLAB Compiler Runtime can be found at + https://www.mathworks.com/help/compiler/install-the-matlab-runtime.html. + """ + + MCR_DEST = "/opt/mcr" + MCR_VERSIONS = {'R2017a': 'v92',} + + def __init__(self, version, matlab_version, pkg_manager, check_urls=True): + self.version = str(version) + self.matlab_version = LooseVersion(matlab_version) + self.pkg_manager = pkg_manager + self.check_urls = check_urls + + if self.version != '12': + raise ValueError("Only SPM12 is supported (for now).") + if self.matlab_version != "R2017a": + raise ValueError("Only MATLAB R2017a is supported (for now).") + + self.cmd = self._create_cmd() + + def _create_cmd(self): + """Return full command to install MCR and standalone SPM.""" + comment = ("#----------------------\n" + "# Install MCR and SPM{}\n" + "#----------------------".format(self.version)) + chunks = [comment, self.install_mcr(), '', self.install_spm()] + return "\n".join(chunks) + + def _install_libs(self): + """Return Dockerfile instructions to install libxext6 and libxt6. + Without these libraries, SPM encounters segmentation fault.""" + libs = {'apt': 'libxext6 libxt6', + 'yum': 'libXext.x86_64 libXt.x86_64'} + cmd = ("{install}" + "\n&& {clean}").format(**manage_pkgs[self.pkg_manager]) + return cmd.format(pkgs=libs[self.pkg_manager]) + + def _get_mcr_url(self): + base = 'https://www.mathworks.com/supportfiles/' + if self.matlab_version > LooseVersion("R2013a"): + rel = ('downloads/{ver}/deployment_files/{ver}/installers/' + 'glnxa64/MCR_{ver}_glnxa64_installer.zip') + else: + rel = ('MCR_Runtime/{ver}/MCR_{ver}_glnxa64_installer.zip') + url = urljoin(base, rel).format(ver=self.matlab_version) + if self.check_urls: + check_url(url) + return url + + def install_mcr(self): + """Return Dockerfile instructions to install MATLAB Compiler Runtime.""" + url = self._get_mcr_url() + comment = "# Install MATLAB Compiler Runtime" + cmd = self._install_libs() + cmd += ('\n&& echo "Downloading MATLAB Compiler Runtime ..."' + "\n&& curl -sSL --retry 5 -o /tmp/mcr.zip {url}" + "\n&& unzip -q /tmp/mcr.zip -d /tmp/mcrtmp" + "\n&& /tmp/mcrtmp/install -destinationFolder {dest}" + " -mode silent -agreeToLicense yes" + "\n&& rm -rf /tmp/*".format(url=url, dest=SPM.MCR_DEST)) + cmd = indent("RUN", cmd) + return '\n'.join((comment, cmd)) + + def _get_spm_url(self): + url = ("http://www.fil.ion.ucl.ac.uk/spm/download/restricted/" + "utopia/dev/spm{spm}_latest_Linux_{matlab}.zip" + "".format(spm=self.version, matlab=self.matlab_version)) + if self.check_urls: + check_url(url) + return url + + @staticmethod + def _get_spm_env_cmd(mcr_path, spm_cmd): + matlabcmd = posixpath.join(mcr_path, 'toolbox', 'matlab') + spmmcrcmd = spm_cmd + ' script' + + ld_lib_path = '/usr/lib/x86_64-linux-gnu' + for relpath in ['runtime/glnxa64', 'bin/glnxa64', 'sys/os/glnxa64']: + ld_lib_path += ":{}".format(posixpath.join(mcr_path, relpath)) + ld_lib_path += ":$LD_LIBRARY_PATH" + + env = ("MATLABCMD={}" + # '\nSPMMCRCMD="{}"' + "\nFORCE_SPMMCR=1" + '\nLD_LIBRARY_PATH={}' + "".format(matlabcmd, ld_lib_path)) + return indent("ENV", env) + + def install_spm(self): + """Return Dockerfile instructions to install standalone SPM.""" + from neurodocker.generate import _add_to_entrypoint + + url = self._get_spm_url() + + mcr_path = posixpath.join(SPM.MCR_DEST, + SPM.MCR_VERSIONS[str(self.matlab_version)], + '') + + spm_cmd = '/opt/spm{0}/run_spm{0}.sh {1}'.format(self.version, mcr_path) + spmmcrcmd = 'export SPMMCRCMD="{} script"'.format(spm_cmd) + entrypoint_cmd = _add_to_entrypoint(spmmcrcmd, with_run=False) + + comment = "# Install standalone SPM" + cmd = ('echo "Downloading standalone SPM ..."' + "\n&& curl -sSL --retry 5 -o spm.zip {url}" + "\n&& unzip -q spm.zip -d /opt" + "\n&& chmod -R 777 /opt/spm*" + "\n&& rm -rf spm.zip" + "\n&& {spm_cmd} quit" + "\n&& {entrypoint_cmd}" + "".format(url=url, spm_cmd=spm_cmd, + entrypoint_cmd=entrypoint_cmd)) + cmd = indent("RUN", cmd) + + env_cmd = self._get_spm_env_cmd(mcr_path, spm_cmd) + + return '\n'.join((comment, cmd, env_cmd)) diff --git a/neurodocker/interfaces/tests/memory.py b/neurodocker/interfaces/tests/memory.py new file mode 100644 index 00000000..8db1ba64 --- /dev/null +++ b/neurodocker/interfaces/tests/memory.py @@ -0,0 +1,131 @@ +"""Utilites to compare files on Dropbox with local files. + +There should be a mapping between paths to Dockerfiles on Dropbox and Docker +images on DockerHub. + +Implementation +-------------- +- Store generated Dockerfiles on Dropbox. +- Compare hash of generated Dockerfile with hash of file on Dropbox. + - Commented lines and empty lines are removed from the Dockerfiles before + computing hash values. +- If hashes do not match: + - Replace existing file with generated file. + - Build Docker image. +- If hashes match: + - Pull Docker image. +""" +from __future__ import (absolute_import, division, print_function, + unicode_literals) + +import logging +import os + +logger = logging.getLogger(__name__) + + +class Dropbox(object): + """Object to interact with the Dropbox API.""" + def __init__(self, access_token): + import dropbox + self.client = dropbox.Dropbox(access_token) + + def download(self, path, return_metadata=False, **kwargs): + """Return metadata and bytes of file. If file does not exist, return + None. + + Copied parts from: + https://github.com/dropbox/dropbox-sdk-python/blob/master/example/updown.py#L147 + """ + import dropbox + + try: + metadata, response = self.client.files_download(path, **kwargs) + content = response.content + response.close() + if return_metadata: + return metadata, content + else: + return content + except dropbox.exceptions.ApiError as err: + # Raise the error if it is something other than file not existing. + try: + if err.error.get_path().is_not_found(): + return None + except Exception: + raise + + def upload(self, bytestring, path, overwrite=False, **kwargs): + """Save `bytestring` to `path` on Dropbox.""" + import dropbox + + if overwrite: + mode = dropbox.files.WriteMode.overwrite + else: + mode = dropbox.files.WriteMode.add + self.client.files_upload(bytestring, path, mode=mode, **kwargs) + + + +def _prune_dockerfile(string, comment_char="#"): + """Remove comments, emptylines, and last layer (serialize to JSON).""" + string = string.strip() # trim white space on both ends. + json_removed = '\n\n'.join(string.split('\n\n')[:-1]) + return '\n'.join(row for row in json_removed.split('\n') + if not row.startswith(comment_char) and row) + + +def _get_hash(bytestring): + """Get sha256 hash of `bytestring`.""" + import hashlib + return hashlib.sha256(bytestring).hexdigest() + + +def _dockerfiles_equivalent(df_a, df_b): + """Return True if unicode strings `df_a` and `df_b` are equivalent. Does + not consider comments or empty lines. + """ + df_a_clean = _prune_dockerfile(df_a) + hash_a = _get_hash(df_a_clean.encode()) + + df_b_clean = _prune_dockerfile(df_b) + hash_b = _get_hash(df_b_clean.encode()) + + print(df_a_clean) + print(df_b_clean) + + return hash_a == hash_b + + +def should_build_image(local_df, remote_path, remote_object): + """Return True if image should be built. Return False if image should be + pulled. + + Parameters + ---------- + local_df : str + Unicode string representation of locally generated Dockerfile. + remote_path : path-like + Path on remote to the Dockerfile. + remote_object : custom + Object to interact with the remote (e.g., Dropbox). This object must + (1) implement a `download` method that takes the `path` to the file on + remote, returns the bytes of the file, and returns None if the file + does not exist; and (2) implement an `upload` method that takes a + bytestring to upload, path on remote, and option to overwrite. + """ + logger.info("Attempting to download Dockerfile ...") + remote_df_bytes = remote_object.download(remote_path) + + if remote_df_bytes is None: + logger.info("File not found on remote. Uploading Dockerfile.") + remote_object.upload(local_df.encode(), remote_path) + return True + else: + if _dockerfiles_equivalent(local_df, remote_df_bytes.decode('utf-8')): + logger.info("Files are the same. Image should be pulled.") + return False + else: + logger.info("Files are different. Updating remote Dockerfile.") + remote_object.upload(local_df.encode(), remote_path, overwrite=True) + return True diff --git a/neurodocker/interfaces/tests/test_afni.py b/neurodocker/interfaces/tests/test_afni.py index 7936d8e4..cccdce7d 100644 --- a/neurodocker/interfaces/tests/test_afni.py +++ b/neurodocker/interfaces/tests/test_afni.py @@ -1,41 +1,38 @@ """Tests for neurodocker.interfaces.AFNI""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function import pytest +from neurodocker import DockerContainer, Dockerfile from neurodocker.interfaces import AFNI from neurodocker.interfaces.tests import utils class TestAFNI(object): + """Tests for AFNI class.""" - def test_docker(self): + def test_build_image_afni_latest_binaries_stretch(self): """Install latest AFNI binaries on Debian stretch.""" - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'debian:stretch'), - ('afni', {'version': 'latest', 'method': 'binaries'}), - ('user', 'neuro'), - ], - } - - bash_test_file = "test_afni.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'docker://debian:stretch'), - ('afni', {'version': 'latest', 'method': 'binaries'}), - ('user', 'neuro'), - ], - } - bash_test_file = "test_afni.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + specs = {'pkg_manager': 'apt', + 'check_urls': False, + 'instructions': [ + ('base', 'debian:stretch'), + ('afni', {'version': 'latest', 'use_binaries': True}), + ('user', 'neuro'), + ]} + + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['afni-latest_stretch'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_afni.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) + + if push: + utils.push_image(image_name) def test_invalid_binaries(self): with pytest.raises(ValueError): - AFNI(version='fakeversion', pkg_manager='apt') + AFNI(version='fakeversion', pkg_manager='apt', check_urls=False) diff --git a/neurodocker/interfaces/tests/test_afni.sh b/neurodocker/interfaces/tests/test_afni.sh index 399b36ea..e41adffc 100644 --- a/neurodocker/interfaces/tests/test_afni.sh +++ b/neurodocker/interfaces/tests/test_afni.sh @@ -1,7 +1,6 @@ #!/usr/bin/env bash -set -ex +set -e +set -x 3dSkullStrip -help - -printf 'passed' diff --git a/neurodocker/interfaces/tests/test_ants.py b/neurodocker/interfaces/tests/test_ants.py index 649a7897..310a935e 100644 --- a/neurodocker/interfaces/tests/test_ants.py +++ b/neurodocker/interfaces/tests/test_ants.py @@ -1,40 +1,54 @@ """Tests for neurodocker.interfaces.ANTs""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function import pytest +from neurodocker import DockerContainer, Dockerfile from neurodocker.interfaces import ANTs from neurodocker.interfaces.tests import utils class TestANTs(object): + """Tests for ANTs class.""" + + def test_build_image_ants_220_binaries_centos7(self): + """Install ANTs 2.2.0 binaries on CentOS 7.""" + specs = {'pkg_manager': 'yum', + 'check_urls': True, + 'instructions': [ + ('base', 'centos:7'), + ('ants', {'version': '2.2.0', 'use_binaries': True}), + ('user', 'neuro'), + ]} + + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['ants-2.0.0_stretch'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_ants.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) + - def test_docker(self): - specs = { - 'pkg_manager': 'yum', - 'instructions': [ - ('base', 'centos:7'), - ('ants', {'version': '2.2.0', 'method': 'binaries'}), - ('user', 'neuro'), - ] - } - - bash_test_file = "test_ants.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'yum', - 'instructions': [ - ('base', 'docker://centos:7'), - ('ants', {'version': '2.2.0'}), - ('user', 'neuro'), - ] - } - bash_test_file = "test_ants.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + if push: + utils.push_image(image_name) def test_invalid_binaries(self): with pytest.raises(ValueError): ANTs(version='fakeversion', pkg_manager='apt', check_urls=False) + + def test_build_from_source_github(self): + # TODO: expand on tests for building ANTs from source. It probably + # will not be possible to build ANTs in Travic because of the 50 min + # time limit. It takes about 45 minutes to compile ANTs. + + ants = ANTs(version='latest', pkg_manager='apt', use_binaries=False) + assert "git checkout" not in ants.cmd + + ants = ANTs(version='2.2.0', pkg_manager='yum', use_binaries=False) + assert ants.cmd + + ants = ANTs(version='arbitrary', pkg_manager='apt', use_binaries=False, + git_hash='12345') + assert 'git checkout 12345' in ants.cmd diff --git a/neurodocker/interfaces/tests/test_ants.sh b/neurodocker/interfaces/tests/test_ants.sh index 384573be..422a5702 100644 --- a/neurodocker/interfaces/tests/test_ants.sh +++ b/neurodocker/interfaces/tests/test_ants.sh @@ -1,8 +1,7 @@ #!/usr/bin/env bash -set -ex +set -e +set -x Atropos --help antsRegistration --version - -printf 'passed' diff --git a/neurodocker/interfaces/tests/test_convert3d.py b/neurodocker/interfaces/tests/test_convert3d.py index 7b83525e..cb49bf14 100644 --- a/neurodocker/interfaces/tests/test_convert3d.py +++ b/neurodocker/interfaces/tests/test_convert3d.py @@ -1,34 +1,33 @@ """Tests for neurodocker.interfaces.Convert3D""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import, division, print_function + +import pytest + +from neurodocker import DockerContainer, Dockerfile +from neurodocker.interfaces import Convert3D from neurodocker.interfaces.tests import utils class TestConvert3D(object): + """Tests for Convert3D class.""" + + def test_build_image_convert3d_100_binaries_zesty(self): + """Install Convert3D binaries on Ubuntu Zesty.""" + specs = {'pkg_manager': 'apt', + 'check_urls': True, + 'instructions': [ + ('base', 'ubuntu:zesty'), + ('c3d', {'version': '1.0.0'}), + ('user', 'neuro'), + ]} + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['convert3d_zesty'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_convert3d.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) - def test_docker(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'ubuntu:18.04'), - ('convert3d', {'version': '1.0.0'}), - ('user', 'neuro'), - ] - } - - bash_test_file = "test_convert3d.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'docker://ubuntu:16.04'), - ('convert3d', {'version': '1.0.0'}), - ('user', 'neuro'), - ] - } - - bash_test_file = "test_convert3d.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + if push: + utils.push_image(image_name) diff --git a/neurodocker/interfaces/tests/test_convert3d.sh b/neurodocker/interfaces/tests/test_convert3d.sh index cc6caeb1..430a7ca9 100644 --- a/neurodocker/interfaces/tests/test_convert3d.sh +++ b/neurodocker/interfaces/tests/test_convert3d.sh @@ -1,7 +1,6 @@ #!/usr/bin/env bash -set -ex +set -e +set -x c3d -h - -printf 'passed' diff --git a/neurodocker/interfaces/tests/test_dcm2niix.py b/neurodocker/interfaces/tests/test_dcm2niix.py index 05a0169c..b39db3f8 100644 --- a/neurodocker/interfaces/tests/test_dcm2niix.py +++ b/neurodocker/interfaces/tests/test_dcm2niix.py @@ -1,34 +1,31 @@ -"""Tests for neurodocker.interfaces.Dcm2niix""" +"""Tests for neurodocker.interfaces.dcm2niix""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import, division, print_function + +from neurodocker import DockerContainer, Dockerfile from neurodocker.interfaces.tests import utils class TestDcm2niix(object): + """Tests for ANTs class.""" + + def test_build_image_dcm2niix_master_source_centos7(self): + """Install dcm2niix from source on CentOS 7.""" + specs = {'pkg_manager': 'yum', + 'check_urls': True, + 'instructions': [ + ('base', 'centos:7'), + ('dcm2niix', {'version': 'master'}), + ('user', 'neuro'), + ]} + + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['dcm2niix-master_centos7'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_dcm2niix.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) - def test_docker(self): - specs = { - 'pkg_manager': 'yum', - 'instructions': [ - ('base', 'centos:7'), - ('dcm2niix', {'version': 'master', 'method': 'source'}), - ('user', 'neuro'), - ], - } - - bash_test_file = "test_dcm2niix.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'yum', - 'instructions': [ - ('base', 'docker://centos:7'), - ('dcm2niix', {'version': 'master', 'method': 'source'}), - ('user', 'neuro'), - ], - } - - bash_test_file = "test_dcm2niix.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + if push: + utils.push_image(image_name) diff --git a/neurodocker/interfaces/tests/test_dcm2niix.sh b/neurodocker/interfaces/tests/test_dcm2niix.sh index d04d3633..a4676ca5 100644 --- a/neurodocker/interfaces/tests/test_dcm2niix.sh +++ b/neurodocker/interfaces/tests/test_dcm2niix.sh @@ -1,7 +1,6 @@ #!/usr/bin/env bash -set -ex +set -e +set -x dcm2niix -h - -printf 'passed' diff --git a/neurodocker/interfaces/tests/test_freesurfer.py b/neurodocker/interfaces/tests/test_freesurfer.py index 6a87c594..2701d1af 100644 --- a/neurodocker/interfaces/tests/test_freesurfer.py +++ b/neurodocker/interfaces/tests/test_freesurfer.py @@ -1,33 +1,47 @@ """Tests for neurodocker.interfaces.FreeSurfer""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import, division, print_function + +import pytest + +from neurodocker import DockerContainer, Dockerfile +from neurodocker.interfaces import FreeSurfer from neurodocker.interfaces.tests import utils class TestFreeSurfer(object): + """Tests for FreeSurfer class.""" + + def test_build_image_freesurfer_600_min_binaries_xenial(self): + """Install minimized FreeSurfer binaries on Ubuntu Xenial.""" + specs = {'pkg_manager': 'apt', + 'check_urls': True, + 'instructions': [ + ('base', 'ubuntu:xenial'), + ('freesurfer', {'version': '6.0.0', 'use_binaries': True, + 'min': True}), + ('user', 'neuro'), + ]} + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['freesurfer-min_zesty'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_freesurfer.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) + + if push: + utils.push_image(image_name) + + def test_copy_license(self): + """Test that only relative paths are accepted.""" + import os + abspath = os.path.abspath('test.txt') + with pytest.raises(ValueError): + FreeSurfer('6.0.0', 'yum', license_path=abspath, check_urls=False) - def test_docker(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'ubuntu:16.04'), - ('freesurfer', {'version': '6.0.0-min'}), - ('user', 'neuro'), - ] - } - - bash_test_file = "test_freesurfer.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'docker://ubuntu:16.04'), - ('freesurfer', {'version': '6.0.0-min'}), - ('user', 'neuro'), - ] - } - bash_test_file = "test_freesurfer.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + path = 'test.txt' + fs = FreeSurfer('6.0.0', 'yum', license_path=path, check_urls=False) + assert "COPY" in fs.cmd, "Copy instruction not found" + assert path in fs.cmd, "Path to license not found" + assert 'license.txt' in fs.cmd, "License file named improperly" diff --git a/neurodocker/interfaces/tests/test_freesurfer.sh b/neurodocker/interfaces/tests/test_freesurfer.sh index 1c327496..c465de83 100644 --- a/neurodocker/interfaces/tests/test_freesurfer.sh +++ b/neurodocker/interfaces/tests/test_freesurfer.sh @@ -1,9 +1,8 @@ #!/usr/bin/env bash -set -ex +set -e +set -x -# --help returns non-zero status code +# --help returns non-zero status code (causes error in pytest). mri_coreg --version recon-all --version - -printf 'passed' diff --git a/neurodocker/interfaces/tests/test_fsl.py b/neurodocker/interfaces/tests/test_fsl.py index 967d345d..e68ec919 100644 --- a/neurodocker/interfaces/tests/test_fsl.py +++ b/neurodocker/interfaces/tests/test_fsl.py @@ -1,33 +1,55 @@ """Tests for neurodocker.interfaces.FSL""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import, division, print_function +import pytest + +from neurodocker import DockerContainer, Dockerfile +from neurodocker.interfaces import FSL from neurodocker.interfaces.tests import utils class TestFSL(object): + """Tests for FSL class.""" + + @pytest.mark.skip(reason="necessary resources exceed available") + def test_build_image_fsl_latest_pyinstaller_centos7(self): + """Install latest FSL with FSL's Python installer on CentOS 7.""" + specs = {'pkg_manager': 'yum', + 'check_urls': True, + 'instructions': [ + ('base', 'centos:7'), + ('fsl', {'version': '5.0.10', 'use_binaries': True}), + ('user', 'neuro'), + ]} + + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['fsl-5.0.10_centos7'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_fsl.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) + + if push: + utils.push_image(image_name) + + def test_build_image_fsl_5010_binaries_centos7(self): + """Install FSL binaries on CentOS 7.""" + specs = {'pkg_manager': 'yum', + 'check_urls': True, + 'instructions': [ + ('base', 'centos:7'), + ('fsl', {'version': '5.0.10', + 'use_binaries': True, + 'eddy_5011': True}) + ]} + + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['fsl-5.0.10_centos7'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_fsl.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) - def test_docker(self): - specs = { - 'pkg_manager': 'yum', - 'instructions': [ - ('base', 'centos:7'), - ('fsl', {'version': '5.0.10'}), - ('user', 'neuro'), - ] - } - - bash_test_file = "test_fsl.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'yum', - 'instructions': [ - ('base', 'docker://centos:7'), - ('fsl', {'version': '5.0.10'}), - ('user', 'neuro'), - ] - } - bash_test_file = "test_fsl.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + if push: + utils.push_image(image_name) diff --git a/neurodocker/interfaces/tests/test_fsl.sh b/neurodocker/interfaces/tests/test_fsl.sh index 04abae77..83de417b 100644 --- a/neurodocker/interfaces/tests/test_fsl.sh +++ b/neurodocker/interfaces/tests/test_fsl.sh @@ -1,8 +1,7 @@ #!/usr/bin/env bash -set -ex +set -e +set -x bet2 -h flirt -version - -printf 'passed' diff --git a/neurodocker/interfaces/tests/test_matlabmcr.py b/neurodocker/interfaces/tests/test_matlabmcr.py deleted file mode 100644 index c0cba9ce..00000000 --- a/neurodocker/interfaces/tests/test_matlabmcr.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for neurodocker.interfaces.MatlabMCR""" diff --git a/neurodocker/interfaces/tests/test_minc.py b/neurodocker/interfaces/tests/test_minc.py index c0e83864..4f0a6343 100644 --- a/neurodocker/interfaces/tests/test_minc.py +++ b/neurodocker/interfaces/tests/test_minc.py @@ -1,34 +1,52 @@ """Tests for neurodocker.interfaces.MINC""" # Author: Sulantha Mathotaarachchi +from __future__ import absolute_import, division, print_function + +import pytest + +from neurodocker import DockerContainer, Dockerfile +from neurodocker.interfaces import minc from neurodocker.interfaces.tests import utils class TestMINC(object): + """Tests for MINC class.""" + + def test_build_image_minc_1915_binaries_xenial(self): + """Install MINC binaries on Ubuntu Xenial.""" + specs = {'pkg_manager': 'apt', + 'check_urls': True, + 'instructions': [ + ('base', 'ubuntu:xenial'), + ('minc', {'version': '1.9.15', 'use_binaries': True}), + ('user', 'neuro'), + ]} + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['minc_xenial'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_minc.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) + + if push: + utils.push_image(image_name) + + def test_build_image_minc_1915_binaries_centos(self): + """Install MINC binaries on CentOS.""" + specs = {'pkg_manager': 'yum', + 'check_urls': True, + 'instructions': [ + ('base', 'centos:latest'), + ('minc', {'version': '1.9.15', 'use_binaries': True}), + ('user', 'neuro'), + ]} + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['minc_centos7'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_minc.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) - def test_docker(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'ubuntu:xenial'), - ('minc', {'version': '1.9.15'}), - ('user', 'neuro'), - ] - } - - bash_test_file = "test_minc.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'yum', - 'instructions': [ - ('base', 'docker://centos:7'), - ('minc', {'version': '1.9.15'}), - ('user', 'neuro'), - ] - } - bash_test_file = "test_minc.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + if push: + utils.push_image(image_name) \ No newline at end of file diff --git a/neurodocker/interfaces/tests/test_minc.sh b/neurodocker/interfaces/tests/test_minc.sh old mode 100644 new mode 100755 index 7f666d58..113e7b7f --- a/neurodocker/interfaces/tests/test_minc.sh +++ b/neurodocker/interfaces/tests/test_minc.sh @@ -1,7 +1,6 @@ #!/usr/bin/env bash -set -ex +set -e +set -x -mincresample -version - -printf 'passed' +mincresample -version \ No newline at end of file diff --git a/neurodocker/interfaces/tests/test_miniconda.py b/neurodocker/interfaces/tests/test_miniconda.py index c72d1806..80bd4c7c 100644 --- a/neurodocker/interfaces/tests/test_miniconda.py +++ b/neurodocker/interfaces/tests/test_miniconda.py @@ -1,61 +1,42 @@ """Tests for neurodocker.interfaces.Miniconda""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import, division, print_function -from neurodocker.interfaces.tests import utils +import pytest +from neurodocker import DockerContainer, Dockerfile +from neurodocker.interfaces import Miniconda +from neurodocker.interfaces.tests import utils class TestMiniconda(object): + """Tests for Miniconda class.""" - def test_docker(self): - specs = { - 'pkg_manager': 'yum', - 'instructions': [ - ('base', 'centos:7'), - ('user', 'neuro'), - ( - 'miniconda', - { + def test_build_image_miniconda_latest_shellscript_centos7(self): + """Install latest version of Miniconda via ContinuumIO's installer + script on CentOS 7. + """ + specs = {'pkg_manager': 'yum', + 'check_urls': True, + 'instructions': [ + ('base', 'centos:7'), + ('user', 'neuro'), + ('miniconda', { 'env_name': 'default', - 'conda_install': ['python=3.6.5', 'traits'], - 'pip_install': ['nipype'], - 'activate': True, - } - ), - ( - 'miniconda', - { + 'conda_install': ['python=3.5.1', 'traits'], + 'pip_install': ['https://github.com/nipy/nipype/archive/master.tar.gz'], + }), + ('miniconda', { 'env_name': 'default', 'pip_install': ['pylsl'], - } - ), - ], - } + }) + ]} - bash_test_file = "test_miniconda.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['miniconda_centos7'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) - def test_singularity(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'docker://debian:stretch-slim'), - ('user', 'neuro'), - ( - 'miniconda', - { - 'env_name': 'default', - 'conda_install': ['python=3.6.5', 'traits'], - 'pip_install': ['nipype'], - 'activate': True, - } - ), - ( - 'miniconda', - {'env_name': 'default', 'pip_install': ['pylsl']} - ), - ], - } + cmd = "bash /testscripts/test_miniconda.sh" + DockerContainer(image).run(cmd, volumes=utils.volumes) - bash_test_file = "test_miniconda.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + if push: + utils.push_image(image_name) diff --git a/neurodocker/interfaces/tests/test_miniconda.sh b/neurodocker/interfaces/tests/test_miniconda.sh index f5e0969d..b1188362 100644 --- a/neurodocker/interfaces/tests/test_miniconda.sh +++ b/neurodocker/interfaces/tests/test_miniconda.sh @@ -1,8 +1,11 @@ #!/usr/bin/env bash -set -ex +set -e +# set -x -if [ "$(python --version)" != "Python 3.6.5" ]; then +source activate default + +if [ "$(python --version)" != "Python 3.5.1" ]; then echo "Python version incorrect." exit 1 fi @@ -10,11 +13,9 @@ fi # Check that python packages were installed. CONDA_LIST="$(conda list)" for pkg in nipype pylsl traits; do - PATTERN_MATCH=$(echo "$CONDA_LIST" | grep "$pkg") + PATTERN_MATCH=$(echo "$CONDA_LIST" | grep "^$pkg") if [ -z "$PATTERN_MATCH" ]; then echo "Python package not found: ${pkg}" exit 1 fi done - -printf 'passed' diff --git a/neurodocker/interfaces/tests/test_mrtrix.py b/neurodocker/interfaces/tests/test_mrtrix.py index 75d0ad5b..be3b14b6 100644 --- a/neurodocker/interfaces/tests/test_mrtrix.py +++ b/neurodocker/interfaces/tests/test_mrtrix.py @@ -1,34 +1,48 @@ """Tests for neurodocker.interfaces.ANTs""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import, division, print_function + +import pytest + +from neurodocker import DockerContainer, Dockerfile +from neurodocker.interfaces import MRtrix3 from neurodocker.interfaces.tests import utils class TestMRtrix3(object): + """Tests for MRtrix3 class.""" + + def test_build_image_mrtrix3_binaries_centos7(self): + """Install MRtrix3 binaries on CentOS 7.""" + specs = {'pkg_manager': 'yum', + 'check_urls': True, + 'instructions': [ + ('base', 'centos:7'), + ('mrtrix3', {'use_binaries': True}), + ('user', 'neuro'), + ]} + + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['mrtrix3_centos7'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_mrtrix.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) + + if push: + utils.push_image(image_name) + + + def test_build_from_source(self): + # TODO: expand on tests for building MRtrix from source. + + mrtrix = MRtrix3(pkg_manager='apt', use_binaries=False) + assert "git checkout" not in mrtrix.cmd + + with pytest.raises(ValueError): + MRtrix3(pkg_manager='yum', use_binaries=False) - def test_docker(self): - specs = { - 'pkg_manager': 'yum', - 'instructions': [ - ('base', 'centos:7'), - ('mrtrix3', {'version': '3.0'}), - ('user', 'neuro'), - ], - } - - bash_test_file = "test_mrtrix.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'yum', - 'instructions': [ - ('base', 'docker://centos:7'), - ('mrtrix3', {'version': '3.0'}), - ('user', 'neuro'), - ], - } - - bash_test_file = "test_mrtrix.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + mrtrix = MRtrix3(pkg_manager='apt', use_binaries=False, + git_hash='12345') + assert 'git checkout 12345' in mrtrix.cmd diff --git a/neurodocker/interfaces/tests/test_mrtrix.sh b/neurodocker/interfaces/tests/test_mrtrix.sh index 06d7ac90..534b3189 100644 --- a/neurodocker/interfaces/tests/test_mrtrix.sh +++ b/neurodocker/interfaces/tests/test_mrtrix.sh @@ -1,7 +1,6 @@ #!/usr/bin/env bash -set -ex +set -e +set -x mrthreshold --help - -printf 'passed' diff --git a/neurodocker/interfaces/tests/test_neurodebian.py b/neurodocker/interfaces/tests/test_neurodebian.py index bebf3c5b..c29ceb55 100644 --- a/neurodocker/interfaces/tests/test_neurodebian.py +++ b/neurodocker/interfaces/tests/test_neurodebian.py @@ -1,50 +1,35 @@ """Tests for neurodocker.interfaces.NeuroDebian""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import, division, print_function + +from neurodocker import DockerContainer, Dockerfile +from neurodocker.interfaces import NeuroDebian from neurodocker.interfaces.tests import utils class TestNeuroDebian(object): + """Tests for NeuroDebian class.""" + + def test_build_image_neurodebian_dcm2niix_xenial(self): + """Install NeuroDebian on Ubuntu 16.04.""" + specs = {'pkg_manager': 'apt', + 'check_urls': False, + 'instructions': [ + ('base', 'ubuntu:16.04'), + ('neurodebian', {'os_codename': 'stretch', + 'download_server': 'usa-nh', + 'full': True, + 'pkgs': ['dcm2niix']}), + ('user', 'neuro'), + ]} + + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['neurodebian_stretch'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_neurodebian.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) - def test_docker(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'ubuntu:16.04'), - ( - 'neurodebian', - { - 'os_codename': 'stretch', - 'server': 'usa-nh', - 'full': True, - } - ), - ('install', ['dcm2niix']), - ('user', 'neuro'), - ] - } - - bash_test_file = "test_neurodebian.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'docker://ubuntu:16.04'), - ( - 'neurodebian', - { - 'os_codename': 'stretch', - 'server': 'usa-nh', - 'full': True, - } - ), - ('install', ['dcm2niix']), - ('user', 'neuro'), - ] - } - - bash_test_file = "test_neurodebian.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + if push: + utils.push_image(image_name) diff --git a/neurodocker/interfaces/tests/test_neurodebian.sh b/neurodocker/interfaces/tests/test_neurodebian.sh index d04d3633..a4676ca5 100644 --- a/neurodocker/interfaces/tests/test_neurodebian.sh +++ b/neurodocker/interfaces/tests/test_neurodebian.sh @@ -1,7 +1,6 @@ #!/usr/bin/env bash -set -ex +set -e +set -x dcm2niix -h - -printf 'passed' diff --git a/neurodocker/interfaces/tests/test_petpvc.py b/neurodocker/interfaces/tests/test_petpvc.py index 028eea94..4d05ae81 100644 --- a/neurodocker/interfaces/tests/test_petpvc.py +++ b/neurodocker/interfaces/tests/test_petpvc.py @@ -1,37 +1,33 @@ """Tests for neurodocker.interfaces.PETPVC""" # Author: Sulantha Mathotaarachchi +from __future__ import absolute_import, division, print_function + +import pytest + +from neurodocker import DockerContainer, Dockerfile +from neurodocker.interfaces import petpvc from neurodocker.interfaces.tests import utils class TestPETPVC(object): """Tests for PETPVC class.""" - def test_docker(self): + def test_build_image_petpvc_120b_binaries_xenial(self): """Install PETPVC binaries on Ubuntu Xenial.""" - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'ubuntu:xenial'), - ('petpvc', {'version': '1.2.2'}), - ('user', 'neuro'), - ] - } - - bash_test_file = "test_petpvc.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'docker://ubuntu:xenial'), - ('petpvc', {'version': '1.2.2'}), - ('user', 'neuro'), - ] - } - - bash_test_file = "test_petpvc.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) + specs = {'pkg_manager': 'apt', + 'check_urls': True, + 'instructions': [ + ('base', 'ubuntu:xenial'), + ('petpvc', {'version': '1.2.0-b', 'use_binaries': True}), + ('user', 'neuro'), + ]} + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['petpvc_xenial'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_petpvc.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) + + if push: + utils.push_image(image_name) \ No newline at end of file diff --git a/neurodocker/interfaces/tests/test_petpvc.sh b/neurodocker/interfaces/tests/test_petpvc.sh old mode 100644 new mode 100755 index e36515ed..92069bbe --- a/neurodocker/interfaces/tests/test_petpvc.sh +++ b/neurodocker/interfaces/tests/test_petpvc.sh @@ -1,7 +1,6 @@ #!/usr/bin/env bash -set -ex +set -e +set -x -which petpvc - -printf 'passed' +ls /opt/petpvc/bin/petpvc \ No newline at end of file diff --git a/neurodocker/interfaces/tests/test_spm.py b/neurodocker/interfaces/tests/test_spm.py new file mode 100644 index 00000000..85abb0a9 --- /dev/null +++ b/neurodocker/interfaces/tests/test_spm.py @@ -0,0 +1,30 @@ +"""Tests for neurodocker.interfaces.SPM""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import, division, print_function + +from neurodocker import DockerContainer, Dockerfile +from neurodocker.interfaces import SPM +from neurodocker.interfaces.tests import utils + +class TestSPM(object): + """Tests for SPM class.""" + + def test_build_image_spm_12_standalone_zesty(self): + """Install standalone SPM12 and MATLAB MCR R2017a.""" + specs = {'pkg_manager': 'apt', + 'check_urls': True, + 'instructions': [ + ('base', 'ubuntu:zesty'), + ('spm', {'version': '12', 'matlab_version': 'R2017a'}), + ('user', 'neuro'), + ]} + + df = Dockerfile(specs).cmd + dbx_path, image_name = utils.DROPBOX_DOCKERHUB_MAPPING['spm-12_zesty'] + image, push = utils.get_image_from_memory(df, dbx_path, image_name) + + cmd = "bash /testscripts/test_spm.sh" + assert DockerContainer(image).run(cmd, volumes=utils.volumes) + + if push: + utils.push_image(image_name) diff --git a/neurodocker/interfaces/tests/test_spm12.sh b/neurodocker/interfaces/tests/test_spm.sh similarity index 76% rename from neurodocker/interfaces/tests/test_spm12.sh rename to neurodocker/interfaces/tests/test_spm.sh index 62e84d2e..2a39e1f5 100644 --- a/neurodocker/interfaces/tests/test_spm12.sh +++ b/neurodocker/interfaces/tests/test_spm.sh @@ -1,8 +1,7 @@ #!/usr/bin/env bash -set -exu +set -e +set -x echo 'fprintf("testing")' > /tmp/test.m $SPMMCRCMD /tmp/test.m - -printf 'passed' diff --git a/neurodocker/interfaces/tests/test_spm12.py b/neurodocker/interfaces/tests/test_spm12.py deleted file mode 100644 index f55c76ff..00000000 --- a/neurodocker/interfaces/tests/test_spm12.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Tests for neurodocker.interfaces.SPM""" - -from neurodocker.interfaces.tests import utils - - -class TestSPM(object): - - def test_docker(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'ubuntu:18.04'), - ('spm12', {'version': 'dev'}), - ('user', 'neuro'), - ], - } - - bash_test_file = "test_spm12.sh" - utils.test_docker_container_from_specs( - specs=specs, bash_test_file=bash_test_file) - - def test_singularity(self): - specs = { - 'pkg_manager': 'apt', - 'instructions': [ - ('base', 'docker://ubuntu:16.04'), - ('spm12', {'version': 'r7219'}), - ('user', 'neuro'), - ], - } - - bash_test_file = "test_spm12.sh" - utils.test_singularity_container_from_specs( - specs=specs, bash_test_file=bash_test_file) diff --git a/neurodocker/interfaces/tests/utils.py b/neurodocker/interfaces/tests/utils.py index c0ef47d3..c63ef7f9 100644 --- a/neurodocker/interfaces/tests/utils.py +++ b/neurodocker/interfaces/tests/utils.py @@ -1,143 +1,171 @@ -"""Utilities for `neurodocker.interfaces.tests`.""" +"""Utility functions for `neurodocker.interfaces.tests`.""" +from __future__ import absolute_import -import io import logging import os -from pathlib import Path -import posixpath -import subprocess -from neurodocker.generators import Dockerfile -from neurodocker.generators import SingularityRecipe -from neurodocker.utils import get_docker_client -from neurodocker.utils import get_singularity_client +from neurodocker import Dockerfile +from neurodocker.docker import client, DockerContainer, DockerImage +from neurodocker.interfaces.tests import memory logger = logging.getLogger(__name__) -PUSH_IMAGES = os.environ.get('ND_PUSH_IMAGES', False) -DOCKER_CACHEDIR = os.path.join(os.path.sep, 'tmp', 'cache') -# Singularity builds clear the /tmp directory -SINGULARITY_CACHEDIR = os.path.join(Path.home(), 'tmp', 'cache') + +# DockerHub repositories cannot have capital letters in them. +DROPBOX_DOCKERHUB_MAPPING = { + 'afni-latest_stretch': ('/Dockerfile.AFNI-latest_stretch', + 'kaczmarj/afni:latest_stretch'), + + 'ants-2.0.0_stretch': ('/Dockerfile.ANTs-2.2.0_stretch', + 'kaczmarj/ants:2.2.0_stretch'), + + 'convert3d_zesty': ('/Dockerfile.Convert3D-1.0.0_zesty', + 'kaczmarj/c3d:1.0.0_zesty'), + + 'dcm2niix-master_centos7': ('/Dockerfile.dcm2niix-master_centos7', + 'kaczmarj/dcm2niix:master_centos7'), + + 'freesurfer-min_zesty': ('/Dockerfile.FreeSurfer-min_zesty', + 'kaczmarj/freesurfer:min_zesty'), + + 'fsl-5.0.9_centos7': ('/Dockerfile.FSL-5.0.9_centos7', + 'kaczmarj/fsl:5.0.9_centos7'), + + 'fsl-5.0.10_centos7': ('/Dockerfile.FSL-5.0.10_centos7', + 'kaczmarj/fsl:5.0.10_centos7'), + + 'miniconda_centos7': ('/Dockerfile.Miniconda-latest_centos7', + 'kaczmarj/miniconda:latest_centos7'), + + 'mrtrix3_centos7': ('/Dockerfile.MRtrix3_centos7', + 'kaczmarj/mrtrix3:centos7'), + + 'neurodebian_stretch': ('/Dockerfile.NeuroDebian_stretch', + 'kaczmarj/neurodebian:stretch'), + + 'spm-12_zesty': ('/Dockerfile.SPM-12_zesty', + 'kaczmarj/spm:12_zesty'), + + 'minc_xenial': ('/Dockerfile.MINC_xenial', + 'kaczmarj/minc:1.9.15_xenial'), + + 'minc_centos7': ('/Dockerfile.MINC_centos7', + 'kaczmarj/minc:1.9.15_centos7'), + + 'petpvc_xenial': ('/Dockerfile.PETPVC_xenial', + 'kaczmarj/petpvc:1.2.0b_xenial'), + +} + here = os.path.dirname(os.path.realpath(__file__)) -_volumes = {here: {'bind': '/testscripts', 'mode': 'ro'}} +volumes = {here: {'bind': '/testscripts', 'mode': 'ro'}} + + + +def pull_image(name, **kwargs): + """Pull image from DockerHub. Return None if image is not found. -_container_run_kwds = {'volumes': _volumes} + This does not stream the raw output of the pull. + + Parameters + ---------- + name : str + Name of Docker image to pull. Should include repository and tag. + Example: 'kaczmarj/neurodocker:latest'. + + """ + import docker + + try: + return client.images.pull(name, **kwargs) + except docker.errors.NotFound: + return None -def docker_is_running(client): - """Return true if Docker server is responsive. +def build_image(df, name): + """Build and return image. Parameters ---------- - client : docker.client.DockerClient - The Docker client. E.g., `client = docker.from_env()`. + df : str + String representation of Dockerfile. + name : str + Name of Docker image to build. Should include repository and tag. + Example: 'kaczmarj/neurodocker:latest'. + """ + logger.info("Building image: {} ...".format(name)) + return DockerImage(df).build(log_console=True, tag=name) + + +def push_image(name): + """Push image to DockerHub. - Returns - ------- - running : bool - True if Docker server is responsive. + Parameters + ---------- + name : str + Name of Docker image to push. Should include repository and tag. + Example: 'kaczmarj/neurodocker:latest'. """ + logger.info("Pushing image to DockerHub: {} ...".format(name)) + client.images.push(name) + + +def _get_dbx_token(): + """Get access token for Dopbox API.""" + import os + import warnings + try: - client.ping() - return True - except Exception: - return False - - -def test_docker_container_from_specs(specs, bash_test_file): - """""" - client = get_docker_client() - docker_is_running(client) - - df = Dockerfile(specs).render() - - refpath = bash_test_file[5:].split('.')[0] - refpath = os.path.join(DOCKER_CACHEDIR, "Dockerfile." + refpath) - - if os.path.exists(refpath): - logger.info("loading cached reference dockerfile") - with open(refpath, 'r') as fp: - reference = fp.read() - if _dockerfiles_equivalent(df, reference): - logger.info("test equal to reference dockerfile, passing") - return # do not build and test because nothing has changed - - logger.info("building docker image") - image, build_logs = client.images.build( - fileobj=io.BytesIO(df.encode()), rm=True) - - bash_test_file = posixpath.join("/testscripts", bash_test_file) - test_cmd = "bash " + bash_test_file - - res = client.containers.run(image, test_cmd, **_container_run_kwds) - passed = res.decode().endswith('passed') - assert passed - - if passed: - os.makedirs(os.path.dirname(refpath), exist_ok=True) - with open(refpath, 'w') as fp: - fp.write(df) - - -def test_singularity_container_from_specs(specs, bash_test_file): - """""" - sr_dir = "singularity_cache" - os.makedirs(sr_dir, exist_ok=True) - - intname = bash_test_file[5:].split('.')[0] - refpath = os.path.join(SINGULARITY_CACHEDIR, "Singularity." + intname) - - sr = SingularityRecipe(specs).render() - - if os.path.exists(refpath): - logger.info("loading cached reference singularity spec") - with open(refpath, 'r') as fp: - reference = fp.read() - if _dockerfiles_equivalent(sr, reference): - logger.info("test equal to reference singularity spec, passing") - return # do not build and test because nothing has changed - - logger.info("building singularity image") - filename = os.path.join(sr_dir, "Singularity." + intname) - with open(filename, 'w') as fp: - fp.write(sr) - - client = get_singularity_client() - img = client.build(os.path.join(sr_dir, intname + ".sqsh"), filename) - - bash_test_file = posixpath.join("/testscripts", bash_test_file) - test_cmd = "bash " + bash_test_file - - # TODO(kaczmarj): replace the exec with a singularity python client - # command. - cmd = "singularity run --bind {s}:{d} {img} {args}" - cmd = cmd.format(s=here, d=_volumes[here]['bind'], img=img, args=test_cmd) - - output = subprocess.check_output(cmd.split()) - passed = output.decode().endswith('passed') - assert passed - if passed: - os.makedirs(os.path.dirname(refpath), exist_ok=True) - with open(refpath, 'w') as fp: - fp.write(sr) - os.remove(img) - - -def _prune_dockerfile(string, comment_char="#"): - """Remove comments, emptylines, and last layer (serialize to JSON).""" - string = string.strip() # trim white space on both ends. - json_removed = '\n\n'.join(string.split('\n\n')[:-1]) - json_removed = "".join(json_removed.split()) - return '\n'.join( - row for row in json_removed.split('\n') if not - row.startswith(comment_char) and row) - - -def _dockerfiles_equivalent(df_a, df_b): - """Return True if unicode strings `df_a` and `df_b` are equivalent. Does - not consider comments or empty lines. + return os.environ['DROPBOX_TOKEN'] + except KeyError: + warnings.warn("Environment variable not found: DROPBOX_TOKEN." + " Cannot interact with Dropbox API. Cannot compare " + " Dockerfiles. Will pull existing Docker images ...") + return None + + +def _check_can_push(): + """Raise error if user cannot push to DockerHub.""" + pass + + +def get_image_from_memory(df, remote_path, name, force_build=False): + """Return image and boolean indicating whether or not to push resulting + image to DockerHub. """ - print(_prune_dockerfile(df_a)) - print(_prune_dockerfile(df_b)) - return _prune_dockerfile(df_a) == _prune_dockerfile(df_b) + if force_build: + logger.info("Building image (forced) ... Result should be pushed.") + image = build_image(df, name) + push = True + return image, push + + token = _get_dbx_token() + + # Take into account other forks of the project. They cannot use the secret + # environment variables in travis ci (e.g., the dropbox token). + if token is None: + logger.info("Attempting to pull image...") + image = pull_image(name) + if image is None: + logger.info("Image not found. Building ...") + image = build_image(df, name) + push = False + return image, push + + dbx_client = memory.Dropbox(token) + + if memory.should_build_image(df, remote_path, remote_object=dbx_client): + logger.info("Building image... Result should be pushed.") + image = build_image(df, name) + push = True + else: + logger.info("Attempting to pull image ...") + image = pull_image(name) + push = False + if image is None: + logger.info("Image could not be pulled. Building ..." + " Result should be pushed.") + image = build_image(df, name) + push = True + return image, push diff --git a/neurodocker/neurodocker.py b/neurodocker/neurodocker.py index 5f5adcf5..e6ed7c9f 100644 --- a/neurodocker/neurodocker.py +++ b/neurodocker/neurodocker.py @@ -1,29 +1,25 @@ -#!/usr/bin/env python3 -"""Neurodocker is a command-line interface to generate custom Dockerfiles and -Singularity recipes. - -For help generating Dockerfiles and Singularity recipes, run - -$ neurodocker generate docker --help -$ neurodocker generate singularity --help +#!/usr/bin/env python """ +Neurodocker command-line interface to generate Dockerfiles and minify +existing containers. +""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import, unicode_literals from argparse import Action, ArgumentParser, RawDescriptionHelpFormatter import logging import sys -from neurodocker import __version__ -from neurodocker import utils -from neurodocker.generators import Dockerfile -from neurodocker.generators import SingularityRecipe -from neurodocker.generators.common import _installation_implementations +from neurodocker import __version__, Dockerfile, utils +from neurodocker.generate import dockerfile_implementations logger = logging.getLogger(__name__) +SUPPORTED_SOFTWARE = dockerfile_implementations['software'] + # https://stackoverflow.com/a/9028031/5666087 class OrderedArgs(Action): - """Object to preserve order in which command-line arguments are given.""" def __call__(self, parser, namespace, values, option_string=None): if 'ordered_args' not in namespace: setattr(namespace, 'ordered_args', []) @@ -32,158 +28,140 @@ def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, 'ordered_args', previous) -def _list_of_kv(kv): - """Split string `kv` at first equals sign.""" - ll = kv.split("=") - ll[1:] = ["=".join(ll[1:])] - return ll +def _add_generate_arguments(parser): + """Add arguments to `parser` for sub-command `generate`.""" + p = parser + def list_of_kv(kv): + """Split string `kv` at first equals sign.""" + l = kv.split("=") + l[1:] = ["=".join(l[1:])] + return l -def _add_generate_common_arguments(parser): - p = parser + p.add_argument("-b", "--base", # required=True, + help="Base Docker image. Eg, ubuntu:17.04") + p.add_argument("-p", "--pkg-manager", # required=True, + choices=utils.manage_pkgs.keys(), + help="Linux package manager.") - p.add_argument("-b", "--base", help="Base Docker image. Eg, ubuntu:17.04") - p.add_argument( - "-p", "--pkg-manager", choices={'apt', 'yum'}, - help="Linux package manager.") - p.add_argument( - '--add-to-entrypoint', action=OrderedArgs, - help=("Add a command to the file /neurodocker/startup.sh, which is the" - " container's default entrypoint.")) - p.add_argument( - '--copy', action=OrderedArgs, nargs="+", - help="Copy files into container. Use format ... ") - p.add_argument( - '--install', action=OrderedArgs, nargs="+", - help=("Install system packages with apt-get or yum, depending on the" - " package manager specified.")) - p.add_argument( - '--entrypoint', action=OrderedArgs, - help="Set the container's entrypoint (Docker) / append to runscript" - " (Singularity)") - p.add_argument( - '-e', '--env', action=OrderedArgs, nargs="+", type=_list_of_kv, - help="Set environment variable(s). Use the format KEY=VALUE") - p.add_argument( - '-r', '--run', action=OrderedArgs, - help="Run a command when building container") - p.add_argument( - '-u', '--user', action=OrderedArgs, - help="Switch current user (creates user if necessary)") - p.add_argument( - '-w', '--workdir', action=OrderedArgs, help="Set working directory") + # Arguments that should be ordered. + p.add_argument('--add', action=OrderedArgs, nargs="+", + help="Dockerfile ADD instruction. Use format ... ") + p.add_argument('--add-to-entrypoint', action=OrderedArgs, nargs="+", + help=("Add a command to the file /neurodocker/startup.sh," + " which is the container's default entrypoint.")) + p.add_argument('--arg', action=OrderedArgs, nargs="+", + help="Dockerfile ARG instruction. Use format KEY[=DEFAULT_VALUE] ...", + type=list_of_kv) + p.add_argument('--cmd', action=OrderedArgs, nargs="+", + help="Dockerfile CMD instruction.") + p.add_argument('--copy', action=OrderedArgs, nargs="+", + help="Dockerfile COPY instruction. Use format ... ") + p.add_argument('--entrypoint', action=OrderedArgs, + help="Dockerfile ENTRYPOINT instruction.") + p.add_argument('-e', '--env', action=OrderedArgs, nargs="+", + help="Dockerfile ENV instruction. Use the format KEY=VALUE ...", + type=list_of_kv) + p.add_argument('--expose', nargs="+", action=OrderedArgs, + help="Dockerfile EXPOSE instruction.") + p.add_argument('--install', action=OrderedArgs, nargs="+", + help=("Install system packages with apt-get or yum," + " depending on the package manager specified.")) + p.add_argument('--instruction', action=OrderedArgs, + help="Arbitrary text to write to Dockerfile.") + p.add_argument('--label', action=OrderedArgs, nargs="+", + help="Dockerfile LABEL instruction.", type=list_of_kv) + p.add_argument('-r', '--run', action=OrderedArgs, + help="Dockerfile RUN instruction") + p.add_argument('--run-bash', action=OrderedArgs, + help="Run BASH code in RUN instruction.") + p.add_argument('-u', '--user', action=OrderedArgs, + help="Dockerfile USER instruction.") + p.add_argument('--volume', action=OrderedArgs, nargs="+", + help="Dockerfile VOLUME instruction.") + p.add_argument('--workdir', action=OrderedArgs, + help="Dockerfile WORKDIR instruction.") # To generate from file. - p.add_argument( - '-f', '--file', dest='file', - help="Generate file from JSON. Overrides other `generate` arguments") + p.add_argument('-f', '--file', dest='file', + help=("Generate Dockerfile from JSON. Overrides other" + " `generate` arguments")) # Other arguments (no order). - p.add_argument( - '-o', '--output', dest="output", - help="If specified, save Dockerfile to file with this name.") - p.add_argument( - '--no-print', dest='no_print', action="store_true", - help="Do not print the generated file") + p.add_argument('-o', '--output', dest="output", + help="If specified, save Dockerfile to file with this name.") + p.add_argument('--no-print-df', dest='no_print_df', action="store_true", + help="Do not print the Dockerfile") + p.add_argument("--no-check-urls", action="store_false", dest="check_urls", + help="Do not verify communication with URLs used in the build.") - _ndeb_servers = ", ".join( - _installation_implementations['neurodebian']._servers.keys() - ) + _ndeb_servers = ", ".join(SUPPORTED_SOFTWARE['neurodebian'].SERVERS.keys()) # Software package options. pkgs_help = { - "all": "Install software packages. Each argument takes a list of" - " key=value pairs. Where applicable, the default installation" - " behavior is to install by downloading and uncompressing" - " binaries. Some programs can be built from source.", - "afni": "Install AFNI. Valid keys are version (required), method," - " install_path, install_r, install_r_pkgs, install_python2," - " and install_python3. Only the latest version and version" - " 17.2.02 are supported at this time.", - "ants": "Install ANTs. Valid keys are version (required), method" - " install_path, cmake_opts, and make_opts. Version can be a " - " git commit hash if building from source.", - "convert3d": "Install Convert3D. Valid keys are version (required)," - " method, and install_path.", - "dcm2niix": "Install dcm2niix. Valid keys are version, method," - " install_path, cmake_opts, and make_opts", - "freesurfer": "Install FreeSurfer. Valid keys are version (required)," - " method, install_path, exclude_paths, and license_path" - " (relative path to license). A FreeSurfer license is" - " required to run the software and is not provided by" - " Neurodocker.", - "fsl": "Install FSL. Valid keys are version (required), method, and" - " install_path.", - "matlabmcr": "Install Matlab Compiler Runtime. Valid keys are version," - " method, and install_path", - "miniconda": "Install Miniconda. Valid keys are install_path," - " env_name, conda_install, pip_install, conda_opts," - " pip_opts, activate (default false), and version" - " (defaults to latest). The options conda_install and" - " pip_install accept strings of packages: conda_install=" - '"python=3.6 numpy traits".', - "mrtrix3": "Install MRtrix3. Valid keys are version (required)," - " method, and install_path", - "neurodebian": "Add NeuroDebian repository. Valid keys are " - "os_codename (eg zesty), server (eg usa-nh), and full" - " (if true, use non-free packages). Valid download" - " servers are {}.".format(_ndeb_servers), - "spm12": "Install SPM12 and its dependency, Matlab Compiler Runtime." - " Valid keys are version and install_path.", - "minc": "Install MINC. Valid keys is version (required), method, and" - " install_path. Only version 1.9.15 is supported at this" - " time.", - "petpvc": "Install PETPVC. Valid keys are version (required), method," - " and install_path." + "all": ( + "Install software packages. Each argument takes a list of" + " key=value pairs. Where applicable, the default installation" + " behavior is to install by downloading and uncompressing" + " binaries."), + "afni": ( + "Install AFNI. Valid keys are version (required), install_r," + " install_python2, and install_python3. Only the latest" + " version and version 17.2.02 are supported at this time."), + "ants": ( + "Install ANTs. Valid keys are version (required), use_binaries" + " (default true), and git_hash. If use_binaries=true, installs" + " pre-compiled binaries; if use_binaries=false, builds ANTs from" + " source. If git_hash is specified, build from source from that" + " commit."), + "c3d": ( + "Install Convert3D. The only valid key is version (required)."), + "dcm2niix": ( + "Install dcm2niix. The only valid key is version (required)."), + "freesurfer": ( + "Install FreeSurfer. Valid keys are version (required)," + " license_path (relative path to license), min (if true, install" + " binaries minimized for recon-all) and use_binaries (default true" + "). A FreeSurfer license is required to run the software and is not" + " provided by Neurodocker."), + "fsl": ( + "Install FSL. Valid keys are version (required), use_binaries" + " (default true) and use_installer."), + "miniconda": ( + "Install Miniconda. Valid keys are env_name (required)," + " conda_install, pip_install, conda_opts, pip_opts, activate" + " (default false) and miniconda_version (defaults to latest). The" + " options conda_install and pip_install accept strings of" + ' packages: conda_install="python=3.6 numpy traits".'), + "mrtrix3": ( + "Install MRtrix3. Valid keys are use_binaries (default true) and" + " git_hash. If git_hash is specified and use_binaries is false," + " will checkout to that commit before building."), + "neurodebian": ( + "Add NeuroDebian repository and optionally install NeuroDebian" + " packages. Valid keys are os_codename (required; e.g., 'zesty')," + " download_server (required), full (if true, default, use non-free" + " packages), and pkgs (list of packages to install). Valid" + " download servers are {}.".format(_ndeb_servers)), + "spm": ( + "Install SPM (and its dependency, Matlab Compiler Runtime). Valid" + " keys are version and matlab_version."), + "minc": ( + "Install MINC. Valid keys is version (required). Only version" + " 1.9.15 is supported at this time."), + "petpvc": ( + "Install PETPVC. Valid keys are version (required)."), } - pkgs = p.add_argument_group( - title="software package arguments", description=pkgs_help['all'] - ) + pkgs = p.add_argument_group(title="software package arguments", + description=pkgs_help['all']) - for pkg in _installation_implementations.keys(): - if pkg == '_header': - continue + for pkg in SUPPORTED_SOFTWARE.keys(): flag = "--{}".format(pkg) # MRtrix3 does not need any arguments by default. nargs = "*" if pkg == "mrtrix3" else "+" - pkgs.add_argument( - flag, dest=pkg, nargs=nargs, action=OrderedArgs, metavar="", - type=_list_of_kv, help=pkgs_help[pkg] - ) - - -def _add_generate_docker_arguments(parser): - """Add arguments to `parser` for sub-command `generate docker`.""" - p = parser - - # Arguments that should be ordered. - p.add_argument( - '--add', action=OrderedArgs, nargs="+", - help="Dockerfile ADD instruction. Use format ... ") - p.add_argument( - '--arg', action=OrderedArgs, nargs="+", type=_list_of_kv, - help="Dockerfile ARG instruction. Use format KEY[=DEFAULT_VALUE] ...") - p.add_argument( - '--cmd', action=OrderedArgs, nargs="+", - help="Dockerfile CMD instruction.") - p.add_argument( - '--expose', nargs="+", action=OrderedArgs, - help="Dockerfile EXPOSE instruction.") - p.add_argument( - '--label', action=OrderedArgs, nargs="+", type=_list_of_kv, - help="Dockerfile LABEL instruction.") - p.add_argument( - '--run-bash', action=OrderedArgs, - help="Run BASH code in RUN instruction.") - p.add_argument( - '--volume', action=OrderedArgs, nargs="+", - help="Dockerfile VOLUME instruction.") - - -def _add_generate_singularity_arguments(parser): - """Add arguments to `parser` for sub-command `generate singularity`.""" - pass + pkgs.add_argument(flag, dest=pkg, nargs=nargs, action=OrderedArgs, + metavar="", type=list_of_kv, help=pkgs_help[pkg]) def _add_reprozip_trace_arguments(parser): @@ -206,10 +184,8 @@ def _add_reprozip_merge_arguments(parser): def create_parser(): """Return command-line argument parser.""" - - parser = ArgumentParser( - description=__doc__, formatter_class=RawDescriptionHelpFormatter - ) + parser = ArgumentParser(description=__doc__, # add_help=False, + formatter_class=RawDescriptionHelpFormatter) verbosity_choices = ('debug', 'info', 'warning', 'error', 'critical') parser.add_argument("-v", "--verbosity", choices=verbosity_choices) @@ -218,40 +194,15 @@ def create_parser(): subparsers = parser.add_subparsers( dest="subparser_name", title="subcommands", - description="valid subcommands" - ) - - # `neurodocker gnerate` parsers. + description="valid subcommands") generate_parser = subparsers.add_parser( - 'generate', help="generate recipes" - ) - generate_subparsers = generate_parser.add_subparsers( - dest="subsubparser_name", title="subcommands", - description="valid subcommands" - ) - generate_docker_parser = generate_subparsers.add_parser( - 'docker', help="generate Dockerfile" - ) - generate_singularity_parser = generate_subparsers.add_parser( - 'singularity', help="generate Singularity recipe" - ) - _add_generate_common_arguments(generate_docker_parser) - _add_generate_docker_arguments(generate_docker_parser) - _add_generate_common_arguments(generate_singularity_parser) - _add_generate_singularity_arguments(generate_singularity_parser) - - # `neurodocker reprozip` parsers. - reprozip_parser = subparsers.add_parser('reprozip', help="") - reprozip_subparsers = reprozip_parser.add_subparsers( - dest="subsubparser_name", title="subcommands", - description="valid subcommands" - ) - reprozip_trace_parser = reprozip_subparsers.add_parser( - 'trace', help="minify container for traced command(s)" - ) - reprozip_merge_parser = reprozip_subparsers.add_parser( - 'merge', help="merge reprozip pack files" - ) + 'generate', help="generate dockerfiles") + reprozip_trace_parser = subparsers.add_parser( + 'reprozip-trace', help="reprozip trace commands") + reprozip_merge_parser = subparsers.add_parser( + 'reprozip-merge', help="merge reprozip pack files") + + _add_generate_arguments(generate_parser) _add_reprozip_trace_arguments(reprozip_trace_parser) _add_reprozip_merge_arguments(reprozip_merge_parser) @@ -265,43 +216,20 @@ def create_parser(): def parse_args(args): """Return namespace of command-line arguments.""" parser = create_parser() - namespace = parser.parse_args(args) - - if namespace.subparser_name is None: - parser.print_help() - parser.exit(1) - elif (namespace.subparser_name == 'generate' - and namespace.subsubparser_name is None): - parser.print_help() - parser.exit(1) - elif (namespace.subparser_name == 'reprozip' - and namespace.subsubparser_name is None): - parser.print_help() - parser.exit(1) - elif (namespace.subparser_name == 'generate' - and namespace.subsubparser_name in {'docker', 'singularity'}): - _validate_generate_args(namespace) - - return namespace + return parser.parse_args(args) def generate(namespace): """Run `neurodocker generate`.""" - if namespace.file is None: - specs = utils._namespace_to_specs(namespace) - else: + if namespace.file is not None: specs = utils.load_json(namespace.file) - - recipe_objs = { - 'docker': Dockerfile, - 'singularity': SingularityRecipe, - } - - recipe_obj = recipe_objs[namespace.subsubparser_name](specs) - if not namespace.no_print: - print(recipe_obj.render()) + else: + specs = utils._namespace_to_specs(namespace) + df = Dockerfile(specs) + if not namespace.no_print_df: + print(df.cmd) if namespace.output: - recipe_obj.save(filepath=namespace.output) + df.save(filepath=namespace.output) def reprozip_trace(namespace): @@ -320,7 +248,7 @@ def reprozip_merge(namespace): merge_pack_files(namespace.outfile, namespace.pack_files) -def _validate_generate_args(namespace): +def _validate_args(namespace): if (namespace.file is None and (namespace.base is None or namespace.pkg_manager is None)): raise ValueError("-b/--base and -p/--pkg-manager are required if not" @@ -334,19 +262,23 @@ def main(args=None): else: namespace = parse_args(args) + if namespace.subparser_name == 'generate': + _validate_args(namespace) + if namespace.verbosity is not None: utils.set_log_level(namespace.verbosity) logger.debug(vars(namespace)) - subparser_functions = { - 'docker': generate, - 'singularity': generate, - 'reprozip-trace': reprozip_trace, - 'reprozip-merge': reprozip_merge, - } + subparser_functions = {'generate': generate, + 'reprozip-trace': reprozip_trace, + 'reprozip-merge': reprozip_merge} + + if namespace.subparser_name not in subparser_functions.keys(): + print(__doc__) + return - subparser_functions[namespace.subsubparser_name](namespace) + subparser_functions[namespace.subparser_name](namespace) if __name__ == "__main__": # pragma: no cover diff --git a/neurodocker/parser.py b/neurodocker/parser.py index 047c3164..af872fe6 100644 --- a/neurodocker/parser.py +++ b/neurodocker/parser.py @@ -1,12 +1,12 @@ """Class to parse specifications for Dockerfile.""" # Author: Jakub Kaczmarzyk +from __future__ import absolute_import + import inspect from neurodocker import utils -from neurodocker.generators.common import _installation_implementations -from neurodocker.generators.docker import Dockerfile -from neurodocker.generators.singularity import SingularityRecipe +from neurodocker.generate import dockerfile_implementations def _check_for_invalid_keys(keys, valid_keys, where): @@ -19,7 +19,7 @@ def _check_for_invalid_keys(keys, valid_keys, where): "".format(where, invalid, valid_keys)) -class _SpecsParser: +class _SpecsParser(object): """Class to parse specifications for Dockerfile. This class checks the dictionary of specifications for errors and raises @@ -45,9 +45,12 @@ class _SpecsParser: >>> SpecsParser(specs) """ VALID_TOP_LEVEL_KEYS = ['check_urls', 'instructions', 'pkg_manager', - 'generation_timestamp', 'neurodocker_version'] + 'generation_timestamp', 'neurodocker_version',] + VALID_INSTRUCTIONS_KEYS = list(dockerfile_implementations['other'].keys()) - VALID_INSTRUCTIONS_KEYS = Dockerfile._implementations.keys() + SUPPORTED_SOFTWARE = dockerfile_implementations['software'].keys() + VALID_INSTRUCTIONS_KEYS.extend(SUPPORTED_SOFTWARE) + VALID_INSTRUCTIONS_KEYS.sort() def __init__(self, specs): self.specs = specs @@ -92,10 +95,17 @@ def _validate_software_options(self): """Raise ValueError if a key is present that does not belong in a function's signature. """ + supported_software = dockerfile_implementations['software'] for pkg, opts in self.specs['instructions']: - if pkg in _installation_implementations.keys(): - func = _installation_implementations[pkg] - params = list(inspect.signature(func).parameters) + if pkg in supported_software.keys(): + func = supported_software[pkg] + try: + params = list(inspect.signature(func).parameters) + # Python 2.7 does not have inspect.signature + except AttributeError: + params = inspect.getargspec(func.__init__)[0] + params.remove('self') + bad_opts = [opt for opt in opts if opt not in params] if bad_opts: bad_opts = ', '.join(bad_opts) diff --git a/neurodocker/reprozip/merge.py b/neurodocker/reprozip/merge.py index fae721ef..e2f6897f 100644 --- a/neurodocker/reprozip/merge.py +++ b/neurodocker/reprozip/merge.py @@ -20,6 +20,10 @@ - If the same files exist in different traces, the contents of those files are identical. """ +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function + from glob import glob import logging import os diff --git a/neurodocker/reprozip/tests/test_merge.py b/neurodocker/reprozip/tests/test_merge.py index 3acbb722..fe7d9480 100644 --- a/neurodocker/reprozip/tests/test_merge.py +++ b/neurodocker/reprozip/tests/test_merge.py @@ -7,22 +7,21 @@ import tarfile import tempfile +from neurodocker.docker import client from neurodocker.reprozip.trace import ReproZipMinimizer -from neurodocker.utils import get_docker_client from neurodocker.reprozip.merge import merge_pack_files def _create_packfile(commands, dir): """Create packfile from list `commands` in debian:stretch container.""" - client = get_docker_client() image = "debian@sha256:427752aa7da803378f765f5a8efba421df5925cbde8ab011717f3642f406fb15" - container = client.containers.run( - image, detach=True, tty=True, security_opt=['seccomp:unconfined']) + container = client.containers.run(image, detach=True, tty=True, + security_opt=['seccomp:unconfined']) try: - minimizer = ReproZipMinimizer( - container.id, commands, packfile_save_dir=dir) + minimizer = ReproZipMinimizer(container.id, commands, + packfile_save_dir=dir) packfile_path = minimizer.run() - except Exception: + except: raise finally: container.stop() @@ -33,7 +32,7 @@ def _create_packfile(commands, dir): def test_merge_pack_files(): tmpdir = tempfile.mkdtemp() - cmd = ["du --help", "ls --help"] + cmd = ["du -sh /usr", "rm --help"] packpath = _create_packfile(cmd, tmpdir) new_name = "first-pack.rpz" os.rename(packpath, os.path.join(tmpdir, new_name)) @@ -61,6 +60,6 @@ def test_merge_pack_files(): assert os.path.isfile(os.path.join(usr_bin_path, 'du')) assert os.path.isfile(os.path.join(bin_path, 'grep')) assert os.path.isfile(os.path.join(bin_path, 'ls')) - assert not os.path.isfile(os.path.join(bin_path, 'rm')) + assert os.path.isfile(os.path.join(bin_path, 'rm')) assert not os.path.isfile(os.path.join(bin_path, 'sed')) assert not os.path.isfile(os.path.join(bin_path, 'tar')) diff --git a/neurodocker/reprozip/tests/test_trace.py b/neurodocker/reprozip/tests/test_trace.py index f3644914..7692c3cd 100644 --- a/neurodocker/reprozip/tests/test_trace.py +++ b/neurodocker/reprozip/tests/test_trace.py @@ -1,17 +1,18 @@ """Tests for trace.py.""" +from __future__ import absolute_import, division, print_function + import os import tempfile import pytest +from neurodocker.docker import client from neurodocker.reprozip.trace import ReproZipMinimizer -from neurodocker.utils import get_docker_client -@pytest.mark.skip(reason="seccomp not available in CI") +@pytest.mark.skip(reason="seccomp not available in ubuntu trusty (travis)") def test_ReproZipMinimizer_no_ptrace(): - client = get_docker_client() container = client.containers.run('debian:stretch', detach=True, tty=True) commands = ["du --help", "ls --help"] @@ -21,7 +22,7 @@ def test_ReproZipMinimizer_no_ptrace(): packfile_save_dir=tmpdir) with pytest.raises(RuntimeError): # ptrace should fail minimizer.run() - except Exception: + except: raise finally: container.stop() @@ -29,18 +30,16 @@ def test_ReproZipMinimizer_no_ptrace(): def test_ReproZipMinimizer(): - client = get_docker_client() - container = client.containers.run( - 'debian:stretch', detach=True, tty=True, - security_opt=['seccomp:unconfined']) + container = client.containers.run('debian:stretch', detach=True, tty=True, + security_opt=['seccomp:unconfined']) commands = ["du --help", "ls --help"] tmpdir = tempfile.mkdtemp() try: - minimizer = ReproZipMinimizer( - container.id, commands, packfile_save_dir=tmpdir) + minimizer = ReproZipMinimizer(container.id, commands, + packfile_save_dir=tmpdir) packfile_path = minimizer.run() - except Exception: + except: raise finally: container.stop() diff --git a/neurodocker/reprozip/trace.py b/neurodocker/reprozip/trace.py index 4d4616b6..fd7e338f 100644 --- a/neurodocker/reprozip/trace.py +++ b/neurodocker/reprozip/trace.py @@ -24,8 +24,8 @@ Notes ----- -1. To use the reprozip trace within a Docker container, the image/container - must be built/run with `--security-opt seccomp:unconfined`. +1. To use the reprozip trace within a Docker container, the image/container must + be built/run with `--security-opt seccomp:unconfined`. A. `docker build` does not allow --security-opt seccomp:unconfined on macOS. 2. Docker's use of layers means that even if a smaller container is committed @@ -38,98 +38,19 @@ B. See https://github.com/moby/moby/issues/332 C. See https://github.com/moby/moby/pull/22641 """ +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import, division, print_function import logging import os -from neurodocker.utils import get_docker_client +from neurodocker.docker import copy_file_to_container, copy_file_from_container BASE_PATH = os.path.dirname(os.path.realpath(__file__)) logger = logging.getLogger(__name__) -def copy_file_to_container(container, src, dest): - """Copy `local_filepath` into `container`:`container_path`. - - Parameters - ---------- - container : str or container object - Container to which file is copied. - src : str - Filepath on the host. - dest : str - Directory inside container. Original filename is preserved. - - Returns - ------- - success : bool - True if copy was a success. False otherwise. - """ - # https://gist.github.com/zbyte64/6800eae10ce082bb78f0b7a2cca5cbc2 - - from io import BytesIO - import tarfile - - client = get_docker_client() - - try: - container.put_archive - container = container - except AttributeError: - container = client.containers.get(container) - - with BytesIO() as tar_stream: - with tarfile.TarFile(fileobj=tar_stream, mode='w') as tar: - filename = os.path.split(src)[-1] - tar.add(src, arcname=filename, recursive=False) - tar_stream.seek(0) - return container.put_archive(dest, tar_stream) - - -def copy_file_from_container(container, src, dest='.'): - """Copy file `filepath` from a running Docker `container`, and save it on - the host to `save_path` with the original filename. - - Parameters - ---------- - container : str or container object - Container from which file is copied. - src : str - Filepath within container. - dest : str - Directory on the host in which to save file. - - Returns - ------- - local_filepath : str - Relative path to saved file on the host. - """ - import tarfile - import tempfile - - client = get_docker_client() - - try: - container.put_archive - container = container - except AttributeError: - container = client.containers.get(container) - - tar_stream, tar_info = container.get_archive(src) - try: - with tempfile.NamedTemporaryFile() as tmp: - for data in tar_stream: - tmp.write(data) - tmp.flush() - with tarfile.TarFile(tmp.name) as tar: - tar.extractall(path=dest) - return os.path.join(dest, tar_info['name']) - except Exception as e: - raise - finally: - tar_stream.close() - - class ReproZipMinimizer(object): """Minimize a container based on arbitrary number of commands. Can only be used at runtime (not while building a Docker image). @@ -152,7 +73,7 @@ def __init__(self, container, commands, packfile_save_dir='.', **kwargs): container.put_archive self.container = container except AttributeError: - client = get_docker_client() + from neurodocker.docker import client self.container = client.containers.get(container) if isinstance(commands, str): @@ -173,7 +94,7 @@ def run(self): Raises ------ - RuntimeError : error occurs while running trace script in container. + RuntimeError : error occurs while running shell script within container. """ import docker @@ -185,24 +106,22 @@ def run(self): logger.debug("running command within container {}: {}" "".format(self.container.id, trace_cmd)) - _, log_gen = self.container.exec_run(trace_cmd, stream=True) - for log in log_gen: + for log in self.container.exec_run(trace_cmd, stream=True): log = log.decode().strip() logger.debug(log) + # TODO: improve error handling. Look into exec_inspect in docker-py. if (("REPROZIP" in log and "couldn't use ptrace" in log) - or "neurodocker (in container): error" in log.lower() - or "_pytracer.Error" in log): + or "NEURODOCKER (in container): error" in log): raise RuntimeError("Error: {}".format(log)) + self.pack_filepath = log.split()[-1].strip() - print(log) - print(self.pack_filepath) try: - rel_pack_filepath = copy_file_from_container( - self.container, self.pack_filepath, self.packfile_save_dir) + rel_pack_filepath = copy_file_from_container(self.container, + self.pack_filepath, + self.packfile_save_dir) except docker.errors.NotFound: - raise RuntimeError( - "ReproZip pack file was not found in the container. `reprozip" - " trace` might have failed.") + raise RuntimeError("ReproZip pack file was not found in the " + "container. `reprozip trace` might have failed.") return os.path.abspath(rel_pack_filepath) diff --git a/neurodocker/reprozip/utils/reprozip_trace_runner.sh b/neurodocker/reprozip/utils/reprozip_trace_runner.sh index 252c5312..a71fd44d 100644 --- a/neurodocker/reprozip/utils/reprozip_trace_runner.sh +++ b/neurodocker/reprozip/utils/reprozip_trace_runner.sh @@ -9,21 +9,24 @@ # variable with the command string and to pass that environment variable to # this script. -set -ex +set -e +set -x -REPROZIP_CONDA="/opt/reprozip-miniconda" -REPROZIP_TRACE_DIR="/neurodocker-reprozip-trace" -CONDA_URL="https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh" +REPROZIP_CONDA=/opt/reprozip-miniconda +REPROZIP_TRACE_DIR=/neurodocker-reprozip-trace +CONDA_URL=https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh # This log prefix is used in trace.py. NEURODOCKER_LOG_PREFIX="NEURODOCKER (in container)" -function program_exists() { +function program_exists() +{ hash "$1" 2>/dev/null; } -function install_missing_dependencies() { +function install_missing_dependencies() +{ if program_exists "apt-get"; then echo "${NEURODOCKER_LOG_PREFIX}: installing $1 with apt-get" apt-get update -qq @@ -38,18 +41,20 @@ function install_missing_dependencies() { } -function install_conda_reprozip() { +function install_conda_reprozip() +{ TMP_CONDA_INSTALLER=/tmp/miniconda.sh ls /tmp curl -sSL -o "$TMP_CONDA_INSTALLER" "$CONDA_URL" ls /tmp bash $TMP_CONDA_INSTALLER -b -p $REPROZIP_CONDA rm -f $TMP_CONDA_INSTALLER - ${REPROZIP_CONDA}/bin/conda install -yq --channel='conda-forge' python=3.6 reprozip + ${REPROZIP_CONDA}/bin/conda install -yq --channel vida-nyu python=3.6 reprozip } -function run_reprozip_trace() { +function run_reprozip_trace() +{ # https://askubuntu.com/a/674347 cmds=("$@") reprozip_base_cmd="${REPROZIP_CONDA}/bin/reprozip trace -d ${REPROZIP_TRACE_DIR} --dont-identify-packages" @@ -66,10 +71,6 @@ function run_reprozip_trace() { reprozip_cmd="${reprozip_base_cmd} ${continue_} ${cmd}" printf "${NEURODOCKER_LOG_PREFIX}: executing command:\t${reprozip_cmd}\n" $reprozip_cmd - - if [ "$?" != 0 ]; then - printf "${NEURODOCKER_LOG_PREFIX}: ERROR : error running reprozip" - fi done } diff --git a/neurodocker/templates/README.md b/neurodocker/templates/README.md deleted file mode 100644 index 06a8396e..00000000 --- a/neurodocker/templates/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# Dockerfile instructions to install various software - -Each software's entry requires instructions for at least one installation -method for at least one version or generic. Valid installation methods are -'binaries' and 'source'. The former installs pre-compiled binaries, and the -latter builds the software from source. - -Generic installation instructions are instructions that apply to all versions. For example, the installation of all versions of ANTs consists of downloading and extracting the binaries. Because the installation method is consistent across versions, generic instructions can be used. - - -Tree structure of a program's entry. - -``` -.yaml - -generic || -|-- binaries || source -|-- instructions -+-- dependencies - +-- - +-- -``` - -Requirements ------------- -1. Installation instructions for at least one version or generic and at - least one installation method, "binaries" or "source". - - If instructions are supplied for a version or range of versions, generic - instructions must be removed (they are no longer truly generic). -2. If the software has system-level dependencies, the package names must be - listed for the `apt` and `yum` package managers. - -Recommendations ---------------- -1. Header with information about the software. - -Version-specific instructions are available to accommodate variations in -installations across versions. - - -String formatting ------------------ -- `install_path` -- `install_deps` -- `binaries_url` : URL to binaries -- `source_url` : URL to source code -- `optional_*` : any optional keyword diff --git a/neurodocker/templates/_header.yaml b/neurodocker/templates/_header.yaml deleted file mode 100644 index 8d0fe8ff..00000000 --- a/neurodocker/templates/_header.yaml +++ /dev/null @@ -1,30 +0,0 @@ -# Instructions to be run in the beginning of every Dockerfile generated by -# Neurodocker. - -generic: - custom: - dependencies: - apt: apt-utils bzip2 ca-certificates curl locales unzip - yum: bzip2 ca-certificates curl localedef unzip - env: - LANG: "en_US.UTF-8" - LC_ALL: "en_US.UTF-8" - ND_ENTRYPOINT: "/neurodocker/startup.sh" - instructions: | - export ND_ENTRYPOINT="{{ _header._env['ND_ENTRYPOINT'] }}" - {{ _header.install_dependencies() }} - {%- if _header.pkg_manager == "apt" %} - sed -i -e 's/# {{ _header._env['LC_ALL'] }} UTF-8/{{ _header._env['LC_ALL'] }} UTF-8/' /etc/locale.gen - dpkg-reconfigure --frontend=noninteractive locales - update-locale LANG="{{ _header._env['LANG'] }}" - {%- elif _header.pkg_manager == "yum" %} - localedef -i {{ _header._env['LC_ALL'].split('.')[0] }} -f {{ _header._env['LC_ALL'].split('.')[1] }} {{ _header._env['LC_ALL'] }} - {%- endif %} - chmod 777 /opt && chmod a+s /opt - mkdir -p /neurodocker - if [ ! -f "$ND_ENTRYPOINT" ]; then - echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" - echo 'set -e' >> "$ND_ENTRYPOINT" - echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; - fi - chmod -R 777 /neurodocker && chmod a+s /neurodocker diff --git a/neurodocker/templates/afni.yaml b/neurodocker/templates/afni.yaml deleted file mode 100644 index 9ebb0ab8..00000000 --- a/neurodocker/templates/afni.yaml +++ /dev/null @@ -1,81 +0,0 @@ ---- - -# Instructions to install AFNI. -# -# Repository: https://github.com/afni/afni -# Website: https://afni.nimh.nih.gov/ -# Documentation: https://afni.nimh.nih.gov/Documentation - - -generic: - binaries: - urls: - latest: https://afni.nimh.nih.gov/pub/dist/tgz/linux_openmp_64.tgz - env: - PATH: "{{ afni.install_path }}:$PATH" - AFNI_PLUGINPATH: "{{ afni.install_path }}" - dependencies: - apt: > - ed gsl-bin libglu1-mesa-dev libglib2.0-0 libglw1-mesa libgomp1 - libjpeg62 libxm4 netpbm tcsh xfonts-base xvfb - yum: > - ed gsl libGLU libgomp libpng12 libXp libXpm netpbm-progs openmotif - tcsh xorg-x11-fonts-misc xorg-x11-server-Xvfb - debs: - - http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb - - http://mirrors.kernel.org/debian/pool/main/libp/libpng/libpng12-0_1.2.49-1%2Bdeb7u2_amd64.deb - instructions: | - {{ afni.install_dependencies() }} - {{ afni.install_debs() }} - gsl2_path="$(find / -name 'libgsl.so.19' || printf '')" - if [ -n "$gsl2_path" ]; then - ln -sfv "$gsl2_path" "$(dirname $gsl2_path)/libgsl.so.0"; - fi - ldconfig - echo "Downloading AFNI ..." - mkdir -p {{ afni.install_path }} - curl {{ afni.curl_opts }} {{ afni.binaries_url }} \ - | tar -xz -C {{ afni.install_path }} --strip-components 1 - {%- if afni.install_r_pkgs %} - {{ afni.install_path }}/rPkgsInstall -pkgs ALL - {% endif -%} - - source: - env: - PATH: "{{ afni.install_path }}:$PATH" - AFNI_PLUGINPATH: "{{ afni.install_path }}" - dependencies: - apt: > - curl ed g++ gcc git libglib2.0-dev libglu1-mesa-dev libgsl-dev - libmotif-dev libnetpbm10-dev libxext-dev libxi-dev libxpm-dev libxt-dev - m4 make nlibxmu-headers nmesa-common-dev r-base r-base-dev tcsh - zlib1g-dev - yum: > - compat-gcc-34 expat-devel gcc gcc-c++ gcc-gfortran git glib2-devel - gsl-devel libXext-devel libXi-devel libXmu-devel libXpm-devel - libXt-devel m4 make mesa-libGL-devel mesa-libGLU-devel netpbm-devel - openmotif-devel R R-devel tcsh zlib-devel - debs: - - http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb - - http://mirrors.kernel.org/debian/pool/main/libp/libpng/libpng12-0_1.2.49-1%2Bdeb7u2_amd64.deb - instructions: | - {{ afni.install_dependencies() }} - {{ afni.install_debs() }} - git clone https://github.com/afni/afni.git {{ afni.install_path }} - cd {{ afni.install_path }} - git checkout {{ afni.version }} - cd src - cp Makefile.linux_openmp_64 Makefile - perl -p -i -e 's/^LGIFTI.*/LGIFTI = -lexpat/' Makefile - perl -p -i -e 's/^USE_LOCAL_X_TREE/#USE_LOCAL_X_TREE/' Makefile - perl -p -i -e 's/XLIBS = \$\(XROOT\)\/lib64\/libXm.a -lXt/XLIBS = \$\(XROOT\)\/lib64\/libXm.a \$\(XROOT\)\/lib\/x86_64-linux-gnu\/libXm.a -lXt/' Makefile - perl -p -i -e 's/^# XLIBS =/XLIBS =/' Makefile - perl -p -i -e 's/^CCOLD.*/CCOLD = \$\(CC\)/' Makefile - perl -p -i -e 's/(^LFLAGS.*)/$1 -L\/usr\/lib\/x86_64-linux-gnu/' Makefile - perl -p -i -e 's/(^PLFLAGS.*)/$1 -L\/usr\/lib -L\/usr\/lib\/x86_64-linux-gnu/' Makefile - perl -p -i -e 's/-lXpm -lXext/-lXpm -lfontconfig -lXext/' Makefile - perl -p -i -e 's/(^SUMA_INCLUDE_PATH.*)/$1 -I\/usr\/lib\/x86_64-linux-gnu\/glib-2.0\/include/' Makefile - make INSTALLDIR="{{ afni.install_path }}" vastness - {%- if afni.install_r_pkgs %} - {{ afni.install_path }}/rPkgsInstall -pkgs ALL - {% endif -%} diff --git a/neurodocker/templates/ants.yaml b/neurodocker/templates/ants.yaml deleted file mode 100644 index 5f62da20..00000000 --- a/neurodocker/templates/ants.yaml +++ /dev/null @@ -1,48 +0,0 @@ -# Instructions to install ANTs. -# -# Repository: https://github.com/ANTsX/ANTs -# Website: http://stnava.github.io/ANTs/ -# Documentation: -# - https://github.com/ANTsX/ANTs/wiki -# - https://github.com/stnava/ANTsTutorial - -generic: - binaries: - urls: - "2.2.0": https://dl.dropbox.com/s/2f4sui1z6lcgyek/ANTs-Linux-centos5_x86_64-v2.2.0-0740f91.tar.gz - "2.1.0": https://dl.dropbox.com/s/h8k4v6d1xrv0wbe/ANTs-Linux-centos5_x86_64-v2.1.0-78931aa.tar.gz - "2.0.3": https://dl.dropbox.com/s/oe4v52lveyt1ry9/ANTs-Linux-centos5_x86_64-v2.0.3-c996539.tar.gz - "2.0.0": https://dl.dropbox.com/s/kgqydc44cc2uigb/ANTs-Linux-centos5_x86_64-v2.0.0-7ae1107.tar.gz - env: - ANTSPATH: "{{ ants.install_path }}" - PATH: "{{ ants.install_path }}:$PATH" - instructions: | - echo "Downloading ANTs ..." - mkdir -p {{ ants.install_path }} - curl {{ ants.curl_opts }} {{ ants.binaries_url }} \ - | tar -xz -C {{ ants.install_path }} --strip-components 1 - - source: - dependencies: - apt: cmake g++ gcc git make zlib1g-dev - yum: cmake gcc-c++ git make zlib-devel - env: - ANTSPATH: "{{ ants.install_path }}/bin" - PATH: "{{ ants.install_path }}/bin:$PATH" - LD_LIBRARY_PATH: "{{ ants.install_path }}/lib:$LD_LIBRARY_PATH" - instructions: | - {{ ants.install_dependencies() }} - mkdir -p /tmp/ants/build - git clone https://github.com/ANTsX/ANTs.git /tmp/ants/source - {% if ants.version != "master" and ants.version != "latest" -%} - cd /tmp/ants/source - git fetch --tags - git checkout {{ ants.version }} - {% endif -%} - cd /tmp/ants/build - cmake {{ ants.cmake_opts|default("-DBUILD_SHARED_LIBS=ON") }} /tmp/ants/source - make {{ ants.make_opts|default("-j1") }} - mkdir -p {{ ants.install_path }} - mv bin lib {{ ants.install_path }}/ - mv /tmp/ants/source/Scripts/* {{ ants.install_path }}/bin - rm -rf /tmp/ants diff --git a/neurodocker/templates/convert3d.yaml b/neurodocker/templates/convert3d.yaml deleted file mode 100644 index 1d3edb65..00000000 --- a/neurodocker/templates/convert3d.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Instructions to install Convert3D. -# -# Repository: https://sourceforge.net/projects/c3d/ -# Website: http://www.itksnap.org/c3d/ -# Documentation: https://sourceforge.net/p/c3d/git/ci/master/tree/doc/c3d.md - -generic: - binaries: - urls: - nightly: https://sourceforge.net/projects/c3d/files/c3d/Nightly/c3d-nightly-Linux-x86_64.tar.gz/download - "1.0.0": https://sourceforge.net/projects/c3d/files/c3d/1.0.0/c3d-1.0.0-Linux-x86_64.tar.gz/download - env: - C3DPATH: "{{ convert3d.install_path }}" - PATH: "{{ convert3d.install_path }}/bin:$PATH" - instructions: | - echo "Downloading Convert3D ..." - mkdir -p {{ convert3d.install_path }} - curl {{ convert3d.curl_opts }} {{ convert3d.binaries_url }} \ - | tar -xz -C {{ convert3d.install_path }} --strip-components 1 diff --git a/neurodocker/templates/dcm2niix.yaml b/neurodocker/templates/dcm2niix.yaml deleted file mode 100644 index b7d30705..00000000 --- a/neurodocker/templates/dcm2niix.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# Instructions to install dcm2niix. -# -# Repository: https://github.com/rordenlab/dcm2niix -# Documentation: https://www.nitrc.org/plugins/mwiki/index.php/dcm2nii:MainPage - -generic: - source: - dependencies: - apt: cmake g++ gcc git make pigz zlib1g-dev - yum: cmake gcc-c++ git libstdc++-static make pigz zlib-devel - env: - PATH: "{{ dcm2niix.install_path }}/bin:$PATH" - instructions: | - {{ dcm2niix.install_dependencies() }} - git clone https://github.com/rordenlab/dcm2niix /tmp/dcm2niix - mkdir /tmp/dcm2niix/build - cd /tmp/dcm2niix/build - cmake {{ dcm2niix.cmake_opts }} -DCMAKE_INSTALL_PREFIX:PATH={{ dcm2niix.install_path }} .. - make {{ dcm2niix.make_opts }} - make install - rm -rf /tmp/dcm2niix diff --git a/neurodocker/templates/freesurfer.yaml b/neurodocker/templates/freesurfer.yaml deleted file mode 100644 index 174b62b2..00000000 --- a/neurodocker/templates/freesurfer.yaml +++ /dev/null @@ -1,32 +0,0 @@ ---- -# Instructions to install FreeSurfer. -# -# Repository: https://github.com/freesurfer/freesurfer -# Website: https://surfer.nmr.mgh.harvard.edu/ -# Documentation: https://surfer.nmr.mgh.harvard.edu/fswiki - - -generic: - binaries: - urls: - "6.0.1": ftp://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.1/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.1.tar.gz - "6.0.0": ftp://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.0/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.0.tar.gz - # See https://github.com/freesurfer/freesurfer/issues/70 - "6.0.0-min": https://dl.dropbox.com/s/nnzcfttc41qvt31/recon-all-freesurfer6-3.min.tgz - dependencies: - apt: bc libgomp1 libxmu6 libxt6 tcsh perl - yum: bc libgomp libXmu libXt tcsh perl - env: - FREESURFER_HOME: "{{ freesurfer.install_path }}" - PATH: "{{ freesurfer.install_path }}/bin:$PATH" - instructions: | - {{ freesurfer.install_dependencies() }} - echo "Downloading FreeSurfer ..." - mkdir -p {{ freesurfer.install_path }} - curl {{ freesurfer.curl_opts }} {{ freesurfer.binaries_url }} \ - | tar -xz -C {{ freesurfer.install_path }} --strip-components 1 {% if freesurfer.exclude_paths -%}\ - {%- for exclude_path in freesurfer.exclude_paths %} - --exclude='{{ exclude_path }}' \ - {%- endfor %} - {%- endif %} - sed -i '$isource "{{ freesurfer.install_path }}/SetUpFreeSurfer.sh"' "$ND_ENTRYPOINT" diff --git a/neurodocker/templates/fsl.yaml b/neurodocker/templates/fsl.yaml deleted file mode 100644 index 0c0d8857..00000000 --- a/neurodocker/templates/fsl.yaml +++ /dev/null @@ -1,37 +0,0 @@ -# Instructions to install FSL. -# -# ***************************************************************************** -# FSL is non-free. If you are considering commercial use of FSL, please consult -# the relevant license. -# ***************************************************************************** -# -# Website: https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/ -# License: https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence - -generic: - binaries: - urls: - "5.0.11": https://fsl.fmrib.ox.ac.uk/fsldownloads/fsl-5.0.11-centos6_64.tar.gz - "5.0.10": https://fsl.fmrib.ox.ac.uk/fsldownloads/fsl-5.0.10-centos6_64.tar.gz - "5.0.9": https://fsl.fmrib.ox.ac.uk/fsldownloads/fsl-5.0.9-centos6_64.tar.gz - "5.0.8": https://fsl.fmrib.ox.ac.uk/fsldownloads/oldversions/fsl-5.0.8-centos6_64.tar.gz - dependencies: - apt: bc dc file libfontconfig1 libfreetype6 libgl1-mesa-dev libglu1-mesa-dev libgomp1 libice6 libmng1 libxcursor1 libxft2 libxinerama1 libxrandr2 libxrender1 libxt6 wget - yum: bc file libGL libGLU libgomp libICE libjpeg libmng libpng12 libSM libX11 libXcursor libXext libXft libXinerama libXrandr libXt wget - env: - FSLDIR: "{{ fsl.install_path }}" - PATH: "{{ fsl.install_path }}/bin:$PATH" - instructions: | - {{ fsl.install_dependencies() }} - echo "Downloading FSL ..." - mkdir -p {{ fsl.install_path }} - curl {{ fsl.curl_opts }} {{ fsl.binaries_url }} \ - | tar -xz -C {{ fsl.install_path }} --strip-components 1 - sed -i '$iecho Some packages in this Docker container are non-free' $ND_ENTRYPOINT - sed -i '$iecho If you are considering commercial use of this container, please consult the relevant license:' $ND_ENTRYPOINT - sed -i '$iecho https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence' $ND_ENTRYPOINT - sed -i '$isource $FSLDIR/etc/fslconf/fsl.sh' $ND_ENTRYPOINT - {% if fsl.version in ("5.0.11", "5.0.10") -%} - echo "Installing FSL conda environment ..." - bash {{ fsl.install_path }}/etc/fslconf/fslpython_install.sh -f {{ fsl.install_path }} - {% endif -%} diff --git a/neurodocker/templates/matlabmcr.yaml b/neurodocker/templates/matlabmcr.yaml deleted file mode 100644 index dbc69dc1..00000000 --- a/neurodocker/templates/matlabmcr.yaml +++ /dev/null @@ -1,41 +0,0 @@ ---- -# Instructions to install MATLAB Compiler Runtime. -# -# Website: https://www.mathworks.com/products/compiler/matlab-runtime.html - -generic: - binaries: - urls: - 2018a: http://ssd.mathworks.com/supportfiles/downloads/R2018a/deployment_files/R2018a/installers/glnxa64/MCR_R2018a_glnxa64_installer.zip - 2017b: http://ssd.mathworks.com/supportfiles/downloads/R2017b/deployment_files/R2017b/installers/glnxa64/MCR_R2017b_glnxa64_installer.zip - 2017a: http://ssd.mathworks.com/supportfiles/downloads/R2017a/deployment_files/R2017a/installers/glnxa64/MCR_R2017a_glnxa64_installer.zip - 2016b: http://ssd.mathworks.com/supportfiles/downloads/R2016b/deployment_files/R2016b/installers/glnxa64/MCR_R2016b_glnxa64_installer.zip - 2016a: http://ssd.mathworks.com/supportfiles/downloads/R2016a/deployment_files/R2016a/installers/glnxa64/MCR_R2016a_glnxa64_installer.zip - 2015b: http://ssd.mathworks.com/supportfiles/downloads/R2015b/deployment_files/R2015b/installers/glnxa64/MCR_R2015b_glnxa64_installer.zip - 2015a: http://ssd.mathworks.com/supportfiles/downloads/R2015a/deployment_files/R2015a/installers/glnxa64/MCR_R2015a_glnxa64_installer.zip - 2014b: http://ssd.mathworks.com/supportfiles/downloads/R2014b/deployment_files/R2014b/installers/glnxa64/MCR_R2014b_glnxa64_installer.zip - 2014a: http://ssd.mathworks.com/supportfiles/downloads/R2014a/deployment_files/R2014a/installers/glnxa64/MCR_R2014a_glnxa64_installer.zip - 2013b: http://ssd.mathworks.com/supportfiles/downloads/R2013b/deployment_files/R2013b/installers/glnxa64/MCR_R2013b_glnxa64_installer.zip - 2013a: http://ssd.mathworks.com/supportfiles/MCR_Runtime/R2013a/MCR_R2013a_glnxa64_installer.zip - 2012b: http://ssd.mathworks.com/supportfiles/MCR_Runtime/R2012b/MCR_R2012b_glnxa64_installer.zip - 2012a: http://ssd.mathworks.com/supportfiles/MCR_Runtime/R2012a/MCR_R2012a_glnxa64_installer.zip - 2010a: https://dl.dropbox.com/s/zz6me0c3v4yq5fd/MCR_R2010a_glnxa64_installer.bin - dependencies: - apt: bc libxext6 libxpm-dev libxt6 - yum: bc libXext.x86_64 libXmu libXpm libXt.x86_64 - env: - LD_LIBRARY_PATH: "$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:{{ matlabmcr.install_path }}/{{ matlabmcr.mcr_version }}/runtime/glnxa64:{{ matlabmcr.install_path }}/{{ matlabmcr.mcr_version }}/bin/glnxa64:{{ matlabmcr.install_path }}/{{ matlabmcr.mcr_version }}/sys/os/glnxa64:{{ matlabmcr.install_path }}/{{ matlabmcr.mcr_version }}/extern/bin/glnxa64" - MATLABCMD: "{{ matlabmcr.install_path }}/{{ matlabmcr.mcr_version }}/toolbox/matlab" - instructions: | - {{ matlabmcr.install_dependencies() }} - echo "Downloading MATLAB Compiler Runtime ..." - {% if matlabmcr.version == "2010a" -%} - curl {{ matlabmcr.curl_opts }} -o /tmp/MCRInstaller.bin {{ matlabmcr.binaries_url }} - chmod +x /tmp/MCRInstaller.bin - /tmp/MCRInstaller.bin -silent -P installLocation="{{ matlabmcr.install_path }}" - {% else -%} - curl {{ matlabmcr.curl_opts }} -o /tmp/mcr.zip {{ matlabmcr.binaries_url }} - unzip -q /tmp/mcr.zip -d /tmp/mcrtmp - /tmp/mcrtmp/install -destinationFolder {{ matlabmcr.install_path }} -mode silent -agreeToLicense yes - {% endif -%} - rm -rf /tmp/* diff --git a/neurodocker/templates/minc.yaml b/neurodocker/templates/minc.yaml deleted file mode 100644 index 2c436fb9..00000000 --- a/neurodocker/templates/minc.yaml +++ /dev/null @@ -1,34 +0,0 @@ -# Instructions to install MINC toolkit v2. -# -# Repository: https://github.com/BIC-MNI/minc-toolkit-v2 -# -# Binaries are compiled in a CentOS 6.9 Docker container, based on this -# Dockerfile: -# https://github.com/BIC-MNI/build_packages/blob/master/build_centos_6.9_x64/Dockerfile - -generic: - binaries: - urls: - 1.9.15: https://dl.dropbox.com/s/40hjzizaqi91373/minc-toolkit-1.9.15-20170529-CentOS_6.9-x86_64.tar.gz - dependencies: - apt: > - libgl1-mesa-dev libice6 libsm6 libx11-6 libxext6 libxi6 libxmu6 - libgomp1 libjpeg62 - yum: > - libICE libSM libX11 libXext libXi libXmu libgomp libjpeg-turbo - mesa-libGL-devel - instructions: | - {{ minc.install_dependencies() }} - echo "Downloading MINC, BEASTLIB, and MODELS..." - mkdir -p {{ minc.install_path }} - curl {{ minc.curl_opts }} {{ minc.binaries_url }} \ - | tar -xz -C {{ minc.install_path }} --strip-components 1 - curl {{ minc.curl_opts }} http://packages.bic.mni.mcgill.ca/tgz/beast-library-1.1.tar.gz \ - | tar -xz -C {{ minc.install_path }}/share - curl {{ minc.curl_opts }} -o /tmp/mni_90a.zip http://www.bic.mni.mcgill.ca/~vfonov/icbm/2009/mni_icbm152_nlin_sym_09a_minc2.zip - unzip /tmp/mni_90a.zip -d {{ minc.install_path }}/share/icbm152_model_09a - curl {{ minc.curl_opts }} -o /tmp/mni_90c.zip http://www.bic.mni.mcgill.ca/~vfonov/icbm/2009/mni_icbm152_nlin_sym_09c_minc2.zip - unzip /tmp/mni_90c.zip -d {{ minc.install_path }}/share/icbm152_model_09c - sed -i 's+MINC_TOOLKIT=/opt/minc+MINC_TOOLKIT={{ minc.install_path }}+g' {{ minc.install_path }}/minc-toolkit-config.sh - sed -i '$isource {{ minc.install_path }}/minc-toolkit-config.sh' $ND_ENTRYPOINT - rm -rf /tmp/* diff --git a/neurodocker/templates/miniconda.yaml b/neurodocker/templates/miniconda.yaml deleted file mode 100644 index 6647ab1b..00000000 --- a/neurodocker/templates/miniconda.yaml +++ /dev/null @@ -1,56 +0,0 @@ -# Instructions to install Miniconda. -# -# Website: https://conda.io/miniconda.html - -generic: - binaries: - urls: - latest: https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh - env: - CONDA_DIR: "{{ miniconda.install_path }}" - PATH: "{{ miniconda.install_path }}/bin:$PATH" - instructions: | - {% if not miniconda._installed -%} - export PATH="{{ miniconda.install_path }}/bin:$PATH" - echo "Downloading Miniconda installer ..." - conda_installer="/tmp/miniconda.sh" - curl {{ miniconda.curl_opts }} -o "$conda_installer" {{ miniconda.binaries_url }} - bash "$conda_installer" -b -p {{ miniconda.install_path }} - rm -f "$conda_installer" - {%- if miniconda.version == "latest" %} - conda update -yq -nbase conda - {% endif -%} - conda config --system --prepend channels conda-forge - conda config --system --set auto_update_conda false - conda config --system --set show_channel_urls true - sync && conda clean -tipsy && sync - {% endif -%} - {% if miniconda.env_name not in miniconda._environments -%} - conda create -y -q --name {{ miniconda.env_name }} - {% endif -%} - {% if miniconda.conda_install is not none -%} - conda install -y -q --name {{ miniconda.env_name }} \ - {%- for pkg in miniconda.conda_install %} - {% if not loop.last -%} - {{ pkg }} \ - {%- else -%} - {{ pkg }} - {%- endif -%} - {% endfor %} - sync && conda clean -tipsy && sync - {% endif -%} - {% if miniconda.pip_install is not none -%} - bash -c "source activate {{ miniconda.env_name }} - pip install -q --no-cache-dir \ - {%- for pkg in miniconda.pip_install %} - {% if not loop.last -%} - {{ pkg }} \ - {%- else -%} - {{ pkg }} - {%- endif -%} - {% endfor %}" - sync - {% endif -%} - {% if miniconda.activate -%} - sed -i '$isource activate {{ miniconda.env_name }}' $ND_ENTRYPOINT - {% endif -%} diff --git a/neurodocker/templates/mrtrix3.yaml b/neurodocker/templates/mrtrix3.yaml deleted file mode 100644 index 73c80bfd..00000000 --- a/neurodocker/templates/mrtrix3.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Instructions to install MRtrix3. -# -# Repository: https://github.com/MRtrix3/mrtrix3 -# Website: http://www.mrtrix.org/ -# Documentation: http://mrtrix.readthedocs.io/en/latest/ - -generic: - binaries: - urls: - "3.0": https://dl.dropbox.com/s/2g008aaaeht3m45/mrtrix3-Linux-centos6.tar.gz - env: - PATH: "{{ mrtrix3.install_path }}/bin:$PATH" - instructions: | - echo "Downloading MRtrix3 ..." - mkdir -p {{ mrtrix3.install_path }} - curl {{ mrtrix3.curl_opts }} {{ mrtrix3.binaries_url }} \ - | tar -xz -C {{ mrtrix3.install_path }} --strip-components 1 - -#TODO(kaczmarj): add option to build from source. diff --git a/neurodocker/templates/neurodebian.yaml b/neurodocker/templates/neurodebian.yaml deleted file mode 100644 index f2ce7a57..00000000 --- a/neurodocker/templates/neurodebian.yaml +++ /dev/null @@ -1,13 +0,0 @@ -# Instructions to add NeuroDebian repositories. - -generic: - custom: - dependencies: - apt: dirmngr gnupg2 - instructions: | - {{ neurodebian.install_dependencies() }} - curl {{ neurodebian.curl_opts }} {{ neurodebian.url }} \ - > /etc/apt/sources.list.d/neurodebian.sources.list - curl -sSL https://dl.dropbox.com/s/zxs209o955q6vkg/neurodebian.gpg | apt-key add - - (apt-key adv --refresh-keys --keyserver hkp://pool.sks-keyservers.net:80 0xA5D32F012649A5A9 || true) - apt-get -qq update diff --git a/neurodocker/templates/petpvc.yaml b/neurodocker/templates/petpvc.yaml deleted file mode 100644 index e8aba4e1..00000000 --- a/neurodocker/templates/petpvc.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# Instructions to install PETPVC. -# -# Repository: https://github.com/UCL/PETPVC - -generic: - binaries: - urls: - "1.2.2": https://github.com/UCL/PETPVC/releases/download/v1.2.2/PETPVC-1.2.2-Linux.tar.gz - "1.2.1": https://github.com/UCL/PETPVC/releases/download/v1.2.1/PETPVC-1.2.1-Linux.tar.gz - "1.2.0-b": https://github.com/UCL/PETPVC/releases/download/v1.2.0-b/PETPVC-1.2.0-b-Linux.tar.gz - "1.2.0-a": https://github.com/UCL/PETPVC/releases/download/v1.2.0-a/PETPVC-1.2.0-a-Linux.tar.gz - "1.1.0": https://github.com/UCL/PETPVC/releases/download/v1.1.0/PETPVC-1.1.0-Linux.tar.gz - "1.0.0": https://github.com/UCL/PETPVC/releases/download/v1.0.0/PETPVC-1.0.0-Linux.tar.gz - env: - PATH: "{{ petpvc.install_path }}/bin:$PATH" - instructions: | - echo "Downloading PETPVC ..." - mkdir -p {{ petpvc.install_path }} - curl {{ petpvc.curl_opts }} {{ petpvc.binaries_url }} \ - | tar -xz -C {{ petpvc.install_path }} --strip-components 1 diff --git a/neurodocker/templates/spm12.yaml b/neurodocker/templates/spm12.yaml deleted file mode 100644 index b197f81a..00000000 --- a/neurodocker/templates/spm12.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Instructions to install SPM. -# -# Website: http://www.fil.ion.ucl.ac.uk/spm/ -# Documentation: http://www.fil.ion.ucl.ac.uk/spm/doc/ - -generic: - binaries: - urls: - dev: http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/dev/spm12_latest_Linux_R2018a.zip - r7219: http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip - r6914: http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r6914_R2010a.zip - r6685: http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r6685_R2010a.zip - r6472: http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r6472_R2010a.zip - r6225: http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r6225_R2010a.zip - env: - FORCE_SPMMCR: "1" - instructions: | - echo "Downloading standalone SPM ..." - curl {{ spm12.curl_opts }} -o /tmp/spm12.zip {{ spm12.binaries_url }} - unzip -q /tmp/spm12.zip -d /tmp - mkdir -p {{ spm12.install_path }} - mv /tmp/spm12/* {{ spm12.install_path }}/ - chmod -R 777 {{ spm12.install_path }} - rm -rf /tmp/* - {{ spm12.install_path }}/run_spm12.sh {{ spm12.mcr_path }} quit - sed -i '$iexport SPMMCRCMD=\"{{ spm12.install_path }}/run_spm12.sh {{ spm12.mcr_path }} script\"' $ND_ENTRYPOINT diff --git a/neurodocker/tests/test_generate.py b/neurodocker/tests/test_generate.py new file mode 100644 index 00000000..6182d3ff --- /dev/null +++ b/neurodocker/tests/test_generate.py @@ -0,0 +1,256 @@ +"""Tests for neurodocker.dockerfile""" +# Author: Jakub Kaczmarzyk + +from __future__ import absolute_import + +import pytest + +from neurodocker import generate as DF +from neurodocker.interfaces import (AFNI, ANTs, FreeSurfer, FSL, Miniconda, + MRtrix3, SPM) + + +def test__add_add(): + with pytest.raises(ValueError): + DF._add_add(['one/path']) + with pytest.raises(ValueError): + DF._add_add(['/absolute/path', 'not/absolute']) + out = DF._add_add(["path/to/here", "/tmp/here"]) + assert 'ADD ["path/to/here", "/tmp/here"]' == out + + +def test__add_to_entrypoint(): + cmd = "export FOO=bar" + truth = "sed -i '$i{}' $ND_ENTRYPOINT".format(cmd) + out = DF._add_to_entrypoint(cmd, with_run=False) + assert truth in out and "RUN" not in out + out = DF._add_to_entrypoint(cmd, with_run=True) + assert truth in out and "RUN" in out + + +def test__add_arg(): + args = {'FOO': 'BAR', 'BAZ': ''} + truth = ('ARG FOO="BAR"' + '\nARG BAZ') + assert truth == DF._add_arg(args) + + +def test__add_base(): + base = "debian:stretch" + truth = "FROM {}".format(base) + assert truth == DF._add_base(base) + + +def test__add_cmd(): + cmd = ["--arg1", "--arg2"] + truth = 'CMD ["--arg1", "--arg2"]' + assert truth == DF._add_cmd(cmd) + + +def test__add_copy(): + with pytest.raises(ValueError): + DF._add_copy(['one/path']) + with pytest.raises(ValueError): + DF._add_copy(['/absolute/path', 'not/absolute']) + out = DF._add_copy(["path/to/here", "/tmp/here"]) + assert 'COPY ["path/to/here", "/tmp/here"]' == out + + +def test__add_entrypoint(): + entrypoint = 'bash "path/to/file"' + truth = 'ENTRYPOINT ["bash", "\\"path/to/file\\""]' + assert truth == DF._add_entrypoint(entrypoint) + + +def test__add_env_vars(): + env = {'THIS': 'THAT'} + truth = 'ENV THIS="THAT"' + assert truth == DF._add_env_vars(env) + + env['A'] = 'B' + truth = ('ENV THIS="THAT" \\\n A="B"') + assert truth == DF._add_env_vars(env) + + +def test__add_exposed_ports(): + ports = ["1234", "5678"] + out = DF._add_exposed_ports(ports) + assert "EXPOSE {}".format(' '.join(ports)) == out + + ports = '1234' + out = DF._add_exposed_ports(ports) + assert "EXPOSE {}".format(ports) == out + + +def test__add_install(): + pkgs = ["git", "vim", "flags=-q --fake-flag"] + out = DF._add_install(pkgs, 'apt') + assert 'apt-get install -y -q --fake-flag' in out + assert 'git' in out + assert 'vim' in out + assert '--no-install-recommends' not in out + + +def test__add_arbitrary_instruction(): + instruction = "RUN echo hello" + assert instruction in DF._add_arbitrary_instruction(instruction) + + +def test__add_label(): + labels = {"FOO": "BAR", "BAZ": "CAT"} + truth = ('LABEL FOO="BAR" \\' + '\n BAZ="CAT"') + assert truth == DF._add_label(labels) + +def test_add_run(): + cmd = "apt-get update\napt-get install -y git" + truth = ("# User-defined instruction" + "\nRUN apt-get update \\" + "\n apt-get install -y git") + assert truth == DF._add_run(cmd) + + +def test__add_run_bash(): + bash = 'echo "hello world" > myfile.txt' + truth = ('# User-defined BASH instruction' + '\nRUN bash -c "echo \\"hello world\\" > myfile.txt"') + assert truth == DF._add_run_bash(bash) + + +def test__add_volume(): + volumes = ["/usr/bin", "/var"] + truth = 'VOLUME ["/usr/bin", "/var"]' + assert truth == DF._add_volume(volumes) + +def test__add_workdir(): + workdir = "/home" + truth = "WORKDIR {}".format(workdir) + assert truth == DF._add_workdir(workdir) + + +def test_DockerfileUsers(): + inst = DF._DockerfileUsers() + assert inst.initialized_users == ['root'] + out = inst.add('neuro') + assert "useradd" in out and "neuro" in out + assert inst.initialized_users == ['root', 'neuro'] + inst.clear_memory() + assert inst.initialized_users == ['root'] + + +def _get_val_in_list_of_tuple(list_of_tuple, key): + return [v for k, v in list_of_tuple if k == key][0] + + +class TestDockerfile(object): + + @pytest.fixture(autouse=True) + def setup(self, tmpdir): + self.tmpdir = tmpdir + self.specs = {'pkg_manager': 'apt', + 'check_urls': False, + 'instructions': [ + ('base', 'ubuntu:17.04'), + ('afni', {'version': 'latest'}), + ('mrtrix3', {}), + ('miniconda', {'env_name': 'default', + 'conda_install': 'python=3.5.1 numpy', + 'pip_install': 'pandas'}), + ('ants', {'version': '2.1.0', 'use_binaries': True}), + ('freesurfer', {'version': '6.0.0', 'min': True}), + ('fsl', {'version': '5.0.10', 'use_binaries': True}), + ('spm', {'version': 12, 'matlab_version': 'R2017a'}), + ('instruction', "RUN ls"), + ] + } + + inst = self.specs['instructions'] + + self.base = "FROM {}".format(_get_val_in_list_of_tuple(inst, 'base')) + self.noninteractive = "ARG DEBIAN_FRONTEND=noninteractive" + self.miniconda = Miniconda(pkg_manager='apt', check_urls=False, **_get_val_in_list_of_tuple(inst, 'miniconda')).cmd + Miniconda.clear_memory() + self.ants = ANTs(pkg_manager='apt', check_urls=False, **_get_val_in_list_of_tuple(inst, 'ants')).cmd + self.fsl = FSL(pkg_manager='apt', check_urls=False, **_get_val_in_list_of_tuple(inst, 'fsl')).cmd + self.spm = SPM(pkg_manager='apt', check_urls=False, **_get_val_in_list_of_tuple(inst, 'spm')).cmd + + def test___repr__(self): + DF.Dockerfile(self.specs) + + def test___str__(self): + df = DF.Dockerfile(self.specs) + assert str(df) == df.cmd + + def test__create_cmd(self): + cmd = DF.Dockerfile(self.specs).cmd + assert self.base in cmd + assert self.noninteractive in cmd + print(cmd) + print(self.miniconda) + assert self.miniconda in cmd + assert self.ants in cmd + assert self.fsl in cmd + assert self.spm in cmd + assert _get_val_in_list_of_tuple(self.specs['instructions'], 'instruction') in cmd + + def test_save(self): + filepath = self.tmpdir.join('Dockerfile') + df = DF.Dockerfile(self.specs) + df.save(filepath.strpath) + assert len(self.tmpdir.listdir()) == 1, "file not saved" + assert df.cmd in filepath.read(), "file content not correct" + + +def test_build_image_from_json(): + """Test of saving JSON in Docker image and building new Docker image with + that JSON file. + """ + from contextlib import redirect_stdout + import io + import os + import subprocess + import sys + import tempfile + + from neurodocker.interfaces.tests.memory import _dockerfiles_equivalent + from neurodocker.neurodocker import main + + + def _get_dockerfile_from_stdout(args): + f = io.StringIO() + with redirect_stdout(f): + main(args) + return f.getvalue() + + # Build Docker image. + args = ['generate', '--base', 'debian:stretch', '--pkg-manager', 'apt', + '--install', 'vim', '--run-bash', 'echo "foo\n\'bar(baz)\'"'] + df = _get_dockerfile_from_stdout(args) + docker_args = "docker build -t json-original -".split() + subprocess.run(docker_args, input=df.encode(), check=True) + + # Copy JSON file onto host. + tempdir = tempfile.mkdtemp(dir='/tmp') + json_file = "/neurodocker/neurodocker_specs.json" + copy_cmd = ("docker run --rm -v {}:/nd json-original mv {} /nd" + "".format(tempdir, json_file)).split() + subprocess.run(copy_cmd, check=True) + json_file = os.path.join(tempdir, 'neurodocker_specs.json') + + # Create new Dockerfile and build. + args = "generate --file {}".format(json_file).split() + df_new = _get_dockerfile_from_stdout(args) + docker_args = "docker build -t json-copy -".split() + subprocess.run(docker_args, input=df_new.encode(), check=True) + + for line_a, line_b in zip(df.split('\n'), df_new.split('\n')): + if not line_a == line_b: + print(line_a + "\t|\t" + line_b) + + os.path.join(tempdir, "DOCKERFILE_A") + with open(os.path.join(tempdir, "DOCKERFILE_A"), 'w') as fp: + fp.write(df) + with open(os.path.join(tempdir, "DOCKERFILE_B"), 'w') as fp: + fp.write(df_new) + + assert _dockerfiles_equivalent(df, df_new), "Failed building from JSON" diff --git a/neurodocker/tests/test_neurodocker.py b/neurodocker/tests/test_neurodocker.py index 40f5bccd..16071d9e 100644 --- a/neurodocker/tests/test_neurodocker.py +++ b/neurodocker/tests/test_neurodocker.py @@ -3,35 +3,37 @@ from __future__ import absolute_import, unicode_literals +import sys + import pytest -from neurodocker.neurodocker import main +from neurodocker.neurodocker import create_parser, parse_args, main def test_generate(): - args = ( - "generate docker -b ubuntu:17.04 -p apt" - " --arg FOO=BAR BAZ" - " --afni version=latest method=source" - " --ants version=2.2.0 method=source" - " --freesurfer version=6.0.0" - " --fsl version=5.0.10" - " --user=neuro" - " --miniconda env_name=neuro conda_install=python=3.6.2" - " --user=root" - " --mrtrix3 version=3.0" - " --neurodebian os_codename=zesty server=usa-nh" - " --spm12 version=r7219 matlab_version=R2017a" - " --expose 1234 9000" - " --volume /var /usr/bin" - " --label FOO=BAR BAZ=CAT" - " --copy relpath/to/file.txt /tmp/file.txt" - " --add relpath/to/file2.txt /tmp/file2.txt" - " --cmd '--arg1' '--arg2'" - " --workdir /home" - " --install git" - " --user=neuro" - ) + args = ("generate -b ubuntu:17.04 -p apt" + " --arg FOO=BAR BAZ" + " --afni version=latest" + " --ants version=2.2.0" + " --freesurfer version=6.0.0" + " --fsl version=5.0.10" + " --user=neuro" + " --miniconda env_name=neuro conda_install=python=3.6.2" + " --user=root" + " --mrtrix3" + " --neurodebian os_codename=zesty download_server=usa-nh" + " --spm version=12 matlab_version=R2017a" + " --no-check-urls" + " --expose 1234 9000" + " --volume /var /usr/bin" + " --label FOO=BAR BAZ=CAT" + " --copy relpath/to/file.txt /tmp/file.txt" + " --add relpath/to/file2.txt /tmp/file2.txt" + " --cmd '--arg1' '--arg2'" + " --workdir /home" + " --install git" + " --user=neuro" + ) main(args.split()) with pytest.raises(SystemExit): @@ -45,9 +47,13 @@ def test_generate(): with pytest.raises(SystemExit): main() + args = "generate -b ubuntu -p apt --ants option=value" + with pytest.raises(ValueError): + main(args.split()) + def test_generate_opts(capsys): - args = "generate docker -b ubuntu:17.04 -p apt {}" + args = "generate -b ubuntu:17.04 -p apt --no-check-urls {}" main(args.format('--user=neuro').split()) out, _ = capsys.readouterr() assert "USER neuro" in out @@ -62,9 +68,8 @@ def test_generate_opts(capsys): main(args.format('--env KEY=VAL KEY2=VAL').split()) out, _ = capsys.readouterr() - assert ( - ('ENV KEY="VAL" \\' in out and 'KEY="VAL"' in out) - or ('ENV KEY2="VAL" \\' in out and 'KEY="VAL"')) + assert 'ENV KEY="VAL" \\' in out + assert ' KEY2="VAL"' in out main(args.format('--expose 1230 1231').split()) out, _ = capsys.readouterr() @@ -78,18 +83,22 @@ def test_generate_opts(capsys): out, _ = capsys.readouterr() assert "vi" in out + main(args.format('--instruction RUNecho').split()) + out, _ = capsys.readouterr() + assert "RUNecho" in out + -@pytest.mark.xfail def test_generate_from_json(capsys, tmpdir): import json - cmd = "generate docker -b debian:stretch -p apt --convert3d version=1.0.0" + cmd = "generate -b debian:stretch -p apt --c3d version=1.0.0" main(cmd.split()) true, _ = capsys.readouterr() - specs = {'generation_timestamp': '2017-08-31 21:49:04', + specs = {'check_urls': True, + 'generation_timestamp': '2017-08-31 21:49:04', 'instructions': [['base', 'debian:stretch'], - ['convert3d', {'version': '1.0.0'}]], + ['c3d', {'version': '1.0.0'}]], 'neurodocker_version': '0.2.0-18-g9227b17', 'pkg_manager': 'apt'} str_specs = json.dumps(specs) @@ -104,3 +113,25 @@ def test_generate_from_json(capsys, tmpdir): # saves to JSON (with timestamp). sl = slice(8, -19) assert true.split('\n')[sl] == test.split('\n')[sl] + + +def test_generate_no_print(capsys): + args = ['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--no-check-urls'] + main(args) + out, _ = capsys.readouterr() + assert "FROM" in out and "RUN" in out + + args.append('--no-print-df') + main(args) + out, _ = capsys.readouterr() + assert not out + + +def test_generate_save(tmpdir): + outfile = tmpdir.join("test.txt") + args = ['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--mrtrix3', + 'use_binaries=false', '--no-print-df', '-o', outfile.strpath, + '--no-check-urls'] + main(args) + assert outfile.read(), "saved Dockerfile is empty" + assert "git clone https://github.com/MRtrix3/mrtrix3.git" in outfile.read() diff --git a/neurodocker/tests/test_utils.py b/neurodocker/tests/test_utils.py index c9792deb..2ece84b8 100644 --- a/neurodocker/tests/test_utils.py +++ b/neurodocker/tests/test_utils.py @@ -3,10 +3,52 @@ from __future__ import absolute_import import pytest +from requests.exceptions import RequestException from neurodocker import utils +def test_manage_pkgs(): + assert 'yum' in utils.manage_pkgs.keys(), "yum not found" + assert 'apt' in utils.manage_pkgs.keys(), "apt not found" + + # Test that each entry is a dictionary with 'install' and 'remove' keys. + for manager in utils.manage_pkgs: + assert 'install' in utils.manage_pkgs[manager].keys(), 'install not found' + assert 'remove' in utils.manage_pkgs[manager].keys(), 'remove not found' + + assert "yum" not in utils.manage_pkgs['apt'].values() + assert "apt" not in utils.manage_pkgs['yum'].values() + + +def test_check_url(): + urls = {'good': 'https://www.google.com/', + '404': 'http://httpstat.us/404', + 'timeout': 'http://10.255.255.255'} + + assert utils.check_url(urls['good']), "Bad response from google.com" + with pytest.raises(RequestException): + utils.check_url(urls['404']) + with pytest.raises(RequestException): + utils.check_url(urls['timeout']) + + +def test_indent(): + pre = "FROM" + cmd = "centos:latest" + indented = ' '.join((pre, cmd)) + assert utils.indent(pre, cmd) == indented, "error prepending Dockerfile instruction" + + pre = "RUN" + cmd = ("echo 'green eggs'\n" + "&& echo ' and'\n" + "&& echo ' ham'") + indented = ("RUN echo 'green eggs' \\\n" + " && echo ' and' \\\n" + " && echo ' ham'") + assert utils.indent(pre, cmd) == indented, "error indenting multi-line instruction" + + def test_save_load_json(tmpdir): filepath = tmpdir.join('test.json').strpath @@ -18,6 +60,9 @@ def test_save_load_json(tmpdir): def test_set_log_level(tmpdir): + import logging + + logger = logging.getLogger(__name__) utils.set_log_level('info') with pytest.raises(ValueError): utils.set_log_level('fake_level') diff --git a/neurodocker/tests/test_version.py b/neurodocker/tests/test_version.py new file mode 100644 index 00000000..4951a404 --- /dev/null +++ b/neurodocker/tests/test_version.py @@ -0,0 +1,20 @@ +"""Tests for neurodocker.utils""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import + +import shutil + +from neurodocker import version + + +def test_get_gitversion(): + gitver = version.get_gitversion() + + if shutil.which('git') is not None: # git exists + assert gitver is not None + if gitver.startswith('v'): + assert gitver[1:] == version.__version__ + else: + assert gitver == version.__version__ + else: + assert version.__version__ diff --git a/neurodocker/utils.py b/neurodocker/utils.py index 352b3dec..9b0d6508 100644 --- a/neurodocker/utils.py +++ b/neurodocker/utils.py @@ -1,27 +1,65 @@ """Package utility functions.""" +# Author: Jakub Kaczmarzyk +from __future__ import absolute_import, division, print_function import json import logging -import re -import yaml -try: - from yaml import CLoader as Loader -except ImportError: - from yaml import Loader +import requests +APT_GET_INSTALL_FLAGS = "-q --no-install-recommends" +YUM_INSTALL_FLAGS = "-q" -def _count_key_occurence_list_of_tuples(list_of_tuples, key): - """Return the number of times `key` occurs as a key in `list_of_tuples`.""" - return sum(1 for i, _ in list_of_tuples if i == key) + +# Templates for installing packages and cleaning up with apt and yum. +manage_pkgs = {'apt': {'install': ('apt-get update -qq && apt-get install -yq ' + '--no-install-recommends {pkgs}'), + 'remove': 'apt-get purge -y --auto-remove {pkgs}', + 'clean': ('apt-get clean\n' + '&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*'),}, + 'yum': {'install': 'yum install -y -q {pkgs}', + # Trying to uninstall ca-certificates breaks things. + 'remove': 'yum remove -y $(echo "{pkgs}" | sed "s/ca-certificates//g")', + 'clean': ('yum clean packages\n' + '&& rm -rf /var/cache/yum/* /tmp/* /var/tmp/*'),}, + } + + +def _indent_pkgs(line_len, pkgs): + cmd = " {first_pkg}".format(first_pkg=pkgs[0]) + separator = "\n" + " " * (line_len + 1) + return separator.join((cmd, *pkgs[1:])) + + +def yum_install(pkgs, flags=None): + """Return command to install `pkgs` with `yum`.""" + if flags is None: + flags = YUM_INSTALL_FLAGS + cmd = "yum install -y {flags}".format(flags=flags) + line_len = len(cmd) + return cmd + _indent_pkgs(line_len, pkgs) + + +def apt_get_install(pkgs, flags=None): + """Return command to install `pkgs` with `apt-get`.""" + if flags is None: + flags = APT_GET_INSTALL_FLAGS + cmd = ("apt-get update -qq" + "\n&& apt-get install -y {flags}").format(flags=flags) + line_len = len(cmd.split('\n')[-1]) + return cmd + _indent_pkgs(line_len, pkgs) def _string_vals_to_bool(dictionary): """Convert string values to bool.""" - # TODO: remove unnecessary boolean variables. - bool_vars = {'min', 'activate', 'use_installer'} + import re + + bool_vars = ['use_binaries', 'use_installer', 'use_neurodebian', + 'add_to_path', 'min', 'activate'] + if dictionary is None: return + for key in dictionary.keys(): if key in bool_vars: if re.search('false', dictionary[key], re.IGNORECASE): @@ -38,12 +76,18 @@ def _string_vals_to_list(dictionary): for kk in list_keys: if kk in dictionary.keys(): - dictionary[kk] = dictionary[kk].split() + dictionary[kk] = " ".join((jj.strip() for jj + in dictionary[kk].split())) + + +def _count_key_occurence_list_of_tuples(list_of_tuples, key): + """Return the number of times `key` occurs as a key in `list_of_tuples`.""" + return sum(1 for i, _ in list_of_tuples if i == key) def _namespace_to_specs(namespace): """Return dictionary of specifications from namespace.""" - from neurodocker.generators.common import _installation_implementations + from neurodocker.generate import dockerfile_implementations instructions = [('base', namespace.base)] try: @@ -62,50 +106,99 @@ def _namespace_to_specs(namespace): # Convert string options that should be booleans to booleans. for instruction, options in instructions: - if instruction in _installation_implementations.keys(): + if instruction in dockerfile_implementations['software'].keys(): _string_vals_to_bool(options) _string_vals_to_list(options) - return { - 'pkg_manager': namespace.pkg_manager, - 'instructions': instructions, - } + specs = {'pkg_manager': namespace.pkg_manager, + 'check_urls': namespace.check_urls, + 'instructions': instructions, } + + return specs def is_url(string): - from urllib.parse import urlparse + try: + from urllib.parse import urlparse # Python 3 + except ImportError: + from urlparse import urlparse # Python 2 result = urlparse(string) return (result.scheme and result.netloc) +def check_url(url, timeout=5, **kwargs): + """Return true if `url` is returns a status code < 400. Otherwise, raise an + error. `kwargs` are arguments for `requests.head()`. + + Parameters + ---------- + url : str + The URL to check. + timeout : numeric + Number of seconds to wait for response from server. + """ + request = requests.head(url, timeout=timeout, **kwargs) + request.raise_for_status() + return True + + +def indent(instruction, cmd, line_suffix=' \\'): + """Add Docker instruction and indent command. + + Parameters + ---------- + instruction : str + Docker instruction for `cmd` (e.g., "RUN"). + cmd : str + The command (lines separated by newline character). + line_suffix : str + The suffix to append to each line except the last one. + + Returns + ------- + dockerfile_chunk : str + Instruction compatible with Dockerfile sytax. + """ + instruction = instruction.upper() + amount = len(instruction) + 1 + indent = ' ' * amount + split_cmd = cmd.splitlines() + + if len(split_cmd) == 1: + return "{} {}".format(instruction, cmd) + + dockerfile_chunk = '' + for i, line in enumerate(split_cmd): + if i == 0: # First line. + dockerfile_chunk += "{} {}{}".format(instruction, line, line_suffix) + # Change the following to use str.join() method. + elif i == len(split_cmd) - 1: # Last line. + dockerfile_chunk += "\n{}{}".format(indent, line) + else: + dockerfile_chunk += "\n{}{}{}".format(indent, line, line_suffix) + return dockerfile_chunk + + def load_json(filepath, **kwargs): - """Load JSON file `filepath` as dictionary. `kwargs` are keyword arguments - for `json.load()`. + """Load JSON file `filepath` as dictionary. `kwargs` can be keyword + arguments for `json.load()`. """ with open(filepath, 'r') as fp: return json.load(fp, **kwargs) def save_json(obj, filepath, indent=4, **kwargs): - """Save `obj` to JSON file `filepath`. `kwargs` are keyword arguments for - `json.dump()`. + """Save `obj` to JSON file `filepath`. `kwargs` can be keyword arguments + for `json.dump()`. """ with open(filepath, 'w') as fp: json.dump(obj, fp, indent=indent, **kwargs) fp.write('\n') -def load_yaml(filepath, **kwargs): - """Return dictionary from YAML file.""" - with open(filepath) as fp: - return yaml.load(fp, Loader=Loader, **kwargs) - - def create_logger(): """Return Neurodocker logger.""" - import logging - logger = logging.getLogger('neurodocker') ch = logging.StreamHandler() format_ = '[NEURODOCKER %(asctime)s %(levelname)s]: %(message)s' @@ -126,6 +219,8 @@ def set_log_level(level): level: {'debug', 'info', 'warning', 'error', 'critical} The level at which to print messages. Case-insensitive. """ + import logging + logging_levels = {'DEBUG': logging.DEBUG, 'INFO': logging.INFO, 'WARNING': logging.WARNING, @@ -136,21 +231,3 @@ def set_log_level(level): logger.setLevel(level) except KeyError: raise ValueError("invalid level '{}'".format(level)) - - -def get_docker_client(version='auto', **kwargs): - try: - import docker - except ImportError: - raise ImportError("the docker python package is required for this") - return docker.from_env(version='auto', **kwargs) - - -def get_singularity_client(**kwargs): - try: - import singularity - from singularity.cli import Singularity - except ImportError: - raise ImportError( - "the singularity python package is required for this") - return Singularity(**kwargs) diff --git a/neurodocker/version.py b/neurodocker/version.py index 92beed29..97d813f6 100644 --- a/neurodocker/version.py +++ b/neurodocker/version.py @@ -3,7 +3,7 @@ Copied from https://github.com/nipy/nipype/blob/master/nipype/info.py. """ -__version__ = '0.4.0rc0' +__version__ = '0.3.2' def get_gitversion(): @@ -17,18 +17,22 @@ def get_gitversion(): import os import subprocess - here = os.path.abspath(os.path.dirname(__file__)) + here = os.path.dirname(os.path.realpath(__file__)) + try: - cmd = "git describe" - return subprocess.check_output(cmd.split(), cwd=here).decode().strip() - except subprocess.CalledProcessError: - return None - - -# Only append git hash if this is not a release. -if 'dev' in __version__: - gitversion = get_gitversion() # v0.3.2-183-gea5425b - if gitversion is not None: - __version__ = gitversion - if gitversion.startswith('v'): - __version__ = __version__[1:] + cmd = 'git describe'.split() + stdout, stderr = subprocess.Popen(cmd, cwd=here, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE).communicate() + ver = stdout.decode().strip() + except Exception: + ver = None + + return ver + + +gitversion = get_gitversion() +if gitversion: + __version__ = gitversion + if gitversion.startswith('v'): + __version__ = __version__[1:] diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 7dcab461..00000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,3 +0,0 @@ -docker>=3.0 -pytest-cov>=2.0 -singularity>=1.0 diff --git a/requirements.txt b/requirements.txt index ef3e6ca5..b03b4474 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ -jinja2>=2.0 -PyYAML>=3.0 +docker>=2.3,<3.0 +requests>=2.0 diff --git a/setup.py b/setup.py index 8e82c8ca..f2552af0 100755 --- a/setup.py +++ b/setup.py @@ -1,74 +1,33 @@ -import os -import re -from setuptools import find_packages -from setuptools import setup - -here = os.path.abspath(os.path.dirname(__file__)) - - -def read(*parts): - with open(os.path.join(here, *parts), 'r') as fp: - return fp.read() - +#!/usr/bin/env python -def find_version(*file_paths): - version_file = read(*file_paths) - version_match = re.search( - r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) - if version_match: - return version_match.group(1) - raise RuntimeError("Unable to find version string.") +import os +from setuptools import find_packages, setup def main(): - """Main setup function.""" - - with open(os.path.join(here, 'README.md'), encoding='utf-8') as fp: - long_description = fp.read() + here = os.path.dirname(os.path.realpath(__file__)) # https://github.com/nipy/nipype/blob/master/setup.py#L114-L120 - # ldict = locals() - # with open(os.path.join(here, 'neurodocker', 'version.py')) as fp: - # exec(fp.read(), globals(), ldict) + ldict = locals() + version_file = os.path.join(here, 'neurodocker', 'version.py') + with open(version_file) as fp: + exec(fp.read(), globals(), ldict) - with open(os.path.join(here, 'requirements.txt')) as fp: + reqs_file = os.path.join(here, 'requirements.txt') + with open(reqs_file) as fp: requirements = [r.strip() for r in fp.readlines()] - with open(os.path.join(here, 'requirements-dev.txt')) as fp: - requirements_dev = [r.strip() for r in fp.readlines()] - - setup( - name="neurodocker", - version=find_version("neurodocker", "version.py"), - license="Apache License, 2.0", - description="Create custom containers for neuroimaging", - long_description=long_description, - long_description_content_type='text/markdown', - url="https://github.com/kaczmarj/neurodocker", - author="Jakub Kaczmarzyk", - author_email="jakubk@mit.edu", - classifiers=[ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - ], - keywords='containers, neuroimaging reproducibility research', - packages=find_packages(exclude=["tests"]), - install_requires=requirements, - entry_points={ - "console_scripts": [ - "neurodocker=neurodocker.neurodocker:main" - ], - }, - python_requires='>=3.5', - extras_require={ - 'dev': requirements_dev, - }, - ) - + setup(name='neurodocker', + version=ldict['__version__'], + url='https://github.com/kaczmarj/neurodocker', + author='Jakub Kaczmarzyk', + author_email='jakubk@mit.edu', + license='Apache License, 2.0', + packages=find_packages(), + install_requires = requirements, + entry_points={'console_scripts': + ['neurodocker=neurodocker.neurodocker:main']} + ) if __name__ == '__main__': main()