From 2d148d0db5965a53faeee8a4458d780bdfb6445b Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Tue, 4 Aug 2020 23:55:00 -0400 Subject: [PATCH 1/8] chaniging imports from nipype.workflows to niflow.nipype1.workflows in the notebooks --- notebooks/basic_graph_visualization.ipynb | 6 +++--- notebooks/basic_import_workflows.ipynb | 6 +++--- notebooks/basic_workflow.ipynb | 6 +++--- notebooks/handson_preprocessing.ipynb | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/notebooks/basic_graph_visualization.ipynb b/notebooks/basic_graph_visualization.ipynb index 7b0c89a..6522844 100644 --- a/notebooks/basic_graph_visualization.ipynb +++ b/notebooks/basic_graph_visualization.ipynb @@ -41,7 +41,7 @@ "outputs": [], "source": [ "# Import the function to create an spm fmri preprocessing workflow\n", - "from nipype.workflows.fmri.spm import create_spm_preproc\n", + "from niflow.nipype1.workflows.fmri.spm import create_spm_preproc\n", "\n", "# Create the workflow object\n", "spmflow = create_spm_preproc()" @@ -274,7 +274,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -288,7 +288,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/basic_import_workflows.ipynb b/notebooks/basic_import_workflows.ipynb index 6b163a5..4151ffc 100644 --- a/notebooks/basic_import_workflows.ipynb +++ b/notebooks/basic_import_workflows.ipynb @@ -40,7 +40,7 @@ "metadata": {}, "outputs": [], "source": [ - "from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth\n", + "from niflow.nipype1.workflows.fmri.fsl.preprocess import create_susan_smooth\n", "smoothwf = create_susan_smooth()" ] }, @@ -323,7 +323,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -337,7 +337,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/basic_workflow.ipynb b/notebooks/basic_workflow.ipynb index 60f3b93..09bde6f 100644 --- a/notebooks/basic_workflow.ipynb +++ b/notebooks/basic_workflow.ipynb @@ -484,7 +484,7 @@ "metadata": {}, "outputs": [], "source": [ - "from nipype.workflows.fmri.fsl import create_susan_smooth" + "from niflow.nipype1.workflows.fmri.fsl import create_susan_smooth" ] }, { @@ -946,7 +946,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -960,7 +960,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, diff --git a/notebooks/handson_preprocessing.ipynb b/notebooks/handson_preprocessing.ipynb index 5f37b8e..444c171 100644 --- a/notebooks/handson_preprocessing.ipynb +++ b/notebooks/handson_preprocessing.ipynb @@ -765,7 +765,7 @@ "metadata": {}, "outputs": [], "source": [ - "from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth" + "from niflow.nipype1.workflows.fmri.fsl.preprocess import create_susan_smooth" ] }, { @@ -1753,7 +1753,7 @@ "metadata": { "anaconda-cloud": {}, "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -1767,7 +1767,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.11" } }, "nbformat": 4, From 3ce8c10af318e4eac1a5c58694dda1976ec877ad Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Wed, 5 Aug 2020 00:03:02 -0400 Subject: [PATCH 2/8] updating python and adding niflow.nipype1 to the requirements --- Dockerfile | 63 ++++++++++++++++++++++++++------------------------ Singularity | 66 ++++++++++++++++++++++++++++++----------------------- generate.sh | 12 +++++----- 3 files changed, 76 insertions(+), 65 deletions(-) diff --git a/Dockerfile b/Dockerfile index bd39263..920d774 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,6 @@ -# Generated by Neurodocker version 0.5.0 -# Timestamp: 2019-07-14 08:54:07 UTC +# Your version: 0.6.0+5.g74cb187.dirty Latest version: 0.7.0 +# Generated by Neurodocker version 0.6.0+5.g74cb187.dirty +# Timestamp: 2020-08-05 04:01:42 UTC # # Thank you for using Neurodocker. If you discover any issues # or ways to improve this software, please submit an issue or @@ -9,6 +10,8 @@ FROM neurodebian:stretch-non-free +USER root + ARG DEBIAN_FRONTEND="noninteractive" ENV LANG="en_US.UTF-8" \ @@ -91,12 +94,12 @@ RUN export TMPDIR="$(mktemp -d)" \ && rm -rf "$TMPDIR" \ && unset TMPDIR \ && echo "Downloading standalone SPM ..." \ - && curl -fsSL --retry 5 -o /tmp/spm12.zip http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip \ + && curl -fsSL --retry 5 -o /tmp/spm12.zip https://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip \ && unzip -q /tmp/spm12.zip -d /tmp \ && mkdir -p /opt/spm12-r7219 \ && mv /tmp/spm12/* /opt/spm12-r7219/ \ && chmod -R 777 /opt/spm12-r7219 \ - && rm -rf /tmp/* \ + && rm -rf /tmp/spm* \ && /opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 quit \ && sed -i '$iexport SPMMCRCMD=\"/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 script\"' $ND_ENTRYPOINT @@ -120,35 +123,35 @@ RUN export PATH="/opt/miniconda-latest/bin:$PATH" \ && sync && conda clean --all && sync \ && conda create -y -q --name neuro \ && conda install -y -q --name neuro \ - 'python=3.6' \ - 'pytest' \ - 'jupyter' \ - 'jupyterlab' \ - 'jupyter_contrib_nbextensions' \ - 'traits' \ - 'pandas' \ - 'matplotlib' \ - 'scikit-learn' \ - 'scikit-image' \ - 'seaborn' \ - 'nbformat' \ - 'nb_conda' \ + "python=3.7" \ + "pytest" \ + "jupyter" \ + "jupyterlab" \ + "jupyter_contrib_nbextensions" \ + "traits" \ + "pandas" \ + "matplotlib" \ + "scikit-learn" \ + "scikit-image" \ + "seaborn" \ + "nbformat" \ + "nb_conda" \ && sync && conda clean --all && sync \ && bash -c "source activate neuro \ && pip install --no-cache-dir \ - https://github.com/nipy/nipype/tarball/master \ - https://github.com/INCF/pybids/tarball/0.7.1 \ - niflow-nipype1-workflows \ - nilearn \ - datalad[full] \ - nipy \ - duecredit \ - nbval" \ + "https://github.com/nipy/nipype/tarball/master" \ + "https://github.com/INCF/pybids/tarball/0.7.1" \ + "nilearn" \ + "datalad[full]" \ + "nipy" \ + "duecredit" \ + "nbval" \ + "niflow-nipype1-workflows"" \ && rm -rf ~/.cache/pip/* \ && sync \ && sed -i '$isource activate neuro' $ND_ENTRYPOINT -ENV LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" +ENV LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:" RUN bash -c 'source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main' @@ -232,9 +235,8 @@ RUN echo '{ \ \n [ \ \n "miniconda", \ \n { \ - \n "miniconda_version": "4.3.31", \ \n "conda_install": [ \ - \n "python=3.6", \ + \n "python=3.7", \ \n "pytest", \ \n "jupyter", \ \n "jupyterlab", \ @@ -255,7 +257,8 @@ RUN echo '{ \ \n "datalad[full]", \ \n "nipy", \ \n "duecredit", \ - \n "nbval" \ + \n "nbval", \ + \n "niflow-nipype1-workflows" \ \n ], \ \n "create_env": "neuro", \ \n "activate": true \ @@ -264,7 +267,7 @@ RUN echo '{ \ \n [ \ \n "env", \ \n { \ - \n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" \ + \n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:" \ \n } \ \n ], \ \n [ \ diff --git a/Singularity b/Singularity index bce0d3c..fcde735 100644 --- a/Singularity +++ b/Singularity @@ -1,5 +1,6 @@ -# Generated by Neurodocker version 0.5.0 -# Timestamp: 2019-07-14 08:54:09 UTC +# Your version: 0.6.0+5.g74cb187.dirty Latest version: 0.7.0 +# Generated by Neurodocker version 0.6.0+5.g74cb187.dirty +# Timestamp: 2020-08-05 04:01:43 UTC # # Thank you for using Neurodocker. If you discover any issues # or ways to improve this software, please submit an issue or @@ -11,6 +12,8 @@ Bootstrap: docker From: neurodebian:stretch-non-free %post +su - root + export ND_ENTRYPOINT="/neurodocker/startup.sh" apt-get update -qq apt-get install -y -q --no-install-recommends \ @@ -83,12 +86,12 @@ chmod +x "$TMPDIR/MCRInstaller.bin" rm -rf "$TMPDIR" unset TMPDIR echo "Downloading standalone SPM ..." -curl -fsSL --retry 5 -o /tmp/spm12.zip http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip +curl -fsSL --retry 5 -o /tmp/spm12.zip https://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip unzip -q /tmp/spm12.zip -d /tmp mkdir -p /opt/spm12-r7219 mv /tmp/spm12/* /opt/spm12-r7219/ chmod -R 777 /opt/spm12-r7219 -rm -rf /tmp/* +rm -rf /tmp/spm* /opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 quit sed -i '$iexport SPMMCRCMD=\"/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 script\"' $ND_ENTRYPOINT @@ -110,29 +113,30 @@ conda config --system --set show_channel_urls true sync && conda clean --all && sync conda create -y -q --name neuro conda install -y -q --name neuro \ - 'python=3.6' \ - 'pytest' \ - 'jupyter' \ - 'jupyterlab' \ - 'jupyter_contrib_nbextensions' \ - 'traits' \ - 'pandas' \ - 'matplotlib' \ - 'scikit-learn' \ - 'scikit-image' \ - 'seaborn' \ - 'nbformat' \ - 'nb_conda' + "python=3.7" \ + "pytest" \ + "jupyter" \ + "jupyterlab" \ + "jupyter_contrib_nbextensions" \ + "traits" \ + "pandas" \ + "matplotlib" \ + "scikit-learn" \ + "scikit-image" \ + "seaborn" \ + "nbformat" \ + "nb_conda" sync && conda clean --all && sync bash -c "source activate neuro pip install --no-cache-dir \ - https://github.com/nipy/nipype/tarball/master \ - https://github.com/INCF/pybids/tarball/0.7.1 \ - nilearn \ - datalad[full] \ - nipy \ - duecredit \ - nbval" + "https://github.com/nipy/nipype/tarball/master" \ + "https://github.com/INCF/pybids/tarball/0.7.1" \ + "nilearn" \ + "datalad[full]" \ + "nipy" \ + "duecredit" \ + "nbval" \ + "niflow-nipype1-workflows"" rm -rf ~/.cache/pip/* sync sed -i '$isource activate neuro' $ND_ENTRYPOINT @@ -174,6 +178,10 @@ echo '{ \n "neurodebian:stretch-non-free" \n ], \n [ +\n "user", +\n "root" +\n ], +\n [ \n "_header", \n { \n "version": "generic", @@ -223,9 +231,8 @@ echo '{ \n [ \n "miniconda", \n { -\n "miniconda_version": "4.3.31", \n "conda_install": [ -\n "python=3.6", +\n "python=3.7", \n "pytest", \n "jupyter", \n "jupyterlab", @@ -246,7 +253,8 @@ echo '{ \n "datalad[full]", \n "nipy", \n "duecredit", -\n "nbval" +\n "nbval", +\n "niflow-nipype1-workflows" \n ], \n "create_env": "neuro", \n "activate": true @@ -255,7 +263,7 @@ echo '{ \n [ \n "env", \n { -\n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" +\n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:" \n } \n ], \n [ @@ -333,7 +341,7 @@ export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:/opt/matlabmc export MATLABCMD="/opt/matlabmcr-2010a/v713/toolbox/matlab" export CONDA_DIR="/opt/miniconda-latest" export PATH="/opt/miniconda-latest/bin:$PATH" -export LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:/usr/lib/fsl/5.0" +export LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:" %files . /home/neuro/nipype_tutorial diff --git a/generate.sh b/generate.sh index a8a3e94..a22ace7 100644 --- a/generate.sh +++ b/generate.sh @@ -14,12 +14,12 @@ generate_docker() { --spm12 version=r7219 \ --user=neuro \ --workdir /home/neuro \ - --miniconda miniconda_version="4.3.31" \ - conda_install="python=3.6 pytest jupyter jupyterlab jupyter_contrib_nbextensions + --miniconda \ + conda_install="python=3.7 pytest jupyter jupyterlab jupyter_contrib_nbextensions traits pandas matplotlib scikit-learn scikit-image seaborn nbformat nb_conda" \ pip_install="https://github.com/nipy/nipype/tarball/master https://github.com/INCF/pybids/tarball/0.7.1 - nilearn datalad[full] nipy duecredit nbval" \ + nilearn datalad[full] nipy duecredit nbval niflow-nipype1-workflows" \ create_env="neuro" \ activate=True \ --env LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:$LD_LIBRARY_PATH" \ @@ -53,12 +53,12 @@ generate_singularity() { --spm12 version=r7219 \ --user=neuro \ --workdir /home/neuro \ - --miniconda miniconda_version="4.3.31" \ - conda_install="python=3.6 pytest jupyter jupyterlab jupyter_contrib_nbextensions + --miniconda \ + conda_install="python=3.7 pytest jupyter jupyterlab jupyter_contrib_nbextensions traits pandas matplotlib scikit-learn scikit-image seaborn nbformat nb_conda" \ pip_install="https://github.com/nipy/nipype/tarball/master https://github.com/INCF/pybids/tarball/0.7.1 - nilearn datalad[full] nipy duecredit nbval" \ + nilearn datalad[full] nipy duecredit nbval niflow-nipype1-workflows" \ create_env="neuro" \ activate=True \ --env LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:$LD_LIBRARY_PATH" \ From 95944ed48b7b4d6587c29e5c256fda8e9b79aa5f Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 7 Aug 2020 02:14:07 -0400 Subject: [PATCH 3/8] adding testing using github actions --- .github/workflows/testing.yml | 50 +++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 .github/workflows/testing.yml diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml new file mode 100644 index 0000000..d44161e --- /dev/null +++ b/.github/workflows/testing.yml @@ -0,0 +1,50 @@ +name: CI + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + + # In this step, this action saves a list of existing images, + # the cache is created without them in the post run. + # It also restores the cache if it exists. + - uses: satackey/action-docker-layer-caching@v0.0.5 + - name: build the image + run: docker build . --file Dockerfile -t nipype_tutorial:latest + + test_1: + needs: build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: satackey/action-docker-layer-caching@v0.0.5 + - name: run test 1 + run: docker run -it --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 1 + + test_2: + needs: build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: satackey/action-docker-layer-caching@v0.0.5 + - name: run test 2 + run: docker run -it --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 2 + + test_3: + needs: build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: satackey/action-docker-layer-caching@v0.0.5 + - name: run test 3 + run: docker run -it --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 3 + From 0df59672a674fc17008a1a0812b88a20c545c735 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 7 Aug 2020 11:26:31 -0400 Subject: [PATCH 4/8] Update .github/workflows/testing.yml Co-authored-by: Mathias Goncalves --- .github/workflows/testing.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index d44161e..3ec3a56 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -28,7 +28,7 @@ jobs: - uses: actions/checkout@v2 - uses: satackey/action-docker-layer-caching@v0.0.5 - name: run test 1 - run: docker run -it --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 1 + run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 1 test_2: needs: build @@ -47,4 +47,3 @@ jobs: - uses: satackey/action-docker-layer-caching@v0.0.5 - name: run test 3 run: docker run -it --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 3 - From c1f0e284bdf10e5770fee1021ca6cd05b3f30d5f Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 7 Aug 2020 11:26:39 -0400 Subject: [PATCH 5/8] Update .github/workflows/testing.yml Co-authored-by: Mathias Goncalves --- .github/workflows/testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 3ec3a56..91a272d 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -46,4 +46,4 @@ jobs: - uses: actions/checkout@v2 - uses: satackey/action-docker-layer-caching@v0.0.5 - name: run test 3 - run: docker run -it --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 3 + run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 3 From dff13040ea9cc3ab9da13753f7c83e3b26eb4c89 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 7 Aug 2020 11:27:01 -0400 Subject: [PATCH 6/8] Update .github/workflows/testing.yml Co-authored-by: Mathias Goncalves --- .github/workflows/testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 91a272d..b6fe46b 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -37,7 +37,7 @@ jobs: - uses: actions/checkout@v2 - uses: satackey/action-docker-layer-caching@v0.0.5 - name: run test 2 - run: docker run -it --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 2 + run: docker run --rm nipype_tutorial:latest python /home/neuro/nipype_tutorial/test_notebooks.py 2 test_3: needs: build From 197aa73c22f7e6c29e385184e065a91c2759a107 Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 7 Aug 2020 11:54:00 -0400 Subject: [PATCH 7/8] adding dockerfile generation to the ga --- .github/workflows/testing.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index b6fe46b..4a04daf 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -13,7 +13,8 @@ jobs: steps: - uses: actions/checkout@v2 - + - name: generate the Dockerfile from generate.sh + run: bash generate.sh # In this step, this action saves a list of existing images, # the cache is created without them in the post run. # It also restores the cache if it exists. From 90a7de7e869e05d87b83908759b1635cd4ded94b Mon Sep 17 00:00:00 2001 From: Dorota Jarecka Date: Fri, 7 Aug 2020 12:01:33 -0400 Subject: [PATCH 8/8] removing Dockerfile/Singularity - should be generated by generate.sh in the GA testing workflow --- Dockerfile | 343 -------------------------------------------------- Singularity | 350 ---------------------------------------------------- 2 files changed, 693 deletions(-) delete mode 100644 Dockerfile delete mode 100644 Singularity diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 920d774..0000000 --- a/Dockerfile +++ /dev/null @@ -1,343 +0,0 @@ -# Your version: 0.6.0+5.g74cb187.dirty Latest version: 0.7.0 -# Generated by Neurodocker version 0.6.0+5.g74cb187.dirty -# Timestamp: 2020-08-05 04:01:42 UTC -# -# Thank you for using Neurodocker. If you discover any issues -# or ways to improve this software, please submit an issue or -# pull request on our GitHub repository: -# -# https://github.com/kaczmarj/neurodocker - -FROM neurodebian:stretch-non-free - -USER root - -ARG DEBIAN_FRONTEND="noninteractive" - -ENV LANG="en_US.UTF-8" \ - LC_ALL="en_US.UTF-8" \ - ND_ENTRYPOINT="/neurodocker/startup.sh" -RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \ - && apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - apt-utils \ - bzip2 \ - ca-certificates \ - curl \ - locales \ - unzip \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \ - && dpkg-reconfigure --frontend=noninteractive locales \ - && update-locale LANG="en_US.UTF-8" \ - && chmod 777 /opt && chmod a+s /opt \ - && mkdir -p /neurodocker \ - && if [ ! -f "$ND_ENTRYPOINT" ]; then \ - echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" \ - && echo 'set -e' >> "$ND_ENTRYPOINT" \ - && echo 'export USER="${USER:=`whoami`}"' >> "$ND_ENTRYPOINT" \ - && echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; \ - fi \ - && chmod -R 777 /neurodocker && chmod a+s /neurodocker - -ENTRYPOINT ["/neurodocker/startup.sh"] - -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - convert3d \ - ants \ - fsl \ - gcc \ - g++ \ - graphviz \ - tree \ - git-annex-standalone \ - vim \ - emacs-nox \ - nano \ - less \ - ncdu \ - tig \ - git-annex-remote-rclone \ - octave \ - netbase \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -RUN sed -i '$isource /etc/fsl/fsl.sh' $ND_ENTRYPOINT - -ENV FORCE_SPMMCR="1" \ - LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:/opt/matlabmcr-2010a/v713/runtime/glnxa64:/opt/matlabmcr-2010a/v713/bin/glnxa64:/opt/matlabmcr-2010a/v713/sys/os/glnxa64:/opt/matlabmcr-2010a/v713/extern/bin/glnxa64" \ - MATLABCMD="/opt/matlabmcr-2010a/v713/toolbox/matlab" -RUN export TMPDIR="$(mktemp -d)" \ - && apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - bc \ - libncurses5 \ - libxext6 \ - libxmu6 \ - libxpm-dev \ - libxt6 \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && echo "Downloading MATLAB Compiler Runtime ..." \ - && curl -sSL --retry 5 -o /tmp/toinstall.deb http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb \ - && dpkg -i /tmp/toinstall.deb \ - && rm /tmp/toinstall.deb \ - && apt-get install -f \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && curl -fsSL --retry 5 -o "$TMPDIR/MCRInstaller.bin" https://dl.dropbox.com/s/zz6me0c3v4yq5fd/MCR_R2010a_glnxa64_installer.bin \ - && chmod +x "$TMPDIR/MCRInstaller.bin" \ - && "$TMPDIR/MCRInstaller.bin" -silent -P installLocation="/opt/matlabmcr-2010a" \ - && rm -rf "$TMPDIR" \ - && unset TMPDIR \ - && echo "Downloading standalone SPM ..." \ - && curl -fsSL --retry 5 -o /tmp/spm12.zip https://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip \ - && unzip -q /tmp/spm12.zip -d /tmp \ - && mkdir -p /opt/spm12-r7219 \ - && mv /tmp/spm12/* /opt/spm12-r7219/ \ - && chmod -R 777 /opt/spm12-r7219 \ - && rm -rf /tmp/spm* \ - && /opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 quit \ - && sed -i '$iexport SPMMCRCMD=\"/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 script\"' $ND_ENTRYPOINT - -RUN test "$(getent passwd neuro)" || useradd --no-user-group --create-home --shell /bin/bash neuro -USER neuro - -WORKDIR /home/neuro - -ENV CONDA_DIR="/opt/miniconda-latest" \ - PATH="/opt/miniconda-latest/bin:$PATH" -RUN export PATH="/opt/miniconda-latest/bin:$PATH" \ - && echo "Downloading Miniconda installer ..." \ - && conda_installer="/tmp/miniconda.sh" \ - && curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ - && bash "$conda_installer" -b -p /opt/miniconda-latest \ - && rm -f "$conda_installer" \ - && conda update -yq -nbase conda \ - && conda config --system --prepend channels conda-forge \ - && conda config --system --set auto_update_conda false \ - && conda config --system --set show_channel_urls true \ - && sync && conda clean --all && sync \ - && conda create -y -q --name neuro \ - && conda install -y -q --name neuro \ - "python=3.7" \ - "pytest" \ - "jupyter" \ - "jupyterlab" \ - "jupyter_contrib_nbextensions" \ - "traits" \ - "pandas" \ - "matplotlib" \ - "scikit-learn" \ - "scikit-image" \ - "seaborn" \ - "nbformat" \ - "nb_conda" \ - && sync && conda clean --all && sync \ - && bash -c "source activate neuro \ - && pip install --no-cache-dir \ - "https://github.com/nipy/nipype/tarball/master" \ - "https://github.com/INCF/pybids/tarball/0.7.1" \ - "nilearn" \ - "datalad[full]" \ - "nipy" \ - "duecredit" \ - "nbval" \ - "niflow-nipype1-workflows"" \ - && rm -rf ~/.cache/pip/* \ - && sync \ - && sed -i '$isource activate neuro' $ND_ENTRYPOINT - -ENV LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:" - -RUN bash -c 'source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main' - -USER root - -RUN mkdir /data && chmod 777 /data && chmod a+s /data - -RUN mkdir /output && chmod 777 /output && chmod a+s /output - -USER neuro - -RUN printf "[user]\n\tname = miykael\n\temail = michaelnotter@hotmail.com\n" > ~/.gitconfig - -RUN bash -c 'source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*' - -RUN curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete - -COPY [".", "/home/neuro/nipype_tutorial"] - -USER root - -RUN chown -R neuro /home/neuro/nipype_tutorial - -RUN rm -rf /opt/conda/pkgs/* - -USER neuro - -RUN mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py - -WORKDIR /home/neuro/nipype_tutorial - -CMD ["jupyter-notebook"] - -RUN echo '{ \ - \n "pkg_manager": "apt", \ - \n "instructions": [ \ - \n [ \ - \n "base", \ - \n "neurodebian:stretch-non-free" \ - \n ], \ - \n [ \ - \n "install", \ - \n [ \ - \n "convert3d", \ - \n "ants", \ - \n "fsl", \ - \n "gcc", \ - \n "g++", \ - \n "graphviz", \ - \n "tree", \ - \n "git-annex-standalone", \ - \n "vim", \ - \n "emacs-nox", \ - \n "nano", \ - \n "less", \ - \n "ncdu", \ - \n "tig", \ - \n "git-annex-remote-rclone", \ - \n "octave", \ - \n "netbase" \ - \n ] \ - \n ], \ - \n [ \ - \n "add_to_entrypoint", \ - \n "source /etc/fsl/fsl.sh" \ - \n ], \ - \n [ \ - \n "spm12", \ - \n { \ - \n "version": "r7219" \ - \n } \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "workdir", \ - \n "/home/neuro" \ - \n ], \ - \n [ \ - \n "miniconda", \ - \n { \ - \n "conda_install": [ \ - \n "python=3.7", \ - \n "pytest", \ - \n "jupyter", \ - \n "jupyterlab", \ - \n "jupyter_contrib_nbextensions", \ - \n "traits", \ - \n "pandas", \ - \n "matplotlib", \ - \n "scikit-learn", \ - \n "scikit-image", \ - \n "seaborn", \ - \n "nbformat", \ - \n "nb_conda" \ - \n ], \ - \n "pip_install": [ \ - \n "https://github.com/nipy/nipype/tarball/master", \ - \n "https://github.com/INCF/pybids/tarball/0.7.1", \ - \n "nilearn", \ - \n "datalad[full]", \ - \n "nipy", \ - \n "duecredit", \ - \n "nbval", \ - \n "niflow-nipype1-workflows" \ - \n ], \ - \n "create_env": "neuro", \ - \n "activate": true \ - \n } \ - \n ], \ - \n [ \ - \n "env", \ - \n { \ - \n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:" \ - \n } \ - \n ], \ - \n [ \ - \n "run_bash", \ - \n "source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main" \ - \n ], \ - \n [ \ - \n "user", \ - \n "root" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir /data && chmod 777 /data && chmod a+s /data" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir /output && chmod 777 /output && chmod a+s /output" \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "run", \ - \n "printf \"[user]\\\n\\tname = miykael\\\n\\temail = michaelnotter@hotmail.com\\\n\" > ~/.gitconfig" \ - \n ], \ - \n [ \ - \n "run_bash", \ - \n "source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*" \ - \n ], \ - \n [ \ - \n "run", \ - \n "curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete" \ - \n ], \ - \n [ \ - \n "copy", \ - \n [ \ - \n ".", \ - \n "/home/neuro/nipype_tutorial" \ - \n ] \ - \n ], \ - \n [ \ - \n "user", \ - \n "root" \ - \n ], \ - \n [ \ - \n "run", \ - \n "chown -R neuro /home/neuro/nipype_tutorial" \ - \n ], \ - \n [ \ - \n "run", \ - \n "rm -rf /opt/conda/pkgs/*" \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \\\"0.0.0.0\\\" > ~/.jupyter/jupyter_notebook_config.py" \ - \n ], \ - \n [ \ - \n "workdir", \ - \n "/home/neuro/nipype_tutorial" \ - \n ], \ - \n [ \ - \n "cmd", \ - \n [ \ - \n "jupyter-notebook" \ - \n ] \ - \n ] \ - \n ] \ - \n}' > /neurodocker/neurodocker_specs.json diff --git a/Singularity b/Singularity deleted file mode 100644 index fcde735..0000000 --- a/Singularity +++ /dev/null @@ -1,350 +0,0 @@ -# Your version: 0.6.0+5.g74cb187.dirty Latest version: 0.7.0 -# Generated by Neurodocker version 0.6.0+5.g74cb187.dirty -# Timestamp: 2020-08-05 04:01:43 UTC -# -# Thank you for using Neurodocker. If you discover any issues -# or ways to improve this software, please submit an issue or -# pull request on our GitHub repository: -# -# https://github.com/kaczmarj/neurodocker - -Bootstrap: docker -From: neurodebian:stretch-non-free - -%post -su - root - -export ND_ENTRYPOINT="/neurodocker/startup.sh" -apt-get update -qq -apt-get install -y -q --no-install-recommends \ - apt-utils \ - bzip2 \ - ca-certificates \ - curl \ - locales \ - unzip -apt-get clean -rm -rf /var/lib/apt/lists/* -sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen -dpkg-reconfigure --frontend=noninteractive locales -update-locale LANG="en_US.UTF-8" -chmod 777 /opt && chmod a+s /opt -mkdir -p /neurodocker -if [ ! -f "$ND_ENTRYPOINT" ]; then - echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" - echo 'set -e' >> "$ND_ENTRYPOINT" - echo 'export USER="${USER:=`whoami`}"' >> "$ND_ENTRYPOINT" - echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; -fi -chmod -R 777 /neurodocker && chmod a+s /neurodocker - -apt-get update -qq -apt-get install -y -q --no-install-recommends \ - convert3d \ - ants \ - fsl \ - gcc \ - g++ \ - graphviz \ - tree \ - git-annex-standalone \ - vim \ - emacs-nox \ - nano \ - less \ - ncdu \ - tig \ - git-annex-remote-rclone \ - octave \ - netbase -apt-get clean -rm -rf /var/lib/apt/lists/* - -sed -i '$isource /etc/fsl/fsl.sh' $ND_ENTRYPOINT - -export TMPDIR="$(mktemp -d)" -apt-get update -qq -apt-get install -y -q --no-install-recommends \ - bc \ - libncurses5 \ - libxext6 \ - libxmu6 \ - libxpm-dev \ - libxt6 -apt-get clean -rm -rf /var/lib/apt/lists/* -echo "Downloading MATLAB Compiler Runtime ..." -curl -sSL --retry 5 -o /tmp/toinstall.deb http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb -dpkg -i /tmp/toinstall.deb -rm /tmp/toinstall.deb -apt-get install -f -apt-get clean -rm -rf /var/lib/apt/lists/* -curl -fsSL --retry 5 -o "$TMPDIR/MCRInstaller.bin" https://dl.dropbox.com/s/zz6me0c3v4yq5fd/MCR_R2010a_glnxa64_installer.bin -chmod +x "$TMPDIR/MCRInstaller.bin" -"$TMPDIR/MCRInstaller.bin" -silent -P installLocation="/opt/matlabmcr-2010a" -rm -rf "$TMPDIR" -unset TMPDIR -echo "Downloading standalone SPM ..." -curl -fsSL --retry 5 -o /tmp/spm12.zip https://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7219_R2010a.zip -unzip -q /tmp/spm12.zip -d /tmp -mkdir -p /opt/spm12-r7219 -mv /tmp/spm12/* /opt/spm12-r7219/ -chmod -R 777 /opt/spm12-r7219 -rm -rf /tmp/spm* -/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 quit -sed -i '$iexport SPMMCRCMD=\"/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713 script\"' $ND_ENTRYPOINT - -test "$(getent passwd neuro)" || useradd --no-user-group --create-home --shell /bin/bash neuro -su - neuro - -cd /home/neuro - -export PATH="/opt/miniconda-latest/bin:$PATH" -echo "Downloading Miniconda installer ..." -conda_installer="/tmp/miniconda.sh" -curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -bash "$conda_installer" -b -p /opt/miniconda-latest -rm -f "$conda_installer" -conda update -yq -nbase conda -conda config --system --prepend channels conda-forge -conda config --system --set auto_update_conda false -conda config --system --set show_channel_urls true -sync && conda clean --all && sync -conda create -y -q --name neuro -conda install -y -q --name neuro \ - "python=3.7" \ - "pytest" \ - "jupyter" \ - "jupyterlab" \ - "jupyter_contrib_nbextensions" \ - "traits" \ - "pandas" \ - "matplotlib" \ - "scikit-learn" \ - "scikit-image" \ - "seaborn" \ - "nbformat" \ - "nb_conda" -sync && conda clean --all && sync -bash -c "source activate neuro - pip install --no-cache-dir \ - "https://github.com/nipy/nipype/tarball/master" \ - "https://github.com/INCF/pybids/tarball/0.7.1" \ - "nilearn" \ - "datalad[full]" \ - "nipy" \ - "duecredit" \ - "nbval" \ - "niflow-nipype1-workflows"" -rm -rf ~/.cache/pip/* -sync -sed -i '$isource activate neuro' $ND_ENTRYPOINT - - -bash -c 'source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main' - -su - root - -mkdir /data && chmod 777 /data && chmod a+s /data - -mkdir /output && chmod 777 /output && chmod a+s /output - -su - neuro - -printf "[user]\n\tname = miykael\n\temail = michaelnotter@hotmail.com\n" > ~/.gitconfig - -bash -c 'source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*' - -curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete - -su - root - -chown -R neuro /home/neuro/nipype_tutorial - -rm -rf /opt/conda/pkgs/* - -su - neuro - -mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py - -cd /home/neuro/nipype_tutorial - -echo '{ -\n "pkg_manager": "apt", -\n "instructions": [ -\n [ -\n "base", -\n "neurodebian:stretch-non-free" -\n ], -\n [ -\n "user", -\n "root" -\n ], -\n [ -\n "_header", -\n { -\n "version": "generic", -\n "method": "custom" -\n } -\n ], -\n [ -\n "install", -\n [ -\n "convert3d", -\n "ants", -\n "fsl", -\n "gcc", -\n "g++", -\n "graphviz", -\n "tree", -\n "git-annex-standalone", -\n "vim", -\n "emacs-nox", -\n "nano", -\n "less", -\n "ncdu", -\n "tig", -\n "git-annex-remote-rclone", -\n "octave", -\n "netbase" -\n ] -\n ], -\n [ -\n "add_to_entrypoint", -\n "source /etc/fsl/fsl.sh" -\n ], -\n [ -\n "spm12", -\n { -\n "version": "r7219" -\n } -\n ], -\n [ -\n "user", -\n "neuro" -\n ], -\n [ -\n "workdir", -\n "/home/neuro" -\n ], -\n [ -\n "miniconda", -\n { -\n "conda_install": [ -\n "python=3.7", -\n "pytest", -\n "jupyter", -\n "jupyterlab", -\n "jupyter_contrib_nbextensions", -\n "traits", -\n "pandas", -\n "matplotlib", -\n "scikit-learn", -\n "scikit-image", -\n "seaborn", -\n "nbformat", -\n "nb_conda" -\n ], -\n "pip_install": [ -\n "https://github.com/nipy/nipype/tarball/master", -\n "https://github.com/INCF/pybids/tarball/0.7.1", -\n "nilearn", -\n "datalad[full]", -\n "nipy", -\n "duecredit", -\n "nbval", -\n "niflow-nipype1-workflows" -\n ], -\n "create_env": "neuro", -\n "activate": true -\n } -\n ], -\n [ -\n "env", -\n { -\n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:" -\n } -\n ], -\n [ -\n "run_bash", -\n "source activate neuro && jupyter nbextension enable exercise2/main && jupyter nbextension enable spellchecker/main" -\n ], -\n [ -\n "user", -\n "root" -\n ], -\n [ -\n "run", -\n "mkdir /data && chmod 777 /data && chmod a+s /data" -\n ], -\n [ -\n "run", -\n "mkdir /output && chmod 777 /output && chmod a+s /output" -\n ], -\n [ -\n "user", -\n "neuro" -\n ], -\n [ -\n "run", -\n "printf \"[user]\\\n\\tname = miykael\\\n\\temail = michaelnotter@hotmail.com\\\n\" > ~/.gitconfig" -\n ], -\n [ -\n "run_bash", -\n "source activate neuro && cd /data && datalad install -r ///workshops/nih-2017/ds000114 && cd ds000114 && datalad update -r && datalad get -r sub-01/ses-test/anat sub-01/ses-test/func/*fingerfootlips*" -\n ], -\n [ -\n "run", -\n "curl -L https://files.osf.io/v1/resources/fvuh8/providers/osfstorage/580705089ad5a101f17944a9 -o /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && tar xf /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz -C /data/ds000114/derivatives/fmriprep/. && rm /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c.tar.gz && find /data/ds000114/derivatives/fmriprep/mni_icbm152_nlin_asym_09c -type f -not -name ?mm_T1.nii.gz -not -name ?mm_brainmask.nii.gz -not -name ?mm_tpm*.nii.gz -delete" -\n ], -\n [ -\n "copy", -\n [ -\n ".", -\n "/home/neuro/nipype_tutorial" -\n ] -\n ], -\n [ -\n "user", -\n "root" -\n ], -\n [ -\n "run", -\n "chown -R neuro /home/neuro/nipype_tutorial" -\n ], -\n [ -\n "run", -\n "rm -rf /opt/conda/pkgs/*" -\n ], -\n [ -\n "user", -\n "neuro" -\n ], -\n [ -\n "run", -\n "mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \\\"0.0.0.0\\\" > ~/.jupyter/jupyter_notebook_config.py" -\n ], -\n [ -\n "workdir", -\n "/home/neuro/nipype_tutorial" -\n ] -\n ] -\n}' > /neurodocker/neurodocker_specs.json - -%environment -export LANG="en_US.UTF-8" -export LC_ALL="en_US.UTF-8" -export ND_ENTRYPOINT="/neurodocker/startup.sh" -export FORCE_SPMMCR="1" -export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:/opt/matlabmcr-2010a/v713/runtime/glnxa64:/opt/matlabmcr-2010a/v713/bin/glnxa64:/opt/matlabmcr-2010a/v713/sys/os/glnxa64:/opt/matlabmcr-2010a/v713/extern/bin/glnxa64" -export MATLABCMD="/opt/matlabmcr-2010a/v713/toolbox/matlab" -export CONDA_DIR="/opt/miniconda-latest" -export PATH="/opt/miniconda-latest/bin:$PATH" -export LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:" - -%files -. /home/neuro/nipype_tutorial - -%runscript -/neurodocker/startup.sh "$@"