From d73d3ca180f2cfe07326982d9a399cf6ffd72143 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boris=20Cl=C3=A9net?= Date: Fri, 19 Apr 2024 13:52:42 +0200 Subject: [PATCH] Correlation update --- Dockerfile | 320 +---------------------- narps_open/utils/correlation/__main__.py | 2 +- 2 files changed, 4 insertions(+), 318 deletions(-) diff --git a/Dockerfile b/Dockerfile index 13b22194..b4956310 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,319 +1,5 @@ -# Generated by: Neurodocker version 0.7.0+0.gdc97516.dirty -# Latest release: Neurodocker version 0.7.0 -# Timestamp: 2021/11/09 11:04:47 UTC -# -# Thank you for using Neurodocker. If you discover any issues -# or ways to improve this software, please submit an issue or -# pull request on our GitHub repository: -# -# https://github.com/ReproNim/neurodocker - -FROM neurodebian:stretch-non-free - +FROM nipype/nipype:py38 +COPY . /work USER root - -ARG DEBIAN_FRONTEND="noninteractive" - -ENV LANG="en_US.UTF-8" \ - LC_ALL="en_US.UTF-8" \ - ND_ENTRYPOINT="/neurodocker/startup.sh" -RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \ - && apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - apt-utils \ - bzip2 \ - ca-certificates \ - curl \ - locales \ - unzip \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \ - && dpkg-reconfigure --frontend=noninteractive locales \ - && update-locale LANG="en_US.UTF-8" \ - && chmod 777 /opt && chmod a+s /opt \ - && mkdir -p /neurodocker \ - && if [ ! -f "$ND_ENTRYPOINT" ]; then \ - echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" \ - && echo 'set -e' >> "$ND_ENTRYPOINT" \ - && echo 'export USER="${USER:=`whoami`}"' >> "$ND_ENTRYPOINT" \ - && echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; \ - fi \ - && chmod -R 777 /neurodocker && chmod a+s /neurodocker - -ENTRYPOINT ["/neurodocker/startup.sh"] - -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - git \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -ENV FSLDIR="/opt/fsl-6.0.3" \ - PATH="/opt/fsl-6.0.3/bin:$PATH" \ - FSLOUTPUTTYPE="NIFTI_GZ" \ - FSLMULTIFILEQUIT="TRUE" \ - FSLTCLSH="/opt/fsl-6.0.3/bin/fsltclsh" \ - FSLWISH="/opt/fsl-6.0.3/bin/fslwish" \ - FSLLOCKDIR="" \ - FSLMACHINELIST="" \ - FSLREMOTECALL="" \ - FSLGECUDAQ="cuda.q" -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - bc \ - dc \ - file \ - libfontconfig1 \ - libfreetype6 \ - libgl1-mesa-dev \ - libgl1-mesa-dri \ - libglu1-mesa-dev \ - libgomp1 \ - libice6 \ - libxcursor1 \ - libxft2 \ - libxinerama1 \ - libxrandr2 \ - libxrender1 \ - libxt6 \ - sudo \ - wget \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && echo "Downloading FSL ..." \ - && mkdir -p /opt/fsl-6.0.3 \ - && curl -fsSL --retry 5 https://fsl.fmrib.ox.ac.uk/fsldownloads/fsl-6.0.3-centos6_64.tar.gz \ - | tar -xz -C /opt/fsl-6.0.3 --strip-components 1 \ - && sed -i '$iecho Some packages in this Docker container are non-free' $ND_ENTRYPOINT \ - && sed -i '$iecho If you are considering commercial use of this container, please consult the relevant license:' $ND_ENTRYPOINT \ - && sed -i '$iecho https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence' $ND_ENTRYPOINT \ - && sed -i '$isource $FSLDIR/etc/fslconf/fsl.sh' $ND_ENTRYPOINT \ - && echo "Installing FSL conda environment ..." \ - && bash /opt/fsl-6.0.3/etc/fslconf/fslpython_install.sh -f /opt/fsl-6.0.3 - -ENV PATH="/opt/afni-latest:$PATH" \ - AFNI_PLUGINPATH="/opt/afni-latest" -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - ed \ - gsl-bin \ - libglib2.0-0 \ - libglu1-mesa-dev \ - libglw1-mesa \ - libgomp1 \ - libjpeg62 \ - libnlopt-dev \ - libxm4 \ - netpbm \ - python \ - python3 \ - r-base \ - r-base-dev \ - tcsh \ - xfonts-base \ - xvfb \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && curl -sSL --retry 5 -o /tmp/toinstall.deb http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb \ - && dpkg -i /tmp/toinstall.deb \ - && rm /tmp/toinstall.deb \ - && curl -sSL --retry 5 -o /tmp/toinstall.deb http://snapshot.debian.org/archive/debian-security/20160113T213056Z/pool/updates/main/libp/libpng/libpng12-0_1.2.49-1%2Bdeb7u2_amd64.deb \ - && dpkg -i /tmp/toinstall.deb \ - && rm /tmp/toinstall.deb \ - && apt-get install -f \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && gsl2_path="$(find / -name 'libgsl.so.19' || printf '')" \ - && if [ -n "$gsl2_path" ]; then \ - ln -sfv "$gsl2_path" "$(dirname $gsl2_path)/libgsl.so.0"; \ - fi \ - && ldconfig \ - && echo "Downloading AFNI ..." \ - && mkdir -p /opt/afni-latest \ - && curl -fsSL --retry 5 https://afni.nimh.nih.gov/pub/dist/tgz/linux_openmp_64.tgz \ - | tar -xz -C /opt/afni-latest --strip-components 1 \ - && PATH=$PATH:/opt/afni-latest rPkgsInstall -pkgs ALL - -ENV FORCE_SPMMCR="1" \ - SPM_HTML_BROWSER="0" \ - LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:/opt/matlabmcr-2010a/v713/runtime/glnxa64:/opt/matlabmcr-2010a/v713/bin/glnxa64:/opt/matlabmcr-2010a/v713/sys/os/glnxa64:/opt/matlabmcr-2010a/v713/extern/bin/glnxa64" \ - MATLABCMD="/opt/matlabmcr-2010a/v713/toolbox/matlab" -RUN export TMPDIR="$(mktemp -d)" \ - && apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - bc \ - libncurses5 \ - libxext6 \ - libxmu6 \ - libxpm-dev \ - libxt6 \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && echo "Downloading MATLAB Compiler Runtime ..." \ - && curl -sSL --retry 5 -o /tmp/toinstall.deb http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb \ - && dpkg -i /tmp/toinstall.deb \ - && rm /tmp/toinstall.deb \ - && apt-get install -f \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && curl -fsSL --retry 5 -o "$TMPDIR/MCRInstaller.bin" https://dl.dropbox.com/s/zz6me0c3v4yq5fd/MCR_R2010a_glnxa64_installer.bin \ - && chmod +x "$TMPDIR/MCRInstaller.bin" \ - && "$TMPDIR/MCRInstaller.bin" -silent -P installLocation="/opt/matlabmcr-2010a" \ - && rm -rf "$TMPDIR" \ - && unset TMPDIR \ - && echo "Downloading standalone SPM ..." \ - && curl -fsSL --retry 5 -o /tmp/spm12.zip https://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7771_R2010a.zip \ - && unzip -q /tmp/spm12.zip -d /tmp \ - && mkdir -p /opt/spm12-r7771 \ - && mv /tmp/spm12/* /opt/spm12-r7771/ \ - && chmod -R 777 /opt/spm12-r7771 \ - && rm -rf /tmp/spm* \ - && /opt/spm12-r7771/run_spm12.sh /opt/matlabmcr-2010a/v713 quit \ - && sed -i '$iexport SPMMCRCMD=\"/opt/spm12-r7771/run_spm12.sh /opt/matlabmcr-2010a/v713 script\"' $ND_ENTRYPOINT - -RUN test "$(getent passwd neuro)" || useradd --no-user-group --create-home --shell /bin/bash neuro +RUN /bin/bash -c "source activate neuro && pip install /work" USER neuro - -WORKDIR /home - -ENV CONDA_DIR="/opt/miniconda-latest" \ - PATH="/opt/miniconda-latest/bin:$PATH" -RUN export PATH="/opt/miniconda-latest/bin:$PATH" \ - && echo "Downloading Miniconda installer ..." \ - && conda_installer="/tmp/miniconda.sh" \ - && curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ - && bash "$conda_installer" -b -p /opt/miniconda-latest \ - && rm -f "$conda_installer" \ - && conda update -yq -nbase conda \ - && conda config --system --prepend channels conda-forge \ - && conda config --system --set auto_update_conda false \ - && conda config --system --set show_channel_urls true \ - && sync && conda clean -y --all && sync \ - && conda create -y -q --name neuro \ - && conda install -y -q --name neuro \ - "python=3.8" \ - "traits" \ - "jupyter" \ - "nilearn" \ - "graphviz" \ - "nipype" \ - "scikit-image" \ - && sync && conda clean -y --all && sync \ - && bash -c "source activate neuro \ - && pip install --no-cache-dir \ - "matplotlib"" \ - && rm -rf ~/.cache/pip/* \ - && sync \ - && sed -i '$isource activate neuro' $ND_ENTRYPOINT - -ENV LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:" - -RUN bash -c 'source activate neuro' - -USER root - -RUN chmod 777 -Rf /home - -RUN chown -R neuro /home - -USER neuro - -RUN mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py - -RUN echo '{ \ - \n "pkg_manager": "apt", \ - \n "instructions": [ \ - \n [ \ - \n "base", \ - \n "neurodebian:stretch-non-free" \ - \n ], \ - \n [ \ - \n "install", \ - \n [ \ - \n "git" \ - \n ] \ - \n ], \ - \n [ \ - \n "fsl", \ - \n { \ - \n "version": "6.0.3" \ - \n } \ - \n ], \ - \n [ \ - \n "afni", \ - \n { \ - \n "version": "latest", \ - \n "method": "binaries", \ - \n "install_r": "true", \ - \n "install_r_pkgs": "true", \ - \n "install_python2": "true", \ - \n "install_python3": "true" \ - \n } \ - \n ], \ - \n [ \ - \n "spm12", \ - \n { \ - \n "version": "r7771", \ - \n "method": "binaries" \ - \n } \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "workdir", \ - \n "/home" \ - \n ], \ - \n [ \ - \n "miniconda", \ - \n { \ - \n "create_env": "neuro", \ - \n "conda_install": [ \ - \n "python=3.8", \ - \n "traits", \ - \n "jupyter", \ - \n "nilearn", \ - \n "graphviz", \ - \n "nipype", \ - \n "scikit-image" \ - \n ], \ - \n "pip_install": [ \ - \n "matplotlib" \ - \n ], \ - \n "activate": true \ - \n } \ - \n ], \ - \n [ \ - \n "env", \ - \n { \ - \n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:" \ - \n } \ - \n ], \ - \n [ \ - \n "run_bash", \ - \n "source activate neuro" \ - \n ], \ - \n [ \ - \n "user", \ - \n "root" \ - \n ], \ - \n [ \ - \n "run", \ - \n "chmod 777 -Rf /home" \ - \n ], \ - \n [ \ - \n "run", \ - \n "chown -R neuro /home" \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \\\"0.0.0.0\\\" > ~/.jupyter/jupyter_notebook_config.py" \ - \n ] \ - \n ] \ - \n}' > /neurodocker/neurodocker_specs.json diff --git a/narps_open/utils/correlation/__main__.py b/narps_open/utils/correlation/__main__.py index d086499b..0ccf80c2 100644 --- a/narps_open/utils/correlation/__main__.py +++ b/narps_open/utils/correlation/__main__.py @@ -19,7 +19,7 @@ def main(): parser = ArgumentParser(description = 'Compare reproduced files to original results.') parser.add_argument('-t', '--team', type = str, required = True, help = 'the team ID', choices = get_implemented_pipelines()) - subjects.add_argument('-n', '--nsubjects', type=str, required = True, + parser.add_argument('-n', '--nsubjects', type = int, required = True, help='the number of subjects to be selected') arguments = parser.parse_args()