Skip to content

Commit

Permalink
Merge branch 'main' into output-spaces
Browse files Browse the repository at this point in the history
  • Loading branch information
tsalo committed Sep 21, 2024
2 parents 07e904f + 24a0dcf commit 3cad316
Show file tree
Hide file tree
Showing 9 changed files with 92 additions and 88 deletions.
10 changes: 5 additions & 5 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ RUN python -m build /src
#

# Utilities for downloading packages
FROM ${BASE_IMAGE} as downloader
FROM ${BASE_IMAGE} AS downloader
# Bump the date to current to refresh curl/certificates/etc
RUN echo "2023.07.20"
RUN apt-get update && \
Expand All @@ -53,7 +53,7 @@ RUN apt-get update && \
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

# AFNI
FROM downloader as afni
FROM downloader AS afni
# Bump the date to current to update AFNI
RUN echo "2023.07.20"
RUN mkdir -p /opt/afni-latest \
Expand All @@ -70,7 +70,7 @@ RUN mkdir -p /opt/afni-latest \
&& find /opt/afni-latest -type f -not -name "3dTshift" -delete

# Connectome Workbench 1.5.0
FROM downloader as workbench
FROM downloader AS workbench
RUN mkdir /opt/workbench && \
curl -sSLO https://www.humanconnectome.org/storage/app/media/workbench/workbench-linux64-v1.5.0.zip && \
unzip workbench-linux64-v1.5.0.zip -d /opt && \
Expand All @@ -79,7 +79,7 @@ RUN mkdir /opt/workbench && \
strip --remove-section=.note.ABI-tag /opt/workbench/libs_linux64/libQt5Core.so.5

# Micromamba
FROM downloader as micromamba
FROM downloader AS micromamba

# Install a C compiler to build extensions when needed.
# traits<6.4 wheels are not available for Python 3.11+, but build easily.
Expand Down Expand Up @@ -108,7 +108,7 @@ RUN npm install -g svgo@^3.2.0 bids-validator@^1.14.0 && \
#
# Main stage
#
FROM ${BASE_IMAGE} as fmripost_aroma
FROM ${BASE_IMAGE} AS fmripost_aroma

# Configure apt
ENV DEBIAN_FRONTEND="noninteractive" \
Expand Down
12 changes: 6 additions & 6 deletions src/fmripost_aroma/data/io_spec.json
Original file line number Diff line number Diff line change
Expand Up @@ -158,13 +158,13 @@
}
],
"patterns": [
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_echo-{echo}][_part-{part}][_space-{space}][_res-{res}][_desc-{desc}]_{suffix<bold|boldref|dseg|mask>}.{extension<nii|nii.gz|json>|nii.gz}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_echo-{echo}][_part-{part}][_space-{space}][_res-{res}][_stat-{statistic}][_desc-{desc}]_{suffix<components|mixing>}.{extension<nii|nii.gz|json>|nii.gz}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_echo-{echo}][_part-{part}][_space-{space}][_res-{res}][_stat-{statistic}][_desc-{desc}]_{suffix<components|mixing>}.{extension<tsv|json>|tsv}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_part-{part}][_desc-{desc}]_{suffix<metrics>}.{extension<tsv|json>|tsv}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_res-{res}][_desc-{desc}]_{suffix<bold|boldref|dseg|mask>}.{extension<nii|nii.gz|json>|nii.gz}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_res-{res}][_stat-{statistic}][_desc-{desc}]_{suffix<components|mixing>}.{extension<nii|nii.gz|json>|nii.gz}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_res-{res}][_stat-{statistic}][_desc-{desc}]_{suffix<components|mixing>}.{extension<tsv|json>|tsv}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_part-{part}][_desc-{desc}]_{suffix<metrics>}.{extension<tsv|json>|tsv}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_part-{part}][_desc-{desc}]_{suffix<timeseries>}.{extension<tsv|json>}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_hemi-{hemi}][_space-{space}][_res-{res}][_den-{density}][_label-{label}][_desc-{desc}]_{suffix<|boldref|dseg|mask>}.{extension<dtseries.nii|dtseries.json>}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_hemi-{hemi}][_space-{space}][_res-{res}][_den-{density}][_label-{label}][_desc-{desc}]_{suffix<|boldref|dseg|mask>}.{extension<dtseries.nii|dtseries.json>}",
"sub-{subject}[/ses-{session}]/{datatype<func>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}]_from-{from}_to-{to}_mode-{mode<image|points>|image}_{suffix<xfm>|xfm}.{extension<txt|h5>}",
"sub-{subject}[/ses-{session}]/{datatype<func|figures>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_echo-{echo}][_part-{part}][_space-{space}][_res-{res}][_stat-{statistic}][_desc-{desc}]_{suffix<components|mixing|bold>}.{extension<svg|html>|svg}"
"sub-{subject}[/ses-{session}]/{datatype<func|figures>|func}/sub-{subject}[_ses-{session}]_task-{task}[_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_run-{run}][_echo-{echo}][_part-{part}][_space-{space}][_res-{res}][_stat-{statistic}][_desc-{desc}]_{suffix<components|mixing|bold>}.{extension<svg|html>|svg}"
]
}
28 changes: 18 additions & 10 deletions src/fmripost_aroma/interfaces/confounds.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,41 +72,49 @@ def _get_ica_confounds(mixing, aroma_features, skip_vols, newpath=None):
aroma_features_df['classification'] != 'rejected'
].index.values
mixing_arr = np.loadtxt(mixing, ndmin=2)
n_comps = mixing_arr.shape[1]
if n_comps != aroma_features_df.shape[0]:
raise ValueError('Mixing matrix and AROMA features do not match')

# Prepare output paths
mixing_out = os.path.join(newpath, 'mixing.tsv')
aroma_confounds = os.path.join(newpath, 'AROMAAggrCompAROMAConfounds.tsv')

# pad mixing_arr with rows of zeros corresponding to number non steady-state volumes
# pad mixing_arr with rows of zeros corresponding to number of non steady-state volumes
padded_mixing_arr = mixing_arr.copy()
if skip_vols > 0:
zeros = np.zeros([skip_vols, mixing_arr.shape[1]])
mixing_arr = np.vstack([zeros, mixing_arr])
padded_mixing_arr = np.vstack([zeros, mixing_arr])

# save mixing_arr
np.savetxt(mixing_out, mixing_arr, delimiter='\t')
np.savetxt(mixing_out, padded_mixing_arr, delimiter='\t')

# Return dummy list of ones if no noise components were found
if motion_ics.size == 0:
config.loggers.interfaces.warning('No noise components were classified')
return None, mixing_out

# return dummy lists of zeros if no signal components were found
good_ic_arr = np.delete(mixing_arr, motion_ics, 1).T
if good_ic_arr.size == 0:
config.loggers.interfaces.warning('No signal components were classified')
return None, mixing_out
if signal_ics.size == 0:
raise Exception('No signal components were classified')

# Select the mixing matrix rows corresponding to the motion ICs
aggr_mixing_arr = mixing_arr[motion_ics, :].T
# Select the mixing matrix columns corresponding to the motion ICs
aggr_mixing_arr = mixing_arr[:, motion_ics]

# Regress the good components out of the bad time series to get "pure evil" regressors
signal_mixing_arr = mixing_arr[signal_ics, :].T
signal_mixing_arr = mixing_arr[:, signal_ics]
aggr_mixing_arr_z = stats.zscore(aggr_mixing_arr, axis=0)
signal_mixing_arr_z = stats.zscore(signal_mixing_arr, axis=0)
betas = np.linalg.lstsq(signal_mixing_arr_z, aggr_mixing_arr_z, rcond=None)[0]
pred_bad_timeseries = np.dot(signal_mixing_arr_z, betas)
orthaggr_mixing_arr = aggr_mixing_arr_z - pred_bad_timeseries

# pad confounds with rows of zeros corresponding to number of non steady-state volumes
if skip_vols > 0:
zeros = np.zeros([skip_vols, aggr_mixing_arr.shape[1]])
aggr_mixing_arr = np.vstack([zeros, aggr_mixing_arr])
orthaggr_mixing_arr = np.vstack([zeros, orthaggr_mixing_arr])

# add one to motion_ic_indices to match melodic report.
aggr_confounds_df = pd.DataFrame(
aggr_mixing_arr,
Expand Down
54 changes: 54 additions & 0 deletions src/fmripost_aroma/interfaces/misc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
"""Miscellaneous interfaces for fmriprep-aroma."""

from nipype.interfaces.base import (
isdefined,
traits,
)
from nipype.utils.filemanip import fname_presuffix
from niworkflows.interfaces.fixes import (
FixHeaderApplyTransforms,
_FixTraitApplyTransformsInputSpec,
)


class _ApplyTransformsInputSpec(_FixTraitApplyTransformsInputSpec):
# Nipype's version doesn't have GenericLabel
interpolation = traits.Enum(
'Linear',
'NearestNeighbor',
'CosineWindowedSinc',
'WelchWindowedSinc',
'HammingWindowedSinc',
'LanczosWindowedSinc',
'MultiLabel',
'Gaussian',
'BSpline',
'GenericLabel',
argstr='%s',
usedefault=True,
)


class ApplyTransforms(FixHeaderApplyTransforms):
"""A modified version of FixHeaderApplyTransforms from niworkflows.
The niworkflows version of ApplyTransforms "fixes the resampled image header
to match the xform of the reference image".
This modification overrides the allowed interpolation values,
since FixHeaderApplyTransforms doesn't support GenericLabel,
which is preferred over MultiLabel.
"""

input_spec = _ApplyTransformsInputSpec

def _run_interface(self, runtime):
if not isdefined(self.inputs.output_image):
self.inputs.output_image = fname_presuffix(
self.inputs.input_image,
suffix='_trans.nii.gz',
newpath=runtime.cwd,
use_ext=False,
)

runtime = super()._run_interface(runtime)
return runtime
57 changes: 0 additions & 57 deletions src/fmripost_aroma/interfaces/resampler.py

This file was deleted.

6 changes: 2 additions & 4 deletions src/fmripost_aroma/workflows/aroma.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,9 +277,7 @@ def init_ica_aroma_wf(

# extract the confound ICs from the results
ica_aroma_confound_extraction = pe.Node(
ICAConfounds(
err_on_aroma_warn=config.workflow.err_on_warn,
),
ICAConfounds(err_on_aroma_warn=config.workflow.err_on_warn),
name='ica_aroma_confound_extraction',
)
workflow.connect([
Expand Down Expand Up @@ -311,7 +309,7 @@ def init_ica_aroma_wf(
niu.Function(function=_convert_to_tsv, output_names=['out_file']),
name='convert_to_tsv',
)
workflow.connect([(select_melodic_files, convert_to_tsv, [('mixing', 'in_file')])])
workflow.connect([(ica_aroma_confound_extraction, convert_to_tsv, [('mixing', 'in_file')])])

ds_mixing = pe.Node(
DerivativesDataSink(
Expand Down
5 changes: 3 additions & 2 deletions src/fmripost_aroma/workflows/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,10 +377,11 @@ def init_single_run_wf(bold_file):
# Resample to MNI152NLin6Asym:res-2, for ICA-AROMA classification
from fmriprep.workflows.bold.apply import init_bold_volumetric_resample_wf
from fmriprep.workflows.bold.stc import init_bold_stc_wf
from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
from niworkflows.interfaces.header import ValidateImage
from templateflow.api import get as get_template

from fmripost_aroma.interfaces.misc import ApplyTransforms

workflow.__desc__ += """\
Raw BOLD series were resampled to MNI152NLin6Asym:res-2, for ICA-AROMA classification.
"""
Expand Down Expand Up @@ -452,7 +453,7 @@ def init_single_run_wf(bold_file):
# Warp the mask as well
mask_to_mni6 = pe.Node(
ApplyTransforms(
interpolation='MultiLabel',
interpolation='GenericLabel',
input_image=functional_cache['bold_mask_native'],
reference_image=mni6_mask,
transforms=[
Expand Down
4 changes: 2 additions & 2 deletions src/fmripost_aroma/workflows/confounds.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,11 @@ def init_carpetplot_wf(
from nipype.interfaces import utility as niu
from nipype.pipeline import engine as pe
from niworkflows.engine.workflows import LiterateWorkflow as Workflow
from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
from templateflow.api import get as get_template

from fmripost_aroma.config import DEFAULT_MEMORY_MIN_GB
from fmripost_aroma.interfaces.bids import DerivativesDataSink
from fmripost_aroma.interfaces.misc import ApplyTransforms

inputnode = pe.Node(
niu.IdentityInterface(
Expand Down Expand Up @@ -154,7 +154,7 @@ def init_carpetplot_wf(
),
),
],
interpolation='MultiLabel',
interpolation='GenericLabel',
args='-u int',
),
name='resample_parc',
Expand Down
4 changes: 2 additions & 2 deletions src/fmripost_aroma/workflows/outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@
from fmriprep.utils.bids import dismiss_echo
from nipype.interfaces import utility as niu
from nipype.pipeline import engine as pe
from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms
from niworkflows.utils.images import dseg_label

from fmripost_aroma.config import DEFAULT_MEMORY_MIN_GB
from fmripost_aroma.interfaces.bids import DerivativesDataSink
from fmripost_aroma.interfaces.misc import ApplyTransforms


def init_func_fit_reports_wf(
Expand Down Expand Up @@ -82,7 +82,7 @@ def init_func_fit_reports_wf(

# Warp the tissue segmentation to MNI
dseg_to_mni6 = pe.Node(
ApplyTransforms(interpolation='MultiLabel'),
ApplyTransforms(interpolation='GenericLabel'),
name='dseg_to_mni6',
mem_gb=1,
)
Expand Down

0 comments on commit 3cad316

Please sign in to comment.