From 827420557f588dabdf4fc2767374ebd928e0e011 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Fri, 30 Aug 2024 18:15:04 -0400 Subject: [PATCH 1/9] start adding anatomical conforming --- ingress2qsirecon/utils/functions.py | 18 +++++ ingress2qsirecon/utils/interfaces.py | 112 +++++++++++++++++++++++++++ ingress2qsirecon/utils/workflows.py | 26 ++++--- 3 files changed, 147 insertions(+), 9 deletions(-) diff --git a/ingress2qsirecon/utils/functions.py b/ingress2qsirecon/utils/functions.py index 1242387..38eeeb7 100644 --- a/ingress2qsirecon/utils/functions.py +++ b/ingress2qsirecon/utils/functions.py @@ -12,6 +12,8 @@ from pathlib import Path from warnings import warn +import nibabel as nb + # Files: file paths relative to subject input folder # MNI: MNI template version # DIR_PATTERN: regex pattern for subject ID within input folder @@ -228,3 +230,19 @@ def create_layout(input_dir: Path, output_dir: Path, input_pipeline: str, partic raise ValueError("No subjects found in layout.") return layout + + +def to_lps(input_img, new_axcodes=("L", "P", "S")): + if isinstance(input_img, str): + input_img = nb.load(input_img) + input_axcodes = nb.aff2axcodes(input_img.affine) + # Is the input image oriented how we want? + if not input_axcodes == new_axcodes: + # Re-orient + input_orientation = nb.orientations.axcodes2ornt(input_axcodes) + desired_orientation = nb.orientations.axcodes2ornt(new_axcodes) + transform_orientation = nb.orientations.ornt_transform(input_orientation, desired_orientation) + reoriented_img = input_img.as_reoriented(transform_orientation) + return reoriented_img + else: + return input_img diff --git a/ingress2qsirecon/utils/interfaces.py b/ingress2qsirecon/utils/interfaces.py index 43ca884..fc54def 100644 --- a/ingress2qsirecon/utils/interfaces.py +++ b/ingress2qsirecon/utils/interfaces.py @@ -21,6 +21,8 @@ ) from nipype.interfaces.workbench.base import WBCommand +from ingress2qsirecon.utils.functions import to_lps + LOGGER = logging.getLogger("nipype.interface") @@ -352,3 +354,113 @@ def _run_interface(self, runtime): self._results["b0_average"] = output_mean_fname return runtime + + +class _ConformInputSpec(BaseInterfaceInputSpec): + in_file = File(mandatory=True, desc="Input image") + out_file = File(mandatory=True, genfile=True, desc="Conformed image") + target_zooms = traits.Tuple(traits.Float, traits.Float, traits.Float, desc="Target zoom information") + target_shape = traits.Tuple(traits.Int, traits.Int, traits.Int, desc="Target shape information") + deoblique_header = traits.Bool(False, usedfault=True) + + +class _ConformOutputSpec(TraitedSpec): + out_file = File(exists=True, desc="Conformed image") + # transform = File(exists=True, desc="Conformation transform") + # report = File(exists=True, desc='reportlet about orientation') + + +class Conform(SimpleInterface): + """Conform a series of T1w images to enable merging. + + Performs two basic functions: + + 1. Orient to LPS (right-left, anterior-posterior, inferior-superior) + 2. Resample to target zooms (voxel sizes) and shape (number of voxels) + + """ + + input_spec = _ConformInputSpec + output_spec = _ConformOutputSpec + + def _run_interface(self, runtime): + # Load image, orient as LPS + fname = self.inputs.in_file + orig_img = nb.load(fname) + reoriented = to_lps(orig_img) + + # Set target shape information + target_zooms = np.array(self.inputs.target_zooms) + target_shape = np.array(self.inputs.target_shape) + target_span = target_shape * target_zooms + + zooms = np.array(reoriented.header.get_zooms()[:3]) + shape = np.array(reoriented.shape[:3]) + + # Reconstruct transform from orig to reoriented image + ornt_xfm = nb.orientations.inv_ornt_aff(nb.io_orientation(reoriented.affine), orig_img.shape) + # Identity unless proven otherwise + target_affine = reoriented.affine.copy() + conform_xfm = np.eye(4) + # conform_xfm = np.diag([-1, -1, 1, 1]) + + xyz_unit = reoriented.header.get_xyzt_units()[0] + if xyz_unit == "unknown": + # Common assumption; if we're wrong, unlikely to be the only thing that breaks + xyz_unit = "mm" + + # Set a 0.05mm threshold to performing rescaling + atol = {"meter": 1e-5, "mm": 0.01, "micron": 10}[xyz_unit] + + # Rescale => change zooms + # Resize => update image dimensions + rescale = not np.allclose(zooms, target_zooms, atol=atol) + resize = not np.all(shape == target_shape) + if rescale or resize: + if rescale: + scale_factor = target_zooms / zooms + target_affine[:3, :3] = reoriented.affine[:3, :3].dot(np.diag(scale_factor)) + + if resize: + # The shift is applied after scaling. + # Use a proportional shift to maintain relative position in dataset + size_factor = target_span / (zooms * shape) + # Use integer shifts to avoid unnecessary interpolation + offset = reoriented.affine[:3, 3] * size_factor - reoriented.affine[:3, 3] + target_affine[:3, 3] = reoriented.affine[:3, 3] + offset.astype(int) + + data = nim.resample_img(reoriented, target_affine, target_shape).get_fdata() + conform_xfm = np.linalg.inv(reoriented.affine).dot(target_affine) + reoriented = reoriented.__class__(data, target_affine, reoriented.header) + + if self.inputs.deoblique_header: + is_oblique = np.any(np.abs(nb.affines.obliquity(reoriented.affine)) > 0) + if is_oblique: + LOGGER.warning("Removing obliquity from image affine") + new_affine = reoriented.affine.copy() + new_affine[:, :-1] = 0 + new_affine[(0, 1, 2), (0, 1, 2)] = reoriented.header.get_zooms()[:3] * np.sign( + reoriented.affine[(0, 1, 2), (0, 1, 2)] + ) + reoriented = nb.Nifti1Image(reoriented.get_fdata(), new_affine, reoriented.header) + + # Save image + out_name = self.inputs.out_file + reoriented.to_filename(out_name) + + # Image may be reoriented, rescaled, and/or resized + if reoriented is not orig_img: + + transform = ornt_xfm.dot(conform_xfm) + if not np.allclose(orig_img.affine.dot(transform), target_affine): + LOGGER.warning("Check alignment of anatomical image.") + + else: + transform = np.eye(4) + + # mat_name = fname_presuffix(fname, suffix=".mat", newpath=runtime.cwd, use_ext=False) + # np.savetxt(mat_name, transform, fmt="%.08f") + # self._results["transform"] = mat_name + self._results["out_file"] = out_name + + return runtime diff --git a/ingress2qsirecon/utils/workflows.py b/ingress2qsirecon/utils/workflows.py index 30e75fc..2a31b32 100644 --- a/ingress2qsirecon/utils/workflows.py +++ b/ingress2qsirecon/utils/workflows.py @@ -9,6 +9,7 @@ from nipype.pipeline.engine import Workflow from ingress2qsirecon.utils.interfaces import ( + Conform, ConformDwi, ConvertWarpfield, ExtractB0s, @@ -73,13 +74,6 @@ def create_single_subject_wf(subject_layout): os.makedirs(Path(bids_base / "anat").resolve()) os.makedirs(Path(bids_base / "dwi").resolve()) - # Some files, like anatomicals, can just be copied over to the output BIDS directory LPS+ THESE FILES (ConformImage) - for key in ["t1w_brain", "brain_mask"]: - bids_key = "bids_" + key - if key in subject_layout.keys(): - if not os.path.exists(subject_layout[bids_key]): - shutil.copyfile(subject_layout[key], subject_layout[bids_key]) - # Create single subject workflow wf_name = f"ingress2qsirecon_single_subject_{subject_name}_wf" wf = Workflow(name=wf_name) @@ -100,6 +94,10 @@ def create_single_subject_wf(subject_layout): # Create node to conform DWI and save to BIDS layout conform_dwi_node = Node(ConformDwi(), name='conform_dwi') + # Create nodes to conform anatomicals and save to BIDS layout + conform_t1w_node = Node(Conform(), name="conform_t1w") + conform_mask_node = Node(Conform(), name="conform_mask") + # Connect nodes wf.connect( [ (input_node, parse_layout_node, [('subject_layout', 'subject_layout')]), @@ -115,11 +113,21 @@ def create_single_subject_wf(subject_layout): ("bids_bvecs", "bvec_out_file"), ], ), + # Add the zooms here + ( + parse_layout_node, + conform_t1w_node, + [("t1w_brain", "in_file"), ("bids_t1w_brain", "out_file")], + ), + ( + parse_layout_node, + conform_mask_node, + [("brain_mask", "in_file"), ("bids_brain_mask", "out_file")], + ), ] ) - # Write out .b and .bmtxt (DIPY standard) - # LPS+ anatomicals + # Write out .b and .bmtxt (DIPY standard) # If subject does not have DWIREF, run node to extract mean b0 if "dwiref" not in subject_layout.keys(): From 1519f1716deeafc6b4ab0eda476083b6db8424e1 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Fri, 30 Aug 2024 18:25:44 -0400 Subject: [PATCH 2/9] make anatomical conform optional because anatomicals optional --- ingress2qsirecon/utils/workflows.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/ingress2qsirecon/utils/workflows.py b/ingress2qsirecon/utils/workflows.py index 2a31b32..6ae64e2 100644 --- a/ingress2qsirecon/utils/workflows.py +++ b/ingress2qsirecon/utils/workflows.py @@ -94,9 +94,7 @@ def create_single_subject_wf(subject_layout): # Create node to conform DWI and save to BIDS layout conform_dwi_node = Node(ConformDwi(), name='conform_dwi') - # Create nodes to conform anatomicals and save to BIDS layout - conform_t1w_node = Node(Conform(), name="conform_t1w") - conform_mask_node = Node(Conform(), name="conform_mask") + # Connect nodes wf.connect( [ @@ -113,19 +111,29 @@ def create_single_subject_wf(subject_layout): ("bids_bvecs", "bvec_out_file"), ], ), - # Add the zooms here + ] + ) + + # Create nodes to conform anatomicals and save to BIDS layout + # Add the zooms here for them to work + if "t1w_brain" in subject_layout.keys(): + conform_t1w_node = Node(Conform(), name="conform_t1w") + wf.connect( ( parse_layout_node, conform_t1w_node, [("t1w_brain", "in_file"), ("bids_t1w_brain", "out_file")], - ), + ) + ) + if "brain_mask" in subject_layout.keys(): + conform_mask_node = Node(Conform(), name="conform_mask") + wf.connect( ( parse_layout_node, conform_mask_node, [("brain_mask", "in_file"), ("bids_brain_mask", "out_file")], ), - ] - ) + ) # Write out .b and .bmtxt (DIPY standard) From 0a9d0068ff2b9303dc0f8246189909bfb4470b0c Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Sat, 31 Aug 2024 13:34:57 -0400 Subject: [PATCH 3/9] start templateflow conform function --- ingress2qsirecon/utils/interfaces.py | 36 ++++++++++++++++++++++++++++ ingress2qsirecon/utils/workflows.py | 3 +++ 2 files changed, 39 insertions(+) diff --git a/ingress2qsirecon/utils/interfaces.py b/ingress2qsirecon/utils/interfaces.py index fc54def..cb196f8 100644 --- a/ingress2qsirecon/utils/interfaces.py +++ b/ingress2qsirecon/utils/interfaces.py @@ -12,9 +12,11 @@ from nilearn import image as nim from nipype import logging from nipype.interfaces.base import ( + BaseInterface, BaseInterfaceInputSpec, CommandLineInputSpec, File, + InputMultiPath, SimpleInterface, TraitedSpec, traits, @@ -464,3 +466,37 @@ def _run_interface(self, runtime): self._results["out_file"] = out_name return runtime + +# Define the input specification +class _ComposeTransformsInputSpec(BaseInterfaceInputSpec): + warp_files = InputMultiPath(File(exists=True), desc="List of warp files in .h5 format", mandatory=True) + output_warp = File(mandatory=True, genfile=True, desc="Output composed warp file") + +# Define the output specification +class _ComposeTransformsOutputSpec(TraitedSpec): + output_warp = File(exists=True, desc="Output composed warp file") + +# Define the custom Nipype interface +class ComposeTransforms(BaseInterface): + input_spec = _ComposeTransformsInputSpec + output_spec = _ComposeTransformsOutputSpec + + def _run_interface(self, runtime): + # Create a CompositeTransform object + composite_transform = sitk.CompositeTransform(3) + + # Iterate over the list of warp files and add them to the composite transform + for warp_file in self.inputs.warp_files: + transform = sitk.ReadTransform(warp_file) + composite_transform.AddTransform(transform) + + # Write the composite transform to the output file + output_path = os.path.abspath(self.inputs.output_warp) + sitk.WriteTransform(composite_transform, output_path) + + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + outputs["output_warp"] = os.path.abspath(self.inputs.output_warp) + return outputs \ No newline at end of file diff --git a/ingress2qsirecon/utils/workflows.py b/ingress2qsirecon/utils/workflows.py index 6ae64e2..f796dbd 100644 --- a/ingress2qsirecon/utils/workflows.py +++ b/ingress2qsirecon/utils/workflows.py @@ -7,8 +7,10 @@ from pathlib import Path from nipype.pipeline.engine import Workflow +from templateflow import api as tflow from ingress2qsirecon.utils.interfaces import ( + ComposeTransforms, Conform, ConformDwi, ConvertWarpfield, @@ -177,6 +179,7 @@ def create_single_subject_wf(subject_layout): ), ] ) + if subject_layout["MNI_"] # Then MNI2Subject if "MNI2subject" in subject_layout.keys(): From 065a4d629aec2164940b1e5b9da52bdb69970513 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Sun, 1 Sep 2024 13:44:56 -0400 Subject: [PATCH 4/9] continue writing templateflow functions --- ingress2qsirecon/utils/interfaces.py | 5 ++++- ingress2qsirecon/utils/workflows.py | 26 ++++++++++++++++++++++++-- 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/ingress2qsirecon/utils/interfaces.py b/ingress2qsirecon/utils/interfaces.py index cb196f8..82e4eaa 100644 --- a/ingress2qsirecon/utils/interfaces.py +++ b/ingress2qsirecon/utils/interfaces.py @@ -467,15 +467,18 @@ def _run_interface(self, runtime): return runtime + # Define the input specification class _ComposeTransformsInputSpec(BaseInterfaceInputSpec): warp_files = InputMultiPath(File(exists=True), desc="List of warp files in .h5 format", mandatory=True) output_warp = File(mandatory=True, genfile=True, desc="Output composed warp file") + # Define the output specification class _ComposeTransformsOutputSpec(TraitedSpec): output_warp = File(exists=True, desc="Output composed warp file") + # Define the custom Nipype interface class ComposeTransforms(BaseInterface): input_spec = _ComposeTransformsInputSpec @@ -499,4 +502,4 @@ def _run_interface(self, runtime): def _list_outputs(self): outputs = self._outputs().get() outputs["output_warp"] = os.path.abspath(self.inputs.output_warp) - return outputs \ No newline at end of file + return outputs diff --git a/ingress2qsirecon/utils/workflows.py b/ingress2qsirecon/utils/workflows.py index f796dbd..69fb7dd 100644 --- a/ingress2qsirecon/utils/workflows.py +++ b/ingress2qsirecon/utils/workflows.py @@ -152,7 +152,7 @@ def create_single_subject_wf(subject_layout): ] ) - # Convert FNIRT nii warps to ITK nii, then ITK nii to ITK H5, then get to MNI2009cAsym space if needed + # Convert FNIRT nii warps to ITK nii, then ITK nii to ITK H5 # Start with subject2MNI if "subject2MNI" in subject_layout.keys(): convert_warpfield_node_subject2MNI = Node(ConvertWarpfield(), name="convert_warpfield_subject2MNI") @@ -179,7 +179,6 @@ def create_single_subject_wf(subject_layout): ), ] ) - if subject_layout["MNI_"] # Then MNI2Subject if "MNI2subject" in subject_layout.keys(): @@ -208,6 +207,29 @@ def create_single_subject_wf(subject_layout): ] ) + # Now get transform to MNI2009cAsym + if ( + subject_layout["MNI_TEMPLATE"] == "MNI152NLin6Asym" + ): # tpl-MNI152NLin6Asym_from-MNI152NLin2009cAsym_mode-image_xfm.h5 + MNI2009cAsym_to_MNINLin6 = tflow.get('MNI152NLin6Asym', desc=None, suffix='xfm', extension='h5') + MNINLin6_to_MNI2009cAsym = tflow.get('MNI152NLin2009cAsym', desc=None, suffix='xfm', extension='h5') + compose_transforms_node_MNI2subject = Node(ComposeTransforms(), name="compose_transforms_MNI2subject") + compose_transforms_node_MNI2subject.inputs.transforms = [ + nii_to_h5_node_subject2MNI.outputs.xfm_h5_out, + MNINLin6_to_MNI2009cAsym, + ] + compose_transforms_node_MNI2subject.inputs.output_warp = str(subject_layout["bids_MNI2subject"]).replace( + subject_layout["MNI_TEMPLATE"], "NI152NLin2009cAsym" + ) + compose_transforms_node_subject2MNI = Node(ComposeTransforms(), name="compose_transforms_subject2MNI") + compose_transforms_node_subject2MNI.inputs.transforms = [ + nii_to_h5_node_MNI2subject.outputs.xfm_h5_out, + MNINLin6_to_MNI2009cAsym, + ] + compose_transforms_node_subject2MNI.inputs.output_warp = str(subject_layout["bids_subject2MNI"]).replace( + subject_layout["MNI_TEMPLATE"], "NI152NLin2009cAsym" + ) + return wf From 1b645ffe979a02f661eac95e9c73b4306fa53534 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Sun, 1 Sep 2024 16:28:18 -0400 Subject: [PATCH 5/9] struggling getting the ants compose to work --- ingress2qsirecon/utils/workflows.py | 102 +++++++++++++++++++--------- 1 file changed, 71 insertions(+), 31 deletions(-) diff --git a/ingress2qsirecon/utils/workflows.py b/ingress2qsirecon/utils/workflows.py index 69fb7dd..6135dbf 100644 --- a/ingress2qsirecon/utils/workflows.py +++ b/ingress2qsirecon/utils/workflows.py @@ -81,7 +81,7 @@ def create_single_subject_wf(subject_layout): wf = Workflow(name=wf_name) # Define input node for the single subject workflow - input_node = Node(IdentityInterface(fields=['subject_layout']), name='input_node') + input_node = Node(IdentityInterface(fields=['subject_layout',"MNI2009cAsym_to_MNINLin6","MNINLin6_to_MNI2009cAsym"]), name='input_node') input_node.inputs.subject_layout = subject_layout # Create node to parse the input dictionary into its individual components @@ -118,24 +118,26 @@ def create_single_subject_wf(subject_layout): # Create nodes to conform anatomicals and save to BIDS layout # Add the zooms here for them to work - if "t1w_brain" in subject_layout.keys(): - conform_t1w_node = Node(Conform(), name="conform_t1w") - wf.connect( - ( - parse_layout_node, - conform_t1w_node, - [("t1w_brain", "in_file"), ("bids_t1w_brain", "out_file")], + # TMP If false because does not work yet + if False: + if "t1w_brain" in subject_layout.keys(): + conform_t1w_node = Node(Conform(), name="conform_t1w") + wf.connect( + ( + parse_layout_node, + conform_t1w_node, + [("t1w_brain", "in_file"), ("bids_t1w_brain", "out_file")], + ) + ) + if "brain_mask" in subject_layout.keys(): + conform_mask_node = Node(Conform(), name="conform_mask") + wf.connect( + ( + parse_layout_node, + conform_mask_node, + [("brain_mask", "in_file"), ("bids_brain_mask", "out_file")], + ), ) - ) - if "brain_mask" in subject_layout.keys(): - conform_mask_node = Node(Conform(), name="conform_mask") - wf.connect( - ( - parse_layout_node, - conform_mask_node, - [("brain_mask", "in_file"), ("bids_brain_mask", "out_file")], - ), - ) # Write out .b and .bmtxt (DIPY standard) @@ -208,28 +210,66 @@ def create_single_subject_wf(subject_layout): ) # Now get transform to MNI2009cAsym + MNI_template = subject_layout["MNI_template"] if ( - subject_layout["MNI_TEMPLATE"] == "MNI152NLin6Asym" - ): # tpl-MNI152NLin6Asym_from-MNI152NLin2009cAsym_mode-image_xfm.h5 + MNI_template == "MNI152NLin6Asym" + ): + # Get the relevant transforms from templateflow MNI2009cAsym_to_MNINLin6 = tflow.get('MNI152NLin6Asym', desc=None, suffix='xfm', extension='h5') + input_node.inputs.MNI2009cAsym_to_MNINLin6 = MNI2009cAsym_to_MNINLin6 MNINLin6_to_MNI2009cAsym = tflow.get('MNI152NLin2009cAsym', desc=None, suffix='xfm', extension='h5') + input_node.inputs.MNINLin6_to_MNI2009cAsym = MNINLin6_to_MNI2009cAsym + + + # Define a function to make a list of two warp files for input to ComposeTransforms + def combine_warp_files(file1, file2): + return [file1, file2] + + # Create a Function node to make a list of warp files for MNI2subject + warp_files_list_MNI2subject = Node(Function( + input_names=['file1', 'file2'], + output_names=['combined_files'], + function=combine_warp_files + ), name='list_warp_files_MNI2subject') + + # Create a Function node to make a list of warp files for subject2MNI + warp_files_list_subject2MNI = Node(Function( + input_names=['file1', 'file2'], + output_names=['combined_files'], + function=combine_warp_files + ), name='list_warp_files_subject2MNI') + + + # Make the compute nodes for combining transforms compose_transforms_node_MNI2subject = Node(ComposeTransforms(), name="compose_transforms_MNI2subject") - compose_transforms_node_MNI2subject.inputs.transforms = [ - nii_to_h5_node_subject2MNI.outputs.xfm_h5_out, - MNINLin6_to_MNI2009cAsym, - ] compose_transforms_node_MNI2subject.inputs.output_warp = str(subject_layout["bids_MNI2subject"]).replace( - subject_layout["MNI_TEMPLATE"], "NI152NLin2009cAsym" + MNI_template, "MNI152NLin2009cAsym" ) compose_transforms_node_subject2MNI = Node(ComposeTransforms(), name="compose_transforms_subject2MNI") - compose_transforms_node_subject2MNI.inputs.transforms = [ - nii_to_h5_node_MNI2subject.outputs.xfm_h5_out, - MNINLin6_to_MNI2009cAsym, - ] compose_transforms_node_subject2MNI.inputs.output_warp = str(subject_layout["bids_subject2MNI"]).replace( - subject_layout["MNI_TEMPLATE"], "NI152NLin2009cAsym" + MNI_template, "MNI152NLin2009cAsym" ) - + + # Connect the nodes + wf.connect([ + # For MNI2subject + (nii_to_h5_node_MNI2subject, warp_files_list_MNI2subject, [("xfm_h5_out", "file1")]), + (input_node, warp_files_list_MNI2subject, [("MNI2009cAsym_to_MNINLin6", "file2")]), + (warp_files_list_MNI2subject, compose_transforms_node_MNI2subject, [("combined_files", "warp_files")]), + + # For subject2MNI + (input_node, warp_files_list_subject2MNI, [("MNINLin6_to_MNI2009cAsym", "file1")]), + (nii_to_h5_node_subject2MNI, warp_files_list_subject2MNI, [("xfm_h5_out", "file2")]), + (warp_files_list_subject2MNI, compose_transforms_node_subject2MNI, [("combined_files", "warp_files")]), + ]) + + # Then Subject --> MNI + #compose_transforms_node_subject2MNI = Node(ComposeTransforms(), name="compose_transforms_subject2MNI") + #compose_transforms_node_subject2MNI.inputs.warp_files = [MNINLin6_to_MNI2009cAsym, subject_layout["bids_subject2MNI"]] + #compose_transforms_node_subject2MNI.inputs.output_warp = str(subject_layout["bids_subject2MNI"]).replace( + # MNI_template, "MNI152NLin2009cAsym" + #) + return wf From 1af363f3ccd0cf0d41726e67b027d7a6b3392a26 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Wed, 4 Sep 2024 10:54:10 -0400 Subject: [PATCH 6/9] conform anatomicals, compose xfms --- ingress2qsirecon/utils/interfaces.py | 39 ++++++++-- ingress2qsirecon/utils/workflows.py | 103 ++++++++++++++++----------- 2 files changed, 94 insertions(+), 48 deletions(-) diff --git a/ingress2qsirecon/utils/interfaces.py b/ingress2qsirecon/utils/interfaces.py index 82e4eaa..f6c4dea 100644 --- a/ingress2qsirecon/utils/interfaces.py +++ b/ingress2qsirecon/utils/interfaces.py @@ -4,8 +4,10 @@ import os import shutil +import tempfile from textwrap import indent +import itk import nibabel as nb import numpy as np import SimpleITK as sitk @@ -479,8 +481,30 @@ class _ComposeTransformsOutputSpec(TraitedSpec): output_warp = File(exists=True, desc="Output composed warp file") +class ComposeTransforms2(BaseInterface): + + input_spec = _ComposeTransformsInputSpec + output_spec = _ComposeTransformsOutputSpec + + def _run_interface(self, runtime): + # Create a CompositeTransform object + + # Iterate over the list of warp files and add them to the composite transform + output_warp = os.path.abspath(self.inputs.output_warp) + transforms = [itk.transformread(warp_file) for warp_file in self.inputs.warp_files] + itk.transformwrite(transforms, output_warp) + + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + outputs["output_warp"] = os.path.abspath(self.inputs.output_warp) + return outputs + + # Define the custom Nipype interface class ComposeTransforms(BaseInterface): + input_spec = _ComposeTransformsInputSpec output_spec = _ComposeTransformsOutputSpec @@ -491,11 +515,16 @@ def _run_interface(self, runtime): # Iterate over the list of warp files and add them to the composite transform for warp_file in self.inputs.warp_files: transform = sitk.ReadTransform(warp_file) - composite_transform.AddTransform(transform) - - # Write the composite transform to the output file - output_path = os.path.abspath(self.inputs.output_warp) - sitk.WriteTransform(composite_transform, output_path) + try: + # If composite, add each transform in the list + for i in range(transform.GetNumberOfTransforms()): + composite_transform.AddTransform(transform.GetNthTransform(i)) + except: + # If not, just add the transform + composite_transform.AddTransform(transform) + + # Write the composite transform to the temporary file + sitk.WriteTransform(composite_transform, self.inputs.output_warp) return runtime diff --git a/ingress2qsirecon/utils/workflows.py b/ingress2qsirecon/utils/workflows.py index 6135dbf..d0189cd 100644 --- a/ingress2qsirecon/utils/workflows.py +++ b/ingress2qsirecon/utils/workflows.py @@ -7,6 +7,7 @@ from pathlib import Path from nipype.pipeline.engine import Workflow +from niworkflows.interfaces.images import TemplateDimensions from templateflow import api as tflow from ingress2qsirecon.utils.interfaces import ( @@ -81,7 +82,10 @@ def create_single_subject_wf(subject_layout): wf = Workflow(name=wf_name) # Define input node for the single subject workflow - input_node = Node(IdentityInterface(fields=['subject_layout',"MNI2009cAsym_to_MNINLin6","MNINLin6_to_MNI2009cAsym"]), name='input_node') + input_node = Node( + IdentityInterface(fields=['subject_layout', "MNI2009cAsym_to_MNINLin6", "MNINLin6_to_MNI2009cAsym"]), + name='input_node', + ) input_node.inputs.subject_layout = subject_layout # Create node to parse the input dictionary into its individual components @@ -117,26 +121,44 @@ def create_single_subject_wf(subject_layout): ) # Create nodes to conform anatomicals and save to BIDS layout - # Add the zooms here for them to work # TMP If false because does not work yet - if False: - if "t1w_brain" in subject_layout.keys(): - conform_t1w_node = Node(Conform(), name="conform_t1w") - wf.connect( + if "t1w_brain" in subject_layout.keys(): + template_dimensions_node = Node(TemplateDimensions(), name="template_dimensions") + conform_t1w_node = Node(Conform(), name="conform_t1w") + wf.connect( + [ + ( + parse_layout_node, + template_dimensions_node, + [("t1w_brain", "anat_list")], + ), + ( + template_dimensions_node, + conform_t1w_node, + [("target_shape", "target_shape"), ("target_zooms", "target_zooms")], + ), ( parse_layout_node, conform_t1w_node, [("t1w_brain", "in_file"), ("bids_t1w_brain", "out_file")], - ) - ) + ), + ] + ) if "brain_mask" in subject_layout.keys(): conform_mask_node = Node(Conform(), name="conform_mask") wf.connect( - ( - parse_layout_node, - conform_mask_node, - [("brain_mask", "in_file"), ("bids_brain_mask", "out_file")], - ), + [ + ( + parse_layout_node, + conform_mask_node, + [("brain_mask", "in_file"), ("bids_brain_mask", "out_file")], + ), + ( + template_dimensions_node, + conform_mask_node, + [("target_shape", "target_shape"), ("target_zooms", "target_zooms")], + ), + ] ) # Write out .b and .bmtxt (DIPY standard) @@ -211,34 +233,28 @@ def create_single_subject_wf(subject_layout): # Now get transform to MNI2009cAsym MNI_template = subject_layout["MNI_template"] - if ( - MNI_template == "MNI152NLin6Asym" - ): + if MNI_template == "MNI152NLin6Asym": # Get the relevant transforms from templateflow MNI2009cAsym_to_MNINLin6 = tflow.get('MNI152NLin6Asym', desc=None, suffix='xfm', extension='h5') input_node.inputs.MNI2009cAsym_to_MNINLin6 = MNI2009cAsym_to_MNINLin6 MNINLin6_to_MNI2009cAsym = tflow.get('MNI152NLin2009cAsym', desc=None, suffix='xfm', extension='h5') input_node.inputs.MNINLin6_to_MNI2009cAsym = MNINLin6_to_MNI2009cAsym - # Define a function to make a list of two warp files for input to ComposeTransforms def combine_warp_files(file1, file2): return [file1, file2] # Create a Function node to make a list of warp files for MNI2subject - warp_files_list_MNI2subject = Node(Function( - input_names=['file1', 'file2'], - output_names=['combined_files'], - function=combine_warp_files - ), name='list_warp_files_MNI2subject') + warp_files_list_MNI2subject = Node( + Function(input_names=['file1', 'file2'], output_names=['combined_files'], function=combine_warp_files), + name='list_warp_files_MNI2subject', + ) # Create a Function node to make a list of warp files for subject2MNI - warp_files_list_subject2MNI = Node(Function( - input_names=['file1', 'file2'], - output_names=['combined_files'], - function=combine_warp_files - ), name='list_warp_files_subject2MNI') - + warp_files_list_subject2MNI = Node( + Function(input_names=['file1', 'file2'], output_names=['combined_files'], function=combine_warp_files), + name='list_warp_files_subject2MNI', + ) # Make the compute nodes for combining transforms compose_transforms_node_MNI2subject = Node(ComposeTransforms(), name="compose_transforms_MNI2subject") @@ -251,24 +267,25 @@ def combine_warp_files(file1, file2): ) # Connect the nodes - wf.connect([ - # For MNI2subject - (nii_to_h5_node_MNI2subject, warp_files_list_MNI2subject, [("xfm_h5_out", "file1")]), - (input_node, warp_files_list_MNI2subject, [("MNI2009cAsym_to_MNINLin6", "file2")]), - (warp_files_list_MNI2subject, compose_transforms_node_MNI2subject, [("combined_files", "warp_files")]), - - # For subject2MNI - (input_node, warp_files_list_subject2MNI, [("MNINLin6_to_MNI2009cAsym", "file1")]), - (nii_to_h5_node_subject2MNI, warp_files_list_subject2MNI, [("xfm_h5_out", "file2")]), - (warp_files_list_subject2MNI, compose_transforms_node_subject2MNI, [("combined_files", "warp_files")]), - ]) + wf.connect( + [ + # For MNI2subject + (nii_to_h5_node_MNI2subject, warp_files_list_MNI2subject, [("xfm_h5_out", "file1")]), + (input_node, warp_files_list_MNI2subject, [("MNI2009cAsym_to_MNINLin6", "file2")]), + (warp_files_list_MNI2subject, compose_transforms_node_MNI2subject, [("combined_files", "warp_files")]), + # For subject2MNI + (input_node, warp_files_list_subject2MNI, [("MNINLin6_to_MNI2009cAsym", "file1")]), + (nii_to_h5_node_subject2MNI, warp_files_list_subject2MNI, [("xfm_h5_out", "file2")]), + (warp_files_list_subject2MNI, compose_transforms_node_subject2MNI, [("combined_files", "warp_files")]), + ] + ) # Then Subject --> MNI - #compose_transforms_node_subject2MNI = Node(ComposeTransforms(), name="compose_transforms_subject2MNI") - #compose_transforms_node_subject2MNI.inputs.warp_files = [MNINLin6_to_MNI2009cAsym, subject_layout["bids_subject2MNI"]] - #compose_transforms_node_subject2MNI.inputs.output_warp = str(subject_layout["bids_subject2MNI"]).replace( + # compose_transforms_node_subject2MNI = Node(ComposeTransforms(), name="compose_transforms_subject2MNI") + # compose_transforms_node_subject2MNI.inputs.warp_files = [MNINLin6_to_MNI2009cAsym, subject_layout["bids_subject2MNI"]] + # compose_transforms_node_subject2MNI.inputs.output_warp = str(subject_layout["bids_subject2MNI"]).replace( # MNI_template, "MNI152NLin2009cAsym" - #) + # ) return wf From fd9082226e1dcbe004ccfb588e2fce38a1946955 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Wed, 4 Sep 2024 11:47:04 -0400 Subject: [PATCH 7/9] clean and READMA --- README.md | 50 +++++++++-------------------- ingress2qsirecon/utils/workflows.py | 7 ---- 2 files changed, 15 insertions(+), 42 deletions(-) diff --git a/README.md b/README.md index acd9a5b..3ddd4b8 100644 --- a/README.md +++ b/README.md @@ -2,45 +2,25 @@ A tool for ingressing outputs from other processing pipelines (e.g., HCP, UKB) for use in QSIRecon. ## Overview -Your description here +This tool can be used to import data from bespoke and widely-used DWI preprocessing pipelines (e.g., Human Connectome Project and UK Biobank) into QSIRecon for post-processing. -## Usage -This project is set up using poetry. To install the dependencies, run `poetry install` from the root of the project. - -```shell -poetry install +## Installation +This project can be installed via PyPi. In your virtual environment run ``` - -To add a new dependency, run `poetry add ` from the root of the project. - -```shell -poetry add +pip install Ingress2QSIRecon ``` - -### Pre-Commit Hooks -This project uses [pre-commit](https://pre-commit.com/) to run linting and formatting tools before each commit. To install the pre-commit hooks, run `pre-commit install` from the root of the project. - -```shell -poetry run pre-commit install +For the master/development branch, you can clone the github repo, and run from within it: ``` - -To run the pre-commit hooks manually, run `pre-commit run --all-files` from the root of the project. - -```shell -poetry run pre-commit run --all-files +pip install -e . ``` - -### Testing -This project uses [pytest](https://docs.pytest.org/en/stable/) for testing. To run the tests, run `pytest` from the root of the project in the poetry shell. - -```shell -poetry run pytest -``` - -There are sensible defaults for pytest setup in the `pyproject.toml` file. You can override these defaults by passing in command line arguments. For example, to run the tests with debug logging enabled, run `pytest --log-cli-level=DEBUG` from the root of the project. - -```shell -poetry run pytest --log-cli-level=DEBUG +## Usage +Assuming you have the data to ingress locally availble, you can run the following: ``` - +ingress2qsirecon \ + /PATH/TO/INPUT/DATA \ # E.g., path to HCP1200 directory + /PATH/TO/OUTPUT \ # BIDS directory with ingressed data will be made here + PIPELINE_NAME \ # Currently support "hcpya" and "ukb" \ + -w /PATH/TO/WORKING/DIR \ # Where intermediate files and workflow information will be stored + --participant-label 100307 # Name(s) of folder within /PATH/TO/INPUT/DATA. If not specified, will process everyone +``` \ No newline at end of file diff --git a/ingress2qsirecon/utils/workflows.py b/ingress2qsirecon/utils/workflows.py index d0189cd..394060d 100644 --- a/ingress2qsirecon/utils/workflows.py +++ b/ingress2qsirecon/utils/workflows.py @@ -280,13 +280,6 @@ def combine_warp_files(file1, file2): ] ) - # Then Subject --> MNI - # compose_transforms_node_subject2MNI = Node(ComposeTransforms(), name="compose_transforms_subject2MNI") - # compose_transforms_node_subject2MNI.inputs.warp_files = [MNINLin6_to_MNI2009cAsym, subject_layout["bids_subject2MNI"]] - # compose_transforms_node_subject2MNI.inputs.output_warp = str(subject_layout["bids_subject2MNI"]).replace( - # MNI_template, "MNI152NLin2009cAsym" - # ) - return wf From 0307b2f842a145ab6c7d5b261720f6ae99853ee6 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Wed, 4 Sep 2024 14:30:38 -0400 Subject: [PATCH 8/9] .b and .bmtxt --- ingress2qsirecon/utils/functions.py | 2 + ingress2qsirecon/utils/interfaces.py | 119 ++++++++++++++++++++++----- ingress2qsirecon/utils/workflows.py | 26 +++++- 3 files changed, 123 insertions(+), 24 deletions(-) diff --git a/ingress2qsirecon/utils/functions.py b/ingress2qsirecon/utils/functions.py index 38eeeb7..90fd6c8 100644 --- a/ingress2qsirecon/utils/functions.py +++ b/ingress2qsirecon/utils/functions.py @@ -111,6 +111,7 @@ def make_bids_file_paths(subject_layout: dict) -> dict: bids_bval_file = os.path.join(bids_base, "dwi", sub_session_string + "_dwi.bval") bids_bvec_file = os.path.join(bids_base, "dwi", sub_session_string + "_dwi.bvec") bids_b_file = os.path.join(bids_base, "dwi", sub_session_string + "_dwi.b") + bids_bmtxt_file = os.path.join(bids_base, "dwi", sub_session_string + "_dwi.bmtxt") bids_dwiref_file = os.path.join(bids_base, "dwi", sub_session_string + "_dwiref.nii.gz") bids_file_paths = { @@ -118,6 +119,7 @@ def make_bids_file_paths(subject_layout: dict) -> dict: "bids_bvals": Path(bids_bval_file), "bids_bvecs": Path(bids_bvec_file), "bids_b": Path(bids_b_file), + "bids_bmtxt": Path(bids_bmtxt_file), "bids_dwiref": Path(bids_dwiref_file), } diff --git a/ingress2qsirecon/utils/interfaces.py b/ingress2qsirecon/utils/interfaces.py index f6c4dea..2e423bc 100644 --- a/ingress2qsirecon/utils/interfaces.py +++ b/ingress2qsirecon/utils/interfaces.py @@ -481,27 +481,6 @@ class _ComposeTransformsOutputSpec(TraitedSpec): output_warp = File(exists=True, desc="Output composed warp file") -class ComposeTransforms2(BaseInterface): - - input_spec = _ComposeTransformsInputSpec - output_spec = _ComposeTransformsOutputSpec - - def _run_interface(self, runtime): - # Create a CompositeTransform object - - # Iterate over the list of warp files and add them to the composite transform - output_warp = os.path.abspath(self.inputs.output_warp) - transforms = [itk.transformread(warp_file) for warp_file in self.inputs.warp_files] - itk.transformwrite(transforms, output_warp) - - return runtime - - def _list_outputs(self): - outputs = self._outputs().get() - outputs["output_warp"] = os.path.abspath(self.inputs.output_warp) - return outputs - - # Define the custom Nipype interface class ComposeTransforms(BaseInterface): @@ -532,3 +511,101 @@ def _list_outputs(self): outputs = self._outputs().get() outputs["output_warp"] = os.path.abspath(self.inputs.output_warp) return outputs + + +class _FSLBVecsToTORTOISEBmatrixInputSpec(BaseInterfaceInputSpec): + bvals_file = File(exists=True, desc='Full path to bvals file', mandatory=True) + bvecs_file = File(exists=True, desc='Full path to bvecs file', mandatory=True) + bmtxt_file = File(mandatory=True, desc='Output B-matrix file', genfile=True) + + +class _FSLBVecsToTORTOISEBmatrixOutputSpec(TraitedSpec): + bmtxt_file = File(exists=True, genfile=True, desc='Output B-matrix file') + + +class FSLBVecsToTORTOISEBmatrix(BaseInterface): + input_spec = _FSLBVecsToTORTOISEBmatrixInputSpec + output_spec = _FSLBVecsToTORTOISEBmatrixOutputSpec + + def _run_interface(self, runtime): + bvals_file = self.inputs.bvals_file + bvecs_file = self.inputs.bvecs_file + + # Load bvals and bvecs + try: + bvals = np.loadtxt(bvals_file) + except OSError: + raise RuntimeError(f"Bvals file does not exist: {bvals_file}") + + try: + bvecs = np.loadtxt(bvecs_file) + except OSError: + raise RuntimeError(f"Bvecs file does not exist: {bvecs_file}") + + # Ensure bvecs has 3 rows and bvals has 1 row + if bvecs.shape[0] != 3: + bvecs = bvecs.T + if bvals.shape[0] != 1: + bvals = bvals.reshape(1, -1) + + Nvols = bvecs.shape[1] + Bmatrix = np.zeros((Nvols, 6)) + + for i in range(Nvols): + vec = bvecs[:, i].reshape(3, 1) + nrm = np.linalg.norm(vec) + if nrm > 1e-3: + vec /= nrm + + mat = bvals[0, i] * np.dot(vec, vec.T) + Bmatrix[i, 0] = mat[0, 0] + Bmatrix[i, 1] = 2 * mat[0, 1] + Bmatrix[i, 2] = 2 * mat[0, 2] + Bmatrix[i, 3] = mat[1, 1] + Bmatrix[i, 4] = 2 * mat[1, 2] + Bmatrix[i, 5] = mat[2, 2] + + dirname = os.path.dirname(bvals_file) + if dirname == "": + dirname = "." + + np.savetxt(self.inputs.bmtxt_file, Bmatrix) + + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + outputs['bmtxt_file'] = self.inputs.bmtxt_file + return outputs + + +class _MRTrixGradientTableInputSpec(BaseInterfaceInputSpec): + bval_file = File(exists=True, mandatory=True) + bvec_file = File(exists=True, mandatory=True) + b_file_out = File(genfile=True, mandatory=True) + + +class _MRTrixGradientTableOutputSpec(TraitedSpec): + b_file_out = File(exists=True) + + +class MRTrixGradientTable(BaseInterface): + input_spec = _MRTrixGradientTableInputSpec + output_spec = _MRTrixGradientTableOutputSpec + + def _run_interface(self, runtime): + _convert_fsl_to_mrtrix(self.inputs.bval_file, self.inputs.bvec_file, self.inputs.b_file_out) + return runtime + + def _list_outputs(self): + outputs = self.output_spec().get() + # Return the output filename which Nipype generates + outputs['b_file_out'] = self.inputs.b_file_out + return outputs + + +def _convert_fsl_to_mrtrix(bval_file, bvec_file, output_fname): + vecs = np.loadtxt(bvec_file) + vals = np.loadtxt(bval_file) + gtab = np.column_stack([vecs.T, vals]) * np.array([-1, -1, 1, 1]) + np.savetxt(output_fname, gtab, fmt=["%.8f", "%.8f", "%.8f", "%d"]) diff --git a/ingress2qsirecon/utils/workflows.py b/ingress2qsirecon/utils/workflows.py index 394060d..c03d91a 100644 --- a/ingress2qsirecon/utils/workflows.py +++ b/ingress2qsirecon/utils/workflows.py @@ -16,6 +16,8 @@ ConformDwi, ConvertWarpfield, ExtractB0s, + FSLBVecsToTORTOISEBmatrix, + MRTrixGradientTable, NIFTItoH5, ) @@ -100,7 +102,9 @@ def create_single_subject_wf(subject_layout): # Create node to conform DWI and save to BIDS layout conform_dwi_node = Node(ConformDwi(), name='conform_dwi') - + # Create node to make b-matrix and bfile from FSL bval/bvec + create_bmatrix_node = Node(FSLBVecsToTORTOISEBmatrix(), name="create_bmatrix") + create_bfile_node = Node(MRTrixGradientTable(), name="create_bfile") # Connect nodes wf.connect( [ @@ -117,6 +121,24 @@ def create_single_subject_wf(subject_layout): ("bids_bvecs", "bvec_out_file"), ], ), + ( + conform_dwi_node, + create_bmatrix_node, + [ + ("bval_out_file", "bvals_file"), + ("bvec_out_file", "bvecs_file"), + ], + ), + (parse_layout_node, create_bmatrix_node, [("bids_bmtxt", "bmtxt_file")]), + ( + conform_dwi_node, + create_bfile_node, + [ + ("bval_out_file", "bval_file"), + ("bvec_out_file", "bvec_file"), + ], + ), + (parse_layout_node, create_bfile_node, [("bids_b", "b_file_out")]), ] ) @@ -161,8 +183,6 @@ def create_single_subject_wf(subject_layout): ] ) - # Write out .b and .bmtxt (DIPY standard) - # If subject does not have DWIREF, run node to extract mean b0 if "dwiref" not in subject_layout.keys(): create_dwiref_node = Node(ExtractB0s(), name="create_dwiref") From bd2bcd3eb6af07c7cba9067722bdd8aa19769327 Mon Sep 17 00:00:00 2001 From: Steven Meisler Date: Wed, 4 Sep 2024 15:09:19 -0400 Subject: [PATCH 9/9] cleaning, dependencies --- ingress2qsirecon/utils/interfaces.py | 38 ++++++++++++++++++++++++++-- ingress2qsirecon/utils/workflows.py | 1 - pyproject.toml | 4 ++- 3 files changed, 39 insertions(+), 4 deletions(-) diff --git a/ingress2qsirecon/utils/interfaces.py b/ingress2qsirecon/utils/interfaces.py index 2e423bc..e41386c 100644 --- a/ingress2qsirecon/utils/interfaces.py +++ b/ingress2qsirecon/utils/interfaces.py @@ -3,11 +3,10 @@ """ import os +import pandas as pd import shutil -import tempfile from textwrap import indent -import itk import nibabel as nb import numpy as np import SimpleITK as sitk @@ -609,3 +608,38 @@ def _convert_fsl_to_mrtrix(bval_file, bvec_file, output_fname): vals = np.loadtxt(bval_file) gtab = np.column_stack([vecs.T, vals]) * np.array([-1, -1, 1, 1]) np.savetxt(output_fname, gtab, fmt=["%.8f", "%.8f", "%.8f", "%d"]) + +class _ScansTSVWriterInputSpec(BaseInterfaceInputSpec): + filenames = traits.List(traits.Str, mandatory=True, desc="List of filenames") + source_files = traits.List(traits.Str, mandatory=True, desc="List of source files") + out_file = File("output.tsv", usedefault=True, desc="Output TSV file") + +class _ScansTSVWriterOutputSpec(TraitedSpec): + out_file = File(desc="Output TSV file") + +class ScansTSVWriter(BaseInterface): + input_spec = _ScansTSVWriterInputSpec + output_spec = _ScansTSVWriterOutputSpec + + def _run_interface(self, runtime): + filenames = self.inputs.filenames + source_files = self.inputs.source_files + + # Check if lengths match + if len(filenames) != len(source_files): + raise ValueError("filenames and source_files must have the same length") + + # Create DataFrame + df = pd.DataFrame({ + "filename": filenames, + "source_file": source_files + }) + + # Write to TSV + df.to_csv(self.inputs.out_file, sep='\t', index=False) + return runtime + + def _list_outputs(self): + outputs = self.output_spec().get().copy() + outputs['out_file'] = self.inputs.out_file + return outputs \ No newline at end of file diff --git a/ingress2qsirecon/utils/workflows.py b/ingress2qsirecon/utils/workflows.py index c03d91a..a5e6a6d 100644 --- a/ingress2qsirecon/utils/workflows.py +++ b/ingress2qsirecon/utils/workflows.py @@ -3,7 +3,6 @@ """ import os -import shutil from pathlib import Path from nipype.pipeline.engine import Workflow diff --git a/pyproject.toml b/pyproject.toml index 5b3dd9b..db57b4c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "Ingress2QSIRecon" -version = "0.1.0" +version = "0.1.1" description = "Tool to ingress data from other pipelines for use in QSIRecon" authors = ["Steven Meisler "] readme = "README.md" @@ -18,9 +18,11 @@ loguru = "^0.7.2" nibabel = "^5.2.1" nilearn = "^0.10.4" nipype = "^1.8.6" +niworkflows = "^1.11.0" pydantic = "^2.8" python = "^3.10" SimpleITK = "^2.4.0" +templateflow = "^24.2.0" [tool.poetry.urls] homepage = "https://github.com/PennLINC/ingress2qsirecon"