Skip to content

Commit

Permalink
Merge pull request #2 from PennLINC/enh/conform_metadata
Browse files Browse the repository at this point in the history
Enh/conform metadata
smeisler authored Sep 4, 2024
2 parents ccd8fd1 + bd2bcd3 commit e229205
Showing 5 changed files with 451 additions and 48 deletions.
50 changes: 15 additions & 35 deletions README.md
Original file line number Diff line number Diff line change
@@ -2,45 +2,25 @@
A tool for ingressing outputs from other processing pipelines (e.g., HCP, UKB) for use in QSIRecon.

## Overview
Your description here
This tool can be used to import data from bespoke and widely-used DWI preprocessing pipelines (e.g., Human Connectome Project and UK Biobank) into QSIRecon for post-processing.

## Usage
This project is set up using poetry. To install the dependencies, run `poetry install` from the root of the project.

```shell
poetry install
## Installation
This project can be installed via PyPi. In your virtual environment run
```

To add a new dependency, run `poetry add <dependency>` from the root of the project.

```shell
poetry add <dependency>
pip install Ingress2QSIRecon
```

### Pre-Commit Hooks
This project uses [pre-commit](https://pre-commit.com/) to run linting and formatting tools before each commit. To install the pre-commit hooks, run `pre-commit install` from the root of the project.

```shell
poetry run pre-commit install
For the master/development branch, you can clone the github repo, and run from within it:
```

To run the pre-commit hooks manually, run `pre-commit run --all-files` from the root of the project.

```shell
poetry run pre-commit run --all-files
pip install -e .
```


### Testing
This project uses [pytest](https://docs.pytest.org/en/stable/) for testing. To run the tests, run `pytest` from the root of the project in the poetry shell.

```shell
poetry run pytest
```

There are sensible defaults for pytest setup in the `pyproject.toml` file. You can override these defaults by passing in command line arguments. For example, to run the tests with debug logging enabled, run `pytest --log-cli-level=DEBUG` from the root of the project.

```shell
poetry run pytest --log-cli-level=DEBUG
## Usage
Assuming you have the data to ingress locally availble, you can run the following:
```

ingress2qsirecon \
/PATH/TO/INPUT/DATA \ # E.g., path to HCP1200 directory
/PATH/TO/OUTPUT \ # BIDS directory with ingressed data will be made here
PIPELINE_NAME \ # Currently support "hcpya" and "ukb" \
-w /PATH/TO/WORKING/DIR \ # Where intermediate files and workflow information will be stored
--participant-label 100307 # Name(s) of folder within /PATH/TO/INPUT/DATA. If not specified, will process everyone
```
20 changes: 20 additions & 0 deletions ingress2qsirecon/utils/functions.py
Original file line number Diff line number Diff line change
@@ -12,6 +12,8 @@
from pathlib import Path
from warnings import warn

import nibabel as nb

# Files: file paths relative to subject input folder
# MNI: MNI template version
# DIR_PATTERN: regex pattern for subject ID within input folder
@@ -109,13 +111,15 @@ def make_bids_file_paths(subject_layout: dict) -> dict:
bids_bval_file = os.path.join(bids_base, "dwi", sub_session_string + "_dwi.bval")
bids_bvec_file = os.path.join(bids_base, "dwi", sub_session_string + "_dwi.bvec")
bids_b_file = os.path.join(bids_base, "dwi", sub_session_string + "_dwi.b")
bids_bmtxt_file = os.path.join(bids_base, "dwi", sub_session_string + "_dwi.bmtxt")
bids_dwiref_file = os.path.join(bids_base, "dwi", sub_session_string + "_dwiref.nii.gz")

bids_file_paths = {
"bids_dwi": Path(bids_dwi_file),
"bids_bvals": Path(bids_bval_file),
"bids_bvecs": Path(bids_bvec_file),
"bids_b": Path(bids_b_file),
"bids_bmtxt": Path(bids_bmtxt_file),
"bids_dwiref": Path(bids_dwiref_file),
}

@@ -228,3 +232,19 @@ def create_layout(input_dir: Path, output_dir: Path, input_pipeline: str, partic
raise ValueError("No subjects found in layout.")

return layout


def to_lps(input_img, new_axcodes=("L", "P", "S")):
if isinstance(input_img, str):
input_img = nb.load(input_img)
input_axcodes = nb.aff2axcodes(input_img.affine)
# Is the input image oriented how we want?
if not input_axcodes == new_axcodes:
# Re-orient
input_orientation = nb.orientations.axcodes2ornt(input_axcodes)
desired_orientation = nb.orientations.axcodes2ornt(new_axcodes)
transform_orientation = nb.orientations.ornt_transform(input_orientation, desired_orientation)
reoriented_img = input_img.as_reoriented(transform_orientation)
return reoriented_img
else:
return input_img
291 changes: 291 additions & 0 deletions ingress2qsirecon/utils/interfaces.py
Original file line number Diff line number Diff line change
@@ -3,6 +3,7 @@
"""

import os
import pandas as pd
import shutil
from textwrap import indent

@@ -12,15 +13,19 @@
from nilearn import image as nim
from nipype import logging
from nipype.interfaces.base import (
BaseInterface,
BaseInterfaceInputSpec,
CommandLineInputSpec,
File,
InputMultiPath,
SimpleInterface,
TraitedSpec,
traits,
)
from nipype.interfaces.workbench.base import WBCommand

from ingress2qsirecon.utils.functions import to_lps

LOGGER = logging.getLogger("nipype.interface")


@@ -352,3 +357,289 @@ def _run_interface(self, runtime):
self._results["b0_average"] = output_mean_fname

return runtime


class _ConformInputSpec(BaseInterfaceInputSpec):
in_file = File(mandatory=True, desc="Input image")
out_file = File(mandatory=True, genfile=True, desc="Conformed image")
target_zooms = traits.Tuple(traits.Float, traits.Float, traits.Float, desc="Target zoom information")
target_shape = traits.Tuple(traits.Int, traits.Int, traits.Int, desc="Target shape information")
deoblique_header = traits.Bool(False, usedfault=True)


class _ConformOutputSpec(TraitedSpec):
out_file = File(exists=True, desc="Conformed image")
# transform = File(exists=True, desc="Conformation transform")
# report = File(exists=True, desc='reportlet about orientation')


class Conform(SimpleInterface):
"""Conform a series of T1w images to enable merging.
Performs two basic functions:
1. Orient to LPS (right-left, anterior-posterior, inferior-superior)
2. Resample to target zooms (voxel sizes) and shape (number of voxels)
"""

input_spec = _ConformInputSpec
output_spec = _ConformOutputSpec

def _run_interface(self, runtime):
# Load image, orient as LPS
fname = self.inputs.in_file
orig_img = nb.load(fname)
reoriented = to_lps(orig_img)

# Set target shape information
target_zooms = np.array(self.inputs.target_zooms)
target_shape = np.array(self.inputs.target_shape)
target_span = target_shape * target_zooms

zooms = np.array(reoriented.header.get_zooms()[:3])
shape = np.array(reoriented.shape[:3])

# Reconstruct transform from orig to reoriented image
ornt_xfm = nb.orientations.inv_ornt_aff(nb.io_orientation(reoriented.affine), orig_img.shape)
# Identity unless proven otherwise
target_affine = reoriented.affine.copy()
conform_xfm = np.eye(4)
# conform_xfm = np.diag([-1, -1, 1, 1])

xyz_unit = reoriented.header.get_xyzt_units()[0]
if xyz_unit == "unknown":
# Common assumption; if we're wrong, unlikely to be the only thing that breaks
xyz_unit = "mm"

# Set a 0.05mm threshold to performing rescaling
atol = {"meter": 1e-5, "mm": 0.01, "micron": 10}[xyz_unit]

# Rescale => change zooms
# Resize => update image dimensions
rescale = not np.allclose(zooms, target_zooms, atol=atol)
resize = not np.all(shape == target_shape)
if rescale or resize:
if rescale:
scale_factor = target_zooms / zooms
target_affine[:3, :3] = reoriented.affine[:3, :3].dot(np.diag(scale_factor))

if resize:
# The shift is applied after scaling.
# Use a proportional shift to maintain relative position in dataset
size_factor = target_span / (zooms * shape)
# Use integer shifts to avoid unnecessary interpolation
offset = reoriented.affine[:3, 3] * size_factor - reoriented.affine[:3, 3]
target_affine[:3, 3] = reoriented.affine[:3, 3] + offset.astype(int)

data = nim.resample_img(reoriented, target_affine, target_shape).get_fdata()
conform_xfm = np.linalg.inv(reoriented.affine).dot(target_affine)
reoriented = reoriented.__class__(data, target_affine, reoriented.header)

if self.inputs.deoblique_header:
is_oblique = np.any(np.abs(nb.affines.obliquity(reoriented.affine)) > 0)
if is_oblique:
LOGGER.warning("Removing obliquity from image affine")
new_affine = reoriented.affine.copy()
new_affine[:, :-1] = 0
new_affine[(0, 1, 2), (0, 1, 2)] = reoriented.header.get_zooms()[:3] * np.sign(
reoriented.affine[(0, 1, 2), (0, 1, 2)]
)
reoriented = nb.Nifti1Image(reoriented.get_fdata(), new_affine, reoriented.header)

# Save image
out_name = self.inputs.out_file
reoriented.to_filename(out_name)

# Image may be reoriented, rescaled, and/or resized
if reoriented is not orig_img:

transform = ornt_xfm.dot(conform_xfm)
if not np.allclose(orig_img.affine.dot(transform), target_affine):
LOGGER.warning("Check alignment of anatomical image.")

else:
transform = np.eye(4)

# mat_name = fname_presuffix(fname, suffix=".mat", newpath=runtime.cwd, use_ext=False)
# np.savetxt(mat_name, transform, fmt="%.08f")
# self._results["transform"] = mat_name
self._results["out_file"] = out_name

return runtime


# Define the input specification
class _ComposeTransformsInputSpec(BaseInterfaceInputSpec):
warp_files = InputMultiPath(File(exists=True), desc="List of warp files in .h5 format", mandatory=True)
output_warp = File(mandatory=True, genfile=True, desc="Output composed warp file")


# Define the output specification
class _ComposeTransformsOutputSpec(TraitedSpec):
output_warp = File(exists=True, desc="Output composed warp file")


# Define the custom Nipype interface
class ComposeTransforms(BaseInterface):

input_spec = _ComposeTransformsInputSpec
output_spec = _ComposeTransformsOutputSpec

def _run_interface(self, runtime):
# Create a CompositeTransform object
composite_transform = sitk.CompositeTransform(3)

# Iterate over the list of warp files and add them to the composite transform
for warp_file in self.inputs.warp_files:
transform = sitk.ReadTransform(warp_file)
try:
# If composite, add each transform in the list
for i in range(transform.GetNumberOfTransforms()):
composite_transform.AddTransform(transform.GetNthTransform(i))
except:
# If not, just add the transform
composite_transform.AddTransform(transform)

# Write the composite transform to the temporary file
sitk.WriteTransform(composite_transform, self.inputs.output_warp)

return runtime

def _list_outputs(self):
outputs = self._outputs().get()
outputs["output_warp"] = os.path.abspath(self.inputs.output_warp)
return outputs


class _FSLBVecsToTORTOISEBmatrixInputSpec(BaseInterfaceInputSpec):
bvals_file = File(exists=True, desc='Full path to bvals file', mandatory=True)
bvecs_file = File(exists=True, desc='Full path to bvecs file', mandatory=True)
bmtxt_file = File(mandatory=True, desc='Output B-matrix file', genfile=True)


class _FSLBVecsToTORTOISEBmatrixOutputSpec(TraitedSpec):
bmtxt_file = File(exists=True, genfile=True, desc='Output B-matrix file')


class FSLBVecsToTORTOISEBmatrix(BaseInterface):
input_spec = _FSLBVecsToTORTOISEBmatrixInputSpec
output_spec = _FSLBVecsToTORTOISEBmatrixOutputSpec

def _run_interface(self, runtime):
bvals_file = self.inputs.bvals_file
bvecs_file = self.inputs.bvecs_file

# Load bvals and bvecs
try:
bvals = np.loadtxt(bvals_file)
except OSError:
raise RuntimeError(f"Bvals file does not exist: {bvals_file}")

try:
bvecs = np.loadtxt(bvecs_file)
except OSError:
raise RuntimeError(f"Bvecs file does not exist: {bvecs_file}")

# Ensure bvecs has 3 rows and bvals has 1 row
if bvecs.shape[0] != 3:
bvecs = bvecs.T
if bvals.shape[0] != 1:
bvals = bvals.reshape(1, -1)

Nvols = bvecs.shape[1]
Bmatrix = np.zeros((Nvols, 6))

for i in range(Nvols):
vec = bvecs[:, i].reshape(3, 1)
nrm = np.linalg.norm(vec)
if nrm > 1e-3:
vec /= nrm

mat = bvals[0, i] * np.dot(vec, vec.T)
Bmatrix[i, 0] = mat[0, 0]
Bmatrix[i, 1] = 2 * mat[0, 1]
Bmatrix[i, 2] = 2 * mat[0, 2]
Bmatrix[i, 3] = mat[1, 1]
Bmatrix[i, 4] = 2 * mat[1, 2]
Bmatrix[i, 5] = mat[2, 2]

dirname = os.path.dirname(bvals_file)
if dirname == "":
dirname = "."

np.savetxt(self.inputs.bmtxt_file, Bmatrix)

return runtime

def _list_outputs(self):
outputs = self._outputs().get()
outputs['bmtxt_file'] = self.inputs.bmtxt_file
return outputs


class _MRTrixGradientTableInputSpec(BaseInterfaceInputSpec):
bval_file = File(exists=True, mandatory=True)
bvec_file = File(exists=True, mandatory=True)
b_file_out = File(genfile=True, mandatory=True)


class _MRTrixGradientTableOutputSpec(TraitedSpec):
b_file_out = File(exists=True)


class MRTrixGradientTable(BaseInterface):
input_spec = _MRTrixGradientTableInputSpec
output_spec = _MRTrixGradientTableOutputSpec

def _run_interface(self, runtime):
_convert_fsl_to_mrtrix(self.inputs.bval_file, self.inputs.bvec_file, self.inputs.b_file_out)
return runtime

def _list_outputs(self):
outputs = self.output_spec().get()
# Return the output filename which Nipype generates
outputs['b_file_out'] = self.inputs.b_file_out
return outputs


def _convert_fsl_to_mrtrix(bval_file, bvec_file, output_fname):
vecs = np.loadtxt(bvec_file)
vals = np.loadtxt(bval_file)
gtab = np.column_stack([vecs.T, vals]) * np.array([-1, -1, 1, 1])
np.savetxt(output_fname, gtab, fmt=["%.8f", "%.8f", "%.8f", "%d"])

class _ScansTSVWriterInputSpec(BaseInterfaceInputSpec):
filenames = traits.List(traits.Str, mandatory=True, desc="List of filenames")
source_files = traits.List(traits.Str, mandatory=True, desc="List of source files")
out_file = File("output.tsv", usedefault=True, desc="Output TSV file")

class _ScansTSVWriterOutputSpec(TraitedSpec):
out_file = File(desc="Output TSV file")

class ScansTSVWriter(BaseInterface):
input_spec = _ScansTSVWriterInputSpec
output_spec = _ScansTSVWriterOutputSpec

def _run_interface(self, runtime):
filenames = self.inputs.filenames
source_files = self.inputs.source_files

# Check if lengths match
if len(filenames) != len(source_files):
raise ValueError("filenames and source_files must have the same length")

# Create DataFrame
df = pd.DataFrame({
"filename": filenames,
"source_file": source_files
})

# Write to TSV
df.to_csv(self.inputs.out_file, sep='\t', index=False)
return runtime

def _list_outputs(self):
outputs = self.output_spec().get().copy()
outputs['out_file'] = self.inputs.out_file
return outputs
134 changes: 122 additions & 12 deletions ingress2qsirecon/utils/workflows.py
Original file line number Diff line number Diff line change
@@ -3,15 +3,20 @@
"""

import os
import shutil
from pathlib import Path

from nipype.pipeline.engine import Workflow
from niworkflows.interfaces.images import TemplateDimensions
from templateflow import api as tflow

from ingress2qsirecon.utils.interfaces import (
ComposeTransforms,
Conform,
ConformDwi,
ConvertWarpfield,
ExtractB0s,
FSLBVecsToTORTOISEBmatrix,
MRTrixGradientTable,
NIFTItoH5,
)

@@ -73,19 +78,15 @@ def create_single_subject_wf(subject_layout):
os.makedirs(Path(bids_base / "anat").resolve())
os.makedirs(Path(bids_base / "dwi").resolve())

# Some files, like anatomicals, can just be copied over to the output BIDS directory LPS+ THESE FILES (ConformImage)
for key in ["t1w_brain", "brain_mask"]:
bids_key = "bids_" + key
if key in subject_layout.keys():
if not os.path.exists(subject_layout[bids_key]):
shutil.copyfile(subject_layout[key], subject_layout[bids_key])

# Create single subject workflow
wf_name = f"ingress2qsirecon_single_subject_{subject_name}_wf"
wf = Workflow(name=wf_name)

# Define input node for the single subject workflow
input_node = Node(IdentityInterface(fields=['subject_layout']), name='input_node')
input_node = Node(
IdentityInterface(fields=['subject_layout', "MNI2009cAsym_to_MNINLin6", "MNINLin6_to_MNI2009cAsym"]),
name='input_node',
)
input_node.inputs.subject_layout = subject_layout

# Create node to parse the input dictionary into its individual components
@@ -100,6 +101,10 @@ def create_single_subject_wf(subject_layout):

# Create node to conform DWI and save to BIDS layout
conform_dwi_node = Node(ConformDwi(), name='conform_dwi')
# Create node to make b-matrix and bfile from FSL bval/bvec
create_bmatrix_node = Node(FSLBVecsToTORTOISEBmatrix(), name="create_bmatrix")
create_bfile_node = Node(MRTrixGradientTable(), name="create_bfile")
# Connect nodes
wf.connect(
[
(input_node, parse_layout_node, [('subject_layout', 'subject_layout')]),
@@ -115,11 +120,67 @@ def create_single_subject_wf(subject_layout):
("bids_bvecs", "bvec_out_file"),
],
),
(
conform_dwi_node,
create_bmatrix_node,
[
("bval_out_file", "bvals_file"),
("bvec_out_file", "bvecs_file"),
],
),
(parse_layout_node, create_bmatrix_node, [("bids_bmtxt", "bmtxt_file")]),
(
conform_dwi_node,
create_bfile_node,
[
("bval_out_file", "bval_file"),
("bvec_out_file", "bvec_file"),
],
),
(parse_layout_node, create_bfile_node, [("bids_b", "b_file_out")]),
]
)
# Write out .b and .bmtxt (DIPY standard)

# LPS+ anatomicals
# Create nodes to conform anatomicals and save to BIDS layout
# TMP If false because does not work yet
if "t1w_brain" in subject_layout.keys():
template_dimensions_node = Node(TemplateDimensions(), name="template_dimensions")
conform_t1w_node = Node(Conform(), name="conform_t1w")
wf.connect(
[
(
parse_layout_node,
template_dimensions_node,
[("t1w_brain", "anat_list")],
),
(
template_dimensions_node,
conform_t1w_node,
[("target_shape", "target_shape"), ("target_zooms", "target_zooms")],
),
(
parse_layout_node,
conform_t1w_node,
[("t1w_brain", "in_file"), ("bids_t1w_brain", "out_file")],
),
]
)
if "brain_mask" in subject_layout.keys():
conform_mask_node = Node(Conform(), name="conform_mask")
wf.connect(
[
(
parse_layout_node,
conform_mask_node,
[("brain_mask", "in_file"), ("bids_brain_mask", "out_file")],
),
(
template_dimensions_node,
conform_mask_node,
[("target_shape", "target_shape"), ("target_zooms", "target_zooms")],
),
]
)

# If subject does not have DWIREF, run node to extract mean b0
if "dwiref" not in subject_layout.keys():
@@ -134,7 +195,7 @@ def create_single_subject_wf(subject_layout):
]
)

# Convert FNIRT nii warps to ITK nii, then ITK nii to ITK H5, then get to MNI2009cAsym space if needed
# Convert FNIRT nii warps to ITK nii, then ITK nii to ITK H5
# Start with subject2MNI
if "subject2MNI" in subject_layout.keys():
convert_warpfield_node_subject2MNI = Node(ConvertWarpfield(), name="convert_warpfield_subject2MNI")
@@ -189,6 +250,55 @@ def create_single_subject_wf(subject_layout):
]
)

# Now get transform to MNI2009cAsym
MNI_template = subject_layout["MNI_template"]
if MNI_template == "MNI152NLin6Asym":
# Get the relevant transforms from templateflow
MNI2009cAsym_to_MNINLin6 = tflow.get('MNI152NLin6Asym', desc=None, suffix='xfm', extension='h5')
input_node.inputs.MNI2009cAsym_to_MNINLin6 = MNI2009cAsym_to_MNINLin6
MNINLin6_to_MNI2009cAsym = tflow.get('MNI152NLin2009cAsym', desc=None, suffix='xfm', extension='h5')
input_node.inputs.MNINLin6_to_MNI2009cAsym = MNINLin6_to_MNI2009cAsym

# Define a function to make a list of two warp files for input to ComposeTransforms
def combine_warp_files(file1, file2):
return [file1, file2]

# Create a Function node to make a list of warp files for MNI2subject
warp_files_list_MNI2subject = Node(
Function(input_names=['file1', 'file2'], output_names=['combined_files'], function=combine_warp_files),
name='list_warp_files_MNI2subject',
)

# Create a Function node to make a list of warp files for subject2MNI
warp_files_list_subject2MNI = Node(
Function(input_names=['file1', 'file2'], output_names=['combined_files'], function=combine_warp_files),
name='list_warp_files_subject2MNI',
)

# Make the compute nodes for combining transforms
compose_transforms_node_MNI2subject = Node(ComposeTransforms(), name="compose_transforms_MNI2subject")
compose_transforms_node_MNI2subject.inputs.output_warp = str(subject_layout["bids_MNI2subject"]).replace(
MNI_template, "MNI152NLin2009cAsym"
)
compose_transforms_node_subject2MNI = Node(ComposeTransforms(), name="compose_transforms_subject2MNI")
compose_transforms_node_subject2MNI.inputs.output_warp = str(subject_layout["bids_subject2MNI"]).replace(
MNI_template, "MNI152NLin2009cAsym"
)

# Connect the nodes
wf.connect(
[
# For MNI2subject
(nii_to_h5_node_MNI2subject, warp_files_list_MNI2subject, [("xfm_h5_out", "file1")]),
(input_node, warp_files_list_MNI2subject, [("MNI2009cAsym_to_MNINLin6", "file2")]),
(warp_files_list_MNI2subject, compose_transforms_node_MNI2subject, [("combined_files", "warp_files")]),
# For subject2MNI
(input_node, warp_files_list_subject2MNI, [("MNINLin6_to_MNI2009cAsym", "file1")]),
(nii_to_h5_node_subject2MNI, warp_files_list_subject2MNI, [("xfm_h5_out", "file2")]),
(warp_files_list_subject2MNI, compose_transforms_node_subject2MNI, [("combined_files", "warp_files")]),
]
)

return wf


4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "Ingress2QSIRecon"
version = "0.1.0"
version = "0.1.1"
description = "Tool to ingress data from other pipelines for use in QSIRecon"
authors = ["Steven Meisler <smeisler13@gmail.com>"]
readme = "README.md"
@@ -18,9 +18,11 @@ loguru = "^0.7.2"
nibabel = "^5.2.1"
nilearn = "^0.10.4"
nipype = "^1.8.6"
niworkflows = "^1.11.0"
pydantic = "^2.8"
python = "^3.10"
SimpleITK = "^2.4.0"
templateflow = "^24.2.0"

[tool.poetry.urls]
homepage = "https://github.com/PennLINC/ingress2qsirecon"

0 comments on commit e229205

Please sign in to comment.