Skip to content

Commit

Permalink
Merge pull request #28 from poldracklab/neuroscout
Browse files Browse the repository at this point in the history
Neuroscout changes / pybids updates
  • Loading branch information
effigies authored May 21, 2018
2 parents b8cf455 + 2d42118 commit 1c41dd0
Show file tree
Hide file tree
Showing 6 changed files with 99 additions and 49 deletions.
27 changes: 16 additions & 11 deletions fitlins/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from scipy import stats as sps
import pandas as pd
from nilearn import plotting as nlp
import nistats.reporting # noqa: F401
from nistats import second_level_model as level2

import pkg_resources as pkgr
Expand Down Expand Up @@ -66,9 +65,16 @@ def expand_contrast_matrix(contrast_matrix, design_matrix):


def init(model_fname, bids_dir, preproc_dir):
analysis = ba.Analysis(model=model_fname,
layout=grabbids.BIDSLayout([bids_dir, preproc_dir]))
analysis.setup(**analysis.model['input'])
if preproc_dir is not None:
config = [('bids', [bids_dir, preproc_dir]),
('derivatives', preproc_dir)]
else:
config = None

layout = grabbids.BIDSLayout(bids_dir, config=config)

analysis = ba.Analysis(model=model_fname, layout=layout)
analysis.setup()
analysis.layout.path_patterns[:0] = PATH_PATTERNS
return analysis

Expand All @@ -79,13 +85,11 @@ def second_level(analysis, block, space, deriv_dir):
config=['bids', 'derivatives',
pkgr.resource_filename('fitlins', 'data/fitlins.json')])
fl_layout.path_patterns[:0] = PATH_PATTERNS

analyses = []

# pybids likes to give us a lot of extraneous columns
cnames = [contrast['name'] for contrast in block.contrasts]
fmri_glm = level2.SecondLevelModel()
for contrasts, idx, ents in block.get_contrasts(names=cnames):

for contrasts, idx, ents in block.get_contrasts():
if contrasts.empty:
continue

Expand Down Expand Up @@ -152,13 +156,14 @@ def second_level(analysis, block, space, deriv_dir):
paradigm = pd.DataFrame(cols)

fmri_glm.fit(data, design_matrix=paradigm)
stat_type = [c['type'] for c in block.contrasts if c['name'] == contrast][0]
stat_type = [c['type'] for c in block.contrasts if c['name'] == contrast] or ['T']
stat_type = stat_type[0]
stat = fmri_glm.compute_contrast(
cname,
second_level_stat_type={'T': 't', 'F': 'F'}[stat_type],
)
data = stat.get_data()
masked_vals = data[data != 0]
stat_data = stat.get_data()
masked_vals = stat_data[stat_data != 0]
if np.isnan(masked_vals).all():
raise ValueError("nistats was unable to perform this contrast")
stat.to_filename(stat_fname)
Expand Down
35 changes: 25 additions & 10 deletions fitlins/cli/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import os
import os.path as op
import time
import json
import logging
import warnings
from argparse import ArgumentParser
Expand Down Expand Up @@ -72,16 +73,20 @@ def get_parser():
parser.add_argument('-v', '--version', action='version', version=verstr)

g_bids = parser.add_argument_group('Options for filtering BIDS queries')
g_bids.add_argument('--participant-label', action='store', nargs='+', default=[],
g_bids.add_argument('--participant-label', action='store', nargs='+', default=None,
help='one or more participant identifiers (the sub- prefix can be '
'removed)')
g_bids.add_argument('-m', '--model', action='store', default='model.json',
help='location of BIDS model description (default bids_dir/model.json)')
g_bids.add_argument('-p', '--preproc-dir', action='store', default='fmriprep',
help='location of preprocessed data (default output_dir/fmriprep)')
g_bids.add_argument('-p', '--preproc-dir', action='store', default=None,
help='location of preprocessed data (default bids_dir/fmriprep)')
g_bids.add_argument('--space', action='store',
choices=['MNI152NLin2009cAsym'], default='MNI152NLin2009cAsym',
help='registered space of input datasets')
g_bids.add_argument('--include', action='store', default=None,
help='regex pattern to include files')
g_bids.add_argument('--exclude', action='store', default=None,
help='regex pattern to exclude files')

g_perfm = parser.add_argument_group('Options to handle performance')
g_perfm.add_argument('--debug', action='store_true', default=False,
Expand Down Expand Up @@ -109,11 +114,14 @@ def create_workflow(opts):

# First check that bids_dir looks like a BIDS folder
bids_dir = op.abspath(opts.bids_dir)
subject_list = bids.collect_participants(
bids_dir, participant_label=opts.participant_label)

if opts.participant_label is not None:
subject_list = bids.collect_participants(
bids_dir, participant_label=opts.participant_label)
else:
subject_list = opts.participant_label

output_dir = op.abspath(opts.output_dir)
os.makedirs(output_dir, exist_ok=True)

# Build main workflow
logger.log(25, INIT_MSG(
Expand All @@ -124,19 +132,26 @@ def create_workflow(opts):
model = default_path(opts.model, bids_dir, 'model.json')
if opts.model in (None, 'default') and not os.path.exists(model):
model = 'default'
preproc_dir = default_path(opts.preproc_dir, output_dir, 'fmriprep')
deriv_dir = op.join(output_dir, 'fitlins')
os.makedirs(deriv_dir, exist_ok=True)

desc = op.join(deriv_dir, 'dataset_description.json')
with open(desc, 'w') as fobj:
json.dump({'Name': 'FitLins output', 'BIDSVersion': '1.1.0'}, fobj)

# BIDS-Apps prefers 'participant', BIDS-Model prefers 'subject'
level = 'subject' if opts.analysis_level == 'participant' else opts.analysis_level

fitlins_wf = init_fitlins_wf(bids_dir, preproc_dir, deriv_dir, opts.space, model,
subject_list, base_dir=opts.work_dir)
fitlins_wf = init_fitlins_wf(
bids_dir, opts.preproc_dir, deriv_dir, opts.space, model=model,
participants=subject_list, base_dir=opts.work_dir,
include_pattern=opts.include, exclude_pattern=opts.exclude
)

try:
fitlins_wf.run(plugin='MultiProc')
if model != 'default':
retcode = run_model(model, opts.space, level, bids_dir, preproc_dir,
retcode = run_model(model, opts.space, level, bids_dir, opts.preproc_dir,
deriv_dir)
else:
retcode = 0
Expand Down
55 changes: 36 additions & 19 deletions fitlins/interfaces/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,9 @@ def _ensure_model(model):


class ModelSpecLoaderInputSpec(BaseInterfaceInputSpec):
bids_dirs = InputMultiPath(Directory(exists=True),
mandatory=True,
desc='BIDS dataset root directories')
bids_dir = Directory(exists=True,
mandatory=True,
desc='BIDS dataset root directory')
model = traits.Either('default', InputMultiPath(File(exists=True)),
desc='Model filename')
selectors = traits.Dict(desc='Limit models to those with matching inputs')
Expand All @@ -90,7 +90,7 @@ class ModelSpecLoader(SimpleInterface):
def _run_interface(self, runtime):
models = self.inputs.model
if not isinstance(models, list):
layout = gb.BIDSLayout(self.inputs.bids_dirs)
layout = gb.BIDSLayout(self.inputs.bids_dir)

if not isdefined(models):
models = layout.get(type='model')
Expand All @@ -113,9 +113,11 @@ def _run_interface(self, runtime):


class LoadLevel1BIDSModelInputSpec(BaseInterfaceInputSpec):
bids_dirs = InputMultiPath(Directory(exists=True),
mandatory=True,
desc='BIDS dataset root directories')
bids_dir = Directory(exists=True,
mandatory=True,
desc='BIDS dataset root directory')
preproc_dir = Directory(exists=True,
desc='Optional preprocessed files directory')
model = traits.Dict(desc='Model specification', mandatory=True)
selectors = traits.Dict(desc='Limit collected sessions', usedefault=True)
include_pattern = InputMultiPath(
Expand All @@ -138,18 +140,23 @@ class LoadLevel1BIDSModel(SimpleInterface):

def _run_interface(self, runtime):
include = self.inputs.include_pattern
exclude = self.inputs.include_pattern
exclude = self.inputs.exclude_pattern
if not isdefined(include):
include = None
if not isdefined(exclude):
exclude = None
layout = gb.BIDSLayout(self.inputs.bids_dirs, include=include,
exclude=exclude)

if isdefined(self.inputs.preproc_dir):
config = [('bids', [self.inputs.bids_dir, self.inputs.preproc_dir]),
('derivatives', self.inputs.preproc_dir)]
else:
config = None
layout = gb.BIDSLayout(self.inputs.bids_dir, config=config,
include=include, exclude=exclude)

selectors = self.inputs.selectors

analysis = ba.Analysis(model=self.inputs.model, layout=layout)
selectors.update(analysis.model['input'])
analysis.setup(**selectors)
block = analysis.blocks[0]

Expand Down Expand Up @@ -220,9 +227,11 @@ def _run_interface(self, runtime):


class BIDSSelectInputSpec(BaseInterfaceInputSpec):
bids_dirs = InputMultiPath(Directory(exists=True),
mandatory=True,
desc='BIDS dataset root directories')
bids_dir = Directory(exists=True,
mandatory=True,
desc='BIDS dataset root directories')
preproc_dir = Directory(exists=True,
desc='Optional preprocessed files directory')
entities = InputMultiPath(traits.Dict(), mandatory=True)
selectors = traits.Dict(desc='Additional selectors to be applied',
usedefault=True)
Expand All @@ -239,7 +248,13 @@ class BIDSSelect(SimpleInterface):
output_spec = BIDSSelectOutputSpec

def _run_interface(self, runtime):
layout = gb.BIDSLayout(self.inputs.bids_dirs)
if isdefined(self.inputs.preproc_dir):
config = [('bids', [self.inputs.bids_dir, self.inputs.preproc_dir]),
('derivatives', self.inputs.preproc_dir)]
else:
config = None
layout = gb.BIDSLayout(self.inputs.bids_dir, config=config)

bold_files = []
mask_files = []
entities = []
Expand All @@ -250,23 +265,23 @@ def _run_interface(self, runtime):
if len(bold_file) == 0:
raise FileNotFoundError(
"Could not find BOLD file in {} with entities {}"
"".format(self.inputs.bids_dirs, selectors))
"".format(self.inputs.bids_dir, selectors))
elif len(bold_file) > 1:
raise ValueError(
"Non-unique BOLD file in {} with entities {}.\n"
"Matches:\n\t{}"
"".format(self.inputs.bids_dirs, selectors,
"".format(self.inputs.bids_dir, selectors,
"\n\t".join(
'{} ({})'.format(
f.filename,
layout.files[f.filename].entities)
for f in bold_file)))

# Select exactly matching mask file (may be over-cautious)
bold_ents = layout.parse_file_entities(bold_file[0].filename)
bold_ents = layout.parse_file_entities(
bold_file[0].filename)
bold_ents['type'] = 'brainmask'
mask_file = layout.get(extensions=['.nii', '.nii.gz'], **bold_ents)

bold_ents.pop('type')

bold_files.append(bold_file[0].filename)
Expand Down Expand Up @@ -330,6 +345,8 @@ class BIDSDataSink(IOBase):
input_spec = BIDSDataSinkInputSpec
output_spec = BIDSDataSinkOutputSpec

_always_run=True

def _list_outputs(self):
base_dir = self.inputs.base_directory

Expand Down
2 changes: 1 addition & 1 deletion fitlins/viz/reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def parse_directory(deriv_dir, analysis):
'contrasts_svg': contrast_svg.filename,
}
if ents.get('subject'):
job_desc['subject_id']: ents.get('subject')
job_desc['subject_id'] = ents.get('subject')
if correlation_matrix:
job_desc['correlation_matrix_svg'] = correlation_matrix[0].filename
if design_matrix:
Expand Down
25 changes: 19 additions & 6 deletions fitlins/workflows/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,33 +4,46 @@
from ..interfaces.nistats import FirstLevelModel


def init_fitlins_wf(bids_dir, preproc_dir, out_dir, space,
model=None, participants='.*',
def init_fitlins_wf(bids_dir, preproc_dir, out_dir, space, exclude_pattern=None,
include_pattern=None, model=None, participants=None,
base_dir=None, name='fitlins_wf'):
wf = pe.Workflow(name=name, base_dir=base_dir)

specs = ModelSpecLoader(bids_dirs=bids_dir)
specs = ModelSpecLoader(bids_dir=bids_dir)
if model is not None:
specs.inputs.model = model

all_models = specs.run().outputs.model_spec
if not all_models:
raise RuntimeError("Unable to find or construct models")

selectors = {'subject': participants} if participants is not None else {}

loader = pe.Node(
LoadLevel1BIDSModel(bids_dirs=[bids_dir, preproc_dir],
selectors={'subject': participants}),
LoadLevel1BIDSModel(bids_dir=bids_dir,
selectors=selectors),
name='loader')

if preproc_dir is not None:
loader.inputs.preproc_dir = preproc_dir
if exclude_pattern is not None:
loader.inputs.exclude_pattern = exclude_pattern
if include_pattern is not None:
loader.inputs.include_pattern = include_pattern

if isinstance(all_models, list):
loader.iterables = ('model', all_models)
else:
loader.inputs.model = all_models

getter = pe.Node(
BIDSSelect(bids_dirs=preproc_dir,
BIDSSelect(bids_dir=bids_dir,
selectors={'type': 'preproc', 'space': space}),
name='getter')

if preproc_dir is not None:
getter.inputs.preproc_dir = preproc_dir

flm = pe.MapNode(
FirstLevelModel(),
iterfield=['session_info', 'contrast_info', 'bold_file', 'mask_file'],
Expand Down
4 changes: 2 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@ seaborn>=0.7.1
numpy>=1.11
nilearn>=0.4.0
pandas>=0.19
pybids>=0.5.1
git+https://github.com/grabbles/grabbit.git@3fe38c7e7eb510a38e6c2d072bdc913aaa1b7389#egg=grabbit-0.1.2-dev
git+https://github.com/grabbles/grabbit.git@6fb00e51ebd51f18d280a9f70696d908ff1faec6#egg=grabbit
git+https://github.com/INCF/pybids.git@997f1dd707706580f9a15197c2f42cbe3fdf8c6d#egg=pybids
git+https://github.com/nistats/nistats.git@ce3695e8f34c6f34323766dc96a60a53b69d2729#egg=nistats-0.0.1b-dev

0 comments on commit 1c41dd0

Please sign in to comment.