From 829f119bc5f3bad48daa8562a0c51de4e7b5b20c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boris=20Cl=C3=A9net?= Date: Tue, 6 Feb 2024 14:36:10 +0100 Subject: [PATCH] [PEP8] some linting + including the narps_open/pipelines dir to pylint --- .github/workflows/code_quality.yml | 2 +- narps_open/pipelines/__main__.py | 30 ------ narps_open/pipelines/team_0I4U_debug.py | 1 + narps_open/pipelines/team_1KB2_debug.py | 1 + narps_open/pipelines/team_43FJ_debug.py | 1 + narps_open/pipelines/team_4TQ6_wip.py | 1 + narps_open/pipelines/team_98BT.py | 1 + narps_open/pipelines/team_Q6O0.py | 63 ++++++++---- narps_open/pipelines/team_R9K3_wip.py | 1 + narps_open/pipelines/team_V55J.py | 1 + narps_open/pipelines/team_X19V.py | 1 + tests/conftest.py | 10 ++ tests/core/test_common.py | 7 +- tests/data/test_results.py | 6 +- tests/data/test_task.py | 4 +- tests/pipelines/test_team_08MQ.py | 20 ++-- tests/pipelines/test_team_2T6S.py | 12 +-- tests/pipelines/test_team_C88N.py | 18 ++-- tests/pipelines/test_team_J7F9.py | 129 ++++++++++++------------ tests/pipelines/test_team_Q6O0.py | 12 +-- tests/pipelines/test_team_T54A.py | 37 +++---- tests/test_conftest.py | 54 +++++++--- tests/test_runner.py | 13 ++- 23 files changed, 214 insertions(+), 211 deletions(-) delete mode 100644 narps_open/pipelines/__main__.py diff --git a/.github/workflows/code_quality.yml b/.github/workflows/code_quality.yml index a9248671..555396c3 100644 --- a/.github/workflows/code_quality.yml +++ b/.github/workflows/code_quality.yml @@ -46,7 +46,7 @@ jobs: - name: Analyse the code with pylint run: | - pylint --fail-under 8 --ignore-paths narps_open/pipelines/ narps_open > pylint_report_narps_open.txt + pylint --fail-under 8 narps_open > pylint_report_narps_open.txt pylint --fail-under 8 tests > pylint_report_tests.txt - name: Archive pylint results diff --git a/narps_open/pipelines/__main__.py b/narps_open/pipelines/__main__.py deleted file mode 100644 index 60fd5c76..00000000 --- a/narps_open/pipelines/__main__.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/python -# coding: utf-8 - -""" Provide a command-line interface for the package narps_open.pipelines """ - -from argparse import ArgumentParser - -from narps_open.pipelines import get_implemented_pipelines - -def main(): - """ Entry-point for the command line tool narps_open_pipeline """ - - # Parse arguments - parser = ArgumentParser(description='Get description of a NARPS pipeline.') - parser.add_argument('-v', '--verbose', action='store_true', - help='verbose mode') - arguments = parser.parse_args() - - # Print header - print('NARPS Open Pipelines') - - # Print general information about NARS Open Pipelines - print('A codebase reproducing the 70 pipelines of the NARPS study (Botvinik-Nezer et al., 2020) shared as an open resource for the community.') - - # Print pipelines - implemented_pipelines = get_implemented_pipelines() - print(f'There are currently {len(implemented_pipelines)} implemented pipelines: {implemented_pipelines}') - -if __name__ == '__main__': - main() diff --git a/narps_open/pipelines/team_0I4U_debug.py b/narps_open/pipelines/team_0I4U_debug.py index bdd1b3e0..98b51837 100755 --- a/narps_open/pipelines/team_0I4U_debug.py +++ b/narps_open/pipelines/team_0I4U_debug.py @@ -1,3 +1,4 @@ +# pylint: skip-file from nipype.interfaces.spm import (Coregister, Smooth, OneSampleTTestDesign, EstimateModel, EstimateContrast, Level1Design, TwoSampleTTestDesign, RealignUnwarp, FieldMap, NewSegment, Normalize12, Reslice) diff --git a/narps_open/pipelines/team_1KB2_debug.py b/narps_open/pipelines/team_1KB2_debug.py index d56b939b..d7699667 100755 --- a/narps_open/pipelines/team_1KB2_debug.py +++ b/narps_open/pipelines/team_1KB2_debug.py @@ -1,3 +1,4 @@ +# pylint: skip-file from nipype.interfaces.fsl import (BET, FAST, MCFLIRT, FLIRT, FNIRT, ApplyWarp, SUSAN, Info, ImageMaths, IsotropicSmooth, Threshold, Level1Design, FEATModel, L2Model, Merge, FLAMEO, ContrastMgr,Cluster, FILMGLS, Randomise, MultipleRegressDesign) diff --git a/narps_open/pipelines/team_43FJ_debug.py b/narps_open/pipelines/team_43FJ_debug.py index 08be5ec9..011e4e63 100755 --- a/narps_open/pipelines/team_43FJ_debug.py +++ b/narps_open/pipelines/team_43FJ_debug.py @@ -1,3 +1,4 @@ +# pylint: skip-file from nipype.interfaces.fsl import (BET, FAST, MCFLIRT, FLIRT, FNIRT, ApplyWarp, SUSAN, MotionOutliers, Info, ImageMaths, IsotropicSmooth, Threshold, Level1Design, FEATModel, L2Model, Merge, FLAMEO, ContrastMgr, FILMGLS, Randomise, MultipleRegressDesign) diff --git a/narps_open/pipelines/team_4TQ6_wip.py b/narps_open/pipelines/team_4TQ6_wip.py index 623df025..28e9aada 100755 --- a/narps_open/pipelines/team_4TQ6_wip.py +++ b/narps_open/pipelines/team_4TQ6_wip.py @@ -1,3 +1,4 @@ +# pylint: skip-file from nipype.interfaces.fsl import (BET, ICA_AROMA, FAST, MCFLIRT, FLIRT, FNIRT, ApplyWarp, SUSAN, Info, ImageMaths, IsotropicSmooth, Threshold, Level1Design, FEATModel, L2Model, Merge, FLAMEO, ContrastMgr,Cluster, FILMGLS, Randomise, MultipleRegressDesign) diff --git a/narps_open/pipelines/team_98BT.py b/narps_open/pipelines/team_98BT.py index 1c68e0ce..653593fc 100755 --- a/narps_open/pipelines/team_98BT.py +++ b/narps_open/pipelines/team_98BT.py @@ -1,3 +1,4 @@ +# pylint: skip-file from nipype.interfaces.spm import (Coregister, Smooth, OneSampleTTestDesign, EstimateModel, EstimateContrast, Level1Design, TwoSampleTTestDesign, RealignUnwarp, NewSegment, SliceTiming, DARTEL, DARTELNorm2MNI, FieldMap) diff --git a/narps_open/pipelines/team_Q6O0.py b/narps_open/pipelines/team_Q6O0.py index 69cacc3c..176b84cf 100755 --- a/narps_open/pipelines/team_Q6O0.py +++ b/narps_open/pipelines/team_Q6O0.py @@ -468,11 +468,14 @@ def get_subject_level_outputs(self): ) # Formatting templates and returning it as a list of files - output_files = [contrast_map_template.format(**dict(zip(parameters.keys(), parameter_values)))\ + output_files = [ + contrast_map_template.format(**dict(zip(parameters.keys(), parameter_values)))\ for parameter_values in parameter_sets] - output_files += [mat_file_template.format(**dict(zip(parameters.keys(), parameter_values)))\ + output_files += [ + mat_file_template.format(**dict(zip(parameters.keys(), parameter_values)))\ for parameter_values in parameter_sets] - output_files += [spmt_file_template.format(**dict(zip(parameters.keys(), parameter_values)))\ + output_files += [ + spmt_file_template.format(**dict(zip(parameters.keys(), parameter_values)))\ for parameter_values in parameter_sets] return output_files @@ -698,23 +701,41 @@ def get_hypotheses_outputs(self): """ Return all hypotheses output file names. """ nb_sub = len(self.subject_list) files = [ - join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'), - join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_gain', 'spmT_0001.nii'), - join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'), - join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_gain', 'spmT_0001.nii'), - join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'), - join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_gain', 'spmT_0001.nii'), - join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'), - join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_gain', 'spmT_0001.nii'), - join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_loss', '_threshold1', 'spmT_0002_thr.nii'), - join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_loss', 'spmT_0002.nii'), - join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_loss', '_threshold1', 'spmT_0002_thr.nii'), - join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_loss', 'spmT_0002.nii'), - join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'), - join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', '_model_type_loss', 'spmT_0001.nii'), - join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'), - join(f'l2_analysis_equalRange_nsub_{nb_sub}', '_model_type_loss', 'spmT_0001.nii'), - join(f'l2_analysis_groupComp_nsub_{nb_sub}', '_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'), - join(f'l2_analysis_groupComp_nsub_{nb_sub}', '_model_type_loss', 'spmT_0001.nii') + join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', + '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'), + join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', + '_model_type_gain', 'spmT_0001.nii'), + join(f'l2_analysis_equalRange_nsub_{nb_sub}', + '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'), + join(f'l2_analysis_equalRange_nsub_{nb_sub}', + '_model_type_gain', 'spmT_0001.nii'), + join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', + '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'), + join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', + '_model_type_gain', 'spmT_0001.nii'), + join(f'l2_analysis_equalRange_nsub_{nb_sub}', + '_model_type_gain', '_threshold0', 'spmT_0001_thr.nii'), + join(f'l2_analysis_equalRange_nsub_{nb_sub}', + '_model_type_gain', 'spmT_0001.nii'), + join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', + '_model_type_loss', '_threshold1', 'spmT_0002_thr.nii'), + join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', + '_model_type_loss', 'spmT_0002.nii'), + join(f'l2_analysis_equalRange_nsub_{nb_sub}', + '_model_type_loss', '_threshold1', 'spmT_0002_thr.nii'), + join(f'l2_analysis_equalRange_nsub_{nb_sub}', + '_model_type_loss', 'spmT_0002.nii'), + join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', + '_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'), + join(f'l2_analysis_equalIndifference_nsub_{nb_sub}', + '_model_type_loss', 'spmT_0001.nii'), + join(f'l2_analysis_equalRange_nsub_{nb_sub}', + '_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'), + join(f'l2_analysis_equalRange_nsub_{nb_sub}', + '_model_type_loss', 'spmT_0001.nii'), + join(f'l2_analysis_groupComp_nsub_{nb_sub}', + '_model_type_loss', '_threshold0', 'spmT_0001_thr.nii'), + join(f'l2_analysis_groupComp_nsub_{nb_sub}', + '_model_type_loss', 'spmT_0001.nii') ] return [join(self.directories.output_dir, f) for f in files] diff --git a/narps_open/pipelines/team_R9K3_wip.py b/narps_open/pipelines/team_R9K3_wip.py index 8294ec2b..65613e70 100755 --- a/narps_open/pipelines/team_R9K3_wip.py +++ b/narps_open/pipelines/team_R9K3_wip.py @@ -1,3 +1,4 @@ +# pylint: skip-file # THIS IS A TEMPLATE THAT CAN BE USE TO REPRODUCE A NEW PIPELINE import os diff --git a/narps_open/pipelines/team_V55J.py b/narps_open/pipelines/team_V55J.py index 9f930608..c451c77d 100755 --- a/narps_open/pipelines/team_V55J.py +++ b/narps_open/pipelines/team_V55J.py @@ -1,3 +1,4 @@ +# pylint: skip-file from nipype.interfaces.spm import (Coregister, Smooth, OneSampleTTestDesign, EstimateModel, EstimateContrast, Level1Design, TwoSampleTTestDesign, RealignUnwarp, Normalize12, NewSegment, FieldMap) diff --git a/narps_open/pipelines/team_X19V.py b/narps_open/pipelines/team_X19V.py index 3e0108ef..b7d1875a 100755 --- a/narps_open/pipelines/team_X19V.py +++ b/narps_open/pipelines/team_X19V.py @@ -1,3 +1,4 @@ +# pylint: skip-file from nipype.interfaces.fsl import (Info, ImageMaths, IsotropicSmooth, Threshold, Level1Design, FEATModel, L2Model, Merge, FLAMEO, ContrastMgr, FILMGLS, MultipleRegressDesign, Cluster, BET, SmoothEstimate) from nipype.algorithms.modelgen import SpecifyModel diff --git a/tests/conftest.py b/tests/conftest.py index d46df024..73dd095e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ from os.path import join, isfile from shutil import rmtree +from numpy import isclose from pytest import helpers from pathvalidate import is_valid_filepath @@ -23,6 +24,15 @@ # Init configuration, to ensure it is in testing mode Configuration(config_type='testing') +@helpers.register +def compare_float_2d_arrays(array_1, array_2): + """ Assert array_1 and array_2 are close enough """ + + assert len(array_1) == len(array_2) + for reference_array, test_array in zip(array_1, array_2): + assert len(reference_array) == len(test_array) + assert isclose(reference_array, test_array).all() + @helpers.register def test_pipeline_outputs(pipeline: Pipeline, number_of_outputs: list): """ Test the outputs of a Pipeline. diff --git a/tests/core/test_common.py b/tests/core/test_common.py index 3bfb69f8..1c362023 100644 --- a/tests/core/test_common.py +++ b/tests/core/test_common.py @@ -59,7 +59,7 @@ def test_remove_file(remove_test_dir): # Check file is removed assert not exists(test_file_path) - + @staticmethod @mark.unit_test def test_remove_directory(remove_test_dir): @@ -396,7 +396,7 @@ def test_node_list_to_file_1(): # Check file was created assert exists(out_file) - + # Check file was created with open(out_file, 'r', encoding = 'utf-8') as file: for list_element, file_element in zip(out_list, file.read().split('\n')): @@ -429,9 +429,8 @@ def test_node_list_to_file_2(): # Check file was created assert exists(out_file) - + # Check file was created with open(out_file, 'r', encoding = 'utf-8') as file: for list_element, file_element in zip(out_list, file.read().split('\n')): assert list_element == file_element - diff --git a/tests/data/test_results.py b/tests/data/test_results.py index 2465eb7d..d1a7a93c 100644 --- a/tests/data/test_results.py +++ b/tests/data/test_results.py @@ -12,8 +12,7 @@ """ from os.path import isdir, join -from shutil import rmtree, move, copytree -from time import sleep +from shutil import rmtree, copytree from checksumdir import dirhash from pytest import mark @@ -74,7 +73,8 @@ def fake_get_uid(_): # Mock the results path results_directory = Configuration()['directories']['narps_results'] - Configuration()['directories']['narps_results'] = Configuration()['directories']['test_runs'] + Configuration()[ + 'directories']['narps_results'] = Configuration()['directories']['test_runs'] # Init & download the collection collection = ResultsCollection('2T6S') diff --git a/tests/data/test_task.py b/tests/data/test_task.py index 8b6860dd..229a1ddc 100644 --- a/tests/data/test_task.py +++ b/tests/data/test_task.py @@ -15,7 +15,7 @@ from pytest import mark, fixture from narps_open.utils.configuration import Configuration -import narps_open.data.task as task +from narps_open.data import task @fixture(scope='function', autouse=True) def mock_task_data(mocker): @@ -50,7 +50,7 @@ def test_singleton(): @mark.unit_test def test_derived(): """ Test the derived values of a TaskInformation object """ - + task_info = task.TaskInformation() assert task_info['NumberOfSlices'] == 6 assert task_info['AcquisitionTime'] == 1 / 6 diff --git a/tests/pipelines/test_team_08MQ.py b/tests/pipelines/test_team_08MQ.py index b962557f..b9baa35e 100644 --- a/tests/pipelines/test_team_08MQ.py +++ b/tests/pipelines/test_team_08MQ.py @@ -57,19 +57,11 @@ def test_outputs(): pipeline = PipelineTeam08MQ() # 1 - 1 subject outputs pipeline.subject_list = ['001'] - assert len(pipeline.get_preprocessing_outputs()) == 4*4 - assert len(pipeline.get_run_level_outputs()) == 8+4*3*4 - assert len(pipeline.get_subject_level_outputs()) == 4*3 - assert len(pipeline.get_group_level_outputs()) == 0 - assert len(pipeline.get_hypotheses_outputs()) == 18 + helpers.test_pipeline_ouputs(pipeline, [4*4, 8+4*3*4, 4*3, 0, 18]) # 2 - 4 subjects outputs pipeline.subject_list = ['001', '002', '003', '004'] - assert len(pipeline.get_preprocessing_outputs()) == 4*4*4 - assert len(pipeline.get_run_level_outputs()) == (8+4*3*4)*4 - assert len(pipeline.get_subject_level_outputs()) == 4*3*4 - assert len(pipeline.get_group_level_outputs()) == 0 - assert len(pipeline.get_hypotheses_outputs()) == 18 + helpers.test_pipeline_ouputs(pipeline, [4*4*4, (8+4*3*4)*4, 4*3*4, 0, 18]) @staticmethod @mark.unit_test @@ -130,10 +122,10 @@ def test_two_sample_t_test_regressors(): ['002', '004'], # equalIndifference group ['001', '002', '003', '004'] # all subjects ) - assert regressors == dict( - equalRange = [1, 0, 1, 0], - equalIndifference = [0, 1, 0, 1] - ) + assert regressors == { + 'equalRange' : [1, 0, 1, 0], + 'equalIndifference' : [0, 1, 0, 1] + } assert groups == [1, 2, 1, 2] @staticmethod diff --git a/tests/pipelines/test_team_2T6S.py b/tests/pipelines/test_team_2T6S.py index 43c41773..fbe5a9ad 100644 --- a/tests/pipelines/test_team_2T6S.py +++ b/tests/pipelines/test_team_2T6S.py @@ -47,19 +47,11 @@ def test_outputs(): pipeline = PipelineTeam2T6S() # 1 - 1 subject outputs pipeline.subject_list = ['001'] - assert len(pipeline.get_preprocessing_outputs()) == 0 - assert len(pipeline.get_run_level_outputs()) == 0 - assert len(pipeline.get_subject_level_outputs()) == 7 - assert len(pipeline.get_group_level_outputs()) == 63 - assert len(pipeline.get_hypotheses_outputs()) == 18 + helpers.test_pipeline_outputs(pipeline, [0, 0, 7, 63, 18]) # 2 - 4 subjects outputs pipeline.subject_list = ['001', '002', '003', '004'] - assert len(pipeline.get_preprocessing_outputs()) == 0 - assert len(pipeline.get_run_level_outputs()) == 0 - assert len(pipeline.get_subject_level_outputs()) == 28 - assert len(pipeline.get_group_level_outputs()) == 63 - assert len(pipeline.get_hypotheses_outputs()) == 18 + helpers.test_pipeline_outputs(pipeline, [0, 0, 28, 63, 18]) @staticmethod @mark.pipeline_test diff --git a/tests/pipelines/test_team_C88N.py b/tests/pipelines/test_team_C88N.py index e6bb70fd..7fbdbe3e 100644 --- a/tests/pipelines/test_team_C88N.py +++ b/tests/pipelines/test_team_C88N.py @@ -52,19 +52,11 @@ def test_outputs(): pipeline = PipelineTeamC88N() # 1 - 1 subject outputs pipeline.subject_list = ['001'] - assert len(pipeline.get_preprocessing_outputs()) == 0 - assert len(pipeline.get_run_level_outputs()) == 0 - assert len(pipeline.get_subject_level_outputs()) == 8 - assert len(pipeline.get_group_level_outputs()) == 53 - assert len(pipeline.get_hypotheses_outputs()) == 18 + helpers.test_pipeline_outputs(pipeline, [0, 0, 8, 53, 18]) # 2 - 4 subjects outputs pipeline.subject_list = ['001', '002', '003', '004'] - assert len(pipeline.get_preprocessing_outputs()) == 0 - assert len(pipeline.get_run_level_outputs()) == 0 - assert len(pipeline.get_subject_level_outputs()) == 32 - assert len(pipeline.get_group_level_outputs()) == 53 - assert len(pipeline.get_hypotheses_outputs()) == 18 + helpers.test_pipeline_outputs(pipeline, [0, 0, 32, 53, 18]) @staticmethod @mark.unit_test @@ -72,7 +64,8 @@ def test_subject_information(): """ Test the get_subject_information method """ # Test with 'gain' - test_event_file = join(Configuration()['directories']['test_data'], 'pipelines', 'events.tsv') + test_event_file = join( + Configuration()['directories']['test_data'], 'pipelines', 'events.tsv') information = PipelineTeamC88N.get_subject_information( [test_event_file, test_event_file], 'gain' @@ -103,7 +96,8 @@ def test_subject_information(): assert isclose(reference_array, test_array).all() # Test with 'loss' - test_event_file = join(Configuration()['directories']['test_data'], 'pipelines', 'events.tsv') + test_event_file = join( + Configuration()['directories']['test_data'], 'pipelines', 'events.tsv') information = PipelineTeamC88N.get_subject_information( [test_event_file, test_event_file], 'loss' diff --git a/tests/pipelines/test_team_J7F9.py b/tests/pipelines/test_team_J7F9.py index 706c0b56..da37e990 100644 --- a/tests/pipelines/test_team_J7F9.py +++ b/tests/pipelines/test_team_J7F9.py @@ -16,7 +16,6 @@ from filecmp import cmp from pytest import helpers, mark, fixture -from numpy import isclose from nipype import Workflow from nipype.interfaces.base import Bunch @@ -34,14 +33,6 @@ def remove_test_dir(): yield # test runs here rmtree(TEMPORARY_DIR, ignore_errors = True) -def compare_float_2d_arrays(array_1, array_2): - """ Assert array_1 and array_2 are close enough """ - - assert len(array_1) == len(array_2) - for reference_array, test_array in zip(array_1, array_2): - assert len(reference_array) == len(test_array) - assert isclose(reference_array, test_array).all() - class TestPipelinesTeamJ7F9: """ A class that contains all the unit tests for the PipelineTeamJ7F9 class.""" @@ -73,19 +64,11 @@ def test_outputs(): pipeline = PipelineTeamJ7F9() # 1 - 1 subject outputs pipeline.subject_list = ['001'] - assert len(pipeline.get_preprocessing_outputs()) == 0 - assert len(pipeline.get_run_level_outputs()) == 0 - assert len(pipeline.get_subject_level_outputs()) == 7 - assert len(pipeline.get_group_level_outputs()) == 63 - assert len(pipeline.get_hypotheses_outputs()) == 18 + helpers.test_pipeline_outputs(pipeline, [0, 0, 7, 63, 18]) # 2 - 4 subjects outputs pipeline.subject_list = ['001', '002', '003', '004'] - assert len(pipeline.get_preprocessing_outputs()) == 0 - assert len(pipeline.get_run_level_outputs()) == 0 - assert len(pipeline.get_subject_level_outputs()) == 28 - assert len(pipeline.get_group_level_outputs()) == 63 - assert len(pipeline.get_hypotheses_outputs()) == 18 + helpers.test_pipeline_outputs(pipeline, [0, 0, 28, 63, 18]) @staticmethod @mark.unit_test @@ -106,80 +89,102 @@ def test_subject_information(): bunch = info_missed[0] assert isinstance(bunch, Bunch) assert bunch.conditions == ['trial', 'missed'] - compare_float_2d_arrays(bunch.onsets, [[4.071, 11.834, 19.535, 27.535, 36.435], [19.535]]) - compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0, 0.0], [0.0]]) - assert bunch.amplitudes == None - assert bunch.tmod == None + helpers.compare_float_2d_arrays(bunch.onsets, [ + [4.071, 11.834, 19.535, 27.535, 36.435], [19.535]]) + helpers.compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0, 0.0], [0.0]]) + assert bunch.amplitudes is None + assert bunch.tmod is None assert bunch.pmod[0].name == ['gain', 'loss'] assert bunch.pmod[0].poly == [1, 1] - compare_float_2d_arrays(bunch.pmod[0].param, [[-8.4, 11.6, 15.6, -12.4, -6.4], [-8.2, -0.2, 4.8, 0.8, 2.8]]) - assert bunch.regressor_names == None - assert bunch.regressors == None + helpers.compare_float_2d_arrays(bunch.pmod[0].param, [ + [-8.4, 11.6, 15.6, -12.4, -6.4], [-8.2, -0.2, 4.8, 0.8, 2.8]]) + assert bunch.regressor_names is None + assert bunch.regressors is None bunch = info_missed[1] assert isinstance(bunch, Bunch) assert bunch.conditions == ['trial', 'missed'] - compare_float_2d_arrays(bunch.onsets, [[4.071, 11.834, 19.535, 27.535, 36.435], [19.535]]) - compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0, 0.0], [0.0]]) - assert bunch.amplitudes == None - assert bunch.tmod == None + helpers.compare_float_2d_arrays(bunch.onsets, [ + [4.071, 11.834, 19.535, 27.535, 36.435], [19.535]]) + helpers.compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0, 0.0], [0.0]]) + assert bunch.amplitudes is None + assert bunch.tmod is None assert bunch.pmod[0].name == ['gain', 'loss'] assert bunch.pmod[0].poly == [1, 1] - compare_float_2d_arrays(bunch.pmod[0].param, [[-8.4, 11.6, 15.6, -12.4, -6.4], [-8.2, -0.2, 4.8, 0.8, 2.8]]) - assert bunch.regressor_names == None - assert bunch.regressors == None + helpers.compare_float_2d_arrays(bunch.pmod[0].param, [ + [-8.4, 11.6, 15.6, -12.4, -6.4], [-8.2, -0.2, 4.8, 0.8, 2.8]]) + assert bunch.regressor_names is None + assert bunch.regressors is None bunch = info_ok[0] assert isinstance(bunch, Bunch) assert bunch.conditions == ['trial'] - compare_float_2d_arrays(bunch.onsets, [[4.071, 11.834, 27.535, 36.435]]) - compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0]]) - assert bunch.amplitudes == None - assert bunch.tmod == None + helpers.compare_float_2d_arrays(bunch.onsets, [[4.071, 11.834, 27.535, 36.435]]) + helpers.compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0]]) + assert bunch.amplitudes is None + assert bunch.tmod is None assert bunch.pmod[0].name == ['gain', 'loss'] assert bunch.pmod[0].poly == [1, 1] - compare_float_2d_arrays(bunch.pmod[0].param, [[-4.5, 15.5, -8.5, -2.5], [-7.0, 1.0, 2.0, 4.0]]) - assert bunch.regressor_names == None - assert bunch.regressors == None + helpers.compare_float_2d_arrays(bunch.pmod[0].param, [ + [-4.5, 15.5, -8.5, -2.5], [-7.0, 1.0, 2.0, 4.0]]) + assert bunch.regressor_names is None + assert bunch.regressors is None bunch = info_ok[1] assert isinstance(bunch, Bunch) assert bunch.conditions == ['trial'] - compare_float_2d_arrays(bunch.onsets, [[4.071, 11.834, 27.535, 36.435]]) - compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0]]) - assert bunch.amplitudes == None - assert bunch.tmod == None + helpers.compare_float_2d_arrays(bunch.onsets, [[4.071, 11.834, 27.535, 36.435]]) + helpers.compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0]]) + assert bunch.amplitudes is None + assert bunch.tmod is None assert bunch.pmod[0].name == ['gain', 'loss'] assert bunch.pmod[0].poly == [1, 1] - compare_float_2d_arrays(bunch.pmod[0].param, [[-4.5, 15.5, -8.5, -2.5], [-7.0, 1.0, 2.0, 4.0]]) - assert bunch.regressor_names == None - assert bunch.regressors == None - + helpers.compare_float_2d_arrays(bunch.pmod[0].param, [ + [-4.5, 15.5, -8.5, -2.5], [-7.0, 1.0, 2.0, 4.0]]) + assert bunch.regressor_names is None + assert bunch.regressors is None + bunch = info_half[0] assert isinstance(bunch, Bunch) assert bunch.conditions == ['trial', 'missed'] - compare_float_2d_arrays(bunch.onsets, [[4.071, 11.834, 27.535, 36.435], []]) - compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0], []]) - assert bunch.amplitudes == None - assert bunch.tmod == None + helpers.compare_float_2d_arrays(bunch.onsets, [[4.071, 11.834, 27.535, 36.435], []]) + helpers.compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0], []]) + assert bunch.amplitudes is None + assert bunch.tmod is None assert bunch.pmod[0].name == ['gain', 'loss'] assert bunch.pmod[0].poly == [1, 1] - compare_float_2d_arrays(bunch.pmod[0].param, [[-6.666666666666668, 13.333333333333332, -10.666666666666668, -4.666666666666668], [-7.666666666666666, 0.3333333333333339, 1.333333333333334, 3.333333333333334]]) - assert bunch.regressor_names == None - assert bunch.regressors == None + helpers.compare_float_2d_arrays( + bunch.pmod[0].param, + [[-6.666666666666668, 13.333333333333332, -10.666666666666668, -4.666666666666668], + [-7.666666666666666, 0.3333333333333339, 1.333333333333334, 3.333333333333334]]) + assert bunch.regressor_names is None + assert bunch.regressors is None bunch = info_half[1] assert isinstance(bunch, Bunch) assert bunch.conditions == ['trial', 'missed'] - compare_float_2d_arrays(bunch.onsets, [[4.071, 11.834, 19.535, 27.535, 36.435], [19.535]]) - compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0, 0.0], [0.0]]) - assert bunch.amplitudes == None - assert bunch.tmod == None + helpers.compare_float_2d_arrays(bunch.onsets, [ + [4.071, 11.834, 19.535, 27.535, 36.435], [19.535]]) + helpers.compare_float_2d_arrays(bunch.durations, [[0.0, 0.0, 0.0, 0.0, 0.0], [0.0]]) + assert bunch.amplitudes is None + assert bunch.tmod is None assert bunch.pmod[0].name == ['gain', 'loss'] assert bunch.pmod[0].poly == [1, 1] - compare_float_2d_arrays(bunch.pmod[0].param, [[-6.666666666666668, 13.333333333333332, 17.333333333333332, -10.666666666666668, -4.666666666666668], [-7.666666666666666, 0.3333333333333339, 5.333333333333334, 1.333333333333334, 3.333333333333334]]) - assert bunch.regressor_names == None - assert bunch.regressors == None + helpers.compare_float_2d_arrays( + bunch.pmod[0].param, + [[ + -6.666666666666668, + 13.333333333333332, + 17.333333333333332, + -10.666666666666668, + -4.666666666666668], + [-7.666666666666666, + 0.3333333333333339, + 5.333333333333334, + 1.333333333333334, + 3.333333333333334]]) + assert bunch.regressor_names is None + assert bunch.regressors is None @staticmethod @mark.unit_test diff --git a/tests/pipelines/test_team_Q6O0.py b/tests/pipelines/test_team_Q6O0.py index 639f609e..6fc7513c 100644 --- a/tests/pipelines/test_team_Q6O0.py +++ b/tests/pipelines/test_team_Q6O0.py @@ -47,19 +47,11 @@ def test_outputs(): pipeline = PipelineTeamQ6O0() # 1 - 1 subject outputs pipeline.subject_list = ['001'] - assert len(pipeline.get_preprocessing_outputs()) == 0 - assert len(pipeline.get_run_level_outputs()) == 0 - assert len(pipeline.get_subject_level_outputs()) == 6 - assert len(pipeline.get_group_level_outputs()) == 37 - assert len(pipeline.get_hypotheses_outputs()) == 18 + helpers.test_pipeline_outputs(pipeline, [0, 0, 6, 37, 18]) # 2 - 4 subjects outputs pipeline.subject_list = ['001', '002', '003', '004'] - assert len(pipeline.get_preprocessing_outputs()) == 0 - assert len(pipeline.get_run_level_outputs()) == 0 - assert len(pipeline.get_subject_level_outputs()) == 24 - assert len(pipeline.get_group_level_outputs()) == 37 - assert len(pipeline.get_hypotheses_outputs()) == 18 + helpers.test_pipeline_outputs(pipeline, [0, 0, 24, 37, 18]) @staticmethod @mark.pipeline_test diff --git a/tests/pipelines/test_team_T54A.py b/tests/pipelines/test_team_T54A.py index 05ecc003..8646760a 100644 --- a/tests/pipelines/test_team_T54A.py +++ b/tests/pipelines/test_team_T54A.py @@ -15,7 +15,6 @@ from shutil import rmtree from pytest import helpers, mark, fixture -from numpy import isclose from nipype import Workflow from nipype.interfaces.base import Bunch @@ -33,14 +32,6 @@ def remove_test_dir(): yield # test runs here rmtree(TEMPORARY_DIR, ignore_errors = True) -def compare_float_2d_arrays(array_1, array_2): - """ Assert array_1 and array_2 are close enough """ - - assert len(array_1) == len(array_2) - for reference_array, test_array in zip(array_1, array_2): - assert len(reference_array) == len(test_array) - assert isclose(reference_array, test_array).all() - class TestPipelinesTeamT54A: """ A class that contains all the unit tests for the PipelineTeamT54A class.""" @@ -101,7 +92,7 @@ def test_subject_information(): bunch = info_missed[0] assert isinstance(bunch, Bunch) assert bunch.conditions == ['trial', 'gain', 'loss', 'difficulty', 'response', 'missed'] - compare_float_2d_arrays(bunch.onsets, [ + helpers.compare_float_2d_arrays(bunch.onsets, [ [4.071, 11.834, 27.535, 36.435], [4.071, 11.834, 27.535, 36.435], [4.071, 11.834, 27.535, 36.435], @@ -109,7 +100,7 @@ def test_subject_information(): [6.459, 14.123, 29.615, 38.723], [19.535] ]) - compare_float_2d_arrays(bunch.durations, [ + helpers.compare_float_2d_arrays(bunch.durations, [ [2.388, 2.289, 2.08, 2.288], [2.388, 2.289, 2.08, 2.288], [2.388, 2.289, 2.08, 2.288], @@ -117,7 +108,7 @@ def test_subject_information(): [0.0, 0.0, 0.0, 0.0], [0.0] ]) - compare_float_2d_arrays(bunch.amplitudes, [ + helpers.compare_float_2d_arrays(bunch.amplitudes, [ [1.0, 1.0, 1.0, 1.0], [14.0, 34.0, 10.0, 16.0], [6.0, 14.0, 15.0, 17.0], @@ -125,35 +116,35 @@ def test_subject_information(): [1.0, 1.0, 1.0, 1.0], [1.0] ]) - assert bunch.regressor_names == None - assert bunch.regressors == None + assert bunch.regressor_names is None + assert bunch.regressors is None bunch = info_ok[0] assert isinstance(bunch, Bunch) assert bunch.conditions == ['trial', 'gain', 'loss', 'difficulty', 'response'] - compare_float_2d_arrays(bunch.onsets, [ + helpers.compare_float_2d_arrays(bunch.onsets, [ [4.071, 11.834, 27.535, 36.435], [4.071, 11.834, 27.535, 36.435], [4.071, 11.834, 27.535, 36.435], [4.071, 11.834, 27.535, 36.435], [6.459, 14.123, 29.615, 38.723] ]) - compare_float_2d_arrays(bunch.durations, [ + helpers.compare_float_2d_arrays(bunch.durations, [ [2.388, 2.289, 2.08, 2.288], [2.388, 2.289, 2.08, 2.288], [2.388, 2.289, 2.08, 2.288], [2.388, 2.289, 2.08, 2.288], [0.0, 0.0, 0.0, 0.0] ]) - compare_float_2d_arrays(bunch.amplitudes, [ + helpers.compare_float_2d_arrays(bunch.amplitudes, [ [1.0, 1.0, 1.0, 1.0], [14.0, 34.0, 10.0, 16.0], [6.0, 14.0, 15.0, 17.0], [1.0, 3.0, 10.0, 9.0], [1.0, 1.0, 1.0, 1.0] ]) - assert bunch.regressor_names == None - assert bunch.regressors == None + assert bunch.regressor_names is None + assert bunch.regressors is None @staticmethod @mark.unit_test @@ -195,10 +186,10 @@ def test_two_sample_t_test_regressors(): ['002', '004'], # equalIndifference group ['001', '002', '003', '004'] # all subjects ) - assert regressors == dict( - equalRange = [1, 0, 1, 0], - equalIndifference = [0, 1, 0, 1] - ) + assert regressors == { + 'equalRange': [1, 0, 1, 0], + 'equalIndifference': [0, 1, 0, 1] + } assert groups == [1, 2, 1, 2] @staticmethod diff --git a/tests/test_conftest.py b/tests/test_conftest.py index 658f5d89..4ea92fb5 100644 --- a/tests/test_conftest.py +++ b/tests/test_conftest.py @@ -25,17 +25,16 @@ from narps_open.utils.configuration import Configuration from narps_open.runner import PipelineRunner from narps_open.pipelines import Pipeline -from narps_open.data.results import ResultsCollection TEST_DIR = abspath(join(Configuration()['directories']['test_runs'], 'test_conftest')) @fixture def set_test_directory(scope = 'function'): + """ A fixture to remove temporary directory created by tests """ + rmtree(TEST_DIR, ignore_errors = True) makedirs(TEST_DIR, exist_ok = True) - yield - # Comment this line for debugging rmtree(TEST_DIR, ignore_errors = True) @@ -234,11 +233,34 @@ def get_file_urls(self): def download(self): """ Download the collection, file by file. """ - pass class TestConftest: """ A class that contains all the unit tests for the conftest module.""" + @staticmethod + @mark.unit_test + def test_compare_float_2d_arrays(): + """ Test the compare_float_2d_arrays helper """ + + array_1 = [[5.0, 0.0], [1.0, 2.0]] + array_2 = [[5.0, 0.0], [1.0]] + with raises(AssertionError): + helpers.compare_float_2d_arrays(array_1, array_2) + + array_1 = [[6.0, 0.0], [1.0]] + array_2 = [[6.0, 0.0], [1.0, 2.0]] + with raises(AssertionError): + helpers.compare_float_2d_arrays(array_1, array_2) + + array_1 = [[7.10001, 0.0], [1.0, 2.0]] + array_2 = [[7.10001, 0.0], [1.0, 2.00003]] + with raises(AssertionError): + helpers.compare_float_2d_arrays(array_1, array_2) + + array_1 = [[10.0000200, 15.10], [1.0, 2.0]] + array_2 = [[10.00002, 15.10000], [1.0, 2.000003]] + helpers.compare_float_2d_arrays(array_1, array_2) + @staticmethod @mark.unit_test def test_test_outputs(set_test_directory): @@ -247,7 +269,7 @@ def test_test_outputs(set_test_directory): # Test pipeline pipeline = MockupPipeline() pipeline.subject_list = ['001', '002'] - + # Wrong length for nb_of_outputs with raises(AssertionError): helpers.test_pipeline_outputs(pipeline, [1,2,3]) @@ -347,21 +369,21 @@ def test_test_pipeline_execution(mocker, set_test_directory): with open(join(TEST_DIR, 'test_conftest.txt'), 'r', encoding = 'utf-8') as file: assert file.readline() == '0\n' # First exec of preprocessing creates an exception (execution counter == 1) - assert file.readline() == f'TestConftest_preprocessing_workflow 4 1\n' + assert file.readline() == 'TestConftest_preprocessing_workflow 4 1\n' # Relaunching the workflow # Preprocessing files won't be created(execution counter == 2) - assert file.readline() == f'TestConftest_preprocessing_workflow 4 2\n' - assert file.readline() == f'TestConftest_run_level_workflow 4 3\n' - assert file.readline() == f'TestConftest_subject_level_workflow 4 4\n' + assert file.readline() == 'TestConftest_preprocessing_workflow 4 2\n' + assert file.readline() == 'TestConftest_run_level_workflow 4 3\n' + assert file.readline() == 'TestConftest_subject_level_workflow 4 4\n' # Relaunching the workflow # Everything's fine - assert file.readline() == f'TestConftest_preprocessing_workflow 4 5\n' - assert file.readline() == f'TestConftest_run_level_workflow 4 6\n' - assert file.readline() == f'TestConftest_subject_level_workflow 4 7\n' - assert file.readline() == f'TestConftest_preprocessing_workflow 3 8\n' - assert file.readline() == f'TestConftest_run_level_workflow 3 9\n' - assert file.readline() == f'TestConftest_subject_level_workflow 3 10\n' - assert file.readline() == f'TestConftest_group_level_workflow 7 11' + assert file.readline() == 'TestConftest_preprocessing_workflow 4 5\n' + assert file.readline() == 'TestConftest_run_level_workflow 4 6\n' + assert file.readline() == 'TestConftest_subject_level_workflow 4 7\n' + assert file.readline() == 'TestConftest_preprocessing_workflow 3 8\n' + assert file.readline() == 'TestConftest_run_level_workflow 3 9\n' + assert file.readline() == 'TestConftest_subject_level_workflow 3 10\n' + assert file.readline() == 'TestConftest_group_level_workflow 7 11' @staticmethod @mark.unit_test diff --git a/tests/test_runner.py b/tests/test_runner.py index bb2a62c3..3a3bdd8a 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -138,22 +138,24 @@ def get_hypotheses_outputs(self): class MockupWrongPipeline(Pipeline): """ A simple Pipeline class for test purposes """ - def __init__(self): - super().__init__() - def get_preprocessing(self): + """ Return a preprocessing worflow with wrong type """ return 'Wrong_workflow_type' def get_run_level_analysis(self): + """ Return a run level analysis worflow """ return None def get_subject_level_analysis(self): + """ Return a subject level analysis worflow """ return None def get_group_level_analysis(self): + """ Return a group level analysis worflow """ return None def get_hypotheses_outputs(self): + """ Return hypotheses """ return None class MockupWrongPipeline2(Pipeline): @@ -163,18 +165,23 @@ def __init__(self): super().__init__() def get_preprocessing(self): + """ Return a preprocessing worflow list with wrong types inside """ return ['Wrong_workflow_type', 'Wrong_workflow_type'] def get_run_level_analysis(self): + """ Return a run level analysis worflow """ return None def get_subject_level_analysis(self): + """ Return a subject level analysis worflow """ return None def get_group_level_analysis(self): + """ Return a group level analysis worflow """ return None def get_hypotheses_outputs(self): + """ Return hypotheses """ return None class TestPipelineRunner: