diff --git a/handler/ezBIDS_core/ezBIDS_core.py b/handler/ezBIDS_core/ezBIDS_core.py index dbe29c3e..04dd40e9 100755 --- a/handler/ezBIDS_core/ezBIDS_core.py +++ b/handler/ezBIDS_core/ezBIDS_core.py @@ -504,9 +504,11 @@ def modify_uploaded_dataset_list(uploaded_img_list): elif any(x.endswith(tuple(['.v', '.v.gz'])) for x in grouped_files): grouped_files = [x for x in grouped_files if not x.endswith(tuple(['.v', '.v.gz']))] - # If imaging file comes with additional data (JSON, bval/bvec) add them to list for processing - if len(grouped_files) > 1: - uploaded_files_list.append(grouped_files) + # Don't want this section is we're allowing only NIfTI files to be uploaded (group length will only be 1). + # # If imaging file comes with additional data (JSON, bval/bvec) add them to list for processing + # if len(grouped_files) > 1: + # uploaded_files_list.append(grouped_files) + uploaded_files_list.append(grouped_files) # Flatten uploaded_files_list uploaded_files_list = natsorted([file for sublist in uploaded_files_list for file in sublist]) @@ -913,7 +915,9 @@ def generate_dataset_list(uploaded_files_list, exclude_data): for img_file in img_list: # Find file extension if img_file.endswith('.nii.gz'): - ext = 'nii.gz' + ext = '.nii.gz' + elif img_file.endswith('.v.gz'): + ext = '.v.gz' else: ext = Path(img_file).suffix @@ -934,12 +938,12 @@ def generate_dataset_list(uploaded_files_list, exclude_data): 'ConversionSoftware': 'ezBIDS', 'ConversionSoftwareVersion': '1.0.0' } - if not os.path.exists(json_path): - with open(json_path, "w") as fp: - json.dump(json_data, fp, indent=3) - corresponding_files_list = corresponding_files_list + [json_path] - json_data = open(json_path) - json_data = json.load(json_data, strict=False) + # if not os.path.exists(json_path): + # with open(json_path, "w") as fp: + # json.dump(json_data, fp, indent=3) + # corresponding_files_list = corresponding_files_list + [json_path] + # json_data = open(json_path) + # json_data = json.load(json_data, strict=False) # Find ImageModality if "Modality" in json_data: @@ -1085,6 +1089,14 @@ def generate_dataset_list(uploaded_files_list, exclude_data): image = nib.load(img_file) ndim = image.ndim + # Check for RepetitionTime (TR) if not in JSON metadata and if so, add to file + if repetition_time == 0: + if len(image.header.get_zooms()) == 4: + repetition_time = image.header.get_zooms()[-1] + if not isinstance(repetition_time, int): + repetition_time = round(float(repetition_time), 2) + json_data['RepetitionTime'] = repetition_time + # Find how many volumes are in nifti file try: volume_count = image.shape[3] @@ -1199,6 +1211,14 @@ def generate_dataset_list(uploaded_files_list, exclude_data): subject = re.sub("[^A-Za-z0-9]+", "", subject) session = re.sub("[^A-Za-z0-9]+", "", session) + # If uploaded data didn't contain JSON metadata, add here + if not os.path.exists(json_path): + with open(json_path, "w") as fp: + json.dump(json_data, fp, indent=3) + corresponding_files_list = corresponding_files_list + [json_path] + json_data = open(json_path) + json_data = json.load(json_data, strict=False) + """ Organize all from individual SeriesNumber in dictionary """ @@ -2889,30 +2909,28 @@ def modify_objects_info(dataset_list): # Make items list (part of objects list) items = [] for item in protocol["paths"]: - if ".bval" in item: + if item.endswith(".bval"): items.append({"path": item, "name": "bval"}) - elif ".bvec" in item: + elif item.endswith(".bvec"): items.append({"path": item, "name": "bvec"}) - elif ".json" in item: + elif item.endswith(".json"): items.append({"path": item, "name": "json", "sidecar": protocol["sidecar"]}) if item.endswith("blood.json"): path = item.split(".json")[0] + ".tsv" headers = [x for x in pd.read_csv(path, sep="\t").columns] - print(path) - print(headers) items.append({"path": path, "name": "tsv", "headers": headers}) - elif ".nii.gz" in item: + elif item.endswith(".nii.gz"): items.append({"path": item, "name": "nii.gz", "pngPaths": [], "headers": protocol["headers"]}) - elif any(x in item for x in MEG_extensions): + elif item.endswith(tuple(MEG_extensions)): name = Path(item).suffix items.append({"path": item, "name": name, diff --git a/handler/find_img_data.py b/handler/find_img_data.py index a047f6a5..5f54cd55 100755 --- a/handler/find_img_data.py +++ b/handler/find_img_data.py @@ -64,8 +64,8 @@ def find_img_data(dir): # PET +# TODO - These two lines take several seconds to complete, maybe can be sped up? pet_folders = [str(folder) for folder in is_pet.pet_folder(Path(root).resolve())] - pet_folders = [os.path.relpath(x, root) for x in pet_folders if x != ''] pet_folders = [os.path.join('.', x) for x in pet_folders] diff --git a/handler/preprocess.sh b/handler/preprocess.sh index 6d24eca1..a881a882 100755 --- a/handler/preprocess.sh +++ b/handler/preprocess.sh @@ -19,12 +19,12 @@ fi root=$1 echo "running preprocess.sh on root folder ${root}" -echo "running expand.sh" -./expand.sh $root - echo "replace file paths that contain spaces or [@^()] characters" find $root -depth -name "*[ @^()]*" | parallel --linebuffer -j 6 -execdir rename "'s/[ @^()]/_/g'" "{}" \; +echo "running expand.sh" +./expand.sh $root + # check to see if uploaded data is a BIDS-compliant dataset echo "Running bids-validator to check BIDS compliance" @@ -92,9 +92,13 @@ if [ $bids_compliant == "true" ]; then touch $root/dcm2niix_output touch $root/dcm2niix_error + + touch $root/pet2bids_output + touch $root/pet2bids_error # find products (NIfTI files) - (cd $root && find . -maxdepth 9 -type f \( -name "*.nii.gz" \) > list) + (cd $root && find . -maxdepth 9 -type f \( -name "*.nii.gz" \) > $root/list) + (cd $root && find . -maxdepth 9 -type f \( -name "*blood.json" \) >> $root/list) echo "running ezBIDS_core (may take several minutes, depending on size of data)" python3 "./ezBIDS_core/ezBIDS_core.py" $root @@ -142,13 +146,15 @@ else # determine which uploaded files/folders are PET directories or ECAT files echo "Finding imaging directories and files" - if [ ! -f $root/list]; then - mkdir -p $root/list + if [ ! -f $root/list ]; then + touch $root/list fi ./find_img_data.py $root # sort $root/pet2bids_dcm.list, $root/pet2bids_ecat.list, and $root/dcm2niix.list for comm. # Then, remove pet directories from dcm2niix list + touch $root/pet2bids_output + if [ -f $root/pet2bids_dcm.list ]; then sort -o $root/pet2bids_dcm.list $root/pet2bids_dcm.list echo "Removing PET directories from dcm2niix list" @@ -216,17 +222,41 @@ else done fi - # Let's add the transformed raw data (i.e. to NIfTI or MEG formats) to the list file - - (cd $root && find . -maxdepth 9 -type f \( -name "*.nii.gz" \) > list) - (cd $root && find . -maxdepth 9 -type f \( -name "*blood.json" \) >> list) + # Check for pet2bids errors + if [[ $PET2BIDS_RUN -eq "true" ]]; then + # pull pet2bids error information to log file + { grep -B 1 --group-separator=$'\n\n' Error $root/pet2bids_output || true; } > $root/pet2bids_error + # # remove error message(s) about not finding any DICOMs in folder + line_nums=$(grep -n 'Error: Unable to find any DICOM images' $root/pet2bids_error | cut -d: -f1) + + for line_num in ${line_nums[*]} + do + sed -i "$((line_num-1)), $((line_num+1))d" $root/pet2bids_error + done + fi + + # Add all transformed data (e.g. NIfTI or MEG formats) to the list file + (cd $root && find . -maxdepth 9 -type f \( -name "*.nii.gz" \) > $root/list) + (cd $root && find . -maxdepth 9 -type f \( -name "*blood.json" \) >> $root/list) if [ -f $root/meg.list ]; then cat $root/meg.list >> $root/list fi if [ ! -s $root/list ]; then - echo "Could not find any MRI, PET, or MEG imaging files in upload, aborting" + err_file='' + if [ `grep 'Error' $root/dcm2niix_error | wc -l` -ne 0 ]; then + err_file='dcm2niix_error' + fi + + if [ `grep 'Error' $root/pet2bids_error | wc -l` -ne 0 ]; then + err_file='pet2bids_error' + fi + + echo "" + echo "Error: Could not find any MRI, PET, or MEG imaging files in upload." + echo "Please click the Debug section below and select Download ${err_file}" + echo "We recommend reaching out to the dcm2niix team for assistance: https://github.com/rordenlab/dcm2niix/issues" exit 1 fi diff --git a/ui/src/SeriesPage.vue b/ui/src/SeriesPage.vue index e2b7f6fd..9f1b6051 100755 --- a/ui/src/SeriesPage.vue +++ b/ui/src/SeriesPage.vue @@ -409,9 +409,9 @@ export default defineComponent({ s.type ); console.log(s.series_idx, s.type); - console.log('metadataAlertsFields', metadataAlertsFields); + // console.log('metadataAlertsFields', metadataAlertsFields); if (metadataAlertsFields.length) { - let warn: string = `'Required metadata is missing and/or provided metadata field values have improper + let warn: string = `'Required metadata is missing, provided metadata field values have improper format. Please click on the "Edit Metadata" button below to resolve. You may skip fields for which you do not know the proper value, but you will not have a fully BIDS-compliant dataset.'`; s.validationWarnings.push(warn); diff --git a/ui/src/Upload.vue b/ui/src/Upload.vue index 848cd232..4c94c122 100644 --- a/ui/src/Upload.vue +++ b/ui/src/Upload.vue @@ -149,7 +149,8 @@
ezBIDS failed.. Please check the Debug logs and contact ezBIDS team (dlevitas@iu.edu).ezBIDS failed.. Please check the Debug logs and contact the ezBIDS team + (dlevitas@iu.edu).
{{ session.status_msg }}
@@ -190,7 +191,7 @@
We couldn't find any objects. Please upload data that contains at least 1 object. Contact the - ezBIDS team (dlevitas@iu.edu) or https://github.com/brainlife/ezbids/issues for + ezBIDS team (dlevitas@iu.edu or https://github.com/brainlife/ezbids/issues) for support
@@ -213,6 +214,9 @@ Download dcm2niix_error + Download pet2bids_error Download list Download ezBIDS_core.json