From 1b5dda922def376792e2104952a292a69640c1b8 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Tue, 7 Dec 2021 17:42:32 +0300 Subject: [PATCH 01/27] Create clean data processed folder. --- preprocessing/preprocess_data.sh | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 43d0e56..f69b7c4 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -145,7 +145,35 @@ fi # Crop the manual seg sct_crop_image -i ${file_gt1}.nii.gz -m ${file_seg_dil}.nii.gz -o ${file_gt1}_crop.nii.gz -# TODO: Create 'clean' output folder +# Go back to the root output path +cd $PATH_OUTPUT + +# Create and populate clean data processed folder for training +PATH_DATA_PROCESSED_CLEAN="${PATH_DATA_PROCESSED}_clean" +if [[ ! -d $PATH_DATA_PROCESSED_CLEAN ]]; then + rsync -avzh --exclude='*.nii.gz' $PATH_DATA_PROCESSED/. $PATH_DATA_PROCESSED_CLEAN +fi + +# Go to the clean subject folder for source images +cd ${PATH_DATA_PROCESSED_CLEAN}/${SUBJECT}/anat + +# Copy source images for training +rsync -avzh ${PATH_DATA_PROCESSED}/${SUBJECT}/anat/${file}.json . +rsync -avzh ${PATH_DATA_PROCESSED}/${SUBJECT}/anat/${file}_crop.nii.gz . + +# Go to clean subject folder for segmentation GTs +cd ${PATH_DATA_PROCESSED_CLEAN}/derivatives/labels/${SUBJECT}/anat + +# Copy segmentation GTs for training +rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json . +rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt1}.nii.gz . +# Copy the second rater GT and aggregated GTs if second rater is present +if [[ -f ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz ]]; then + rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json . + rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz . + rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gtc}.nii.gz . + rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_soft}.nii.gz . +fi # Display useful info for the log end=`date +%s` From 60fe4c1d05204453d34eabcf1a724f644c48f4c4 Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Tue, 7 Dec 2021 10:17:37 -0500 Subject: [PATCH 02/27] Update preprocessing/preprocess_data.sh --- preprocessing/preprocess_data.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index f69b7c4..ad09b63 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -150,6 +150,7 @@ cd $PATH_OUTPUT # Create and populate clean data processed folder for training PATH_DATA_PROCESSED_CLEAN="${PATH_DATA_PROCESSED}_clean" +# Copy non-image files (eg: participants.tsv, etc.) if [[ ! -d $PATH_DATA_PROCESSED_CLEAN ]]; then rsync -avzh --exclude='*.nii.gz' $PATH_DATA_PROCESSED/. $PATH_DATA_PROCESSED_CLEAN fi From d88010878bbb8a9923ad72f1b91ef44023c302c8 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Wed, 8 Dec 2021 15:45:33 +0300 Subject: [PATCH 03/27] Added _crop to images and GT filenames for BIDS compliance. --- preprocessing/preprocess_data.sh | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index ad09b63..52269eb 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -158,22 +158,29 @@ fi # Go to the clean subject folder for source images cd ${PATH_DATA_PROCESSED_CLEAN}/${SUBJECT}/anat -# Copy source images for training -rsync -avzh ${PATH_DATA_PROCESSED}/${SUBJECT}/anat/${file}.json . +# Rename JSON for source image for BIDS compliance +mv ${file}.json ${file}_crop.json + +# Copy source image for training rsync -avzh ${PATH_DATA_PROCESSED}/${SUBJECT}/anat/${file}_crop.nii.gz . # Go to clean subject folder for segmentation GTs cd ${PATH_DATA_PROCESSED_CLEAN}/derivatives/labels/${SUBJECT}/anat +# Rename JSON for first rater GT for BIDS compliance +mv ${file_gt1}.json ${file_gt1}_crop.json + # Copy segmentation GTs for training -rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json . -rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt1}.nii.gz . -# Copy the second rater GT and aggregated GTs if second rater is present +rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt1}_crop.nii.gz . + if [[ -f ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz ]]; then - rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json . - rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz . - rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gtc}.nii.gz . - rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_soft}.nii.gz . + # Rename JSON for second rater GT for BIDS compliance if second rater is present + mv ${file_gt2}.json ${file_gt2}_crop.json + + # Copy the second rater GT and aggregated GTs if second rater is present + rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}_crop.nii.gz . + rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gtc}_crop.nii.gz . + rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_soft}_crop.nii.gz . fi # Display useful info for the log From 6242a12651021fa1d1434fed3d7b2986aa218bc1 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Wed, 8 Dec 2021 16:09:51 +0300 Subject: [PATCH 04/27] Fixed minor typo. --- preprocessing/preprocess_data.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 52269eb..8bc6156 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -109,7 +109,7 @@ file_seg="${FILESEG}" # Dilate spinal cord mask sct_maths -i ${file_seg}.nii.gz -dilate 5 -shape ball -o ${file_seg}_dilate.nii.gz -# Use dilated mask to crop the orginal image and manual MS segmentations +# Use dilated mask to crop the original image and manual MS segmentations sct_crop_image -i ${file}.nii.gz -m ${file_seg}_dilate.nii.gz -o ${file}_crop.nii.gz # Go to subject folder for segmentation GTs From bec1305cc7a2b7de7a63e9f1ea70872999583dbe Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Wed, 8 Dec 2021 16:16:14 +0300 Subject: [PATCH 05/27] Added version for SCT dependency. --- preprocessing/preprocess_data.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 8bc6156..586995d 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -2,9 +2,8 @@ # # Preprocess data. # -# Dependencies: -# - FSL -# - SCT +# Dependencies (versions): +# - SCT (5.4.0) # # Usage: # ./preprocess_data.sh From a869c68d06009d99afcc39eaef355f2d21e18962 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Wed, 8 Dec 2021 16:52:01 +0300 Subject: [PATCH 06/27] Changed the centerline extraction method to SVM for SC segmentation. --- preprocessing/preprocess_data.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 586995d..e398349 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -53,7 +53,7 @@ segment_if_does_not_exist() { else echo "Not found. Proceeding with automatic segmentation." # Segment spinal cord - sct_deepseg_sc -i ${file}.nii.gz -c $contrast -brain 1 -centerline cnn -qc ${PATH_QC} -qc-subject ${SUBJECT} + sct_deepseg_sc -i ${file}.nii.gz -c $contrast -qc ${PATH_QC} -qc-subject ${SUBJECT} fi } From 35fa3d02db8cc9f702ac12eb8a85f5029020d5ea Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Wed, 8 Dec 2021 21:18:24 +0300 Subject: [PATCH 07/27] Removed _crop suffix from images and GTs for cleaned data processed folder. --- preprocessing/preprocess_data.sh | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index e398349..f534eca 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -157,29 +157,20 @@ fi # Go to the clean subject folder for source images cd ${PATH_DATA_PROCESSED_CLEAN}/${SUBJECT}/anat -# Rename JSON for source image for BIDS compliance -mv ${file}.json ${file}_crop.json - # Copy source image for training -rsync -avzh ${PATH_DATA_PROCESSED}/${SUBJECT}/anat/${file}_crop.nii.gz . +rsync -avzh ${PATH_DATA_PROCESSED}/${SUBJECT}/anat/${file}_crop.nii.gz ${file}.nii.gz # Go to clean subject folder for segmentation GTs cd ${PATH_DATA_PROCESSED_CLEAN}/derivatives/labels/${SUBJECT}/anat -# Rename JSON for first rater GT for BIDS compliance -mv ${file_gt1}.json ${file_gt1}_crop.json - # Copy segmentation GTs for training -rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt1}_crop.nii.gz . +rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt1}_crop.nii.gz ${file_gt1}.nii.gz if [[ -f ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz ]]; then - # Rename JSON for second rater GT for BIDS compliance if second rater is present - mv ${file_gt2}.json ${file_gt2}_crop.json - # Copy the second rater GT and aggregated GTs if second rater is present - rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}_crop.nii.gz . - rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gtc}_crop.nii.gz . - rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_soft}_crop.nii.gz . + rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}_crop.nii.gz ${file_gt2}.nii.gz + rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gtc}_crop.nii.gz ${file_gtc}.nii.gz + rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_soft}_crop.nii.gz ${file_soft}.nii.gz fi # Display useful info for the log From 625c99a62f9111f632cbab25759670775b94343c Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Wed, 8 Dec 2021 16:20:09 -0500 Subject: [PATCH 08/27] Refactored creation of clean dataset Fixes issues when some folders were not properly created. --- preprocessing/preprocess_data.sh | 33 ++++++++++++-------------------- 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index f534eca..ed7ab4b 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -53,7 +53,7 @@ segment_if_does_not_exist() { else echo "Not found. Proceeding with automatic segmentation." # Segment spinal cord - sct_deepseg_sc -i ${file}.nii.gz -c $contrast -qc ${PATH_QC} -qc-subject ${SUBJECT} + sct_deepseg_sc -i ${file}.nii.gz -c $contrast -brain 1 -centerline cnn -qc ${PATH_QC} -qc-subject ${SUBJECT} fi } @@ -149,28 +149,19 @@ cd $PATH_OUTPUT # Create and populate clean data processed folder for training PATH_DATA_PROCESSED_CLEAN="${PATH_DATA_PROCESSED}_clean" -# Copy non-image files (eg: participants.tsv, etc.) -if [[ ! -d $PATH_DATA_PROCESSED_CLEAN ]]; then - rsync -avzh --exclude='*.nii.gz' $PATH_DATA_PROCESSED/. $PATH_DATA_PROCESSED_CLEAN -fi - -# Go to the clean subject folder for source images -cd ${PATH_DATA_PROCESSED_CLEAN}/${SUBJECT}/anat - -# Copy source image for training -rsync -avzh ${PATH_DATA_PROCESSED}/${SUBJECT}/anat/${file}_crop.nii.gz ${file}.nii.gz - -# Go to clean subject folder for segmentation GTs -cd ${PATH_DATA_PROCESSED_CLEAN}/derivatives/labels/${SUBJECT}/anat - -# Copy segmentation GTs for training -rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt1}_crop.nii.gz ${file_gt1}.nii.gz - +mkdir -p $PATH_DATA_PROCESSED_CLEAN $PATH_DATA_PROCESSED_CLEAN/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat +rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.nii.gz +rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.json $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.json +mkdir -p derivatives derivatives/labels derivatives/labels/${SUBJECT} derivatives/labels/${SUBJECT}/anat/ +rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt1}.nii.gz +rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json +# If second rater is present, copy the other files if [[ -f ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz ]]; then # Copy the second rater GT and aggregated GTs if second rater is present - rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}_crop.nii.gz ${file_gt2}.nii.gz - rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gtc}_crop.nii.gz ${file_gtc}.nii.gz - rsync -avzh ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_soft}_crop.nii.gz ${file_soft}.nii.gz + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt2}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gtc}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gtc}.nii.gz + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_soft}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_soft}.nii.gz fi # Display useful info for the log From c235a1d91b4f6b32592727f72d7702191e346ae3 Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Wed, 8 Dec 2021 16:26:55 -0500 Subject: [PATCH 09/27] Fixed wrong path --- preprocessing/preprocess_data.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index ed7ab4b..8fbf7a3 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -152,7 +152,7 @@ PATH_DATA_PROCESSED_CLEAN="${PATH_DATA_PROCESSED}_clean" mkdir -p $PATH_DATA_PROCESSED_CLEAN $PATH_DATA_PROCESSED_CLEAN/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.nii.gz rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.json $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.json -mkdir -p derivatives derivatives/labels derivatives/labels/${SUBJECT} derivatives/labels/${SUBJECT}/anat/ +mkdir -p $PATH_DATA_PROCESSED_CLEAN/derivatives $PATH_DATA_PROCESSED_CLEAN/derivatives/labels $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/ rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt1}.nii.gz rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json # If second rater is present, copy the other files From 02084eed8576bab4af44ca2861d048343d8dc34e Mon Sep 17 00:00:00 2001 From: Julien Cohen-Adad Date: Wed, 8 Dec 2021 16:39:28 -0500 Subject: [PATCH 10/27] Forgot to copy the BIDS metadata files --- preprocessing/preprocess_data.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 8fbf7a3..8065367 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -150,6 +150,9 @@ cd $PATH_OUTPUT # Create and populate clean data processed folder for training PATH_DATA_PROCESSED_CLEAN="${PATH_DATA_PROCESSED}_clean" mkdir -p $PATH_DATA_PROCESSED_CLEAN $PATH_DATA_PROCESSED_CLEAN/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat +rsync -avzh $PATH_DATA_PROCESSED/dataset_description.json $PATH_DATA_PROCESSED_CLEAN/ +rsync -avzh $PATH_DATA_PROCESSED/participants.* $PATH_DATA_PROCESSED_CLEAN/ +rsync -avzh $PATH_DATA_PROCESSED/README $PATH_DATA_PROCESSED_CLEAN/ rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.nii.gz rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.json $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.json mkdir -p $PATH_DATA_PROCESSED_CLEAN/derivatives $PATH_DATA_PROCESSED_CLEAN/derivatives/labels $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/ From 943a5b17835898a1af3b049a3a9844b1b61111c9 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Fri, 10 Dec 2021 15:01:28 +0300 Subject: [PATCH 11/27] Added checks for JSON validity. --- preprocessing/preprocess_data.sh | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 8065367..581971e 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -99,6 +99,11 @@ cd ${SUBJECT}/anat # Define variables file="${SUBJECT}_UNIT1" +# Make sure the image metadata is a valid JSON object +if [[ ! -s ${file}.json ]]; then + echo "{}" >> ${file}.json +fi + # Spinal cord segmentation. Here, we are dealing with MP2RAGE contrast. We # specify t1 contrast because the cord is bright and the CSF is dark (like on # the traditional MPRAGE T1w data). @@ -124,8 +129,17 @@ file_soft="${SUBJECT}_UNIT1_lesion-manual-soft" # Redefine variable for final SC segmentation mask as path changed file_seg_dil=${PATH_DATA_PROCESSED}/${SUBJECT}/anat/${file_seg}_dilate +# Make sure the first rater metadata is a valid JSON object +if [[ ! -s ${file_gt1}.json ]]; then + echo "{}" >> ${file_gt1}.json +fi + # Aggregate multiple raters if second rater is present if [[ -f ${file_gt2}.nii.gz ]]; then + # Make sure the second rater metadata is a valid JSON object + if [[ ! -s ${file_gt2}.json ]]; then + echo "{}" >> ${file_gt2}.json + fi # Create consensus ground truth by majority vote sct_maths -i ${file_gt1}.nii.gz -add ${file_gt2}.nii.gz -o lesion_sum.nii.gz sct_maths -i lesion_sum.nii.gz -sub 1 -o lesion_sum_minusone.nii.gz From 996f3a57198c360169096a193a9a8170c983d8c6 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Fri, 10 Dec 2021 18:23:32 +0300 Subject: [PATCH 12/27] Added lesion QC for preprocessing. --- preprocessing/qc_preprocess.py | 76 ++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 preprocessing/qc_preprocess.py diff --git a/preprocessing/qc_preprocess.py b/preprocessing/qc_preprocess.py new file mode 100644 index 0000000..23cd8ff --- /dev/null +++ b/preprocessing/qc_preprocess.py @@ -0,0 +1,76 @@ +""" +Quality control for preprocessing step. +See `preprocess_data.sh` for the preprocessing pipeline. +""" + +import argparse +import os +from tqdm import tqdm +from collections import Counter + +import pandas as pd +import nibabel as nib +import numpy as np + +# Argument parsing +parser = argparse.ArgumentParser(description='Quality control for preprocessing.') +parser.add_argument('-s', '--sct_output_path', type=str, required=True, + help='Path to the folder generated by `sct_run_batch`. This folder should contain `data_processed` folder.') +args = parser.parse_args() + +# Quick checking of arguments +if not os.path.exists(args.sct_output_path): + raise NotADirectoryError('%s could NOT be found!' % args.sct_output_path) +else: + if not os.path.exists(os.path.join(args.sct_output_path, 'data_processed')): + raise NotADirectoryError('`data_processed` could NOT be found within %s' % args.sct_output_path) + +# Get all subjects +subjects_df = pd.read_csv(os.path.join(args.sct_output_path, 'data_processed', 'participants.tsv'), sep='\t') +subjects = subjects_df['participant_id'].values.tolist() + +# Log resolutions and sizes for data exploration +resolutions, sizes = [], [] + +# Perform QC for each subject +for subject in tqdm(subjects, desc='Iterating over Subjects'): + # Get paths + subject_images_path = os.path.join(args.sct_output_path, 'data_processed', subject, 'anat') + subject_labels_path = os.path.join(args.sct_output_path, 'data_processed', 'derivatives', 'labels', subject, 'anat') + + # Read cropped subject image (i.e. 3D volume) to be used for training + img_crop_fpath = os.path.join(subject_images_path, '%s_UNIT1_crop.nii.gz' % subject) + if not os.path.exists(img_crop_fpath): + print('Could not find cropped image for subject: %s' % subject) + continue + img_crop = nib.load(img_crop_fpath) + + # Get and log size and resolution for each subject image + size = img_crop.get_fdata().shape + resolution = tuple(img_crop.header['pixdim'].tolist()[1:4]) + resolution = tuple([np.round(r, 1) for r in list(resolution)]) + sizes.append(size) + resolutions.append(resolution) + + # Read original and cropped subject ground-truths (GT) + gt1_fpath = os.path.join(subject_labels_path, '%s_UNIT1_lesion-manual.nii.gz' % subject) + gt1_crop_fpath = os.path.join(subject_labels_path, '%s_UNIT1_lesion-manual_crop.nii.gz' % subject) + gt2_fpath = os.path.join(subject_labels_path, '%s_UNIT1_lesion-manual2.nii.gz' % subject) + gt2_crop_fpath = os.path.join(subject_labels_path, '%s_UNIT1_lesion-manual2_crop.nii.gz' % subject) + + gt1 = nib.load(gt1_fpath) + gt1_crop = nib.load(gt1_crop_fpath) + gt2 = nib.load(gt2_fpath) + gt2_crop = nib.load(gt2_crop_fpath) + + # Basic shape checks + if not img_crop.shape == gt1_crop.shape == gt2_crop.shape: + raise ValueError('Shape mismatch in images and GTs for subject: %s' % subject) + + # Check if the dilated SC mask leaves out any lesions from GTs (from each rater) + if not (np.allclose(np.sum(gt1.get_fdata()), np.sum(gt1_crop.get_fdata())) and + np.allclose(np.sum(gt2.get_fdata()), np.sum(gt2_crop.get_fdata()))): + print('\n\tALERT: Lesion(s) from raters cropped during preprocessing for subject: %s' % subject) + +print('RESOLUTIONS: ', Counter(resolutions)) +print('SIZES: ', Counter(sizes)) From 6e7352f8d208832a0d613030f02e1a9b4b549f64 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Fri, 10 Dec 2021 18:35:12 +0300 Subject: [PATCH 13/27] More clear logging of problematic subjects. --- preprocessing/qc_preprocess.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/preprocessing/qc_preprocess.py b/preprocessing/qc_preprocess.py index 23cd8ff..db700ea 100644 --- a/preprocessing/qc_preprocess.py +++ b/preprocessing/qc_preprocess.py @@ -32,7 +32,10 @@ # Log resolutions and sizes for data exploration resolutions, sizes = [], [] -# Perform QC for each subject +# Log problematic subjects for QC +failed_crop_subjects, shape_mismatch_subjects, left_out_lesion_subjects = [], [], [] + +# Perform QC on each subject for subject in tqdm(subjects, desc='Iterating over Subjects'): # Get paths subject_images_path = os.path.join(args.sct_output_path, 'data_processed', subject, 'anat') @@ -41,7 +44,7 @@ # Read cropped subject image (i.e. 3D volume) to be used for training img_crop_fpath = os.path.join(subject_images_path, '%s_UNIT1_crop.nii.gz' % subject) if not os.path.exists(img_crop_fpath): - print('Could not find cropped image for subject: %s' % subject) + failed_crop_subjects.append(subject) continue img_crop = nib.load(img_crop_fpath) @@ -65,12 +68,17 @@ # Basic shape checks if not img_crop.shape == gt1_crop.shape == gt2_crop.shape: - raise ValueError('Shape mismatch in images and GTs for subject: %s' % subject) + shape_mismatch_subjects.append(subject) + continue # Check if the dilated SC mask leaves out any lesions from GTs (from each rater) if not (np.allclose(np.sum(gt1.get_fdata()), np.sum(gt1_crop.get_fdata())) and np.allclose(np.sum(gt2.get_fdata()), np.sum(gt2_crop.get_fdata()))): - print('\n\tALERT: Lesion(s) from raters cropped during preprocessing for subject: %s' % subject) + left_out_lesion_subjects.append(subject) print('RESOLUTIONS: ', Counter(resolutions)) print('SIZES: ', Counter(sizes)) + +print('Could not find cropped image for the following subjects: ', failed_crop_subjects) +print('Found shape mismatch in images and GTs for the following subjects: ', shape_mismatch_subjects) +print('ALERT: Lesion(s) from raters cropped during preprocessing for the following subjects: ', left_out_lesion_subjects) From 94bdeff33a46b1017eccecfdffc7058fe0a7bb03 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Mon, 13 Dec 2021 00:16:16 +0300 Subject: [PATCH 14/27] Added documentation for preprocessing QC in README. --- README.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 75c5821..5a6d40f 100644 --- a/README.md +++ b/README.md @@ -23,5 +23,13 @@ git clone https://github.com/ivadomed/model_seg_ms_mp2rage.git The data need to be preprocessed before training. Here is the syntax: ~~~ -sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output ./data_basel-mp2rage -jobs -2 +sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output -jobs ~~~ + +After running the preprocessing, you can also run the quality-control (QC) script: +``` +python preprocessing/qc_preprocess.py -s +``` +which i) logs resolutions and sizes for each subject image for data exploration, +ii) performs basic shape checks for images and ground-truths (GTs), and most importantly +iii) checks if the dilated spinal-cord (SC) mask leaves out any lesions from the GT of each rater. From 2ad03ec62cc4c00111c0463d7308e68aaff5e9a3 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Mon, 13 Dec 2021 06:13:19 +0300 Subject: [PATCH 15/27] Added input param for centerline method (svm or default=cnn). --- preprocessing/preprocess_data.sh | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 581971e..d15dced 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -41,6 +41,7 @@ segment_if_does_not_exist() { ### local file="$1" local contrast="$2" + local centerline_method="$3" # Update global variable with segmentation file name FILESEG="${file}_seg" FILESEGMANUAL="${PATH_DATA}/derivatives/labels/${SUBJECT}/anat/${FILESEG}-manual.nii.gz" @@ -52,13 +53,21 @@ segment_if_does_not_exist() { sct_qc -i ${file}.nii.gz -s ${FILESEG}.nii.gz -p sct_deepseg_sc -qc ${PATH_QC} -qc-subject ${SUBJECT} else echo "Not found. Proceeding with automatic segmentation." - # Segment spinal cord - sct_deepseg_sc -i ${file}.nii.gz -c $contrast -brain 1 -centerline cnn -qc ${PATH_QC} -qc-subject ${SUBJECT} + # Segment spinal cord based on the specified centerline method + if [[ $centerline_method == "cnn" ]]; then + sct_deepseg_sc -i ${file}.nii.gz -c $contrast -brain 1 -centerline cnn -qc ${PATH_QC} -qc-subject ${SUBJECT} + elif [[ $centerline_method == "svm" ]]; then + sct_deepseg_sc -i ${file}.nii.gz -c $contrast -centerline svm -qc ${PATH_QC} -qc-subject ${SUBJECT} + else + echo "Centerline extraction method = ${centerline_method} is not recognized!" + exit 1 + fi fi } # Retrieve input params and other params SUBJECT=$1 +CENTERLINE_METHOD=${2:-"cnn"} # get starting time: start=`date +%s` @@ -107,7 +116,7 @@ fi # Spinal cord segmentation. Here, we are dealing with MP2RAGE contrast. We # specify t1 contrast because the cord is bright and the CSF is dark (like on # the traditional MPRAGE T1w data). -segment_if_does_not_exist ${file} t1 +segment_if_does_not_exist ${file} t1 ${CENTERLINE_METHOD} file_seg="${FILESEG}" # Dilate spinal cord mask From e1828f90eed0c8bca4c8cda00884cf1ab3398d85 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Thu, 16 Dec 2021 13:29:28 +0300 Subject: [PATCH 16/27] Added centerline param docs to README. --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 5a6d40f..8a1fcd8 100644 --- a/README.md +++ b/README.md @@ -23,9 +23,12 @@ git clone https://github.com/ivadomed/model_seg_ms_mp2rage.git The data need to be preprocessed before training. Here is the syntax: ~~~ -sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output -jobs +sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output -script-args "" -jobs ~~~ +where `` is either `svm` or `cnn`. You can also leave out the `-script-args` argument in which case `cnn` will be used by default in the preprocessing script. +[#10](https://github.com/ivadomed/model_seg_ms_mp2rage/issues/10) is a related issue you can check. + After running the preprocessing, you can also run the quality-control (QC) script: ``` python preprocessing/qc_preprocess.py -s From a5964c0822a05c176439b7a67dd545e37b042cc9 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Thu, 16 Dec 2021 13:29:51 +0300 Subject: [PATCH 17/27] Updated example usage in comments with centerline param. --- preprocessing/preprocess_data.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index d15dced..8da426c 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -6,8 +6,10 @@ # - SCT (5.4.0) # # Usage: -# ./preprocess_data.sh +# ./preprocess_data.sh # +# is the name of the subject in BIDS convention (sub-XXX) +# is the method sct_deepseg_sc uses for centerline extraction (cnn or svm) # # Manual segmentations or labels should be located under: # PATH_DATA/derivatives/labels/SUBJECT// From 9f15b11953dfcaa9b3b8677bb136c1221ff2e716 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Fri, 17 Dec 2021 14:49:06 +0300 Subject: [PATCH 18/27] Implemented clean processed folder for SC segmentation task. --- preprocessing/preprocess_data.sh | 48 +++++++++++++++++++++++--------- 1 file changed, 35 insertions(+), 13 deletions(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 8da426c..1f529b9 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -6,10 +6,11 @@ # - SCT (5.4.0) # # Usage: -# ./preprocess_data.sh +# ./preprocess_data.sh # # is the name of the subject in BIDS convention (sub-XXX) # is the method sct_deepseg_sc uses for centerline extraction (cnn or svm) +# is the aimed training task which will guide preprocessing (scseg or lesionseg) # # Manual segmentations or labels should be located under: # PATH_DATA/derivatives/labels/SUBJECT// @@ -70,6 +71,7 @@ segment_if_does_not_exist() { # Retrieve input params and other params SUBJECT=$1 CENTERLINE_METHOD=${2:-"cnn"} +TASK=${3:-"lesionseg"} # get starting time: start=`date +%s` @@ -174,24 +176,44 @@ cd $PATH_OUTPUT # Create and populate clean data processed folder for training PATH_DATA_PROCESSED_CLEAN="${PATH_DATA_PROCESSED}_clean" + +# Copy over required BIDs files mkdir -p $PATH_DATA_PROCESSED_CLEAN $PATH_DATA_PROCESSED_CLEAN/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat rsync -avzh $PATH_DATA_PROCESSED/dataset_description.json $PATH_DATA_PROCESSED_CLEAN/ rsync -avzh $PATH_DATA_PROCESSED/participants.* $PATH_DATA_PROCESSED_CLEAN/ rsync -avzh $PATH_DATA_PROCESSED/README $PATH_DATA_PROCESSED_CLEAN/ -rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.nii.gz -rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.json $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.json -mkdir -p $PATH_DATA_PROCESSED_CLEAN/derivatives $PATH_DATA_PROCESSED_CLEAN/derivatives/labels $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/ -rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt1}.nii.gz -rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json -# If second rater is present, copy the other files -if [[ -f ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz ]]; then - # Copy the second rater GT and aggregated GTs if second rater is present - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt2}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gtc}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gtc}.nii.gz - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_soft}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_soft}.nii.gz + +if [[ $TASK == "lesionseg" ]]; then + # For lesion segmentation task, copy SC crops as inputs and lesion annotations as targets + rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.nii.gz + rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.json $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.json + mkdir -p $PATH_DATA_PROCESSED_CLEAN/derivatives $PATH_DATA_PROCESSED_CLEAN/derivatives/labels $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/ + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt1}.nii.gz + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json + # If second rater is present, copy the other files + if [[ -f ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz ]]; then + # Copy the second rater GT and aggregated GTs if second rater is present + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt2}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gtc}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gtc}.nii.gz + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_soft}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_soft}.nii.gz + fi +elif [[ $TASK == "scseg" ]]; then + # For SC segmentation task, copy raw subject images as inputs and SC masks as targets + rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.nii.gz $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.nii.gz + rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.json $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.json + mkdir -p $PATH_DATA_PROCESSED_CLEAN/derivatives $PATH_DATA_PROCESSED_CLEAN/derivatives/labels $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/ + file_seg_gt="${file}_seg-manual" + rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_seg.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.nii.gz + # TODO: Get the correct JSON below, skipping for now. + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json +else + echo "Task = ${TASK} is not recognized!" + exit 1 fi + + # Display useful info for the log end=`date +%s` runtime=$((end-start)) From dbe089fdf30b14d93f3b4e76f180b91d9d5fd0b6 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Fri, 17 Dec 2021 14:49:22 +0300 Subject: [PATCH 19/27] Added documentation for multi-task preprocessing. --- README.md | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 8a1fcd8..5fb3047 100644 --- a/README.md +++ b/README.md @@ -20,14 +20,27 @@ git clone https://github.com/ivadomed/model_seg_ms_mp2rage.git ## Prepare the data -The data need to be preprocessed before training. Here is the syntax: +The data need to be preprocessed before training. The general syntax for preprocessing is: ~~~ -sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output -script-args "" -jobs +sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output -script-args "-centerline_method -task " -jobs ~~~ -where `` is either `svm` or `cnn`. You can also leave out the `-script-args` argument in which case `cnn` will be used by default in the preprocessing script. -[#10](https://github.com/ivadomed/model_seg_ms_mp2rage/issues/10) is a related issue you can check. +where `` is either `cnn` (default value) or `svm` and +`` is either `lesionseg` (default value) or `scseg`. You can leave the `-script-args` +argument empty to stick to the default values. + +To run preprocessing for the lesion segmentation task: + +~~~ +sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output basel-mp2rage-preprocessed-lesionseg -script-args "svm lesionseg" -jobs +~~~ + +To run preprocessing for the spinal cord (SC) segmentation task: + +~~~ +sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output basel-mp2rage-preprocessed-scseg -script-args "svm scseg" -jobs +~~~ After running the preprocessing, you can also run the quality-control (QC) script: ``` From cd31be31ddb8b064c96724a462e74be0bbabf6a3 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Fri, 17 Dec 2021 23:47:44 +0300 Subject: [PATCH 20/27] Inverted lines for SC seg and lesion seg in the documentation. --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 5fb3047..eb90969 100644 --- a/README.md +++ b/README.md @@ -30,16 +30,16 @@ where `` is either `cnn` (default value) or `svm` and `` is either `lesionseg` (default value) or `scseg`. You can leave the `-script-args` argument empty to stick to the default values. -To run preprocessing for the lesion segmentation task: +To run preprocessing for the spinal cord (SC) segmentation task: ~~~ -sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output basel-mp2rage-preprocessed-lesionseg -script-args "svm lesionseg" -jobs +sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output basel-mp2rage-preprocessed-scseg -script-args "svm scseg" -jobs ~~~ -To run preprocessing for the spinal cord (SC) segmentation task: +To run preprocessing for the lesion segmentation task: ~~~ -sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output basel-mp2rage-preprocessed-scseg -script-args "svm scseg" -jobs +sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output basel-mp2rage-preprocessed-lesionseg -script-args "svm lesionseg" -jobs ~~~ After running the preprocessing, you can also run the quality-control (QC) script: From 80e8cebfff69710dbd22adce13d57565a38ce735 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Sat, 18 Dec 2021 00:29:24 +0300 Subject: [PATCH 21/27] Copying relevant JSON or creating a new one for SC seg. --- preprocessing/preprocess_data.sh | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 1f529b9..0b18118 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -205,8 +205,14 @@ elif [[ $TASK == "scseg" ]]; then mkdir -p $PATH_DATA_PROCESSED_CLEAN/derivatives $PATH_DATA_PROCESSED_CLEAN/derivatives/labels $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/ file_seg_gt="${file}_seg-manual" rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_seg.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.nii.gz - # TODO: Get the correct JSON below, skipping for now. - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json + # Copy the relevant JSON: use auto-generated JSON for manually corrected and create new JSON for sct_deepseg_sc generated SC segs + if [[ -f $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json ]]; then + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json + else + datetime=$(date +'%Y-%m-%d %H:%M:%S') + echo -e "{\n\t\"Author\": \"Generated with sct_deepseg_sc\",\n\t\"Date\": \"${datetime}\"\n}" >> $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json + fi + else echo "Task = ${TASK} is not recognized!" exit 1 From 0e8527f98ae9920686b8feb8a81a4096726a299d Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Sat, 18 Dec 2021 00:31:10 +0300 Subject: [PATCH 22/27] Set tabs to 4 spaces and added comment. --- preprocessing/preprocess_data.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 0b18118..e6f6a4b 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -209,7 +209,9 @@ elif [[ $TASK == "scseg" ]]; then if [[ -f $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json ]]; then rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json else + # Get current datetime and set tabs to 4 spaces datetime=$(date +'%Y-%m-%d %H:%M:%S') + tabs 4 echo -e "{\n\t\"Author\": \"Generated with sct_deepseg_sc\",\n\t\"Date\": \"${datetime}\"\n}" >> $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json fi From 4b9b332461c4dadc435fe9318cbd8458e1cf4700 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Sat, 18 Dec 2021 00:45:02 +0300 Subject: [PATCH 23/27] Converted the 4-spaced tab back to 4 explicit spaces. --- preprocessing/preprocess_data.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index e6f6a4b..5686ad2 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -211,8 +211,7 @@ elif [[ $TASK == "scseg" ]]; then else # Get current datetime and set tabs to 4 spaces datetime=$(date +'%Y-%m-%d %H:%M:%S') - tabs 4 - echo -e "{\n\t\"Author\": \"Generated with sct_deepseg_sc\",\n\t\"Date\": \"${datetime}\"\n}" >> $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json + echo -e "{\n \"Author\": \"Generated with sct_deepseg_sc\",\n \"Date\": \"${datetime}\"\n}" >> $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json fi else From be4d846cf4104371dd467436954c684c3e8dcd36 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Sat, 18 Dec 2021 00:47:20 +0300 Subject: [PATCH 24/27] Deleted extra line. --- preprocessing/preprocess_data.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 5686ad2..422aad0 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -213,7 +213,6 @@ elif [[ $TASK == "scseg" ]]; then datetime=$(date +'%Y-%m-%d %H:%M:%S') echo -e "{\n \"Author\": \"Generated with sct_deepseg_sc\",\n \"Date\": \"${datetime}\"\n}" >> $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json fi - else echo "Task = ${TASK} is not recognized!" exit 1 From 814c9ae473f516a22d06bf4a5ddf0566cdf0bccb Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Sat, 18 Dec 2021 15:57:38 +0300 Subject: [PATCH 25/27] Unified two tasks, removed centerline and task user args. --- preprocessing/preprocess_data.sh | 86 ++++++++++++++++---------------- 1 file changed, 42 insertions(+), 44 deletions(-) diff --git a/preprocessing/preprocess_data.sh b/preprocessing/preprocess_data.sh index 422aad0..5090053 100644 --- a/preprocessing/preprocess_data.sh +++ b/preprocessing/preprocess_data.sh @@ -70,8 +70,6 @@ segment_if_does_not_exist() { # Retrieve input params and other params SUBJECT=$1 -CENTERLINE_METHOD=${2:-"cnn"} -TASK=${3:-"lesionseg"} # get starting time: start=`date +%s` @@ -120,7 +118,7 @@ fi # Spinal cord segmentation. Here, we are dealing with MP2RAGE contrast. We # specify t1 contrast because the cord is bright and the CSF is dark (like on # the traditional MPRAGE T1w data). -segment_if_does_not_exist ${file} t1 ${CENTERLINE_METHOD} +segment_if_does_not_exist ${file} t1 svm file_seg="${FILESEG}" # Dilate spinal cord mask @@ -174,48 +172,48 @@ sct_crop_image -i ${file_gt1}.nii.gz -m ${file_seg_dil}.nii.gz -o ${file_gt1}_cr # Go back to the root output path cd $PATH_OUTPUT -# Create and populate clean data processed folder for training -PATH_DATA_PROCESSED_CLEAN="${PATH_DATA_PROCESSED}_clean" - -# Copy over required BIDs files -mkdir -p $PATH_DATA_PROCESSED_CLEAN $PATH_DATA_PROCESSED_CLEAN/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat -rsync -avzh $PATH_DATA_PROCESSED/dataset_description.json $PATH_DATA_PROCESSED_CLEAN/ -rsync -avzh $PATH_DATA_PROCESSED/participants.* $PATH_DATA_PROCESSED_CLEAN/ -rsync -avzh $PATH_DATA_PROCESSED/README $PATH_DATA_PROCESSED_CLEAN/ - -if [[ $TASK == "lesionseg" ]]; then - # For lesion segmentation task, copy SC crops as inputs and lesion annotations as targets - rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.nii.gz - rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.json $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.json - mkdir -p $PATH_DATA_PROCESSED_CLEAN/derivatives $PATH_DATA_PROCESSED_CLEAN/derivatives/labels $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/ - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt1}.nii.gz - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json - # If second rater is present, copy the other files - if [[ -f ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz ]]; then - # Copy the second rater GT and aggregated GTs if second rater is present - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt2}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gtc}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_gtc}.nii.gz - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_soft}_crop.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_soft}.nii.gz - fi -elif [[ $TASK == "scseg" ]]; then - # For SC segmentation task, copy raw subject images as inputs and SC masks as targets - rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.nii.gz $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.nii.gz - rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.json $PATH_DATA_PROCESSED_CLEAN/${SUBJECT}/anat/${file}.json - mkdir -p $PATH_DATA_PROCESSED_CLEAN/derivatives $PATH_DATA_PROCESSED_CLEAN/derivatives/labels $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT} $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/ - file_seg_gt="${file}_seg-manual" - rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_seg.nii.gz $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.nii.gz - # Copy the relevant JSON: use auto-generated JSON for manually corrected and create new JSON for sct_deepseg_sc generated SC segs - if [[ -f $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json ]]; then - rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json - else - # Get current datetime and set tabs to 4 spaces - datetime=$(date +'%Y-%m-%d %H:%M:%S') - echo -e "{\n \"Author\": \"Generated with sct_deepseg_sc\",\n \"Date\": \"${datetime}\"\n}" >> $PATH_DATA_PROCESSED_CLEAN/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json - fi +# Create clean data processed folders for two tasks: spinal cord (SC) segmentation and lesion segmentation +PATH_DATA_PROCESSED_SCSEG="${PATH_DATA_PROCESSED}_scseg" +PATH_DATA_PROCESSED_LESIONSEG="${PATH_DATA_PROCESSED}_lesionseg" + +# Copy over required BIDs files to both folders +mkdir -p $PATH_DATA_PROCESSED_SCSEG $PATH_DATA_PROCESSED_SCSEG/${SUBJECT} $PATH_DATA_PROCESSED_SCSEG/${SUBJECT}/anat +mkdir -p $PATH_DATA_PROCESSED_LESIONSEG $PATH_DATA_PROCESSED_LESIONSEG/${SUBJECT} $PATH_DATA_PROCESSED_LESIONSEG/${SUBJECT}/anat +rsync -avzh $PATH_DATA_PROCESSED/dataset_description.json $PATH_DATA_PROCESSED_SCSEG/ +rsync -avzh $PATH_DATA_PROCESSED/dataset_description.json $PATH_DATA_PROCESSED_LESIONSEG/ +rsync -avzh $PATH_DATA_PROCESSED/participants.* $PATH_DATA_PROCESSED_SCSEG/ +rsync -avzh $PATH_DATA_PROCESSED/participants.* $PATH_DATA_PROCESSED_LESIONSEG/ +rsync -avzh $PATH_DATA_PROCESSED/README $PATH_DATA_PROCESSED_SCSEG/ +rsync -avzh $PATH_DATA_PROCESSED/README $PATH_DATA_PROCESSED_LESIONSEG/ + +# For SC segmentation task, copy raw subject images as inputs and SC masks as targets +rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.nii.gz $PATH_DATA_PROCESSED_SCSEG/${SUBJECT}/anat/${file}.nii.gz +rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.json $PATH_DATA_PROCESSED_SCSEG/${SUBJECT}/anat/${file}.json +mkdir -p $PATH_DATA_PROCESSED_SCSEG/derivatives $PATH_DATA_PROCESSED_SCSEG/derivatives/labels $PATH_DATA_PROCESSED_SCSEG/derivatives/labels/${SUBJECT} $PATH_DATA_PROCESSED_SCSEG/derivatives/labels/${SUBJECT}/anat/ +file_seg_gt="${file}_seg-manual" +rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_seg.nii.gz $PATH_DATA_PROCESSED_SCSEG/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.nii.gz +# Copy the relevant JSON: use auto-generated JSON for manually corrected and create new JSON for sct_deepseg_sc generated SC segs +if [[ -f $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json ]]; then + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json $PATH_DATA_PROCESSED_SCSEG/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json else - echo "Task = ${TASK} is not recognized!" - exit 1 + # Get current datetime and set tabs to 4 spaces + datetime=$(date +'%Y-%m-%d %H:%M:%S') + echo -e "{\n \"Author\": \"Generated with sct_deepseg_sc\",\n \"Date\": \"${datetime}\"\n}" >> $PATH_DATA_PROCESSED_SCSEG/derivatives/labels/${SUBJECT}/anat/${file_seg_gt}.json +fi + +# For lesion segmentation task, copy SC crops as inputs and lesion annotations as targets +rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}_crop.nii.gz $PATH_DATA_PROCESSED_LESIONSEG/${SUBJECT}/anat/${file}.nii.gz +rsync -avzh $PATH_DATA_PROCESSED/${SUBJECT}/anat/${file}.json $PATH_DATA_PROCESSED_LESIONSEG/${SUBJECT}/anat/${file}.json +mkdir -p $PATH_DATA_PROCESSED_LESIONSEG/derivatives $PATH_DATA_PROCESSED_LESIONSEG/derivatives/labels $PATH_DATA_PROCESSED_LESIONSEG/derivatives/labels/${SUBJECT} $PATH_DATA_PROCESSED_LESIONSEG/derivatives/labels/${SUBJECT}/anat/ +rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}_crop.nii.gz $PATH_DATA_PROCESSED_LESIONSEG/derivatives/labels/${SUBJECT}/anat/${file_gt1}.nii.gz +rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json $PATH_DATA_PROCESSED_LESIONSEG/derivatives/labels/${SUBJECT}/anat/${file_gt1}.json +# If second rater is present, copy the other files +if [[ -f ${PATH_DATA_PROCESSED}/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz ]]; then + # Copy the second rater GT and aggregated GTs if second rater is present + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt2}_crop.nii.gz $PATH_DATA_PROCESSED_LESIONSEG/derivatives/labels/${SUBJECT}/anat/${file_gt2}.nii.gz + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json $PATH_DATA_PROCESSED_LESIONSEG/derivatives/labels/${SUBJECT}/anat/${file_gt2}.json + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_gtc}_crop.nii.gz $PATH_DATA_PROCESSED_LESIONSEG/derivatives/labels/${SUBJECT}/anat/${file_gtc}.nii.gz + rsync -avzh $PATH_DATA_PROCESSED/derivatives/labels/${SUBJECT}/anat/${file_soft}_crop.nii.gz $PATH_DATA_PROCESSED_LESIONSEG/derivatives/labels/${SUBJECT}/anat/${file_soft}.nii.gz fi From 91e8104fd44bdf4f81b7b6b64f6a335c96b71d07 Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Sat, 18 Dec 2021 15:57:54 +0300 Subject: [PATCH 26/27] Updated documentation on how to run preprocessing. --- README.md | 26 +++++++------------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index eb90969..ec6ca0d 100644 --- a/README.md +++ b/README.md @@ -20,32 +20,20 @@ git clone https://github.com/ivadomed/model_seg_ms_mp2rage.git ## Prepare the data -The data need to be preprocessed before training. The general syntax for preprocessing is: +The data need to be preprocessed before training. The preprocessing command is: ~~~ -sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output -script-args "-centerline_method -task " -jobs +sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output -jobs ~~~ -where `` is either `cnn` (default value) or `svm` and -`` is either `lesionseg` (default value) or `scseg`. You can leave the `-script-args` -argument empty to stick to the default values. - -To run preprocessing for the spinal cord (SC) segmentation task: - -~~~ -sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output basel-mp2rage-preprocessed-scseg -script-args "svm scseg" -jobs -~~~ - -To run preprocessing for the lesion segmentation task: - -~~~ -sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output basel-mp2rage-preprocessed-lesionseg -script-args "svm lesionseg" -jobs -~~~ +This command will create a `data_processed_scseg` folder for the SC segmentation task and a +`data_processed_lesionseg` folder for the lesion segmentation task inside the `` +you specified. Each of these two folders contain only the required files for their respective task. After running the preprocessing, you can also run the quality-control (QC) script: ``` python preprocessing/qc_preprocess.py -s ``` -which i) logs resolutions and sizes for each subject image for data exploration, -ii) performs basic shape checks for images and ground-truths (GTs), and most importantly +which i) logs resolutions and sizes for each SC-cropped subject image for data exploration, +ii) performs basic shape checks for SC-cropped images and ground-truths (GTs), and most importantly iii) checks if the dilated spinal-cord (SC) mask leaves out any lesions from the GT of each rater. From 04e4098843a801f08b9165fe0de07354df5c8dce Mon Sep 17 00:00:00 2001 From: Uzay Macar Date: Sat, 18 Dec 2021 20:36:17 +0300 Subject: [PATCH 27/27] Fixed script path in README. Co-authored-by: Julien Cohen-Adad --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ec6ca0d..15f88b0 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ git clone https://github.com/ivadomed/model_seg_ms_mp2rage.git The data need to be preprocessed before training. The preprocessing command is: ~~~ -sct_run_batch -script /model_seg_ms_mp2rage/preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output -jobs +sct_run_batch -script /preprocessing/preprocess_data.sh -path-data /basel-mp2rage/ -path-output -jobs ~~~ This command will create a `data_processed_scseg` folder for the SC segmentation task and a