diff --git a/hippunfold/config/nnunet_model_urls.yml b/hippunfold/config/nnunet_model_urls.yml deleted file mode 100644 index 6b8ee5d1..00000000 --- a/hippunfold/config/nnunet_model_urls.yml +++ /dev/null @@ -1,8 +0,0 @@ -T1w: 'https://zenodo.org/record/4508747/files/trained_model.3d_fullres.Task101_hcp1200_T1w.nnUNetTrainerV2.model_best.tar' -T2w: 'https://zenodo.org/record/4508747/files/trained_model.3d_fullres.Task102_hcp1200_T2w.nnUNetTrainerV2.model_best.tar' -neonateT1w: 'https://zenodo.org/record/5733556/files/trained_model.3d_fullres.Task205_hcp1200_b1000_finetuneround2_dhcp_T1w.nnUNetTrainerV2.model_best.tar' -neonateT1w_v2: 'https://zenodo.org/record/8209029/files/trained_model.3d_fullres.Task301_dhcp_T1w_synthseg_manuallycorrected.nnUNetTrainer.model_best.tar' -hippb500: 'https://zenodo.org/record/5732291/files/trained_model.3d_fullres.Task110_hcp1200_b1000crop.nnUNetTrainerV2.model_best.tar' -T1T2w: 'https://zenodo.org/record/4508747/files/trained_model.3d_fullres.Task103_hcp1200_T1T2w.nnUNetTrainerV2.model_best.tar' -synthseg_v0.1: 'https://dropbox.com/s/asoanq94ofersv3/trained_model.3d_fullres.Task102_synsegGenDetailed.nnUNetTrainerV2.model_best.tar' -synthseg_v0.2: 'https://www.dropbox.com/scl/fi/69yvmo38gdn3dj8aupq9z/trained_model.3d_fullres.Task203_synthseg.nnUNetTrainerV2.model_best.tar?rlkey=90km7u4t0cw8t15akr5tawxyl&dl=0' diff --git a/hippunfold/config/snakebids.yml b/hippunfold/config/snakebids.yml index 203c69ac..5f6b2402 100644 --- a/hippunfold/config/snakebids.yml +++ b/hippunfold/config/snakebids.yml @@ -337,43 +337,40 @@ cifti_metric_types: singularity: autotop: 'docker://khanlab/hippunfold_deps:v0.5.0' -xfm_identity: resources/identity_xfm.txt -template: CITI168 +xfm_identity: resources/etc/identity_xfm.txt + template_files: CITI168: - T1w: resources/CITI168/T1w_head_700um.nii.gz - T2w: resources/CITI168/T2w_head_700um.nii.gz - xfm_corobl: resources/CITI168/CoronalOblique_rigid.txt - crop_ref: resources/CITI168/T2w_300umCoronalOblique_hemi-{hemi}.nii.gz - crop_refT1w: resources/CITI168/T1w_300umCoronalOblique_hemi-{hemi}.nii.gz - Mask_crop: resources/CITI168/Mask_300umCoronalOblique_hemi-{hemi}.nii.gz + T1w: T1w_head_700um.nii.gz + T2w: T2w_head_700um.nii.gz + xfm_corobl: CoronalOblique_rigid.txt + crop_ref: T2w_300umCoronalOblique_hemi-{hemi}.nii.gz + crop_refT1w: T1w_300umCoronalOblique_hemi-{hemi}.nii.gz + Mask_crop: Mask_300umCoronalOblique_hemi-{hemi}.nii.gz dHCP: - T1w: resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_res-1_T1w.nii.gz - T2w: resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_res-1_T2w.nii.gz - xfm_corobl: resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_to-corobl_affine.txt - crop_ref: resources/CITI168/T2w_300umCoronalOblique_hemi-{hemi}.nii.gz - crop_refT1w: resources/CITI168/T1w_300umCoronalOblique_hemi-{hemi}.nii.gz - Mask_crop: resources/CITI168/Mask_300umCoronalOblique_hemi-{hemi}.nii.gz - -atlas: - - multihist7 + T1w: tpl-dHCP_cohort-1_res-1_T1w.nii.gz + T2w: tpl-dHCP_cohort-1_res-1_T2w.nii.gz + xfm_corobl: tpl-dHCP_cohort-1_to-corobl_affine.txt + crop_ref: tpl-dHCP_cohort-1_res-1_space-corobl_hemi-{hemi}_T2w.nii.gz + crop_refT1w: tpl-dHCP_cohort-1_res-1_space-corobl_hemi-{hemi}_T1w.nii.gz + Mask_crop: tpl-dHCP_cohort-1_res-1_space-corobl_hemi-{hemi}_desc-hipp_mask.nii.gz atlas_files: multihist7: - label_nii: resources/multihist7/sub-maxprob_label-hipp_desc-manualsubfieldsunfoldaligned_dseg.nii.gz - label_list: resources/multihist7/labellist.txt - thick: resources/multihist7/thickness.nii.gz - curv: resources/multihist7/curvature.nii.gz - gyr: resources/multihist7/gyrification.nii.gz + label_nii: sub-maxprob_label-hipp_desc-manualsubfieldsunfoldaligned_dseg.nii.gz + label_list: labellist.txt + thickness: thickness.nii.gz + curvature: curvature.nii.gz + gyrification: gyrification.nii.gz bigbrain: - label_nii: resources/bigbrain/sub-bigbrain_hemi-{hemi}_label-hipp_desc-manualsubfields_dseg.nii.gz - label_list: resources/bigbrain/sub-bigbrain_labellist.txt + label_nii: sub-bigbrain_hemi-{hemi}_label-hipp_desc-manualsubfields_dseg.nii.gz + label_list: sub-bigbrain_labellist.txt magdeburg: - label_nii: resources/magdeburg/sub-all_hemi-{hemi}_label-hipp_desc-manualsubfields_maxprob.nii.gz - label_list: resources/magdeburg/magdeburg_labellist.txt + label_nii: sub-all_hemi-{hemi}_label-hipp_desc-manualsubfields_maxprob.nii.gz + label_list: magdeburg_labellist.txt freesurfer: - label_nii: resources/freesurfer/sub-all_hemi-{hemi}_space-unfold_label-hipp_desc-freesurfersubfields_dseg.nii.gz - label_list: resources/freesurfer/freesurfer_labellist.txt + label_nii: sub-all_hemi-{hemi}_space-unfold_label-hipp_desc-freesurfersubfields_dseg.nii.gz + label_list: freesurfer_labellist.txt #values to use for re-mapping tissue labels when combining with subfields. @@ -407,18 +404,39 @@ no_reg_template: False modality: T2w +template: CITI168 + +atlas: + - multihist7 + #these will be downloaded to ~/.cache/hippunfold -nnunet_model: - T1w: 'zenodo.org/record/4508747/files/trained_model.3d_fullres.Task101_hcp1200_T1w.nnUNetTrainerV2.model_best.tar' - T2w: 'zenodo.org/record/4508747/files/trained_model.3d_fullres.Task102_hcp1200_T2w.nnUNetTrainerV2.model_best.tar' - hippb500: 'zenodo.org/record/5732291/files/trained_model.3d_fullres.Task110_hcp1200_b1000crop.nnUNetTrainerV2.model_best.tar' - neonateT1w: 'zenodo.org/record/5733556/files/trained_model.3d_fullres.Task205_hcp1200_b1000_finetuneround2_dhcp_T1w.nnUNetTrainerV2.model_best.tar' - neonateT1w_v2: 'zenodo.org/record/8209029/files/trained_model.3d_fullres.Task301_dhcp_T1w_synthseg_manuallycorrected.nnUNetTrainer.model_best.tar' - T1T2w: 'zenodo.org/record/4508747/files/trained_model.3d_fullres.Task103_hcp1200_T1T2w.nnUNetTrainerV2.model_best.tar' - synthseg_v0.1: 'zenodo.org/record/8184230/files/trained_model.3d_fullres.Task102_synsegGenDetailed.nnUNetTrainerV2.model_best.tar' - synthseg_v0.2: 'zenodo.org/record/8184230/files/trained_model.3d_fullres.Task203_synthseg.nnUNetTrainerV2.model_best.tar' +resource_urls: + nnunet_model: + T1w: 'zenodo.org/record/4508747/files/trained_model.3d_fullres.Task101_hcp1200_T1w.nnUNetTrainerV2.model_best.tar' + T2w: 'zenodo.org/record/4508747/files/trained_model.3d_fullres.Task102_hcp1200_T2w.nnUNetTrainerV2.model_best.tar' + hippb500: 'zenodo.org/record/5732291/files/trained_model.3d_fullres.Task110_hcp1200_b1000crop.nnUNetTrainerV2.model_best.tar' + neonateT1w: 'zenodo.org/record/5733556/files/trained_model.3d_fullres.Task205_hcp1200_b1000_finetuneround2_dhcp_T1w.nnUNetTrainerV2.model_best.tar' + neonateT1w_v2: 'zenodo.org/record/8209029/files/trained_model.3d_fullres.Task301_dhcp_T1w_synthseg_manuallycorrected.nnUNetTrainer.model_best.tar' + T1T2w: 'zenodo.org/record/4508747/files/trained_model.3d_fullres.Task103_hcp1200_T1T2w.nnUNetTrainerV2.model_best.tar' + synthseg_v0.1: 'zenodo.org/record/8184230/files/trained_model.3d_fullres.Task102_synsegGenDetailed.nnUNetTrainerV2.model_best.tar' + synthseg_v0.2: 'zenodo.org/record/8184230/files/trained_model.3d_fullres.Task203_synthseg.nnUNetTrainerV2.model_best.tar' + atlas: + multihist7: 'files.ca-1.osf.io/v1/resources/v8acf/providers/osfstorage/65395b782827451220b86dd8/?zip=' + bigbrain: 'files.ca-1.osf.io/v1/resources/v8acf/providers/osfstorage/65395b8b13d27b123094c96f/?zip=' + magdeburg: 'files.ca-1.osf.io/v1/resources/v8acf/providers/osfstorage/65395b8013d27b122f94c938/?zip=' + freesurfer: 'files.ca-1.osf.io/v1/resources/v8acf/providers/osfstorage/65395b8513d27b123094c96a/?zip=' + template: + CITI168: 'files.ca-1.osf.io/v1/resources/v8acf/providers/osfstorage/65395bf0282745121fb86a93/?zip=' + dHCP: 'files.ca-1.osf.io/v1/resources/v8acf/providers/osfstorage/65395bff13d27b123094c9b4/?zip=' + MBMv2: 'files.ca-1.osf.io/v1/resources/v8acf/providers/osfstorage/65395c0887852d133ca597dd/?zip=' + MBMv3: 'files.ca-1.osf.io/v1/resources/v8acf/providers/osfstorage/65395c0e8a28b11240ffc6e9/?zip=' + upenn: 'files.ca-1.osf.io/v1/resources/v8acf/providers/osfstorage/65395c1613d27b122a94ca09/?zip=' +#to get hash, see https://github.com/CenterForOpenScience/osf.io/issues/8256#issuecomment-379833911 + + + crop_native_box: '256x256x256vox' crop_native_res: '0.2x0.2x0.2mm' diff --git a/hippunfold/download_models.py b/hippunfold/download_models.py deleted file mode 100644 index b8722d54..00000000 --- a/hippunfold/download_models.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python3 -import argparse -import errno -import os - -import requests -import yaml -from appdirs import AppDirs - - -def get_model_dict(): - # get list of model urls - model_cfg = os.path.join( - os.path.dirname(__file__), "config", "nnunet_model_urls.yml" - ) - with open(model_cfg, "r") as cfg: - model_dict = yaml.load(cfg, Loader=yaml.FullLoader) - return model_dict - - -def parse_args(model_dict): - parser = argparse.ArgumentParser( - prog="hippunfold_download_models", - description="Tool for downloading U-net models for hippunfold", - ) - - parser.add_argument("--models", nargs="+", dest="models", choices=model_dict.keys()) - args = parser.parse_args() - return args - - -def main(): - - # get the model dict first, so we know what to parse - model_dict = get_model_dict() - inputs = parse_args(model_dict) - - if "HIPPUNFOLD_CACHE_DIR" in os.environ.keys(): - print( - f"HIPPUNFOLD_CACHE_DIR defined, using: {os.environ['HIPPUNFOLD_CACHE_DIR']}" - ) - download_dir = os.environ["HIPPUNFOLD_CACHE_DIR"] - else: - print(f"HIPPUNFOLD_CACHE_DIR not defined, using default location") - # create local download dir if it doesn't exist - dirs = AppDirs("hippunfold", "khanlab") - download_dir = dirs.user_cache_dir - - try: - os.mkdir(download_dir) - except OSError as exc: - if exc.errno != errno.EEXIST: - raise - pass - - if inputs.models == None: - models = model_dict.keys() - else: - models = inputs.models - - for modality in models: - url = model_dict[modality] - tarfile = url.split("/")[-1] - local_path = os.path.join(download_dir, tarfile) - - # add ?dl=1 to url - url = "".join([url, "?dl=1"]) - - # if it doesn't exist, download the file - if not os.path.exists(local_path): - # download it: - print(f"Downloading {modality} model...") - print(f" url = {url}") - print(f" dest = {local_path}") - r = requests.get(url, allow_redirects=True, stream=True) - with open(local_path, "wb") as f: - f.write(r.content) - print(" Download complete") - else: - print(f"Skipping {modality} model: already downloaded to {local_path}") - - -if __name__ == "__main__": - main() diff --git a/hippunfold/resources/CITI168/CoronalOblique_rigid.txt b/hippunfold/resources/CITI168/CoronalOblique_rigid.txt deleted file mode 100644 index 4475783b..00000000 --- a/hippunfold/resources/CITI168/CoronalOblique_rigid.txt +++ /dev/null @@ -1,5 +0,0 @@ -#Insight Transform File V1.0 -#Transform 0 -Transform: MatrixOffsetTransformBase_double_3_3 -Parameters: 1 0 0 0 0.809017 -0.587785 0 0.587785 0.809017 -6.10623e-16 4.36795 -9.04318 -FixedParameters: 0 0 0 diff --git a/hippunfold/resources/CITI168/Mask_300umCoronalOblique_hemi-L.nii.gz b/hippunfold/resources/CITI168/Mask_300umCoronalOblique_hemi-L.nii.gz deleted file mode 100755 index e4b9b9ec..00000000 Binary files a/hippunfold/resources/CITI168/Mask_300umCoronalOblique_hemi-L.nii.gz and /dev/null differ diff --git a/hippunfold/resources/CITI168/Mask_300umCoronalOblique_hemi-R.nii.gz b/hippunfold/resources/CITI168/Mask_300umCoronalOblique_hemi-R.nii.gz deleted file mode 100755 index d00d818d..00000000 Binary files a/hippunfold/resources/CITI168/Mask_300umCoronalOblique_hemi-R.nii.gz and /dev/null differ diff --git a/hippunfold/resources/CITI168/T1w_300umCoronalOblique_hemi-L.nii.gz b/hippunfold/resources/CITI168/T1w_300umCoronalOblique_hemi-L.nii.gz deleted file mode 100644 index a6132fa0..00000000 Binary files a/hippunfold/resources/CITI168/T1w_300umCoronalOblique_hemi-L.nii.gz and /dev/null differ diff --git a/hippunfold/resources/CITI168/T1w_300umCoronalOblique_hemi-R.nii.gz b/hippunfold/resources/CITI168/T1w_300umCoronalOblique_hemi-R.nii.gz deleted file mode 100644 index 8c202b0d..00000000 Binary files a/hippunfold/resources/CITI168/T1w_300umCoronalOblique_hemi-R.nii.gz and /dev/null differ diff --git a/hippunfold/resources/CITI168/T1w_head_700um.nii.gz b/hippunfold/resources/CITI168/T1w_head_700um.nii.gz deleted file mode 100644 index 4b869380..00000000 Binary files a/hippunfold/resources/CITI168/T1w_head_700um.nii.gz and /dev/null differ diff --git a/hippunfold/resources/CITI168/T2w_300umCoronalOblique_hemi-L.nii.gz b/hippunfold/resources/CITI168/T2w_300umCoronalOblique_hemi-L.nii.gz deleted file mode 100644 index c3b238ad..00000000 Binary files a/hippunfold/resources/CITI168/T2w_300umCoronalOblique_hemi-L.nii.gz and /dev/null differ diff --git a/hippunfold/resources/CITI168/T2w_300umCoronalOblique_hemi-R.nii.gz b/hippunfold/resources/CITI168/T2w_300umCoronalOblique_hemi-R.nii.gz deleted file mode 100644 index 362db3ec..00000000 Binary files a/hippunfold/resources/CITI168/T2w_300umCoronalOblique_hemi-R.nii.gz and /dev/null differ diff --git a/hippunfold/resources/CITI168/T2w_head_700um.nii.gz b/hippunfold/resources/CITI168/T2w_head_700um.nii.gz deleted file mode 100644 index 022c4d34..00000000 Binary files a/hippunfold/resources/CITI168/T2w_head_700um.nii.gz and /dev/null differ diff --git a/hippunfold/resources/Training_Instructions.md b/hippunfold/resources/Training_Instructions.md deleted file mode 100644 index 76de956a..00000000 --- a/hippunfold/resources/Training_Instructions.md +++ /dev/null @@ -1,36 +0,0 @@ -This document contains step-by-step instructions for retraining or finetuning UNet for hippocampal segmentation. - -## 1) Run existing data through a previous model -There's a chance that one of the existing models will perform well on your new dataset, if the data is similar enough. If performance is very good then no further fine-tuning is needed. If performance is poor on some samples, they can be manually corrected or else fully manually segmented. In either case, running the full pipeline end-to-end should produce images that are CorObl, which is the space that segmentations for training should be in. - -see `hippunfold -h` - -## 2) Collect training images and segmentations -All training data should be manually inspected, and once the quality is good the CorObl image (eg. `outputdirectory/subjectID/hemi-L/img.nii`) and corresponding segmentation image (eg. `outputdirectory/subjectID/hemi-L/niftynet_lbl.nii` or a manually generated segmentation image) can be copied into a new clean directory (eg. `mynewdataset/training/`). Each new subject in the training directory should have a unique subjectID as a prefix, and either `_img` or `_lbl` for images and segmentations, repsectively. - -For example `ls mynewdataset/training` should produce something like this: -``` -sub-001_img.nii.gz -sub-001_lbl.nii.gz -sub-002_img.nii.gz -sub-002_lbl.nii.gz -``` -It is also possible to fine-tune on only a subset of subjects (for example, only those that produced good performance on the first pass). - -## 3) Fine-tune an existing model, or train one from scratch -Once you have populated your training data directory, you may train your model or fine-tune an existing model using `fineTune_UNet.sh`. This is a compute-intensive process. This can be run on a CPU, but it is recommended that you run on GPU with sufficient GPU memory (current models were trained on 8xV100 GPU nodes). By default, `fineTune_UNet.sh` will run 100k iterations which should take <\24h with these parameters. - -For example: -`singularity exec --nv hippocampal_autotop_latest.sif bash /src/resources/fineTune_UNet.sh mynewdataset/training mynewdataset/newCNNmodel` -(omit `--nv` if no GPU is available) - -This will perform 100k training iterations with data augmentation and using the same parameters as previous work. Training and validation progress can be viewed using tensorboard (eg. `tensorboard --logdir mynewdataset/newCNNmodel/models`). Once training is complete, inference will be performed on the remaining test data, which can then be inspected for quality. Further training iterations can be run using the same command as above (specifying the same output directory), or a new model can be trained using the same data by specifying a different directory. - -If you know what you are doing, you can open `mynewdataset/newCNNmodel/config.ini` and modify parameters before running additional training. - -## 4) Incremental learning -If your dataset is very large, you may fine-tune on only a subset of new samples. In that case, you can re-run steps 1-3 which should now produce more good quality segmentations for use in further training. - -## 5) Share trained models and/or data -Please consider sharing your data and/or trained models to improve generalizability to future studies. - diff --git a/hippunfold/resources/bigbrain/bigbrain_snaplabels.txt b/hippunfold/resources/bigbrain/bigbrain_snaplabels.txt deleted file mode 100644 index 0a8c7b26..00000000 --- a/hippunfold/resources/bigbrain/bigbrain_snaplabels.txt +++ /dev/null @@ -1,24 +0,0 @@ -################################################ -# ITK-SnAP Label Description File -# File format: -# IDX -R- -G- -B- -A-- VIS MSH LABEL -# Fields: -# IDX: Zero-based index -# -R-: Red color component (0..255) -# -G-: Green color component (0..255) -# -B-: Blue color component (0..255) -# -A-: Label transparency (0.00 .. 1.00) -# VIS: Label visibility (0 or 1) -# IDX: Label mesh visibility (0 or 1) -# LABEL: Label description -################################################ - 0 0 0 0 0 0 0 "Clear Label" - 1 255 0 0 1 1 1 "Subiculum" - 2 0 255 0 1 1 1 "CA1" - 3 0 0 255 1 1 1 "CA2" - 4 255 255 0 1 1 1 "CA3" - 5 0 255 255 1 1 1 "CA4" - 6 255 0 255 1 1 1 "DG" - 7 255 239 213 1 1 1 "SRLM" - 8 240 86 224 1 1 1 "Cyst" - diff --git a/hippunfold/resources/bigbrain/sub-bigbrain_hemi-L_label-hipp_desc-manualsubfields_dseg.nii.gz b/hippunfold/resources/bigbrain/sub-bigbrain_hemi-L_label-hipp_desc-manualsubfields_dseg.nii.gz deleted file mode 100644 index eef6cdc4..00000000 Binary files a/hippunfold/resources/bigbrain/sub-bigbrain_hemi-L_label-hipp_desc-manualsubfields_dseg.nii.gz and /dev/null differ diff --git a/hippunfold/resources/bigbrain/sub-bigbrain_hemi-R_label-hipp_desc-manualsubfields_dseg.nii.gz b/hippunfold/resources/bigbrain/sub-bigbrain_hemi-R_label-hipp_desc-manualsubfields_dseg.nii.gz deleted file mode 100644 index fe232b3f..00000000 Binary files a/hippunfold/resources/bigbrain/sub-bigbrain_hemi-R_label-hipp_desc-manualsubfields_dseg.nii.gz and /dev/null differ diff --git a/hippunfold/resources/bigbrain/sub-bigbrain_labellist.txt b/hippunfold/resources/bigbrain/sub-bigbrain_labellist.txt deleted file mode 100644 index 8bd58b3a..00000000 --- a/hippunfold/resources/bigbrain/sub-bigbrain_labellist.txt +++ /dev/null @@ -1,10 +0,0 @@ -Subiculum -1 0 0 255 255 -CA1 -2 133 222 255 255 -CA2 -3 0 255 170 255 -CA3 -4 255 162 0 255 -CA4 -5 255 0 0 255 diff --git a/hippunfold/resources/desc-flipLR_type-itk_xfm.txt b/hippunfold/resources/desc-flipLR_type-itk_xfm.txt deleted file mode 100644 index 335082b6..00000000 --- a/hippunfold/resources/desc-flipLR_type-itk_xfm.txt +++ /dev/null @@ -1,5 +0,0 @@ -#Insight Transform File V1.0 -#Transform 0 -Transform: MatrixOffsetTransformBase_double_3_3 -Parameters: -1 0 0 0 1 0 0 0 1 0 0 0 -FixedParameters: 0 0 0 diff --git a/hippunfold/resources/identity_xfm.txt b/hippunfold/resources/etc/identity_xfm.txt similarity index 100% rename from hippunfold/resources/identity_xfm.txt rename to hippunfold/resources/etc/identity_xfm.txt diff --git a/hippunfold/resources/example_sbatch.sh b/hippunfold/resources/example_sbatch.sh deleted file mode 100755 index e07a84aa..00000000 --- a/hippunfold/resources/example_sbatch.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -#SBATCH --account=rrg-lpalaniy -#SBATCH --ntasks=1 -#SBATCH --gres=gpu:v100:8 -#SBATCH --exclusive -#SBATCH --cpus-per-task=28 -#SBATCH --mem=86000M -#SBATCH --time=24:00:00 - -module load arch/avx512 StdEnv/2018.3 -nvidia-smi - -singularity exec --nv hippocampal_autotop_latest.sif bash /src/resources/fineTune_UNet.sh mynewdataset/training mynewdataset/newCNNmodel diff --git a/hippunfold/resources/fineTune_UNet.sh b/hippunfold/resources/fineTune_UNet.sh deleted file mode 100755 index d0d4bffc..00000000 --- a/hippunfold/resources/fineTune_UNet.sh +++ /dev/null @@ -1,46 +0,0 @@ -trainingdir=$1 #'../training_data_b1000/' -newmodeldir=$2 #'testmodel' - -if [ "$#" -lt 2 ] -then - echo "This script can be used to incrementally train UNet. If this is the first time running this script for a new model, a new config file will be generated. Otherwise training will resume from the last iteration." - echo "" - echo "Usage: $0 [optional arguments]" - echo "" - echo " -b bootstrap existing model" - echo " -i number of new iterations (default 100k)" - echo "" - - exit 1 - fi - -shift 2 -iterations=100000 - -while getopts "b:" options; do - case $options in - b ) echo "bootstrapping model from $OPTARG" - bootstrapmodel=$OPTARG;; - i ) echo "number of final iterations (after bootstrapping if included) $OPTARG" - iterations=$OPTARG;; - * ) usage - exit 1;; - esac -done - -if [ -f "$newmodeldir/config.ini" ] -then -python write_config_NiftyNet.py $trainingdir $newmodeldir $iterations $bootstrapmodel -else -mv $newmodeldir/dataset_split_training.csv $newmodeldir/dataset_split.csv # resume past dataset_split.csv -fi - -# TO BE RUN IN BASH -# requires niftynet -net_segment -c $newmodeldir/config.ini train -net_segment -c $newmodeldir/config.ini inference -net_segment -c $newmodeldir/config.ini evaluation - -# need to rename this file before AutoTops_transformAndRollOut.m -mv $newmodeldir/dataset_split.csv $newmodeldir/dataset_split_training.csv - diff --git a/hippunfold/resources/freesurfer/README.md b/hippunfold/resources/freesurfer/README.md deleted file mode 100644 index c14a3e9e..00000000 --- a/hippunfold/resources/freesurfer/README.md +++ /dev/null @@ -1,6 +0,0 @@ -Freesurfer subfield atlas - -Generated using workflow from https://github.com/khanlab/hippunfold-create-atlas#freesurfer -This creates probalistic and maxprob (used here) labels by running hippunfold & freesurfer subfield segmentation on ds002168 (openneuro, 48 subjects) and mapping the freesurfer labels to the unfolded space. - - diff --git a/hippunfold/resources/freesurfer/freesurfer_labellist.txt b/hippunfold/resources/freesurfer/freesurfer_labellist.txt deleted file mode 100644 index 9892045a..00000000 --- a/hippunfold/resources/freesurfer/freesurfer_labellist.txt +++ /dev/null @@ -1,88 +0,0 @@ -alveus -201 255 204 153 255 -perforant_pathway -202 255 128 128 255 -parasubiculum -203 175 175 75 255 -presubiculum -204 64 0 64 255 -subiculum -205 0 0 255 255 -CA1 -206 255 0 0 255 -CA2 -207 128 128 255 255 -CA3 -208 0 128 0 255 -CA4 -209 196 160 128 255 -GC-DG -210 32 200 255 255 -HATA -211 128 255 128 255 -fimbria -212 204 153 204 255 -lateral_ventricle -213 121 17 136 255 -molecular_layer_HP -214 128 0 0 255 -hippocampal_fissure -215 128 32 255 255 -entorhinal_cortex -216 255 204 102 255 -molecular_layer_subiculum -217 128 128 128 255 -Amygdala -218 104 255 255 255 -Cerebral_White_Matter -219 0 226 0 255 -Cerebral_Cortex -220 205 63 78 255 -Inf_Lat_Vent -221 197 58 250 255 -Perirhinal -222 33 150 250 255 -Cerebral_White_Matter_Edge -223 226 0 0 255 -Background -224 100 100 100 255 -Ectorhinal -225 197 150 250 255 -HP_tail -226 170 170 255 255 -Polymorphic-Layer -227 128 255 128 255 -Intracellular-Space -228 204 153 204 255 -HP_body -231 0 255 0 255 -HP_head -232 255 0 0 255 -presubiculum-head -233 32 0 32 255 -presubiculum-body -234 64 0 64 255 -subiculum-head -235 0 0 175 255 -subiculum-body -236 0 0 255 255 -CA1-head -237 175 75 75 255 -CA1-body -238 255 0 0 255 -CA3-head -239 0 80 0 255 -CA3-body -240 0 128 0 255 -CA4-head -241 120 90 50 255 -CA4-body -242 196 160 128 255 -GC-ML-DG-head -243 75 125 175 255 -GC-ML-DG-body -244 32 200 255 255 -molecular_layer_HP-head -245 100 25 25 255 -molecular_layer_HP-body -246 128 0 0 255 diff --git a/hippunfold/resources/freesurfer/maxprob_labellist.txt b/hippunfold/resources/freesurfer/maxprob_labellist.txt deleted file mode 100644 index 72f0a842..00000000 --- a/hippunfold/resources/freesurfer/maxprob_labellist.txt +++ /dev/null @@ -1,88 +0,0 @@ -alveus -1 255 204 153 255 -perforant_pathway -2 255 128 128 255 -parasubiculum -3 175 175 75 255 -presubiculum -4 64 0 64 255 -subiculum -5 0 0 255 255 -CA1 -6 255 0 0 255 -CA2 -7 128 128 255 255 -CA3 -8 0 128 0 255 -CA4 -9 196 160 128 255 -GC-DG -10 32 200 255 255 -HATA -11 128 255 128 255 -fimbria -12 204 153 204 255 -lateral_ventricle -13 121 17 136 255 -molecular_layer_HP -14 128 0 0 255 -hippocampal_fissure -15 128 32 255 255 -entorhinal_cortex -16 255 204 102 255 -molecular_layer_subiculum -17 128 128 128 255 -Amygdala -18 104 255 255 255 -Cerebral_White_Matter -19 0 226 0 255 -Cerebral_Cortex -20 205 63 78 255 -Inf_Lat_Vent -21 197 58 250 255 -Perirhinal -22 33 150 250 255 -Cerebral_White_Matter_Edge -23 226 0 0 255 -Background -24 100 100 100 255 -Ectorhinal -25 197 150 250 255 -HP_tail -26 170 170 255 255 -Polymorphic-Layer -27 128 255 128 255 -Intracellular-Space -28 204 153 204 255 -HP_body -29 0 255 0 255 -HP_head -30 255 0 0 255 -presubiculum-head -31 32 0 32 255 -presubiculum-body -32 64 0 64 255 -subiculum-head -33 0 0 175 255 -subiculum-body -34 0 0 255 255 -CA1-head -35 175 75 75 255 -CA1-body -36 255 0 0 255 -CA3-head -37 0 80 0 255 -CA3-body -38 0 128 0 255 -CA4-head -39 120 90 50 255 -CA4-body -40 196 160 128 255 -GC-ML-DG-head -41 75 125 175 255 -GC-ML-DG-body -42 32 200 255 255 -molecular_layer_HP-head -43 100 25 25 255 -molecular_layer_HP-body -44 128 0 0 255 diff --git a/hippunfold/resources/freesurfer/sub-all_hemi-L_space-unfold_label-hipp_desc-freesurfersubfields_dseg.nii.gz b/hippunfold/resources/freesurfer/sub-all_hemi-L_space-unfold_label-hipp_desc-freesurfersubfields_dseg.nii.gz deleted file mode 100644 index cc948c0c..00000000 Binary files a/hippunfold/resources/freesurfer/sub-all_hemi-L_space-unfold_label-hipp_desc-freesurfersubfields_dseg.nii.gz and /dev/null differ diff --git a/hippunfold/resources/freesurfer/sub-all_hemi-R_space-unfold_label-hipp_desc-freesurfersubfields_dseg.nii.gz b/hippunfold/resources/freesurfer/sub-all_hemi-R_space-unfold_label-hipp_desc-freesurfersubfields_dseg.nii.gz deleted file mode 100644 index e6eb8355..00000000 Binary files a/hippunfold/resources/freesurfer/sub-all_hemi-R_space-unfold_label-hipp_desc-freesurfersubfields_dseg.nii.gz and /dev/null differ diff --git a/hippunfold/resources/desc-subfields_atlas-bigbrain_dseg.tsv b/hippunfold/resources/label_lut/desc-subfields_atlas-bigbrain_dseg.tsv similarity index 100% rename from hippunfold/resources/desc-subfields_atlas-bigbrain_dseg.tsv rename to hippunfold/resources/label_lut/desc-subfields_atlas-bigbrain_dseg.tsv diff --git a/hippunfold/resources/desc-subfields_atlas-freesurfer_dseg.tsv b/hippunfold/resources/label_lut/desc-subfields_atlas-freesurfer_dseg.tsv similarity index 100% rename from hippunfold/resources/desc-subfields_atlas-freesurfer_dseg.tsv rename to hippunfold/resources/label_lut/desc-subfields_atlas-freesurfer_dseg.tsv diff --git a/hippunfold/resources/desc-subfields_atlas-magdeburg_dseg.tsv b/hippunfold/resources/label_lut/desc-subfields_atlas-magdeburg_dseg.tsv similarity index 100% rename from hippunfold/resources/desc-subfields_atlas-magdeburg_dseg.tsv rename to hippunfold/resources/label_lut/desc-subfields_atlas-magdeburg_dseg.tsv diff --git a/hippunfold/resources/desc-subfields_atlas-multihist7_dseg.tsv b/hippunfold/resources/label_lut/desc-subfields_atlas-multihist7_dseg.tsv similarity index 100% rename from hippunfold/resources/desc-subfields_atlas-multihist7_dseg.tsv rename to hippunfold/resources/label_lut/desc-subfields_atlas-multihist7_dseg.tsv diff --git a/hippunfold/resources/desc-subfields_dseg.tsv b/hippunfold/resources/label_lut/desc-subfields_dseg.tsv similarity index 100% rename from hippunfold/resources/desc-subfields_dseg.tsv rename to hippunfold/resources/label_lut/desc-subfields_dseg.tsv diff --git a/hippunfold/resources/desc-subfields_freeview_desg.tsv b/hippunfold/resources/label_lut/desc-subfields_freeview_desg.tsv similarity index 100% rename from hippunfold/resources/desc-subfields_freeview_desg.tsv rename to hippunfold/resources/label_lut/desc-subfields_freeview_desg.tsv diff --git a/hippunfold/resources/dseg.tsv b/hippunfold/resources/label_lut/dseg.tsv similarity index 100% rename from hippunfold/resources/dseg.tsv rename to hippunfold/resources/label_lut/dseg.tsv diff --git a/hippunfold/resources/magdeburg/README.md b/hippunfold/resources/magdeburg/README.md deleted file mode 100644 index 01c75bca..00000000 --- a/hippunfold/resources/magdeburg/README.md +++ /dev/null @@ -1,6 +0,0 @@ -Magdeburg subfield atlas - -Generated using workflow from https://github.com/khanlab/hippunfold-create-atlas -This creates probalistic and maxprob (used here) labels by running hippunfold on the dataset (35 subjects scanned at 7T), then sampling the manual labels in the unfolded space. - - diff --git a/hippunfold/resources/magdeburg/magdeburg_labellist.txt b/hippunfold/resources/magdeburg/magdeburg_labellist.txt deleted file mode 100644 index a930bc7d..00000000 --- a/hippunfold/resources/magdeburg/magdeburg_labellist.txt +++ /dev/null @@ -1,28 +0,0 @@ -CA1 -1 255 0 0 255 -CA2 -2 0 255 0 255 -DG -3 0 0 255 255 -CA3 -4 255 255 0 255 -Tail -5 0 255 255 255 -Label_6 -6 255 0 255 255 -Label_7 -7 255 239 213 255 -Sub -8 240 86 224 255 -ErC -9 205 133 63 255 -A35 -10 102 205 170 255 -A36 -11 0 0 128 255 -PhC -12 230 0 50 255 -Cysts -13 55 160 230 255 -Label_17 255 -17 221 160 221 255 diff --git a/hippunfold/resources/magdeburg/magdeburg_snaplabels.txt b/hippunfold/resources/magdeburg/magdeburg_snaplabels.txt deleted file mode 100644 index 10294f53..00000000 --- a/hippunfold/resources/magdeburg/magdeburg_snaplabels.txt +++ /dev/null @@ -1,29 +0,0 @@ -################################################ -# ITK-SnAP Label Description File -# File format: -# IDX -R- -G- -B- -A-- VIS MSH LABEL -# Fields: -# IDX: Zero-based index -# -R-: Red color component (0..255) -# -G-: Green color component (0..255) -# -B-: Blue color component (0..255) -# -A-: Label transparency (0.00 .. 1.00) -# VIS: Label visibility (0 or 1) -# IDX: Label mesh visibility (0 or 1) -# LABEL: Label description -################################################ - 0 0 0 0 0 0 0 "Clear Label" - 1 255 0 0 1 1 1 "CA1" - 2 0 255 0 1 1 1 "CA2" - 3 0 0 255 1 1 1 "DG" - 4 255 255 0 1 1 1 "CA3" - 5 0 255 255 1 1 1 "Tail" - 6 255 0 255 1 1 1 "Label 6" - 7 255 239 213 1 1 1 "Label 7" - 8 240 86 224 1 1 1 "Sub" - 9 205 133 63 1 1 1 "ErC" - 10 102 205 170 1 1 1 "A35" - 11 0 0 128 1 1 1 "A36" - 12 230 0 50 1 1 1 "PhC" - 13 55 160 230 1 1 1 "Cysts" - 17 221 160 221 1 1 1 "Label 17" diff --git a/hippunfold/resources/magdeburg/sub-all_hemi-L_label-hipp_desc-manualsubfields_maxprob.nii.gz b/hippunfold/resources/magdeburg/sub-all_hemi-L_label-hipp_desc-manualsubfields_maxprob.nii.gz deleted file mode 100644 index be087930..00000000 Binary files a/hippunfold/resources/magdeburg/sub-all_hemi-L_label-hipp_desc-manualsubfields_maxprob.nii.gz and /dev/null differ diff --git a/hippunfold/resources/magdeburg/sub-all_hemi-R_label-hipp_desc-manualsubfields_maxprob.nii.gz b/hippunfold/resources/magdeburg/sub-all_hemi-R_label-hipp_desc-manualsubfields_maxprob.nii.gz deleted file mode 100644 index 09067249..00000000 Binary files a/hippunfold/resources/magdeburg/sub-all_hemi-R_label-hipp_desc-manualsubfields_maxprob.nii.gz and /dev/null differ diff --git a/hippunfold/resources/multihist7/curvature.nii.gz b/hippunfold/resources/multihist7/curvature.nii.gz deleted file mode 100644 index 74bd4361..00000000 Binary files a/hippunfold/resources/multihist7/curvature.nii.gz and /dev/null differ diff --git a/hippunfold/resources/multihist7/gyrification.nii.gz b/hippunfold/resources/multihist7/gyrification.nii.gz deleted file mode 100644 index 92a0fcc9..00000000 Binary files a/hippunfold/resources/multihist7/gyrification.nii.gz and /dev/null differ diff --git a/hippunfold/resources/multihist7/labellist.txt b/hippunfold/resources/multihist7/labellist.txt deleted file mode 100644 index 8bd58b3a..00000000 --- a/hippunfold/resources/multihist7/labellist.txt +++ /dev/null @@ -1,10 +0,0 @@ -Subiculum -1 0 0 255 255 -CA1 -2 133 222 255 255 -CA2 -3 0 255 170 255 -CA3 -4 255 162 0 255 -CA4 -5 255 0 0 255 diff --git a/hippunfold/resources/multihist7/snaplabels.txt b/hippunfold/resources/multihist7/snaplabels.txt deleted file mode 100644 index 0a8c7b26..00000000 --- a/hippunfold/resources/multihist7/snaplabels.txt +++ /dev/null @@ -1,24 +0,0 @@ -################################################ -# ITK-SnAP Label Description File -# File format: -# IDX -R- -G- -B- -A-- VIS MSH LABEL -# Fields: -# IDX: Zero-based index -# -R-: Red color component (0..255) -# -G-: Green color component (0..255) -# -B-: Blue color component (0..255) -# -A-: Label transparency (0.00 .. 1.00) -# VIS: Label visibility (0 or 1) -# IDX: Label mesh visibility (0 or 1) -# LABEL: Label description -################################################ - 0 0 0 0 0 0 0 "Clear Label" - 1 255 0 0 1 1 1 "Subiculum" - 2 0 255 0 1 1 1 "CA1" - 3 0 0 255 1 1 1 "CA2" - 4 255 255 0 1 1 1 "CA3" - 5 0 255 255 1 1 1 "CA4" - 6 255 0 255 1 1 1 "DG" - 7 255 239 213 1 1 1 "SRLM" - 8 240 86 224 1 1 1 "Cyst" - diff --git a/hippunfold/resources/multihist7/sub-maxprob_label-hipp_desc-manualsubfieldsunfoldaligned_dseg.nii.gz b/hippunfold/resources/multihist7/sub-maxprob_label-hipp_desc-manualsubfieldsunfoldaligned_dseg.nii.gz deleted file mode 100644 index 16df5aec..00000000 Binary files a/hippunfold/resources/multihist7/sub-maxprob_label-hipp_desc-manualsubfieldsunfoldaligned_dseg.nii.gz and /dev/null differ diff --git a/hippunfold/resources/multihist7/thickness.nii.gz b/hippunfold/resources/multihist7/thickness.nii.gz deleted file mode 100644 index 17cad99a..00000000 Binary files a/hippunfold/resources/multihist7/thickness.nii.gz and /dev/null differ diff --git a/hippunfold/resources/multihist7/unfolded2Dref.nii.gz b/hippunfold/resources/multihist7/unfolded2Dref.nii.gz deleted file mode 100644 index 71c279bd..00000000 Binary files a/hippunfold/resources/multihist7/unfolded2Dref.nii.gz and /dev/null differ diff --git a/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_res-1_T1w.nii.gz b/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_res-1_T1w.nii.gz deleted file mode 100644 index 0863c67d..00000000 Binary files a/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_res-1_T1w.nii.gz and /dev/null differ diff --git a/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_res-1_T2w.nii.gz b/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_res-1_T2w.nii.gz deleted file mode 100644 index ed7cfafc..00000000 Binary files a/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_res-1_T2w.nii.gz and /dev/null differ diff --git a/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_to-corobl_affine.txt b/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_to-corobl_affine.txt deleted file mode 100644 index cd87ab88..00000000 --- a/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_cohort-1_to-corobl_affine.txt +++ /dev/null @@ -1,5 +0,0 @@ -#Insight Transform File V1.0 -#Transform 0 -Transform: MatrixOffsetTransformBase_double_3_3 -Parameters: 0.6192172099526084 0.003547968758508144 -0.005716400566823027 -0.008399112382682927 0.4767321844018321 -0.40163865268059323 0.0031337214422971524 0.41092964620414707 0.5080721294551599 -1.8737867065805176 8.046749265675501 2.4210136564228932 -FixedParameters: 0 0 0 diff --git a/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_hemi-L_space-cropT1w_desc-postproc_dseg.nii.gz b/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_hemi-L_space-cropT1w_desc-postproc_dseg.nii.gz deleted file mode 100644 index c15c50d9..00000000 Binary files a/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_hemi-L_space-cropT1w_desc-postproc_dseg.nii.gz and /dev/null differ diff --git a/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_hemi-R_space-cropT1w_desc-postproc_dseg.nii.gz b/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_hemi-R_space-cropT1w_desc-postproc_dseg.nii.gz deleted file mode 100644 index 15e48454..00000000 Binary files a/hippunfold/resources/tpl-dHCP/cohort-1/tpl-dHCP_hemi-R_space-cropT1w_desc-postproc_dseg.nii.gz and /dev/null differ diff --git a/hippunfold/resources/tpl-dHCP/template_description.json b/hippunfold/resources/tpl-dHCP/template_description.json deleted file mode 100644 index 461216ac..00000000 --- a/hippunfold/resources/tpl-dHCP/template_description.json +++ /dev/null @@ -1,129 +0,0 @@ -{ - "Name": "Group average template for N=92 developing HCP neonatal T1w & T2w scans", - "Authors": [ - "Ali Khan" - ], - "Acknowledgements": "Created by greedy_template_neonatal Snakemake workflow", - "BIDSVersion": [ - "1.1.0" - ], - "License": "See LICENSE file", - "TemplateFlowVersion": "1.0.0", - "res": { - "01": { - "origin": [ - 51.5625, - -84.17188262939453, - -38.718753814697266 - ], - "shape": [ - 201.0, - 273.0, - 216.0 - ], - "zooms": [ - 0.5, - 0.5, - 0.5 - ] - } - }, - "cohort": { - "neonatal92T1T2": { - "participants": [ - "CC00051XX02", - "CC00052XX03", - "CC00053XX04", - "CC00054XX05", - "CC00055XX06", - "CC00056XX07", - "CC00057XX08", - "CC00060XX03", - "CC00062XX05", - "CC00064XX07", - "CC00065XX08", - "CC00066XX09", - "CC00067XX10", - "CC00068XX11", - "CC00069XX12", - "CC00071XX06", - "CC00073XX08", - "CC00074XX09", - "CC00075XX10", - "CC00076XX11", - "CC00078XX13", - "CC00079XX14", - "CC00080XX07", - "CC00082XX09", - "CC00083XX10", - "CC00085XX12", - "CC00086XX13", - "CC00087BN14", - "CC00088XX15", - "CC00089XX16", - "CC00091XX10", - "CC00094AN13", - "CC00094BN13", - "CC00095XX14", - "CC00096XX15", - "CC00099AN18", - "CC00099BN18", - "CC00100XX01", - "CC00101XX02", - "CC00102XX03", - "CC00104XX05", - "CC00105XX06", - "CC00106XX07", - "CC00107XX08", - "CC00108XX09", - "CC00109XX10", - "CC00111XX04", - "CC00113XX06", - "CC00114XX07", - "CC00115XX08", - "CC00116XX09", - "CC00117XX10", - "CC00119XX12", - "CC00120XX05", - "CC00122XX07", - "CC00126XX11", - "CC00127XX12", - "CC00130XX07", - "CC00131XX08", - "CC00134XX11", - "CC00138XX15", - "CC00139XX16", - "CC00143BN12", - "CC00144XX13", - "CC00145XX14", - "CC00146XX15", - "CC00149XX18", - "CC00150AN02", - "CC00150BN02", - "CC00152AN04", - "CC00153XX05", - "CC00155XX07", - "CC00157XX09", - "CC00158XX10", - "CC00159XX11", - "CC00160XX04", - "CC00162XX06", - "CC00163XX07", - "CC00164XX08", - "CC00165XX09", - "CC00168XX12", - "CC00171XX07", - "CC00172AN08", - "CC00172BN08", - "CC00174XX10", - "CC00176XX12", - "CC00178XX14", - "CC00179XX15", - "CC00180XX08", - "CC00181XX09", - "CC00182XX10", - "CC00183XX11" - ] - } - } -} diff --git a/hippunfold/resources/write_config_NiftyNet.py b/hippunfold/resources/write_config_NiftyNet.py deleted file mode 100755 index 6f567f61..00000000 --- a/hippunfold/resources/write_config_NiftyNet.py +++ /dev/null @@ -1,112 +0,0 @@ -# write config file for NiftyNet highres3dnet network training using these default parameters. First argument is the training directory, second argument is the output directory, third argument specifies number of iterations, and fourht (optional) an existing model to bootstrap. - -import sys -import configparser -import os -import shutil -import glob - -# input arguments - -trainingdir = sys.argv[1] #'../training_data_b1000/' -newmodeldir = sys.argv[2] #'testmodel' -iterations = sys.argv[3] #'testmodel' -try: - os.mkdir(newmodeldir) -except: - print("output directory already exists") - -# copy over bootstrapped CNN model -if len(sys.argv) == 5: - bootstrapmodel = sys.argv[4] - shutil.copytree(bootstrapmodel + "/models/", newmodeldir + "/models/") - start_iter = "-1" - # add existing iterations to max iterations - fn = glob.glob(newmodeldir + "/models/*.index")[0] - i = fn.find("ckpt-")[0] - fn = fn[i + 5 : -6] - iterations = int(iterations) + int(fn) -elif len(sys.argv) == 4: - bootstrapmodel = ( - "" # Optional. To resume training a timed out model, specify it here. - ) - start_iter = "0" -else: - print("Error wrong number of input arguments") - -# write config file with default values -config = configparser.ConfigParser() - -config["IMG"] = { - "path_to_search": trainingdir, - "filename_contains": "img", - "spatial_window_size": "(64, 64, 64)", - "interp_order": "1", - "pixdim": "(0.3, 0.3, 0.3)", - "axcodes": "(R, A, S)", -} -config["LBL"] = { - "path_to_search": trainingdir, - "filename_contains": "lbl", - "spatial_window_size": "(64, 64, 64)", - "interp_order": "1", - "pixdim": "(0.3, 0.3, 0.3)", - "axcodes": "(R, A, S)", -} - -config["SYSTEM"] = {"cuda_devices": '""', "model_dir": newmodeldir} - -config["NETWORK"] = { - "name": "highres3dnet_large", - "batch_size": "1", - "activation_function": "relu", - "volume_padding_size": "0", - "normalisation": "True", - "foreground_type": "mean_plus", - "cutoff": "(0.001, 0.999)", -} - -config["TRAINING"] = { - "sample_per_volume": "5", - "lr": "0.001", - "loss_type": "Dice", - "starting_iter": start_iter, - "save_every_n": "1000", - "tensorboard_every_n": "100", - "max_iter": iterations, - "validation_every_n": "100", - "exclude_fraction_for_validation": "0.2", - "exclude_fraction_for_inference": "0.2", - "rotation_angle": "(-10.0,10.0)", - "random_flipping_axes": "0", - "do_elastic_deformation": "True", - "num_ctrl_points": "4", - "deformation_sigma": "15", - "proportion_to_deform": "0.75", - "bias_field_range": "(-5.0,5.0)", - "bf_order": "3", -} - - -config["INFERENCE"] = { - "border": "(16,16,16)", - "inference_iter": "-1", - "save_seg_dir": newmodeldir + "/parcellation_output", - "output_interp_order": "0", -} - -config["SEGMENTATION"] = { - "image": "IMG", - "label": "LBL", - "label_normalisation": "False", - "output_prob": "False", - "num_classes": "9", -} - -config["EVALUATION"] = { - "save_csv_dir": newmodeldir + "/eval", - "evaluations": "dice,average_distance", -} - -with open(newmodeldir + "/config.ini", "w") as configfile: - config.write(configfile) diff --git a/hippunfold/workflow/Snakefile b/hippunfold/workflow/Snakefile index 4c9ff0ef..84859a31 100644 --- a/hippunfold/workflow/Snakefile +++ b/hippunfold/workflow/Snakefile @@ -110,6 +110,7 @@ work = os.path.join(config["root"], "work") include: "rules/common.smk" +include: "rules/download.smk" include: "rules/preproc_t1.smk" diff --git a/hippunfold/workflow/rules/common.smk b/hippunfold/workflow/rules/common.smk index 301552bf..f2acdaf2 100644 --- a/hippunfold/workflow/rules/common.smk +++ b/hippunfold/workflow/rules/common.smk @@ -397,6 +397,16 @@ def get_work_dir(wildcards): return folder_without_file +def get_download_dir(): + if "HIPPUNFOLD_CACHE_DIR" in os.environ.keys(): + download_dir = os.environ["HIPPUNFOLD_CACHE_DIR"] + else: + # create local download dir if it doesn't exist + dirs = AppDirs("hippunfold", "khanlab") + download_dir = dirs.user_cache_dir + return download_dir + + rule archive_work_after_final: input: get_final_subj_output(), diff --git a/hippunfold/workflow/rules/download.smk b/hippunfold/workflow/rules/download.smk new file mode 100644 index 00000000..07c1bcdd --- /dev/null +++ b/hippunfold/workflow/rules/download.smk @@ -0,0 +1,21 @@ +# populate the HIPPUNFOLD_CACHE_DIR folder as needed + +download_dir = get_download_dir() + + +rule download_extract_atlas_or_template: + params: + url=lambda wildcards: config["resource_urls"][wildcards.resource_type][ + wildcards.atlas + ], + output: + unzip_dir=directory( + Path(download_dir) / "{resource_type,atlas|template}" / "{atlas}" + ), + container: + config["singularity"]["autotop"] + shadow: + "minimal" + shell: + "wget https://{params.url} -O temp.zip && " + " unzip -d {output.unzip_dir} temp.zip" diff --git a/hippunfold/workflow/rules/gifti.smk b/hippunfold/workflow/rules/gifti.smk index 7c0f633b..46805e9c 100644 --- a/hippunfold/workflow/rules/gifti.smk +++ b/hippunfold/workflow/rules/gifti.smk @@ -462,13 +462,11 @@ rule metric_to_nii: label="hipp", **config["subj_wildcards"] ), + atlas_dir=Path(download_dir) / "atlas" / "multihist7", params: interp="-nearest-vertex 1", - refflatnii=os.path.join( - workflow.basedir, - "..", - config["atlas_files"]["multihist7"]["label_nii"], - ), + refflatnii=lambda wildcards, input: Path(input.atlas_dir) + / config["atlas_files"]["multihist7"]["label_nii"], output: metric_nii=bids( root=work, @@ -526,26 +524,18 @@ rule unfolded_registration: label="hipp", **config["subj_wildcards"] ), + atlas_dir=lambda wildcards: Path(download_dir) / "atlas" / wildcards.atlas, params: antsparams="-d 2 -t so", outsuffix="tmp", warpfn="tmp1Warp.nii.gz", invwarpfn="tmp1InverseWarp.nii.gz", - refthickness=lambda wildcards: os.path.join( - workflow.basedir, - "..", - config["atlas_files"][wildcards.atlas]["thick"], - ), - refcurvature=lambda wildcards: os.path.join( - workflow.basedir, - "..", - config["atlas_files"][wildcards.atlas]["curv"], - ), - refgyrification=lambda wildcards: os.path.join( - workflow.basedir, - "..", - config["atlas_files"][wildcards.atlas]["gyr"], - ), + refthickness=lambda wildcards, input: Path(input.atlas_dir) + / config["atlas_files"][wildcards.atlas]["thickness"], + refcurvature=lambda wildcards, input: Path(input.atlas_dir) + / config["atlas_files"][wildcards.atlas]["curvature"], + refgyrification=lambda wildcards, input: Path(input.atlas_dir) + / config["atlas_files"][wildcards.atlas]["gyrification"], output: warp=bids( root=work, @@ -1057,9 +1047,6 @@ rule calculate_thickness_from_surface2: rule resample_atlas_to_refvol: """this is just done in case the atlas has a different unfolded config than the current run""" input: - atlas=lambda wildcards: os.path.join( - workflow.basedir, "..", config["atlas_files"][wildcards.atlas]["label_nii"] - ), refvol=bids( root=root, space="unfold", @@ -1068,6 +1055,10 @@ rule resample_atlas_to_refvol: suffix="refvol.nii.gz", **config["subj_wildcards"] ), + atlas_dir=lambda wildcards: Path(download_dir) / "atlas" / wildcards.atlas, + params: + atlas=lambda wildcards, input: Path(input.atlas_dir) + / config["atlas_files"][wildcards.atlas]["label_nii"], output: label_nii=bids( root=work, @@ -1094,7 +1085,7 @@ rule resample_atlas_to_refvol: group: "subj" shell: - "antsApplyTransforms -d 3 -n MultiLabel -i {input.atlas} -r {input.refvol} -o {output.label_nii} -v &> {log}" + "antsApplyTransforms -d 3 -n MultiLabel -i {params.atlas} -r {input.refvol} -o {output.label_nii} -v &> {log}" rule nii_to_label_gii: @@ -1116,12 +1107,10 @@ rule nii_to_label_gii: "unfold_template_hipp", "tpl-avg_space-unfold_den-{density}_midthickness.surf.gii", ), - label_list=lambda wildcards: os.path.join( - workflow.basedir, - "..", - config["atlas_files"][wildcards.atlas]["label_list"], - ), + atlas_dir=lambda wildcards: Path(download_dir) / "atlas" / wildcards.atlas, params: + label_list=lambda wildcards, input: Path(input.atlas_dir) + / config["atlas_files"][wildcards.atlas]["label_list"], structure_type=lambda wildcards: hemi_to_structure[wildcards.hemi], output: label_gii=bids( @@ -1142,8 +1131,8 @@ rule nii_to_label_gii: shadow: "minimal" shell: - "wb_command -volume-to-surface-mapping {input.label_nii} {input.surf} temp.shape.gii -enclosing && " - "wb_command -metric-label-import temp.shape.gii {input.label_list} {output.label_gii} && " + "wb_command -volume-to-surface-mapping {params.label_list} {input.surf} temp.shape.gii -enclosing && " + "wb_command -metric-label-import temp.shape.gii {params.label_list} {output.label_gii} && " "wb_command -set-structure {output.label_gii} {params.structure_type}" diff --git a/hippunfold/workflow/rules/nnunet.smk b/hippunfold/workflow/rules/nnunet.smk index 054d1d20..84e76103 100644 --- a/hippunfold/workflow/rules/nnunet.smk +++ b/hippunfold/workflow/rules/nnunet.smk @@ -43,23 +43,30 @@ def get_nnunet_input(wildcards): def get_model_tar(): - if "HIPPUNFOLD_CACHE_DIR" in os.environ.keys(): - download_dir = os.environ["HIPPUNFOLD_CACHE_DIR"] - else: - # create local download dir if it doesn't exist - dirs = AppDirs("hippunfold", "khanlab") - download_dir = dirs.user_cache_dir - if config["force_nnunet_model"]: model_name = config["force_nnunet_model"] else: model_name = config["modality"] - local_tar = config["nnunet_model"].get(model_name, None) + local_tar = config["resource_urls"]["nnunet_model"].get(model_name, None) if local_tar == None: print(f"ERROR: {model_name} does not exist in nnunet_model in the config file") - return os.path.abspath(os.path.join(download_dir, local_tar.split("/")[-1])) + return (Path(download_dir) / "model" / Path(local_tar).name).absolute() + + +rule download_nnunet_model: + params: + url=config["resource_urls"]["nnunet_model"][config["force_nnunet_model"]] + if config["force_nnunet_model"] + else config["resource_urls"]["nnunet_model"][config["modality"]], + model_dir=Path(download_dir) / "model", + output: + model_tar=get_model_tar(), + container: + config["singularity"]["autotop"] + shell: + "mkdir -p {params.model_dir} && wget https://{params.url} -O {output}" def parse_task_from_tar(wildcards, input): @@ -89,19 +96,6 @@ def parse_trainer_from_tar(wildcards, input): return trainer -rule download_model: - params: - url=config["nnunet_model"][config["force_nnunet_model"]] - if config["force_nnunet_model"] - else config["nnunet_model"][config["modality"]], - output: - model_tar=get_model_tar(), - container: - config["singularity"]["autotop"] - shell: - "wget https://{params.url} -O {output}" - - rule run_inference: """ This rule uses either GPU or CPU . It also runs in an isolated folder (shadow), with symlinks to inputs in that folder, copying over outputs once complete, so temp files are not retained""" @@ -206,22 +200,17 @@ rule unflip_nnunet_nii: " {input.unflip_ref} -push FLIPPED -copy-transform -o {output.nnunet_seg} " -def get_f3d_ref(wildcards): +def get_f3d_ref(wildcards, input): + if config["modality"] == "T2w": nii = ( - os.path.join( - workflow.basedir, - "..", - config["template_files"][config["template"]]["crop_ref"], - ), + Path(input.template_dir) + / config["template_files"][config["template"]]["crop_ref"] ) elif config["modality"] == "T1w": nii = ( - os.path.join( - workflow.basedir, - "..", - config["template_files"][config["template"]]["crop_refT1w"], - ), + Path(input.template_dir) + / config["template_files"][config["template"]]["crop_refT1w"] ) else: raise ValueError("modality not supported for nnunet!") @@ -250,6 +239,8 @@ rule qc_nnunet_f3d: space="corobl", hemi="{hemi}" ), + template_dir=Path(download_dir) / "template" / config["template"], + params: ref=get_f3d_ref, output: cpp=bids( @@ -293,8 +284,8 @@ rule qc_nnunet_f3d: group: "subj" shell: - "reg_f3d -flo {input.img} -ref {input.ref} -res {output.res} -cpp {output.cpp} &> {log} && " - "reg_resample -flo {input.seg} -cpp {output.cpp} -ref {input.ref} -res {output.res_mask} -inter 0 &> {log}" + "reg_f3d -flo {input.img} -ref {params.ref} -res {output.res} -cpp {output.cpp} &> {log} && " + "reg_resample -flo {input.seg} -cpp {output.cpp} -ref {params.ref} -res {output.res_mask} -inter 0 &> {log}" rule qc_nnunet_dice: @@ -308,13 +299,11 @@ rule qc_nnunet_dice: space="template", hemi="{hemi}" ), - ref=os.path.join( - workflow.basedir, - "..", - config["template_files"][config["template"]]["Mask_crop"], - ), + template_dir=Path(download_dir) / "template" / config["template"], params: hipp_lbls=[1, 2, 7, 8], + ref=lambda wildcards, input: Path(input.template_dir) + / config["template_files"][config["template"]]["Mask_crop"], output: dice=report( bids( diff --git a/hippunfold/workflow/rules/preproc_seg.smk b/hippunfold/workflow/rules/preproc_seg.smk index d5e6861b..1f9afab0 100644 --- a/hippunfold/workflow/rules/preproc_seg.smk +++ b/hippunfold/workflow/rules/preproc_seg.smk @@ -39,11 +39,10 @@ rule warp_seg_to_corobl_crop: desc="affine", type_="itk" ), - ref=os.path.join( - workflow.basedir, - "..", - config["template_files"][config["template"]]["crop_ref"], - ), + template_dir=Path(download_dir) / "template" / config["template"], + params: + ref=lambda wildcards, input: Path(input.template_dir) + / config["template_files"][config["template"]]["crop_ref"], output: nii=bids( root=work, @@ -51,7 +50,7 @@ rule warp_seg_to_corobl_crop: **config["subj_wildcards"], suffix="dseg.nii.gz", space="corobl", - hemi="{hemi}", + hemi="{hemi,L|R}", from_="{space}" ), container: @@ -60,7 +59,7 @@ rule warp_seg_to_corobl_crop: "subj" shell: "ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS={threads} " - "antsApplyTransforms -d 3 --interpolation MultiLabel -i {input.nii} -o {output.nii} -r {input.ref} -t {input.xfm}" + "antsApplyTransforms -d 3 --interpolation MultiLabel -i {input.nii} -o {output.nii} -r {params.ref} -t {input.xfm}" rule lr_flip_seg: diff --git a/hippunfold/workflow/rules/preproc_t1.smk b/hippunfold/workflow/rules/preproc_t1.smk index 20901519..02b2e841 100644 --- a/hippunfold/workflow/rules/preproc_t1.smk +++ b/hippunfold/workflow/rules/preproc_t1.smk @@ -71,28 +71,32 @@ else: def reg_to_template_cmd(wildcards, input, output): + + ref = ( + Path(input.template_dir) + / config["template_files"][config["template"]][wildcards.modality], + ) if config["no_reg_template"]: - cmd = f"reg_resample -flo {input.flo} -ref {input.ref} -res {output.warped_subj} -aff {input.xfm_identity}; cp {input.xfm_identity} {output.xfm_ras}" + cmd = f"reg_resample -flo {input.flo} -ref {ref} -res {output.warped_subj} -aff {input.xfm_identity}; cp {input.xfm_identity} {output.xfm_ras}" elif config["rigid_reg_template"]: - cmd = f"reg_aladin -flo {input.flo} -ref {input.ref} -res {output.warped_subj} -aff {output.xfm_ras} -rigOnly" + cmd = f"reg_aladin -flo {input.flo} -ref {ref} -res {output.warped_subj} -aff {output.xfm_ras} -rigOnly" else: - cmd = f"reg_aladin -flo {input.flo} -ref {input.ref} -res {output.warped_subj} -aff {output.xfm_ras}" + cmd = f"reg_aladin -flo {input.flo} -ref {ref} -res {output.warped_subj} -aff {output.xfm_ras}" return cmd rule reg_to_template: + """ generic for T1w or T2w right now """ input: flo=bids( root=root, datatype="anat", **config["subj_wildcards"], desc="preproc", - suffix="T1w.nii.gz" - ), - ref=os.path.join( - workflow.basedir, "..", config["template_files"][config["template"]]["T1w"] + suffix="{modality}.nii.gz" ), xfm_identity=os.path.join(workflow.basedir, "..", config["xfm_identity"]), + template_dir=Path(download_dir) / "template" / config["template"], params: cmd=reg_to_template_cmd, output: @@ -100,7 +104,7 @@ rule reg_to_template: root=work, datatype="anat", **config["subj_wildcards"], - suffix="T1w.nii.gz", + suffix="{modality,T1w|T2w}.nii.gz", space=config["template"], desc="affine" ), @@ -109,7 +113,17 @@ rule reg_to_template: datatype="warps", **config["subj_wildcards"], suffix="xfm.txt", - from_="T1w", + from_="{modality,T1w|T2w}", + to=config["template"], + desc="affine", + type_="ras" + ), + log: + bids( + root="logs", + **config["subj_wildcards"], + suffix="reg.txt", + from_="{modality,T1w|T2w}", to=config["template"], desc="affine", type_="ras" @@ -166,11 +180,10 @@ rule compose_template_xfm_corobl: desc="affine", type_="itk" ), - std_to_cor=os.path.join( - workflow.basedir, - "..", - config["template_files"][config["template"]]["xfm_corobl"], - ), + template_dir=Path(download_dir) / "template" / config["template"], + params: + std_to_cor=lambda wildcards, input: Path(input.template_dir) + / config["template_files"][config["template"]]["xfm_corobl"], output: sub_to_cor=bids( root=work, @@ -187,7 +200,7 @@ rule compose_template_xfm_corobl: group: "subj" shell: - "c3d_affine_tool -itk {input.sub_to_std} -itk {input.std_to_cor} -mult -oitk {output}" + "c3d_affine_tool -itk {input.sub_to_std} -itk {params.std_to_cor} -mult -oitk {output}" rule invert_template_xfm_itk2ras: @@ -272,16 +285,10 @@ rule warp_t1_to_corobl_crop: desc="affine", type_="itk" ), - ref=os.path.join( - workflow.basedir, - "..", - config["template_files"][config["template"]]["crop_ref"], - ), - std_to_cor=os.path.join( - workflow.basedir, - "..", - config["template_files"][config["template"]]["xfm_corobl"], - ), + template_dir=Path(download_dir) / "template" / config["template"], + params: + ref=lambda wildcards, input: Path(input.template_dir) + / config["template_files"][config["template"]]["crop_ref"], output: t1=bids( root=work, @@ -298,7 +305,7 @@ rule warp_t1_to_corobl_crop: "subj" shell: "ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS={threads} " - "antsApplyTransforms -d 3 --interpolation Linear -i {input.t1} -o {output.t1} -r {input.ref} -t {input.xfm}" + "antsApplyTransforms -d 3 --interpolation Linear -i {input.t1} -o {output.t1} -r {params.ref} -t {input.xfm}" rule lr_flip_t1: diff --git a/hippunfold/workflow/rules/preproc_t2.smk b/hippunfold/workflow/rules/preproc_t2.smk index f7c1809c..0573498b 100644 --- a/hippunfold/workflow/rules/preproc_t2.smk +++ b/hippunfold/workflow/rules/preproc_t2.smk @@ -247,58 +247,6 @@ rule reg_t2_to_t1: "c3d_affine_tool {output.xfm_ras} -oitk {output.xfm_itk}" -rule reg_t2_to_template: - input: - flo=bids( - root=root, - datatype="anat", - **config["subj_wildcards"], - suffix="T2w.nii.gz", - desc="preproc" - ), - ref=os.path.join( - workflow.basedir, "..", config["template_files"][config["template"]]["T2w"] - ), - xfm_identity=os.path.join(workflow.basedir, "..", config["xfm_identity"]), - params: - cmd=reg_to_template_cmd, - output: - warped_subj=bids( - root=work, - datatype="anat", - **config["subj_wildcards"], - suffix="T2w.nii.gz", - space=config["template"], - desc="affine" - ), - xfm_ras=bids( - root=work, - datatype="warps", - **config["subj_wildcards"], - suffix="xfm.txt", - from_="T2w", - to=config["template"], - desc="affine", - type_="ras" - ), - log: - bids( - root="logs", - **config["subj_wildcards"], - suffix="reg.txt", - from_="T2w", - to=config["template"], - desc="affine", - type_="ras" - ), - container: - config["singularity"]["autotop"] - group: - "subj" - shell: - "{params.cmd}" + " &> {log}" - - def get_inputs_compose_t2_xfm_corobl(wildcards): if config["t1_reg_template"]: # xfm0: t2 to t1 @@ -346,20 +294,34 @@ def get_inputs_compose_t2_xfm_corobl(wildcards): ) # xfm1: template to corobl - std_to_cor = ( - os.path.join( - workflow.basedir, - "..", - config["template_files"][config["template"]]["xfm_corobl"], - ), + template_dir = Path(download_dir) / "template" / config["template"] + return {"t2_to_std": t2_to_std, "template_dir": template_dir} + + +def get_cmd_compose_t2_xfm_corobl(wildcards, input): + if config["t1_reg_template"]: + # xfm0: t2 to t1 + xfm0 = input.t2_to_t1 + # xfm1: t1 to corobl + xfm1 = input.t1_to_cor + else: + # xfm0: t2 to template + xfm0 = input.t2_to_std + # xfm1: template to corobl + xfm1 = ( + Path(input.template_dir) + / config["template_files"][config["template"]]["xfm_corobl"] ) - return {"t2_to_std": t2_to_std, "std_to_cor": std_to_cor} + + return "c3d_affine_tool -itk {xfm0} -itk {xfm1} -mult -oitk {output}" # now have t2 to t1 xfm, compose this with t1 to corobl xfm rule compose_t2_xfm_corobl: input: unpack(get_inputs_compose_t2_xfm_corobl), + params: + cmd=get_cmd_compose_t2_xfm_corobl, output: t2_to_cor=bids( root=work, @@ -371,12 +333,22 @@ rule compose_t2_xfm_corobl: desc="affine", type_="itk" ), + log: + bids( + root="logs", + **config["subj_wildcards"], + suffix="composecorobl.txt", + from_="T2w", + to="corobl", + desc="affine", + type_="itk" + ), container: config["singularity"]["autotop"] group: "subj" shell: - "c3d_affine_tool -itk {input[0]} -itk {input[1]} -mult -oitk {output}" + "{params.cmd} > {log}" # if already have t2w in T1w space, then we don't need to use composed xfm: @@ -419,11 +391,10 @@ rule warp_t2_to_corobl_crop: desc="preproc" ), xfm=get_xfm_to_corobl(), - ref=os.path.join( - workflow.basedir, - "..", - config["template_files"][config["template"]]["crop_ref"], - ), + template_dir=Path(download_dir) / "template" / config["template"], + params: + ref=lambda wildcards, input: Path(input.template_dir) + / config["template_files"][config["template"]]["crop_ref"], output: nii=bids( root=work, @@ -432,7 +403,7 @@ rule warp_t2_to_corobl_crop: suffix="T2w.nii.gz", space="corobl", desc="preproc", - hemi="{hemi}" + hemi="{hemi,L|R}" ), container: config["singularity"]["autotop"] @@ -440,7 +411,7 @@ rule warp_t2_to_corobl_crop: "subj" shell: "ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS={threads} " - "antsApplyTransforms -d 3 --interpolation Linear -i {input.nii} -o {output.nii} -r {input.ref} -t {input.xfm}" + "antsApplyTransforms -d 3 --interpolation Linear -i {input.nii} -o {output.nii} -r {params.ref} -t {input.xfm}" rule lr_flip_t2: diff --git a/hippunfold/workflow/rules/qc.smk b/hippunfold/workflow/rules/qc.smk index 5d44ec60..bf9c8650 100644 --- a/hippunfold/workflow/rules/qc.smk +++ b/hippunfold/workflow/rules/qc.smk @@ -1,10 +1,5 @@ rule qc_reg_to_template: input: - ref=lambda wildcards: os.path.join( - workflow.basedir, - "..", - config["template_files"][config["template"]][wildcards.native_modality], - ), flo=bids( root=work, datatype="anat", @@ -13,6 +8,10 @@ rule qc_reg_to_template: space=config["template"], desc="affine" ), + template_dir=Path(download_dir) / "template" / config["template"], + params: + ref=lambda wildcards, input: Path(input.template_dir) + / config["template_files"][config["template"]][wildcards.native_modality], output: png=report( bids( @@ -51,12 +50,10 @@ rule get_subfield_vols_subj: hemi=config["hemi"], allow_missing=True, ), - lookup_tsv=os.path.join( - workflow.basedir, - "..", - "resources", - "desc-subfields_atlas-{atlas}_dseg.tsv", - ), + lookup_tsv=Path(workflow.basedir).parent + / "resources" + / "label_lut" + / "desc-subfields_atlas-{atlas}_dseg.tsv", group: "subj" output: diff --git a/hippunfold/workflow/scripts/dice.py b/hippunfold/workflow/scripts/dice.py index abc9bfee..14122738 100644 --- a/hippunfold/workflow/scripts/dice.py +++ b/hippunfold/workflow/scripts/dice.py @@ -1,7 +1,7 @@ import nibabel as nib import numpy as np -r = nib.load(snakemake.input.ref) +r = nib.load(snakemake.params.ref) ref_mask = r.get_fdata() n = nib.load(snakemake.input.res_mask) nnunet_rois = n.get_fdata() diff --git a/hippunfold/workflow/scripts/vis_regqc.py b/hippunfold/workflow/scripts/vis_regqc.py index 49c88579..a0e64ad1 100644 --- a/hippunfold/workflow/scripts/vis_regqc.py +++ b/hippunfold/workflow/scripts/vis_regqc.py @@ -7,6 +7,6 @@ matplotlib.use("Agg") display = plotting.plot_anat(snakemake.input.flo, display_mode="ortho", dim=-0.5) -display.add_contours(snakemake.input.ref, colors="r") +display.add_contours(snakemake.params.ref, colors="r") display.savefig(snakemake.output.png) display.close()