-
Notifications
You must be signed in to change notification settings - Fork 8
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
After the bug fix listed below, CABINET ran postBIBSnet on multiple subjects with no sessions and on multiple subjects with multiple sessions. The postBIBSnet outputs look good. - Fixed bug where preBIBSnet used "_crop_" and postBIBSnet used "_full_crop_" - Added validation-check where running stages out of order will cause a crash - Removed some finished TODOs, added some other TODOs
- Loading branch information
Showing
2 changed files
with
23 additions
and
30 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,7 +5,7 @@ | |
Connectome ABCD-XCP niBabies Imaging nnu-NET (CABINET) | ||
Greg Conan: [email protected] | ||
Created: 2021-11-12 | ||
Updated: 2022-08-19 | ||
Updated: 2022-08-23 | ||
""" | ||
# Import standard libraries | ||
import argparse | ||
|
@@ -70,7 +70,6 @@ def main(): | |
run_all_stages(STAGES, sub_ses_IDs, json_args["stage_names"]["start"], | ||
json_args["stage_names"]["end"], json_args, logger) | ||
# TODO default to running all stages if not specified by the user | ||
# TODO add error if end is given as a stage that happens before start | ||
|
||
# Show user how long the pipeline took and end the pipeline here | ||
exit_with_time_info(start_time) | ||
|
@@ -123,8 +122,8 @@ def get_params_from_JSON(stage_names, logger): | |
parser.add_argument( | ||
"-jargs", "-params", "--parameter-json", dest="parameter_json", | ||
type=valid_readable_json, required=True, | ||
help=("Valid path to existing readable parameter .JSON file. See " | ||
"README.md and example parameter .JSON files for more " | ||
help=("Required. Valid path to existing readable parameter .JSON " | ||
"file. See README.md and example parameter .JSON files for more " | ||
"information on parameters.") | ||
# TODO: Add description of all nibabies and XCP-D parameters to the README? | ||
# TODO: In the README.md file, mention which arguments are required and which are optional (with defaults) | ||
|
@@ -210,6 +209,12 @@ def validate_cli_args(cli_args, stage_names, parser, logger): | |
j_args["meta"] = {script_dir_attr: SCRIPT_DIR, | ||
"slurm": bool(cli_args[script_dir_attr])} | ||
|
||
# Crash immediately if the end is given as a stage that happens before start | ||
if (stage_names.index(cli_args["start"]) | ||
> stage_names.index(cli_args["end"])): | ||
parser.error("Error: {} stage must happen before {} stage." | ||
.format(cli_args["start"], cli_args["end"])) | ||
|
||
# Add command-line arguments to j_args | ||
j_args["stage_names"] = {"start": cli_args["start"], | ||
"end": cli_args["end"]} # TODO Maybe save the stage_names list in here too to replace optional_out_dirs use cases? | ||
|
@@ -420,7 +425,6 @@ def run_preBIBSnet(j_args, logger): | |
:param logger: logging.Logger object to show messages and raise warnings | ||
:return: j_args, but with preBIBSnet working directory names added | ||
""" | ||
# sub_ses = get_subj_ID_and_session(j_args) | ||
completion_msg = "The anatomical images have been {} for use in BIBSnet" | ||
preBIBSnet_paths = get_and_make_preBIBSnet_work_dirs(j_args) | ||
|
||
|
@@ -483,14 +487,10 @@ def run_BIBSnet(j_args, logger): | |
# Import BIBSnet functionality from BIBSnet/run.py | ||
parent_BIBSnet = os.path.dirname(j_args["bibsnet"]["code_dir"]) | ||
logger.info("Importing BIBSnet from {}".format(parent_BIBSnet)) | ||
sys.path.append(parent_BIBSnet) | ||
sys.path.append(parent_BIBSnet) #sys.path.append("/home/cabinet/SW/BIBSnet") | ||
from BIBSnet.run import run_nnUNet_predict | ||
|
||
# TODO test functionality of importing BIBSNet function via params json (j_args) | ||
#parent_BIBSnet = os.path.dirname(j_args["bibsnet"]["code_dir"]) | ||
#logger.info("Importing BIBSnet from {}".format(parent_BIBSnet)) | ||
#sys.path.append("/home/cabinet/SW/BIBSnet") | ||
#from BIBSnet.run import run_nnUNet_predict | ||
|
||
try: # Run BIBSnet | ||
inputs_BIBSnet = {"model": j_args["bibsnet"]["model"], | ||
|
@@ -516,7 +516,7 @@ def run_BIBSnet(j_args, logger): | |
sys.exit(e) | ||
|
||
# Remove unneeded empty directories | ||
for unneeded_dir_name in ("nnUNet_cropped_image", "nnUNet_raw_data"): | ||
for unneeded_dir_name in ("nnUNet_cropped_data", "nnUNet_raw_data"): | ||
unneeded_dir_path = os.path.join( | ||
j_args["optional_out_dirs"]["derivatives"], unneeded_dir_name | ||
) | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,7 +5,7 @@ | |
Common source for utility functions used by CABINET :) | ||
Greg Conan: [email protected] | ||
Created: 2021-11-12 | ||
Updated: 2022-08-19 | ||
Updated: 2022-08-23 | ||
""" | ||
# Import standard libraries | ||
import argparse | ||
|
@@ -80,8 +80,6 @@ def align_ACPC_1_img(j_args, logger, xfm_ACPC_vars, crop2full, output_var, t, | |
run_FSL_sh_script(j_args, logger, "aff2rigid", mats["full2acpc"], | ||
mats["acpc2rigidbody"]) | ||
|
||
# run_FSL_sh_script(j_args, logger, "convert_xfm", "-inverse", mats["rigidbody2acpc"], "-omat", mats["acpc2rigidbody"]) | ||
|
||
# Apply ACPC alignment to the data | ||
# Create a resampled image (ACPC aligned) using spline interpolation # TODO Only run this command in debug mode | ||
# if j_args["common"]["debug"]: | ||
|
@@ -242,8 +240,9 @@ def correct_chirality(nifti_input_file_path, segment_lookup_table, | |
seg_to_T1w_nat = os.path.join(chiral_out_dir, "seg_reg_to_T1w_native.mat") | ||
preBIBSnet_mat = os.path.join( | ||
j_args["optional_out_dirs"]["postbibsnet"], *sub_ses, | ||
"preBIBSnet_full_crop_T1w_to_BIBS_template.mat" | ||
) # "preBIBSnet_T1w_final.mat") crop_T{}w_to_BIBS_template.mat | ||
"preBIBSnet_crop_T1w_to_BIBS_template.mat" # TODO Name this outside of pre- and postBIBSnet then pass it to both | ||
) # preBIBSnet_mat_glob = | ||
# preBIBSnet_mat = glob(preBIBSnet_mat_glob).pop() # NOTE CABINET ran without error using this on 2022-08-23 | ||
run_FSL_sh_script(j_args, logger, "convert_xfm", "-omat", | ||
seg_to_T1w_nat, "-inverse", preBIBSnet_mat) # TODO Define preBIBSnet_mat path outside of stages because it's used by preBIBSnet and postBIBSnet | ||
|
||
|
@@ -579,12 +578,10 @@ def get_optimal_resized_paths(sub_ses, bibsnet_out_dir): | |
|
||
def get_spatial_resolution_of(image_fpath, j_args, logger, fn_name="fslinfo"): | ||
""" | ||
Run any FSL function in a Bash subprocess, unless its outputs exist and the | ||
parameter file said not to overwrite outputs | ||
:param j_args: Dictionary containing all args from parameter .JSON file | ||
:param logger: logging.Logger object to show messages and raise warnings | ||
:param fsl_fn_name: String naming the FSL function which is an | ||
executable file in j_args[common][fsl_bin_path] | ||
:param fn_name: String naming the function which is an | ||
executable file in j_args[common][fsl_bin_path] | ||
""" # TODO Do we even need this function? | ||
# FSL command to run in a subprocess | ||
to_run = [os.path.join(j_args["common"]["fsl_bin_path"], fn_name), | ||
|
@@ -739,6 +736,8 @@ def optimal_realigned_imgs(xfm_imgs_non_ACPC, xfm_imgs_ACPC_and_reg, j_args, log | |
""" | ||
if not os.path.exists(out_mat_fpath): | ||
shutil.copy2(concat_mat, out_mat_fpath) | ||
if j_args["common"]["verbose"]: | ||
logger.info("Copying {} to {}".format(concat_mat, out_mat_fpath)) | ||
return optimal_resize | ||
|
||
|
||
|
@@ -992,7 +991,7 @@ def resize_images(cropped_imgs, output_dir, ref_image, ident_mx, | |
# registration_T2w_to_T1w's cropT2tocropT1.mat, and then non-ACPC | ||
# registration_T2w_to_T1w's crop_T1_to_BIBS_template.mat) | ||
preBIBS_nonACPC_out["T{}w_crop2BIBS_mat".format(t)] = os.path.join( | ||
xfm_non_ACPC_vars["out_dir"], "full_crop_T{}w_to_BIBS_template.mat".format(t) | ||
xfm_non_ACPC_vars["out_dir"], "crop_T{}w_to_BIBS_template.mat".format(t) | ||
) | ||
full2cropT1w_mat = os.path.join(xfm_non_ACPC_vars["out_dir"], | ||
"full2cropT1w.mat") | ||
|
@@ -1082,15 +1081,15 @@ def run_all_stages(all_stages, sub_ses_IDs, start, end, | |
running = False | ||
for dict_with_IDs in sub_ses_IDs: | ||
|
||
# ...make a j_args copy with its subject ID, session ID, and age # TODO Add brain_z_size into j_args[ID] | ||
# ...make a j_args copy with its subject ID, session ID, and age | ||
sub_ses_j_args = ubiquitous_j_args.copy() | ||
sub_ses_j_args["ID"] = dict_with_IDs | ||
sub_ses = get_subj_ID_and_session(sub_ses_j_args) | ||
sub_ses_j_args["optimal_resized"] = get_optimal_resized_paths( | ||
sub_ses, ubiquitous_j_args["optional_out_dirs"]["bibsnet"] | ||
) | ||
|
||
# Check that all required input files exist for the stages to run | ||
# ...check that all required input files exist for the stages to run | ||
verify_CABINET_inputs_exist(sub_ses, sub_ses_j_args, logger) | ||
|
||
# ...run all stages that the user said to run | ||
|
@@ -1397,18 +1396,12 @@ def verify_CABINET_inputs_exist(sub_ses, j_args, logger): | |
"bibsnet": list(j_args["optimal_resized"].values()), | ||
"postbibsnet": [out_BIBSnet_seg, *subject_heads], | ||
"nibabies": out_paths_BIBSnet, | ||
"xcpd": []} | ||
"xcpd": list()} | ||
|
||
# For each stage that will be run, verify that its prereq input files exist | ||
all_stages = [s for s in stage_prerequisites.keys()] | ||
|
||
# required_files = stage_prerequisites[j_args["stage_names"]["start"]] | ||
start_ix = all_stages.index(j_args["stage_names"]["start"]) | ||
for stage in all_stages[:start_ix+1]: | ||
|
||
# if stage == j_args["stage_names"]["start"]: | ||
# if will_run_stage(stage, j_args["stage_names"]["start"], j_args["stage_names"]["end"], all_stages): | ||
|
||
missing_files = list() | ||
for globbable in stage_prerequisites[stage]: | ||
if not glob(globbable): | ||
|