From be9771c2a51a8b0bc3c3d487d62172eb1fab1457 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Thu, 2 May 2024 14:32:56 +0200 Subject: [PATCH 01/19] stop automatic run --- .github/workflows/deploy.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 61a89be0..c81df851 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -1,9 +1,10 @@ name: deploy-docker on: - release: - types: - - published +# release: +# types: +# - published + workflow_dispatch: jobs: deploy-gpu: From 2b2b75a138e538723ec924e7e00eaf9b61a02a7a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20K=C3=BCgler?= Date: Fri, 3 May 2024 21:30:48 +0200 Subject: [PATCH 02/19] Update checkpoint.py Add additional debug information if the download of checkpoints fails. --- FastSurferCNN/utils/checkpoint.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/FastSurferCNN/utils/checkpoint.py b/FastSurferCNN/utils/checkpoint.py index 635b5db4..7579eaab 100644 --- a/FastSurferCNN/utils/checkpoint.py +++ b/FastSurferCNN/utils/checkpoint.py @@ -346,8 +346,10 @@ def download_checkpoint( except requests.exceptions.HTTPError as e: LOGGER.info(f"Server {url} not reachable.") + LOGGER.exception(e) LOGGER.warn(f"Response code: {e.response.status_code}") except requests.exceptions.RequestException as e: + LOGGER.exception(e) LOGGER.warn(f"Server {url} not reachable.") if response is None: From a16a6a031bab302d3a2d0d399dbda5ebac791b3e Mon Sep 17 00:00:00 2001 From: Taha Abdullah Date: Fri, 24 May 2024 18:53:16 +0200 Subject: [PATCH 03/19] fixing issue with get_cmap on matplotlib 3.9.0 - importing get_cmap from matplotlib,pyplot instead of matplotlib.cm --- FastSurferCNN/utils/mapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/FastSurferCNN/utils/mapper.py b/FastSurferCNN/utils/mapper.py index 02198bc7..3c664ef2 100644 --- a/FastSurferCNN/utils/mapper.py +++ b/FastSurferCNN/utils/mapper.py @@ -49,7 +49,7 @@ import numpy as np import pandas import torch -from matplotlib.cm import get_cmap +from matplotlib.pyplot import get_cmap from matplotlib.colors import Colormap from numpy import typing as npt From 2f350e9965dc19fa9bcc1b53ee0934ed7c54ea53 Mon Sep 17 00:00:00 2001 From: Taha Abdullah Date: Fri, 24 May 2024 18:53:16 +0200 Subject: [PATCH 04/19] Adding in [MISSING] data in docstrings Added missing descriptions in the dcstrings of the following files: CerebNet/datasets/load_data.py CerebNet/datasets/utils.py CerebNet/utils/meters.py CerebNet/utils/metrics.py CerebNet/utils/misc.py FastSurferCNN/config/global_var.py FastSurferCNN/data_loader/augmentation.py FastSurferCNN/data_loader/data_utils.py FastSurferCNN/data_loader/dataset.py FastSurferCNN/inference.py FastSurferCNN/models/interpolation_layer.py FastSurferCNN/models/losses.py FastSurferCNN/models/networks.py FastSurferCNN/models/sub_module.py FastSurferCNN/quick_qc.py FastSurferCNN/reduce_to_aseg.py FastSurferCNN/run_model.py FastSurferCNN/segstats.py FastSurferCNN/train.py FastSurferCNN/utils/checkpoint.py FastSurferCNN/utils/common.py FastSurferCNN/utils/mapper.py FastSurferCNN/utils/meters.py FastSurferCNN/utils/metrics.py FastSurferCNN/utils/misc.py FastSurferCNN/utils/parser_defaults.py recon_surf/N4_bias_correct.py recon_surf/align_points.py recon_surf/image_io.py --- CerebNet/datasets/load_data.py | 23 +++- CerebNet/datasets/utils.py | 12 +- CerebNet/utils/meters.py | 134 ++++++++++++++++++-- CerebNet/utils/metrics.py | 22 ++-- CerebNet/utils/misc.py | 35 ++++- FastSurferCNN/config/global_var.py | 11 +- FastSurferCNN/data_loader/augmentation.py | 22 ++-- FastSurferCNN/data_loader/data_utils.py | 6 +- FastSurferCNN/data_loader/dataset.py | 2 +- FastSurferCNN/inference.py | 10 +- FastSurferCNN/models/interpolation_layer.py | 8 +- FastSurferCNN/models/losses.py | 2 +- FastSurferCNN/models/networks.py | 8 +- FastSurferCNN/models/sub_module.py | 6 +- FastSurferCNN/quick_qc.py | 6 +- FastSurferCNN/reduce_to_aseg.py | 25 ++-- FastSurferCNN/run_model.py | 6 +- FastSurferCNN/segstats.py | 106 +++++++++------- FastSurferCNN/train.py | 4 +- FastSurferCNN/utils/checkpoint.py | 4 +- FastSurferCNN/utils/common.py | 20 ++- FastSurferCNN/utils/mapper.py | 36 +++--- FastSurferCNN/utils/meters.py | 36 +++--- FastSurferCNN/utils/metrics.py | 24 +++- FastSurferCNN/utils/misc.py | 6 +- FastSurferCNN/utils/parser_defaults.py | 4 +- recon_surf/N4_bias_correct.py | 6 +- recon_surf/align_points.py | 2 +- recon_surf/image_io.py | 3 +- 29 files changed, 388 insertions(+), 201 deletions(-) diff --git a/CerebNet/datasets/load_data.py b/CerebNet/datasets/load_data.py index 38687df7..40818edd 100644 --- a/CerebNet/datasets/load_data.py +++ b/CerebNet/datasets/load_data.py @@ -65,7 +65,26 @@ def _process_segm_volumes( def _load_volumes(self, subject_path, store_talairach=False): """ - [MISSING]. + Loads the original image and cerebellum sub-segmentation from the given subject path. + Also loads the Talairach coordinates if store_talairach is set to True. + + Parameters + ---------- + subject_path : str + The path to the subject's data directory. + store_talairach : bool, optional + If True, the method will attempt to load the Talairach coordinates. Defaults to False. + + Returns + ------- + orig : np.ndarray + The original image. + cereb_subseg : np.ndarray + The cerebellum sub-segmentation loaded from the subject's data directory. + img_meta_data : dict + Dictionary containing the affine transformation and header from cereb_subseg file. + If store_talairach is True and Talairach coordinates file exists, also contains the + Talairach coordinates. """ orig_path = join(subject_path, self.cfg.IMAGE_NAME) subseg_path = join(subject_path, self.cfg.CEREB_SUBSEG_NAME) @@ -181,7 +200,7 @@ def load_subject(self, current_subject, store_talairach=False, load_aux_data=Fal Parameters ---------- - current_subject : [MISSING] + current_subject : str Subject ID. store_talairach : bool, optional Whether to store Talairach coordinates. Defaults to False. diff --git a/CerebNet/datasets/utils.py b/CerebNet/datasets/utils.py index be5bd08d..4d848f4a 100644 --- a/CerebNet/datasets/utils.py +++ b/CerebNet/datasets/utils.py @@ -634,15 +634,15 @@ def _crop_transform_pad_fn(image, pad_tuples, pad): Parameters ---------- - image : [MISSING] - [MISSING]. - pad_tuples : [MISSING] - [MISSING]. + image : np.ndarray, torch.Tensor + Input image. + pad_tuples : List[Tuple[int, int]] + List of padding tuples for each axis. Returns ------- - [MISSING TYPE] - [MISSING Discription]. + partial + A partial function to pad the image. """ if all(p1 == 0 and p2 == 0 for p1, p2 in pad_tuples): return None diff --git a/CerebNet/utils/meters.py b/CerebNet/utils/meters.py index 6b50a909..67d7ebdd 100644 --- a/CerebNet/utils/meters.py +++ b/CerebNet/utils/meters.py @@ -36,6 +36,11 @@ class TestMeter: def __init__(self, classname_to_ids): """ Constructor function. + + Parameters + ---------- + classname_to_ids : dict + Dictionary containing class names and their corresponding ids. """ # class_id: class_name self.classname_to_ids = classname_to_ids @@ -46,7 +51,20 @@ def __init__(self, classname_to_ids): def _compute_hd(self, pred_bin, gt_bin): """ - [MISSING]. + Compute the Hausdorff Distance (HD) between the predicted binary segmentation map + and the ground truth binary segmentation map. + + Parameters + ---------- + pred_bin : np.array + Predicted binary segmentation map. + gt_bin : np.array + Ground truth binary segmentation map. + + Returns + ------- + hd_dict : dict + Dictionary containing the maximum HD and 95th percentile HD. """ hd_dict = {} if np.count_nonzero(pred_bin) == 0: @@ -61,14 +79,38 @@ def _compute_hd(self, pred_bin, gt_bin): def _get_binray_map(self, lbl_map, class_names): """ - [MISSING]. + Generate binary map based on the label map and class names. + + Parameters + ---------- + lbl_map : np.array + Label map where each pixel/voxel is assigned a class label. + class_names : list + List of class names to be considered in the binary map. + + Returns + ------- + bin_map : np.array + Binary map where True represents class and False represents its absence. """ bin_map = np.logical_or.reduce(list(map(lambda l: lbl_map == l, class_names))) return bin_map def metrics_per_class(self, pred, gt): """ - [MISSING]. + Compute metrics for each class in the predicted and ground truth segmentation maps. + + Parameters + ---------- + pred : np.array + Predicted segmentation map. + gt : np.array + Ground truth segmentation map. + + Returns + ------- + metrics : dict + Dict containing metrics for each class. """ metrics = {"Label": [], "Dice": [], "HD95": [], "HD_Max": [], "VS": []} for lbl_name, lbl_id in self.classname_to_ids.items(): @@ -116,8 +158,27 @@ def __init__( device=None, writer=None, ): - """" + """ Constructor function. + + Parameters + ---------- + cfg : object + Configuration object containing all the configuration parameters. + mode : str + Mode of operation ("Train" or "Val"). + global_step : int + The global step count. + total_iter : int, optional + Total number of iterations. + total_epoch : int, optional + Total number of epochs. + class_names : list, optional + List of class names. + device : str, optional + Device to be used for computation. + writer : object, optional + Writer object for tensorboard. """ self._cfg = cfg self.mode = mode.capitalize() @@ -144,6 +205,15 @@ def reset(self): def update_stats(self, pred, labels, loss_dict=None): """ Update stats. + + Parameters + ---------- + pred : torch.Tensor + Predicted labels. + labels : torch.Tensor + Ground truth labels. + loss_dict : dict, optional + Dictionary containing loss values. """ self.dice_score.update((pred, labels)) if loss_dict is None: @@ -154,6 +224,11 @@ def update_stats(self, pred, labels, loss_dict=None): def write_summary(self, loss_dict): """ Write summary. + + Parameters + ---------- + loss_dict : dict + Dictionary containing loss values. """ if self.writer is None: return @@ -161,11 +236,22 @@ def write_summary(self, loss_dict): self.writer.add_scalar(f"{self.mode}/{name}", loss.item(), self.global_iter) self.global_iter += 1 - def prediction_visualize( - self, cur_iter, cur_epoch, img_batch, label_batch, pred_batch - ): + def prediction_visualize(self, cur_iter, cur_epoch, img_batch, label_batch, pred_batch): """ - [MISSING]. + Visualize prediction results for current iteration and epoch. + + Parameters + ---------- + cur_iter : int + Current iteration number. + cur_epoch : int + Current epoch number. + img_batch : torch.Tensor + Input image batch. + label_batch : torch.Tensor + Ground truth label batch. + pred_batch : torch.Tensor + Predicted label batch. """ if self.writer is None: return @@ -179,7 +265,14 @@ def prediction_visualize( def log_iter(self, cur_iter, cur_epoch): """ - [MISSING]. + Log training or validation progress at each iteration. + + Parameters + ---------- + cur_iter : int + The current iteration number. + cur_epoch : int + The current epoch number. """ if (cur_iter + 1) % self._cfg.TRAIN.LOG_INTERVAL == 0: out_losses = {} @@ -203,7 +296,16 @@ def log_iter(self, cur_iter, cur_epoch): def log_lr(self, lr, step=None): """ - [MISSING]. + Log learning rate at each step. + + Parameters + ---------- + lr : list + Learning rate at the current step. Expected to be a list where the first + element is the learning rate. + step : int, optional + Current step number. If not provided, the global iteration + number is used. """ if step is None: step = self.global_iter @@ -211,7 +313,17 @@ def log_lr(self, lr, step=None): def log_epoch(self, cur_epoch): """ - [MISSING]. + Log mean Dice score and confusion matrix at the end of each epoch. + + Parameters + ---------- + cur_epoch : int + Current epoch number. + + Returns + ------- + dice_score : float + The mean Dice score for the non-background classes. """ dice_score_per_class, confusion_mat = self.dice_score.compute(per_class=True) dice_score = dice_score_per_class[1:].mean() diff --git a/CerebNet/utils/metrics.py b/CerebNet/utils/metrics.py index 0c48a4af..b76d1d20 100644 --- a/CerebNet/utils/metrics.py +++ b/CerebNet/utils/metrics.py @@ -68,7 +68,7 @@ def __init__( def reset(self): """ - [MISSING]. + Reset the state of the object. """ self.union = torch.zeros(self.n_classes, self.n_classes) self.intersection = torch.zeros(self.n_classes, self.n_classes) @@ -84,7 +84,7 @@ def _check_output_type(self, output): """ if not (isinstance(output, tuple)): raise TypeError( - "Output should a tuple consist of of torch.Tensors, but given {}".format( + "Output should be a tuple consisting of torch.Tensors, but given {}".format( type(output) ) ) @@ -93,15 +93,13 @@ def _update_union_intersection(self, batch_output, labels_batch): """ Update the union and intersection matrices based on batch predictions and labels. - [MISSING DESCRIPTION] - Parameters: ----------- - batch_output : [MISSING TYPE] - [MISSING DESCRIPTION] + batch_output : torch.Tensor + Batch predictions from the model. - labels_batch : [MISSING TYPE] - [MISSING DESCRIPTION] + labels_batch : np.ndarray or torch.Tensor + Batch labels from the dataset. """ # self.union.to(batch_output.device) # self.intersection.to(batch_output.device) @@ -120,8 +118,8 @@ def update(self, output): Parameters ---------- - output : [MISSING] - [MISSING DESCRIPTION]. + output : tuple of torch.Tensor + Tuple of predictions and labels. """ self._check_output_type(output) @@ -144,7 +142,7 @@ def update(self, output): def compute(self, per_class=False, class_idxs=None): """ - [MISSING]. + Compute the Dice score. """ dice_cm_mat = self._dice_confusion_matrix(class_idxs) dice_score_per_class = dice_cm_mat.diagonal() @@ -156,7 +154,7 @@ def compute(self, per_class=False, class_idxs=None): def _dice_confusion_matrix(self, class_idxs): """ - [MISSING]. + Compute the Dice score confusion matrix. """ dice_intersection = self.intersection.cpu().numpy() dice_union = self.union.cpu().numpy() diff --git a/CerebNet/utils/misc.py b/CerebNet/utils/misc.py index a8e0ede0..52d142b8 100644 --- a/CerebNet/utils/misc.py +++ b/CerebNet/utils/misc.py @@ -79,6 +79,29 @@ def plot_confusion_matrix( figsize=(20, 20), file_save_name=None, ): + """ + This function prints and plots the confusion matrix. + + Parameters + ---------- + cm : np.ndarray + Confusion matrix. + classes : list + List of classes. + title : str + Title of the confusion matrix (Default value = "Confusion matrix"). + cmap : plt.cm + Color map (Default value = plt.cm.Blues). + figsize : tuple + Figure size (Default value = (20, 20)). + file_save_name : str + File save name (Default value = None). + + Returns + ------- + fig : plt.Figure + Figure object. + """ n_classes = len(classes) fig, ax = plt.subplots(figsize=figsize) @@ -179,8 +202,8 @@ def set_summary_path(cfg): Parameters ---------- - cfg : [MISSING TYPE] - [MISSING]. + cfg : yacs.config.CfgNode + Configuration node. """ summary_path = check_path(os.path.join(cfg.LOG_DIR, "summary")) cfg.EXPR_NUM = str(find_latest_experiment(os.path.join(cfg.LOG_DIR, "summary")) + 1) @@ -209,8 +232,8 @@ def update_results_dir(cfg): Parameters ---------- - cfg : [MISSING TYPE] - [MISSING]. + cfg : yacs.config.CfgNode + Configuration node. """ cfg.EXPR_NUM = str(find_latest_experiment(cfg.TEST.RESULTS_DIR) + 1) cfg.TEST.RESULTS_DIR = check_path( @@ -224,8 +247,8 @@ def update_split_path(cfg): Parameters ---------- - cfg : [MISSING TYPE] - [MISSING]. + cfg : yacs.config.CfgNode + Configuration node. """ from os.path import split, join diff --git a/FastSurferCNN/config/global_var.py b/FastSurferCNN/config/global_var.py index eee3c30b..88e5580d 100644 --- a/FastSurferCNN/config/global_var.py +++ b/FastSurferCNN/config/global_var.py @@ -147,14 +147,15 @@ def get_class_names(plane, options): Parameters ---------- - plane : - [MISSING] - options : - [MISSING] + plane : str + Plane of the MRI scan. + options : List[str] + List of classes to include. Returns ------- - [MISSING] + selection : List[str] + List of class names. """ selection = [] diff --git a/FastSurferCNN/data_loader/augmentation.py b/FastSurferCNN/data_loader/augmentation.py index a1688765..4e8a5407 100644 --- a/FastSurferCNN/data_loader/augmentation.py +++ b/FastSurferCNN/data_loader/augmentation.py @@ -350,10 +350,12 @@ class AugmentationPadImage(object): Attributes ---------- - pad_size_imag - [missing]. - pad_size_mask - [missing]. + pad_size_image : tuple + The padding size for the image. + pad_size_mask : tuple + The padding size for the mask. + pad_type : str + The type of padding to be applied. Methods ------- @@ -369,12 +371,12 @@ def __init__( """ Construct object. - Attributes + Parameters ---------- - pad_size - [MISSING]. - pad_type - [MISSING]. + pad_size : tuple + The padding size. + pad_type : str + The type of padding to be applied. """ assert isinstance(pad_size, (int, tuple)) @@ -425,7 +427,7 @@ def __init__(self, output_size: Union[int, Tuple], crop_type: str = 'Random'): output_size Size of the output image either an integer or a tuple. crop_type - [MISSING]. + The type of crop to be performed. """ assert isinstance(output_size, (int, tuple)) diff --git a/FastSurferCNN/data_loader/data_utils.py b/FastSurferCNN/data_loader/data_utils.py index 9bf75dc8..b5e9abd0 100644 --- a/FastSurferCNN/data_loader/data_utils.py +++ b/FastSurferCNN/data_loader/data_utils.py @@ -413,11 +413,11 @@ def filter_blank_slices_thick( Returns ------- filtered img_vol : np.ndarray - [MISSING]. + Image volume with blank slices removed. label_vol : np.ndarray - [MISSING]. + Label volume with blank slices removed. weight_vol : np.ndarray - [MISSING]. + Weight volume with blank slices removed. """ # Get indices of all slices with more than threshold labels/pixels select_slices = np.sum(label_vol, axis=(0, 1)) > threshold diff --git a/FastSurferCNN/data_loader/dataset.py b/FastSurferCNN/data_loader/dataset.py index fc68f1cb..5efe6ee3 100644 --- a/FastSurferCNN/data_loader/dataset.py +++ b/FastSurferCNN/data_loader/dataset.py @@ -256,7 +256,7 @@ def _get_scale_factor( img_zoom : torch.Tensor Image zoom factor. scale_aug : torch.Tensor - [MISSING]. + Scale augmentation factor. Returns ------- diff --git a/FastSurferCNN/inference.py b/FastSurferCNN/inference.py index 6a1b7f83..6a0cab30 100644 --- a/FastSurferCNN/inference.py +++ b/FastSurferCNN/inference.py @@ -55,9 +55,9 @@ class Inference: model_name : str Name of the model alpha : Dict[str, float] - [MISSING] + Alpha values for different planes post_prediction_mapping_hook - [MISSING] + Hook for post prediction mapping Methods ------- @@ -98,7 +98,7 @@ def __init__( ckpt : str String or os.PathLike object containing the name to the checkpoint file (Default value = ""). lut : Union[None, str, np.ndarray, DataFrame] - [MISSING] (Default value = None). + Lookup table for mapping (Default value = None). """ # Set random seed from configs. np.random.seed(cfg.RNG_SEED) @@ -162,7 +162,7 @@ def setup_model(self, cfg=None, device: torch.device = None): def set_cfg(self, cfg: yacs.config.CfgNode): """ - [MISSING]. + Set the configuration node. Parameters ---------- @@ -332,7 +332,7 @@ def eval( val_loader : DataLoader Validation loader. out_scale : Optional - [MISSING] (Default value = None). + Output scale (Default value = None). out : Optional[torch.Tensor] Previous prediction tensor (Default value = None). diff --git a/FastSurferCNN/models/interpolation_layer.py b/FastSurferCNN/models/interpolation_layer.py index b656b803..ccafd4da 100644 --- a/FastSurferCNN/models/interpolation_layer.py +++ b/FastSurferCNN/models/interpolation_layer.py @@ -286,18 +286,18 @@ def _calculate_crop_pad( alignment: str, ) -> _T.Tuple[slice, T_Scale, _T.Tuple[int, int], int]: """ - Return start- and end- coordinate given sizes, the updated scale factor [MISSING]. + Return start- and end- coordinate given sizes, the updated scale factor. Parameters ---------- in_shape : _T.Sequence[int] - [MISSING]. + Input shape. scale_factor : T_Scale - [MISSING]. + Scale factor. dim : int Dimension to be cropped. alignment : str - [MISSING]. + Alignment of the cropping. Returns ------- diff --git a/FastSurferCNN/models/losses.py b/FastSurferCNN/models/losses.py index de667f6e..7b25820d 100644 --- a/FastSurferCNN/models/losses.py +++ b/FastSurferCNN/models/losses.py @@ -169,7 +169,7 @@ def forward( self, inputx: Tensor, target: Tensor, weight: Tensor ) -> Tuple[Tensor, Tensor, Tensor]: """ - [MISSING]. + Calculate the total loss, dice loss and cross entropy value for the given input. Parameters ---------- diff --git a/FastSurferCNN/models/networks.py b/FastSurferCNN/models/networks.py index cdfdf326..312a8197 100644 --- a/FastSurferCNN/models/networks.py +++ b/FastSurferCNN/models/networks.py @@ -105,7 +105,7 @@ def forward( scale_factor : Tensor, optional [N, 1] Defaults to None. scale_factor_out : Tensor, optional - [Missing]. + Tensor representing the scale factor for the output. Defaults to None. Returns ------- @@ -195,7 +195,7 @@ def forward( scale_factor : Tensor, optional [N, 1] Defaults to None. scale_factor_out : Tensor, optional - [Missing]. + Tensor representing the scale factor for the output. Defaults to None. Returns ------- @@ -337,9 +337,9 @@ def forward( x : Tensor Input image [N, C, H, W]. scale_factor : Tensor - [MISSING] [N, 1]. + Tensor of shape [N, 1] representing the scale factor for each image in the batch. scale_factor_out : Tensor, Optional - [MISSING]. + Tensor representing the scale factor for the output. Defaults to None. Returns ------- diff --git a/FastSurferCNN/models/sub_module.py b/FastSurferCNN/models/sub_module.py index 2f84da4e..7f38cf20 100644 --- a/FastSurferCNN/models/sub_module.py +++ b/FastSurferCNN/models/sub_module.py @@ -119,7 +119,7 @@ def forward(self, x: Tensor) -> Tensor: Returns ------- out : Tensor - [MISSING]. + Output image (processed feature map). """ # Input batch normalization x0_bn = self.bn0(x) @@ -448,9 +448,9 @@ def __init__(self, sigma: float = 0.1, device: str = "cuda"): Parameters ---------- sigma : float - [MISSING] (Default value = 0.1). + Standard deviation of the GaussianNoise (Default value = 0.1). device : str - [MISSING] (Default value = "cuda"). + Device to run the model on (Default value = "cuda"). """ super().__init__() self.sigma = sigma diff --git a/FastSurferCNN/quick_qc.py b/FastSurferCNN/quick_qc.py index 8c64f59b..8b37f17f 100644 --- a/FastSurferCNN/quick_qc.py +++ b/FastSurferCNN/quick_qc.py @@ -76,11 +76,11 @@ def check_volume(asegdkt_segfile:np.ndarray, voxvol: float, thres: float = 0.70) Parameters ---------- asegdkt_segfile : np.ndarray - [MISSING]. + The segmentation file. voxvol : float - [MISSING]. + The volume of a voxel. thres : float - [MISSING]. + The threshold for the total volume (Default value = 0.70). Returns ------- diff --git a/FastSurferCNN/reduce_to_aseg.py b/FastSurferCNN/reduce_to_aseg.py index ec0ad400..87d14f88 100644 --- a/FastSurferCNN/reduce_to_aseg.py +++ b/FastSurferCNN/reduce_to_aseg.py @@ -92,17 +92,18 @@ def options_parse(): def reduce_to_aseg(data_inseg): """ - [MISSING]. + Reduce the input segmentation to a simpler segmentation. Parameters ---------- data_inseg : - - [MISSING]. + The input segmentation. This should be a 3D array where the value at each position represents the segmentation + label for that position. Returns ------- - [MISSING] - Returns reduced_aseg. + data_inseg : - + The reduced segmentation. """ print("Reducing to aseg ...") # replace 2000... with 42 @@ -119,11 +120,11 @@ def create_mask(aseg_data, dnum, enum): Parameters ---------- aseg_data : - - [MISSING]. + The input segmentation data. dnum : - - [MISSING]. + The number of iterations for the dilation operation. enum : - - [MISSING]. + The number of iterations for the erosion operation. Returns ------- @@ -169,17 +170,17 @@ def create_mask(aseg_data, dnum, enum): def flip_wm_islands(aseg_data): """ - [MISSING]. + Flip labels of disconnected white matter islands to the other hemisphere. Parameters ---------- - aseg_data : - - [MISSING]. + aseg_data : numpy.ndarray + The input segmentation data. Returns ------- - flip_data - [MISSING]. + flip_data : numpy.ndarray + The segmentation data with flipped WM labels. """ # Sometimes WM is far in the other hemisphere, but with a WM label from the other hemi # These are usually islands, not connected to the main hemi WM component diff --git a/FastSurferCNN/run_model.py b/FastSurferCNN/run_model.py index 230edd0a..e4894726 100644 --- a/FastSurferCNN/run_model.py +++ b/FastSurferCNN/run_model.py @@ -58,8 +58,12 @@ def make_parser() -> argparse.ArgumentParser: return parser + def main(args): - """[MISSING] First set variables and then runs the trainer model.""" + """ + First sets variables and then runs the trainer model. + """ + args = setup_options() cfg = get_config(args) if args.aug is not None: diff --git a/FastSurferCNN/segstats.py b/FastSurferCNN/segstats.py index 1223f88c..28786e4c 100644 --- a/FastSurferCNN/segstats.py +++ b/FastSurferCNN/segstats.py @@ -767,17 +767,18 @@ def seg_borders( def _laplace(data): """ - [MISSING]. + Helper function to compute the Laplacian of the data, and return a + boolean array where the Laplacian is not zero. Parameters ---------- data : - [MISSING]. + Input data. Returns ------- bool - [MISSING]. + Boolean array where Laplacian is not zero. """ return laplace(data.astype(cmp_dtype)) != np.asarray(0.0, dtype=cmp_dtype) @@ -802,20 +803,22 @@ def borders( Parameters ---------- _array : _ArrayType - [MISSING]. + Input labeled array or binary image. labels : Union[Iterable[np.int], bool] - [MISSING]. + List of labels for which borders will be computed. + If labels is True, _array is treated as a binary mask. max_label : Optional[np.int], Optional - [MISSING]. + The maximum label ot consider. If None, the maximum label in the array is used. six_connected : bool - [MISSING]. + If True, 6-connected borders are computed, + otherwise 26-connected borders are computed. out : Optional[_ArrayType] - [MISSING]. + Output array to store the computed borders (Optional). Returns ------- _ArrayType - [MISSING]. + A binary image where borders are marked as True. """ dim = _array.ndim array_alloc = partial(np.full, dtype=_array.dtype) @@ -910,16 +913,16 @@ def grow_patch( Parameters ---------- patch : Sequence[slice] - [MISSING]. + A sequence of slices. whalf : int - [MISSING]. + Integer that specifies the amount to grow/ungrow the patch. img_size : Union[np.ndarray, Sequence[float]] - [MISSING]. + Size of the image. Returns ------- Tuple[Tuple[slice, ...], Tuple[slice, ...]] - [MISSING]. + A tuple containing the grown patch and the ungrown patch. """ # patch start/stop _patch = np.asarray([(s.start, s.stop) for s in patch]) @@ -952,20 +955,20 @@ def uniform_filter( Parameters ---------- arr : _ArrayType - [MISSING]. + Input array. filter_size : int - [MISSING]. + Size of the uniform filter. fillval : float - [MISSING]. + Fill value when the filter is outside the array. patch : Optional[Tuple[slice, ...]] - [MISSING]. + Sub-region of the array to apply filter to (Default: None). out : Optional[_ArrayType] - [MISSING]. + Output array to store the result (Default: None). Returns ------- _ArrayType - [MISSING]. + The filtered array. """ _patch = (slice(None),) if patch is None else patch arr = arr.astype(float) @@ -1313,27 +1316,29 @@ def global_stats( ], ]: """ - Compute Label, Number of voxels, 'robust' number of voxels, norm minimum, maximum, sum, sum of squares and 6-connected border of label lab (out references the border). + Compute Label, Number of voxels, 'robust' number of voxels, norm minimum, maximum, sum, + sum of squares and 6-connected border of label lab (out references the border). Parameters ---------- lab : _IntType - [MISSING]. + Label to compute statistics for. norm : pt.NDArray[_NumberType] - [MISSING]. + Normalized image. seg : npt.NDArray[_IntType] - [MISSING]. + Segmentation image. out : npt.NDArray[bool], Optional - [MISSING]. + Output array to store the computed borders (Optional). robust_percentage : float, Optional - [MISSING]. + Percentage of values to keep for robust statistics (Default: None). Returns ------- _IntType and int - [MISSING]. + Label and number of voxels. or _IntType, int, int, _NumberType, _NumberType, float, float, float and npt.NDArray[bool] - [MISSING]. + Label, number of voxels, 'robust' number of voxels, norm minimum, maximum, sum, + sum of squares, volume and border. """ bin_array = cast(npt.NDArray[bool], seg == lab) data = norm[bin_array].astype( @@ -1381,20 +1386,20 @@ def patch_filter( Parameters ---------- pos : Tuple[int, int, int] - [MISSING]. + Starting position of the patch. mask : npt.NDArray[bool] - [MISSING]. + Mask to crop to. global_crop : Tuple[slice, ...] - [MISSING]. + Global cropping context. patch_size : int - [MISSING]. Defaults to 32. + Size of patch. Defaults to 32. Returns ------- bool - [MISSING]. + Whether there are mask-True voxels in the patch. Sequence[slice] - [MISSING]. + Cropped patch. """ # create slices for current patch context (constrained by the global_crop) patch = [ @@ -1425,9 +1430,9 @@ def crop_patch_to_mask( Returns ------- bool - [MISSING]. + Whether there are mask-True voxels in the patch. Sequence[slice] - [MISSING]. + Cropped patch. Notes ----- @@ -1492,36 +1497,39 @@ def pv_calc_patch( Parameters ---------- patch : Tuple[slice, ...] - [MISSING]. + Patch to calculate PV for. global_crop : Tuple[slice, ...] - [MISSING]. + Global cropping context. loc_border : Dict[_IntType, npt.NDArray[bool]] - [MISSING]. + Dictionary mapping labels to their borders. seg : npt.NDArray[_IntType] - [MISSING]. + Segmentation image. norm : np.ndarray - [MISSING]. + Normalized image. border : npt.NDArray[bool] - [MISSING]. + Border of the patch. full_pv : npt.NDArray[float], Optional - [MISSING]. + Array to store the partial volume for each voxel in the patch (Optional). full_ipv : npt.NDArray[float], Optional - [MISSING]. + Array to store the inverse partial volume for each voxel in the patch (Optional). full_nbr_label : npt.NDArray[_IntType], Optional - [MISSING]. + Array to store the label for each neighboring voxel that contributes to the + partial volume calculation. (Optional). full_seg_mean : npt.NDArray[float], Optional - [MISSING]. + Array to store the mean intensity of the segmentation label for each voxel in + the patch (Optional). full_nbr_mean : npt.NDArray[float], Optional - [MISSING]. + Array to store the mean intensity of the neighboring voxels that contribute to + the partial volume calculation for each voxel in the patch (Optional). eps : float - [MISSING]. Defaults to 1e-6. + Epsilon. Defaults to 1e-6. legacy_freesurfer : bool - [MISSING]. + Whether to use a freesurfer legacy compatibility mode to exactly replicate freesurfer. Returns ------- Dict[_IntType, float] - [MISSING]. + Partial and inverse partial volumes for each label in the patch. """ log_eps = -int(np.log10(eps)) diff --git a/FastSurferCNN/train.py b/FastSurferCNN/train.py index 4724fad0..8e30ebd4 100644 --- a/FastSurferCNN/train.py +++ b/FastSurferCNN/train.py @@ -112,9 +112,9 @@ def train( scheduler : None, scheduler.StepLR, scheduler.CosineAnnealingWarmRestarts LR scheduler for the training. train_meter : Meter - [MISSING]. + Meter to keep track of the training stats. epoch : int - [MISSING]. + Current epoch. """ self.model.train() diff --git a/FastSurferCNN/utils/checkpoint.py b/FastSurferCNN/utils/checkpoint.py index 7579eaab..3a6b86b3 100644 --- a/FastSurferCNN/utils/checkpoint.py +++ b/FastSurferCNN/utils/checkpoint.py @@ -128,7 +128,7 @@ def create_checkpoint_dir(expr_dir: Union[os.PathLike], expr_num: int): expr_dir : Union[os.PathLike] Directory to create. expr_num : int - Number of expr [MISSING]. + Experiment number. Returns ------- @@ -277,7 +277,7 @@ def save_checkpoint( scheduler : Optional[Scheduler] Used network scheduler. Optional (Default value = None). best : bool - Whether this was the best checkpoint so far [MISSING] (Default value = False). + Whether this was the best checkpoint so far (Default value = False). """ save_name = f"Epoch_{epoch:05d}_training_state.pkl" saving_model = model.module if num_gpus > 1 else model diff --git a/FastSurferCNN/utils/common.py b/FastSurferCNN/utils/common.py index 873c7527..d2577b85 100644 --- a/FastSurferCNN/utils/common.py +++ b/FastSurferCNN/utils/common.py @@ -467,7 +467,7 @@ def copy_orig_name(self, _copy_orig_name: str): Parameters ---------- _copy_orig_name : str - [MISSING]. + The copy of the orig name. Returns ------- @@ -487,7 +487,7 @@ def conf_name(self) -> Path: Returns ------- Path - [MISSING]. + The path to the conformed image file. """ assert ( hasattr(self, "_conf_name") or "The conf_name attribute has not been set!" @@ -497,17 +497,13 @@ def conf_name(self) -> Path: @conf_name.setter def conf_name(self, _conf_name: str): """ - [MISSING]. + Set the path to the conformed image. Parameters ---------- _conf_name : str - [MISSING]. + Path to the conformed image. - Returns - ------- - str - [MISSING]. """ self._conf_name = _conf_name @@ -534,7 +530,7 @@ def segfile(self, _segfile: str): Parameters ---------- _segfile : str - [MISSING]. + Path to the segmentation file. """ self._segfile = _segfile @@ -1001,12 +997,14 @@ def __getitem__(self, item: int | str) -> SubjectDirectory: Parameters ---------- item : int, str - [MISSING]. + The index or name of the subject. + If integer, it is treated as an index and corresponding subject is returned. + If string, it is treated as the subject. Returns ------- SubjectDirectory - [MISSING]. + A SubjectDirectory object corresponding to the provided index or name. """ if isinstance(item, int): if item < 0 or item >= self._num_subjects: diff --git a/FastSurferCNN/utils/mapper.py b/FastSurferCNN/utils/mapper.py index 3c664ef2..fa3550d1 100644 --- a/FastSurferCNN/utils/mapper.py +++ b/FastSurferCNN/utils/mapper.py @@ -226,15 +226,15 @@ def update( Parameters ---------- - other : "Mapper[KT, VT]" - [MISSING]. + other : Mapper[KT, VT] + The other Mapper object whose key-value pairs are to be added to this Mapper object. overwrite : bool - [MISSING] (Default value = True). + Flag to overwrite value if key already exists in Mapper object (Default value = True). Returns ------- - "Mapper[KT, VT]" - [MISSING]. + Mapper[KT, VT] + Mapper after merge. """ for key, value in iter(other): if overwrite or key not in self._map_dict: @@ -264,7 +264,7 @@ def map(self, image: AT, out: Optional[AT] = None) -> AT: Returns ------- AT - [MISSING]. + Data after being mapped to the internal space. """ # torch sparse tensors can't index with images # self._map = _b.torch.sparse_coo_tensor(src_labels, labels, (self._max_label,) + self._label_shape) @@ -351,7 +351,7 @@ def _map_py(self, image: AT, out: Optional[AT] = None) -> AT: Returns ------- AT - [MISSING]. + Image data after being mapped. """ out_type = image if out is None else out if out is None: @@ -434,7 +434,7 @@ def __getitem__(self, item: KT) -> VT: def __iter__(self) -> Iterator[Tuple[KT, VT]]: """ - [MISSING]. + Create an iterator for the Mapper object. """ return iter(self._map_dict.items()) @@ -518,7 +518,7 @@ def make_classmapper( Returns ------- "Mapper[int, int]" - [MISSING]. + A Mapper object that provides a mapping from one label space to another. Raises ------ @@ -756,7 +756,7 @@ def __getitem__(self, key: KT) -> Tuple[int, KT, Tuple[int, int, int, int], Any] Parameters ---------- key : KT - [MISSING]. + The key for which the information is to be retrieved. Raises ------- @@ -797,7 +797,7 @@ def get_color_by_index(self, index: int, base: NT = 1.0) -> Tuple[NT, NT, NT, NT def colormap(self) -> Mapper[KT, ColorTuple]: """ - [MISSING]. + Generate a Mapper object that maps classes to their corresponding colors. """ if self._color_palette is None: raise RuntimeError("No color_palette set") @@ -833,7 +833,7 @@ def labelname2id(self) -> Mapper[KT, Any]: class JsonColorLookupTable(ColorLookupTable[KT]): """ - [MISSING]. + This class extends the ColorLookupTable to handle JSON data. """ _data: Any @@ -913,7 +913,7 @@ def _get_labels(self) -> Union[Dict[KT, Any], Iterable[KT]]: def dataframe(self) -> pandas.DataFrame: """ - [MISSING]. + Converts the labels from the internal data dictionary to a pandas DataFrame. """ if isinstance(self._data, dict) and "labels" in self._data: return pandas.DataFrame.from_dict(self._data["labels"]) @@ -952,7 +952,7 @@ def labelname2id(self) -> Mapper[KT, Any]: class TSVLookupTable(ColorLookupTable[str]): """ - [MISSING]. + This class extends the ColorLookupTable to handle TSV (Tab Separated Values) data. """ _data: pandas.DataFrame @@ -1036,13 +1036,13 @@ def getitem_by_index( Returns ------- index : int - [MISSING]. + The index of the entry. key : str - [MISSING]. + The label name associated with the entry. color : Tuple[int, int, int, int] - [MISSING]. + The RGBA color label associated with the entry. int - [MISSING]. + The data index associated with the entry. """ index, key, color, _ = super(TSVLookupTable, self).getitem_by_index(index) return index, key, color, self._data.iloc[index].name diff --git a/FastSurferCNN/utils/meters.py b/FastSurferCNN/utils/meters.py index 6e8500be..e0a7b902 100644 --- a/FastSurferCNN/utils/meters.py +++ b/FastSurferCNN/utils/meters.py @@ -28,7 +28,9 @@ class Meter: - """[MISSING].""" + """ + Meter class to keep track of the losses and scores during training and validation. + """ def __init__( self, @@ -47,21 +49,21 @@ def __init__( Parameters ---------- cfg - [MISSING] + Configuration Node. mode - [MISSING] + Meter mode (Train or Val). global_step - [MISSING] + Global step. total_iter - [MISSING] + Total iterations (Default value = None). total_epoch - [MISSING] + Total epochs (Default value = None). class_names - [MISSING] + Class names (Default value = None). device - [MISSING] + Device (Default value = None). writer - [MISSING] + Writer (Default value = None). """ self._cfg = cfg @@ -87,19 +89,19 @@ def reset(self): def enable_confusion_mat(self): """ - [MISSING]. + Enable confusion matrix. """ self.confusion_mat = True def disable_confusion_mat(self): """ - [MISSING]. + Disable confusion matrix. """ self.confusion_mat = False def update_stats(self, pred, labels, batch_loss): """ - [MISSING]. + Update the statistics. """ self.dice_score.update((pred, labels), self.confusion_mat) self.batch_losses.append(batch_loss.item()) @@ -110,14 +112,14 @@ def write_summary(self, loss_total, lr=None, loss_ce=None, loss_dice=None): Parameters ---------- - loss_total : [MISSING] - [MISSING]. + loss_total : torch.Tensor + Total loss. lr : default = None - [MISSING] (Default value = None). + Learning rate (Default value = None). loss_ce : default = None - [MISSING] (Default value = None). + Cross entropy loss (Default value = None). loss_dice : default = None - [MISSING] (Default value = None). + Dice loss (Default value = None). """ self.writer.add_scalar( f"{self.mode}/total_loss", loss_total.item(), self.global_iter diff --git a/FastSurferCNN/utils/metrics.py b/FastSurferCNN/utils/metrics.py index 3c0dca5e..0b0cadf2 100644 --- a/FastSurferCNN/utils/metrics.py +++ b/FastSurferCNN/utils/metrics.py @@ -80,9 +80,11 @@ def precision_recall( Returns ------- np.ndarray - [MISSING]. + An array containing the number of true positives for each class. np.ndarray - [MISSING]. + An array containing the sum of true positives and false negatives for each class. + np.ndarray + An array containing the sum of true positives and false positives for each class. """ tpos_fneg = [] tpos_fpos = [] @@ -137,7 +139,7 @@ def __init__( def reset(self): """ - [MISSING]. + Reset the union and intersection matrices to zero. """ self.union = torch.zeros(self.n_classes, self.n_classes, device=self._device) self.intersection = torch.zeros( @@ -239,7 +241,14 @@ def comput_dice_cnf(self): def _dice_calculation(self): """ - [MISSING]. + Calculate the Dice Score. + + The Dice Score is calculated as 2 * (intersection / union). + + Returns + ------- + dsc : torch.Tensor + The Dice Score for each class. """ intersection = self.intersection.diagonal() union = self.union.diagonal() @@ -248,7 +257,12 @@ def _dice_calculation(self): def _dice_confusion_matrix(self): """ - [MISSING]. + Calculate the Dice confusion matrix. + + Returns + ------- + dice_cnf_matrix : numpy.ndarray + The Dice confusion matrix for each class. """ dice_intersection = self.intersection.cpu().numpy() dice_union = self.union.cpu().numpy() diff --git a/FastSurferCNN/utils/misc.py b/FastSurferCNN/utils/misc.py index cbfd1cb0..310caeaa 100644 --- a/FastSurferCNN/utils/misc.py +++ b/FastSurferCNN/utils/misc.py @@ -107,7 +107,7 @@ def plot_confusion_matrix( Returns ------- fig : matplotlib.figure.Figure - [MISSING]. + Matplotlib Figure object with the confusion matrix plot. """ n_classes = len(classes) @@ -195,8 +195,8 @@ def update_num_steps( Parameters ---------- dataloader : FastSurferCNN.data_loader.loader.DataLoader - [MISSING]. + The dataloader object that contains the training data. cfg : yacs.config.CfgNode - [MISSING]. + The configuration object that contains the training configuration. """ cfg.TRAIN.NUM_STEPS = len(dataloader) diff --git a/FastSurferCNN/utils/parser_defaults.py b/FastSurferCNN/utils/parser_defaults.py index 63d1a84a..767c990d 100644 --- a/FastSurferCNN/utils/parser_defaults.py +++ b/FastSurferCNN/utils/parser_defaults.py @@ -51,12 +51,12 @@ class CanAddArguments(Protocol): """ - [MISSING]. + """ def add_argument(self, *args, **kwargs): """ - [MISSING]. + Add an argument to the object. """ ... diff --git a/recon_surf/N4_bias_correct.py b/recon_surf/N4_bias_correct.py index 637b0e04..0998c52a 100644 --- a/recon_surf/N4_bias_correct.py +++ b/recon_surf/N4_bias_correct.py @@ -318,7 +318,7 @@ def normalize_wm_mask_ball( itk_mask : sitk.Image, optional Image mask. radius : float | int - Defaults to 50 [MISSING]. + Radius of ball around centroid. Defaults to 50. centroid : np.ndarray Brain centroid. target_wm : float | int @@ -389,6 +389,10 @@ def normalize_wm_aseg( Image mask. itk_aseg : sitk.Image Aseg-like segmentation image to find WM. + radius : float | int + Radius of ball around centroid. Defaults to 50. + centroid : Optional[np.ndarray] + Image centroid. Defaults to None. target_wm : float | int Target white matter intensity. Defaults to 110. target_bg : float | int diff --git a/recon_surf/align_points.py b/recon_surf/align_points.py index d1a4a256..e94a8f1c 100755 --- a/recon_surf/align_points.py +++ b/recon_surf/align_points.py @@ -132,7 +132,7 @@ def find_rotation(p_mov: npt.NDArray, p_dst: npt.NDArray) -> np.ndarray: def find_rigid(p_mov: npt.NDArray, p_dst: npt.NDArray) -> np.ndarray: """ - [MISSING]. + Find rigid transformation matrix between two point sets. Parameters ---------- diff --git a/recon_surf/image_io.py b/recon_surf/image_io.py index 3e3306bd..44e53100 100644 --- a/recon_surf/image_io.py +++ b/recon_surf/image_io.py @@ -181,7 +181,8 @@ def writeITKimage( filename: str, header: Optional[nib.freesurfer.mghformat.MGHHeader] = None ) -> None: - """[MISSING]. + """ + Writes the given ITK image to a file. Parameters ---------- From 3190c90d504b4bb216abc4076e8deb323e76f27d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20K=C3=BCgler?= Date: Fri, 31 May 2024 18:53:08 +0200 Subject: [PATCH 05/19] Fixing docstrings and typing Apply suggestions from code review --- CerebNet/datasets/load_data.py | 2 +- CerebNet/utils/misc.py | 16 ++++++++-------- FastSurferCNN/inference.py | 6 +++--- FastSurferCNN/models/sub_module.py | 4 ++-- FastSurferCNN/quick_qc.py | 2 +- FastSurferCNN/reduce_to_aseg.py | 14 +++++++------- FastSurferCNN/segstats.py | 26 +++++++++++++------------- FastSurferCNN/utils/checkpoint.py | 2 +- FastSurferCNN/utils/mapper.py | 2 +- recon_surf/N4_bias_correct.py | 6 +++--- 10 files changed, 40 insertions(+), 40 deletions(-) diff --git a/CerebNet/datasets/load_data.py b/CerebNet/datasets/load_data.py index 40818edd..500362fb 100644 --- a/CerebNet/datasets/load_data.py +++ b/CerebNet/datasets/load_data.py @@ -72,7 +72,7 @@ def _load_volumes(self, subject_path, store_talairach=False): ---------- subject_path : str The path to the subject's data directory. - store_talairach : bool, optional + store_talairach : bool, default=False If True, the method will attempt to load the Talairach coordinates. Defaults to False. Returns diff --git a/CerebNet/utils/misc.py b/CerebNet/utils/misc.py index 52d142b8..6e7ed790 100644 --- a/CerebNet/utils/misc.py +++ b/CerebNet/utils/misc.py @@ -88,14 +88,14 @@ def plot_confusion_matrix( Confusion matrix. classes : list List of classes. - title : str - Title of the confusion matrix (Default value = "Confusion matrix"). - cmap : plt.cm - Color map (Default value = plt.cm.Blues). - figsize : tuple - Figure size (Default value = (20, 20)). - file_save_name : str - File save name (Default value = None). + title : str, default="Confusion matrix" + Title of the confusion matrix. + cmap : plt.cm, default=matplotlib.pyplot.cm.Blues + Color map. + figsize : tuple, default=(20, 20) + Figure size. + file_save_name : str, optional + File save name. Returns ------- diff --git a/FastSurferCNN/inference.py b/FastSurferCNN/inference.py index 6a0cab30..cdf5b169 100644 --- a/FastSurferCNN/inference.py +++ b/FastSurferCNN/inference.py @@ -55,9 +55,9 @@ class Inference: model_name : str Name of the model alpha : Dict[str, float] - Alpha values for different planes + Alpha values for different planes. post_prediction_mapping_hook - Hook for post prediction mapping + Hook for post prediction mapping. Methods ------- @@ -97,7 +97,7 @@ def __init__( Device specification for distributed computation usage. ckpt : str String or os.PathLike object containing the name to the checkpoint file (Default value = ""). - lut : Union[None, str, np.ndarray, DataFrame] + lut : str, np.ndarray, DataFrame, optional Lookup table for mapping (Default value = None). """ # Set random seed from configs. diff --git a/FastSurferCNN/models/sub_module.py b/FastSurferCNN/models/sub_module.py index 7f38cf20..67c687d9 100644 --- a/FastSurferCNN/models/sub_module.py +++ b/FastSurferCNN/models/sub_module.py @@ -447,9 +447,9 @@ def __init__(self, sigma: float = 0.1, device: str = "cuda"): Parameters ---------- - sigma : float + sigma : float, default=0.1 Standard deviation of the GaussianNoise (Default value = 0.1). - device : str + device : str, default="cuda" Device to run the model on (Default value = "cuda"). """ super().__init__() diff --git a/FastSurferCNN/quick_qc.py b/FastSurferCNN/quick_qc.py index 8b37f17f..313580ff 100644 --- a/FastSurferCNN/quick_qc.py +++ b/FastSurferCNN/quick_qc.py @@ -79,7 +79,7 @@ def check_volume(asegdkt_segfile:np.ndarray, voxvol: float, thres: float = 0.70) The segmentation file. voxvol : float The volume of a voxel. - thres : float + thres : float, default=0.7 The threshold for the total volume (Default value = 0.70). Returns diff --git a/FastSurferCNN/reduce_to_aseg.py b/FastSurferCNN/reduce_to_aseg.py index 87d14f88..74cfd550 100644 --- a/FastSurferCNN/reduce_to_aseg.py +++ b/FastSurferCNN/reduce_to_aseg.py @@ -90,19 +90,19 @@ def options_parse(): return options -def reduce_to_aseg(data_inseg): +def reduce_to_aseg(data_inseg: np.ndarray) -> np.ndarray: """ Reduce the input segmentation to a simpler segmentation. Parameters ---------- - data_inseg : - + data_inseg : np.ndarray, torch.Tensor The input segmentation. This should be a 3D array where the value at each position represents the segmentation label for that position. Returns ------- - data_inseg : - + data_inseg : np.ndarray, torch.Tensor The reduced segmentation. """ print("Reducing to aseg ...") @@ -119,11 +119,11 @@ def create_mask(aseg_data, dnum, enum): Parameters ---------- - aseg_data : - + aseg_data : npt.NDArray[int] The input segmentation data. - dnum : - + dnum : int The number of iterations for the dilation operation. - enum : - + enum : int The number of iterations for the erosion operation. Returns @@ -168,7 +168,7 @@ def create_mask(aseg_data, dnum, enum): return aseg_data -def flip_wm_islands(aseg_data): +def flip_wm_islands(aseg_datanp.ndarray) -> np.ndarray: """ Flip labels of disconnected white matter islands to the other hemisphere. diff --git a/FastSurferCNN/segstats.py b/FastSurferCNN/segstats.py index 28786e4c..f2b93d52 100644 --- a/FastSurferCNN/segstats.py +++ b/FastSurferCNN/segstats.py @@ -772,12 +772,12 @@ def _laplace(data): Parameters ---------- - data : + data : np.ndarray Input data. Returns ------- - bool + npt.NDArray[bool] Boolean array where Laplacian is not zero. """ return laplace(data.astype(cmp_dtype)) != np.asarray(0.0, dtype=cmp_dtype) @@ -804,15 +804,15 @@ def borders( ---------- _array : _ArrayType Input labeled array or binary image. - labels : Union[Iterable[np.int], bool] + labels : Iterable[np.int], bool List of labels for which borders will be computed. If labels is True, _array is treated as a binary mask. - max_label : Optional[np.int], Optional + max_label : np.int, optional The maximum label ot consider. If None, the maximum label in the array is used. - six_connected : bool + six_connected : bool, default=True If True, 6-connected borders are computed, otherwise 26-connected borders are computed. - out : Optional[_ArrayType] + out : _ArrayType, optional Output array to store the computed borders (Optional). Returns @@ -916,12 +916,12 @@ def grow_patch( A sequence of slices. whalf : int Integer that specifies the amount to grow/ungrow the patch. - img_size : Union[np.ndarray, Sequence[float]] + img_size : np.ndarray, Sequence[float] Size of the image. Returns ------- - Tuple[Tuple[slice, ...], Tuple[slice, ...]] + tuple[tuple[slice, ...], tuple[slice, ...]] A tuple containing the grown patch and the ungrown patch. """ # patch start/stop @@ -960,9 +960,9 @@ def uniform_filter( Size of the uniform filter. fillval : float Fill value when the filter is outside the array. - patch : Optional[Tuple[slice, ...]] + patch : tuple[slice, ...], optional Sub-region of the array to apply filter to (Default: None). - out : Optional[_ArrayType] + out : _ArrayType, optional Output array to store the result (Default: None). Returns @@ -1391,7 +1391,7 @@ def patch_filter( Mask to crop to. global_crop : Tuple[slice, ...] Global cropping context. - patch_size : int + patch_size : int, default=32 Size of patch. Defaults to 32. Returns @@ -1521,9 +1521,9 @@ def pv_calc_patch( full_nbr_mean : npt.NDArray[float], Optional Array to store the mean intensity of the neighboring voxels that contribute to the partial volume calculation for each voxel in the patch (Optional). - eps : float + eps : float, default=1e-6 Epsilon. Defaults to 1e-6. - legacy_freesurfer : bool + legacy_freesurfer : bool, default=False Whether to use a freesurfer legacy compatibility mode to exactly replicate freesurfer. Returns diff --git a/FastSurferCNN/utils/checkpoint.py b/FastSurferCNN/utils/checkpoint.py index 3a6b86b3..ef472074 100644 --- a/FastSurferCNN/utils/checkpoint.py +++ b/FastSurferCNN/utils/checkpoint.py @@ -276,7 +276,7 @@ def save_checkpoint( Used network optimizer. scheduler : Optional[Scheduler] Used network scheduler. Optional (Default value = None). - best : bool + best : bool, default=False Whether this was the best checkpoint so far (Default value = False). """ save_name = f"Epoch_{epoch:05d}_training_state.pkl" diff --git a/FastSurferCNN/utils/mapper.py b/FastSurferCNN/utils/mapper.py index fa3550d1..fa96a83f 100644 --- a/FastSurferCNN/utils/mapper.py +++ b/FastSurferCNN/utils/mapper.py @@ -228,7 +228,7 @@ def update( ---------- other : Mapper[KT, VT] The other Mapper object whose key-value pairs are to be added to this Mapper object. - overwrite : bool + overwrite : bool, default=True Flag to overwrite value if key already exists in Mapper object (Default value = True). Returns diff --git a/recon_surf/N4_bias_correct.py b/recon_surf/N4_bias_correct.py index 0998c52a..95a65c06 100644 --- a/recon_surf/N4_bias_correct.py +++ b/recon_surf/N4_bias_correct.py @@ -317,7 +317,7 @@ def normalize_wm_mask_ball( N-dimensional itk image. itk_mask : sitk.Image, optional Image mask. - radius : float | int + radius : float, int, default=50 Radius of ball around centroid. Defaults to 50. centroid : np.ndarray Brain centroid. @@ -389,9 +389,9 @@ def normalize_wm_aseg( Image mask. itk_aseg : sitk.Image Aseg-like segmentation image to find WM. - radius : float | int + radius : float, int, default=50 Radius of ball around centroid. Defaults to 50. - centroid : Optional[np.ndarray] + centroid : np.ndarray, optional Image centroid. Defaults to None. target_wm : float | int Target white matter intensity. Defaults to 110. From 3622c13c049a367e72ca9c0db8332c0493101db7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20K=C3=BCgler?= Date: Fri, 31 May 2024 18:57:55 +0200 Subject: [PATCH 06/19] Fix typo in typing --- FastSurferCNN/reduce_to_aseg.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/FastSurferCNN/reduce_to_aseg.py b/FastSurferCNN/reduce_to_aseg.py index 74cfd550..c7c3cc69 100644 --- a/FastSurferCNN/reduce_to_aseg.py +++ b/FastSurferCNN/reduce_to_aseg.py @@ -168,7 +168,7 @@ def create_mask(aseg_data, dnum, enum): return aseg_data -def flip_wm_islands(aseg_datanp.ndarray) -> np.ndarray: +def flip_wm_islands(aseg_data : np.ndarray) -> np.ndarray: """ Flip labels of disconnected white matter islands to the other hemisphere. From 1aa8bbcaaa11d5c499df23af5a97b8ed1befc0bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20K=C3=BCgler?= Date: Mon, 17 Jun 2024 12:36:13 -0400 Subject: [PATCH 07/19] Remove -debug flag for timing If `fs_time` fails (this would usually not happen in Linux), `timecmd` is set to empty (no timing is performed) and the `-debug` flag in line 38 becomes the "command". This raises an error. --- recon_surf/functions.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/recon_surf/functions.sh b/recon_surf/functions.sh index b7560d83..0b719a09 100644 --- a/recon_surf/functions.sh +++ b/recon_surf/functions.sh @@ -35,7 +35,7 @@ function RunIt() echo "if [ \${PIPESTATUS[0]} -ne 0 ] ; then exit 1 ; fi" >> $CMDF else echo $cmd |& tee -a $LF - $timecmd -debug $cmd |& tee -a $LF + $timecmd $cmd |& tee -a $LF if [ ${PIPESTATUS[0]} -ne 0 ] ; then exit 1 ; fi fi } From b8fa9d0d80ffc91b0648f3f605b2dfcadd0f5359 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 18 Jun 2024 16:07:07 +0200 Subject: [PATCH 08/19] flexible versioining for mac --- requirements.mac.txt | 203 ++++--------------------------------------- 1 file changed, 18 insertions(+), 185 deletions(-) diff --git a/requirements.mac.txt b/requirements.mac.txt index f1afd67f..9de44fa4 100644 --- a/requirements.mac.txt +++ b/requirements.mac.txt @@ -1,185 +1,18 @@ -# -# This file is manually created from the autogenerated -# requirements.txt . It is experimental to support MAC -# (intel, apple silicon and gpus via mps). For this we -# currently need the nightly torch and torchvision. -# ---extra-index-url https://download.pytorch.org/whl/nightly/cpu - -absl-py==1.2.0 - # via tensorboard -cachetools==5.2.0 - # via google-auth -certifi==2022.6.15 - # via requests -charset-normalizer==2.1.0 - # via requests -click==8.1.3 - # via torchio -cycler==0.11.0 - # via matplotlib -deprecated==1.2.13 - # via torchio -fonttools==4.34.4 - # via matplotlib -google-auth==2.9.1 - # via - # google-auth-oauthlib - # tensorboard -google-auth-oauthlib==0.4.6 - # via tensorboard -grpcio==1.47.0 - # via tensorboard -h5py==3.7.0 - # via -r requirements.in -humanize==4.2.3 - # via torchio -idna==3.3 - # via requests -imageio==2.19.5 - # via scikit-image -importlib-metadata==4.12.0 - # via markdown -joblib==1.2.0 - # via scikit-learn -kiwisolver==1.4.4 - # via matplotlib -lapy==0.4.1 - # via -r requirements.in -markdown==3.4.1 - # via tensorboard -matplotlib==3.5.1 - # via -r requirements.in -networkx==2.8.5 - # via scikit-image -nibabel==3.2.2 - # via - # -r requirements.in - # torchio -numpy==1.23.5 - # via - # -r requirements.in - # h5py - # imageio - # lapy - # matplotlib - # nibabel - # pandas - # pywavelets - # scikit-image - # scikit-learn - # scipy - # tensorboard - # tifffile - # torchio - # torchvision -oauthlib==3.2.0 - # via requests-oauthlib -packaging==21.3 - # via - # matplotlib - # nibabel - # scikit-image -pandas==1.4.3 - # via -r requirements.in -pillow==9.2.0 - # via - # -r requirements.in - # imageio - # matplotlib - # scikit-image - # torchvision -plotly==5.9.0 - # via lapy -protobuf==3.19.4 - # via tensorboard -pyasn1==0.4.8 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.2.8 - # via google-auth -pyparsing==3.0.9 - # via - # matplotlib - # packaging -python-dateutil==2.8.2 - # via - # -r requirements.in - # matplotlib - # pandas -pytz==2022.1 - # via pandas -pywavelets==1.3.0 - # via scikit-image -pyyaml==6.0 - # via - # -r requirements.in - # yacs -requests==2.28.1 - # via - # requests-oauthlib - # tensorboard - # torchvision -requests-oauthlib==1.3.1 - # via google-auth-oauthlib -rsa==4.8 - # via google-auth -scikit-image==0.19.2 - # via -r requirements.in -scikit-learn==1.1.2 - # via -r requirements.in -scipy==1.8.0 - # via - # -r requirements.in - # lapy - # scikit-image - # scikit-learn - # torchio -simpleitk==2.1.1 - # via - # -r requirements.in - # torchio -six==1.16.0 - # via - # google-auth - # grpcio - # python-dateutil -tenacity==8.0.1 - # via plotly -tensorboard==2.9.1 - # via -r requirements.in -tensorboard-data-server==0.6.1 - # via tensorboard -tensorboard-plugin-wit==1.8.1 - # via tensorboard -threadpoolctl==3.1.0 - # via scikit-learn -tifffile==2022.5.4 - # via scikit-image -torch>=1.13.0.dev20220815 - # manually set nighly -torchio==0.18.83 - # via -r requirements.in -torchvision>=0.14.0.dev20220815 - # manually set nighly -tqdm==4.64 - # via - # -r requirements.in - # torchio -typing-extensions==4.3.0 - # via - # torch - # torchvision -urllib3==1.26.10 - # via requests -werkzeug==2.1.2 - # via tensorboard -wheel==0.37.1 - # via tensorboard -wrapt==1.14.1 - # via deprecated -yacs==0.1.8 - # via -r requirements.in -zipp==3.8.1 - # via importlib-metadata +h5py>=3.7 +lapy>=0.4.1 +matplotlib>=3.5.1 +nibabel>=3.2.2 +numpy>=1.21,<2 +pandas>=1.4.3 +torch>=2.0.0 +pyyaml>=6.0 +requests>=2.31.0 +scipy>=1.8.0 +yacs>=0.1.8 +simpleitk>=2.1.1 +scikit-image>=0.19.3 +scipy>=1.8.0,!=1.13.0 +tensorboard>=2.9.1 +torchio>=0.18.83 +torchvision>=0.15.2+cu117 +tqdm>=4.64 From b3250feb9d8be2250ad52a9a623418c6ce6eee4a Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 18 Jun 2024 16:39:54 +0200 Subject: [PATCH 09/19] revert pipe amp to bash < v4 --- recon_surf/functions.sh | 30 +++--- recon_surf/recon-surf.sh | 186 ++++++++++++++++++------------------ recon_surf/recon-surfreg.sh | 70 +++++++------- 3 files changed, 143 insertions(+), 143 deletions(-) diff --git a/recon_surf/functions.sh b/recon_surf/functions.sh index 0b719a09..0ce21c8b 100644 --- a/recon_surf/functions.sh +++ b/recon_surf/functions.sh @@ -30,12 +30,12 @@ function RunIt() if [[ $# -eq 3 ]] then local CMDF=$3 - echo "echo ${cmd@Q}" |& tee -a $CMDF - echo "$timecmd $cmd" |& tee -a $CMDF + echo "echo ${cmd@Q}" 2>&1 | tee -a $CMDF + echo "$timecmd $cmd" 2>&1 | tee -a $CMDF echo "if [ \${PIPESTATUS[0]} -ne 0 ] ; then exit 1 ; fi" >> $CMDF else - echo $cmd |& tee -a $LF - $timecmd $cmd |& tee -a $LF + echo $cmd 2>&1 | tee -a $LF + $timecmd $cmd 2>&1 | tee -a $LF if [ ${PIPESTATUS[0]} -ne 0 ] ; then exit 1 ; fi fi } @@ -107,21 +107,21 @@ function softlink_or_copy() if [[ $# -eq 4 ]] then local CMDF=$4 - echo "echo \"$ln_cmd\" " |& tee -a $CMDF - echo "$timecmd $ln_cmd " |& tee -a $CMDF - echo "if [ \${PIPESTATUS[0]} -ne 0 ]" |& tee -a $CMDF - echo "then " |& tee -a $CMDF - echo " echo \"$cp_cmd\" " |& tee -a $CMDF - echo " $timecmd $cp_cmd " |& tee -a $CMDF + echo "echo \"$ln_cmd\" " 2>&1 | tee -a $CMDF + echo "$timecmd $ln_cmd " 2>&1 | tee -a $CMDF + echo "if [ \${PIPESTATUS[0]} -ne 0 ]" 2>&1 | tee -a $CMDF + echo "then " 2>&1 | tee -a $CMDF + echo " echo \"$cp_cmd\" " 2>&1 | tee -a $CMDF + echo " $timecmd $cp_cmd " 2>&1 | tee -a $CMDF echo " if [ \${PIPESTATUS[0]} -ne 0 ] ; then exit 1 ; fi" >> $CMDF - echo "fi" |& tee -a $CMDF + echo "fi" 2>&1 | tee -a $CMDF else - echo $ln_cmd |& tee -a $LF - $timecmd $ln_cmd |& tee -a $LF + echo $ln_cmd 2>&1 | tee -a $LF + $timecmd $ln_cmd 2>&1 | tee -a $LF if [ ${PIPESTATUS[0]} -ne 0 ] then - echo $cp_cmd |& tee -a $LF - $timecmd $cp_cmd |& tee -a $LF + echo $cp_cmd 2>&1 | tee -a $LF + $timecmd $cp_cmd 2>&1 | tee -a $LF if [ ${PIPESTATUS[0]} -ne 0 ] ; then exit 1 ; fi fi fi diff --git a/recon_surf/recon-surf.sh b/recon_surf/recon-surf.sh index 8b17efba..1b31ee75 100755 --- a/recon_surf/recon-surf.sh +++ b/recon_surf/recon-surf.sh @@ -389,37 +389,37 @@ if [ $DoneFile != /dev/null ] ; then rm -f $DoneFile ; fi LF=$SUBJECTS_DIR/$subject/scripts/recon-surf.log if [ $LF != /dev/null ] ; then rm -f $LF ; fi echo "Log file for recon-surf.sh" >> $LF -date |& tee -a $LF -echo "" |& tee -a $LF -echo "export SUBJECTS_DIR=$SUBJECTS_DIR" |& tee -a $LF -echo "cd `pwd`" |& tee -a $LF -echo $0 ${inputargs[*]} |& tee -a $LF -echo "" |& tee -a $LF -cat $FREESURFER_HOME/build-stamp.txt |& tee -a $LF -echo $VERSION |& tee -a $LF -uname -a |& tee -a $LF - -echo " " |& tee -a $LF -echo "================== Checking validity of inputs =================================" |& tee -a $LF -echo " " |& tee -a $LF +date 2>&1 | tee -a $LF +echo "" 2>&1 | tee -a $LF +echo "export SUBJECTS_DIR=$SUBJECTS_DIR" 2>&1 | tee -a $LF +echo "cd `pwd`" 2>&1 | tee -a $LF +echo $0 ${inputargs[*]} 2>&1 | tee -a $LF +echo "" 2>&1 | tee -a $LF +cat $FREESURFER_HOME/build-stamp.txt 2>&1 | tee -a $LF +echo $VERSION 2>&1 | tee -a $LF +uname -a 2>&1 | tee -a $LF + +echo " " 2>&1 | tee -a $LF +echo "================== Checking validity of inputs =================================" 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF # Print parallelization parameters -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF if [ "$DoParallel" == "1" ] then - echo " RUNNING both hemis in PARALLEL " |& tee -a $LF + echo " RUNNING both hemis in PARALLEL " 2>&1 | tee -a $LF else - echo " RUNNING both hemis SEQUENTIALLY " |& tee -a $LF + echo " RUNNING both hemis SEQUENTIALLY " 2>&1 | tee -a $LF fi -echo " RUNNING $OMP_NUM_THREADS number of OMP THREADS " |& tee -a $LF -echo " RUNNING $ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS number of ITK THREADS " |& tee -a $LF -echo " " |& tee -a $LF +echo " RUNNING $OMP_NUM_THREADS number of OMP THREADS " 2>&1 | tee -a $LF +echo " RUNNING $ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS number of ITK THREADS " 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF # Check input segmentation quality -echo "Checking Input Segmentation Quality ..." |& tee -a "$LF" +echo "Checking Input Segmentation Quality ..." 2>&1 | tee -a "$LF" cmd="$python $FASTSURFER_HOME/FastSurferCNN/quick_qc.py --asegdkt_segfile $asegdkt_segfile" RunIt "$cmd" "$LF" -echo "" |& tee -a "$LF" +echo "" 2>&1 | tee -a "$LF" @@ -427,9 +427,9 @@ echo "" |& tee -a "$LF" ########################################## START ######################################################## -echo " " |& tee -a $LF -echo "================== Creating orig and rawavg from input =========================" |& tee -a $LF -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF +echo "================== Creating orig and rawavg from input =========================" 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF CONFORM_LF=$SUBJECTS_DIR/$subject/scripts/conform.log if [ $CONFORM_LF != /dev/null ] ; then rm -f $CONFORM_LF ; fi @@ -450,12 +450,12 @@ RunIt "$cmd" $LF if (( $(echo "$vox_size < $hires_voxsize_threshold" | bc -l) )) then - echo "The voxel size $vox_size is less than $hires_voxsize_threshold, so we are proceeding with hires options." |& tee -a $LF + echo "The voxel size $vox_size is less than $hires_voxsize_threshold, so we are proceeding with hires options." 2>&1 | tee -a $LF hiresflag="-hires" noconform_if_hires=" -noconform" hires_surface_suffix=".predec" else - echo "The voxel size $vox_size is not less than $hires_voxsize_threshold, so we are proceeding with standard options." |& tee -a $LF + echo "The voxel size $vox_size is not less than $hires_voxsize_threshold, so we are proceeding with standard options." 2>&1 | tee -a $LF hiresflag="" noconform_if_hires="" hires_surface_suffix="" @@ -477,9 +477,9 @@ popd ### ---------- if [ ! -f "$mask" ] || [ ! -f "$mdir/aseg.auto_noCCseg.mgz" ] ; then # Mask or aseg.auto_noCCseg not found; create them - echo " " |& tee -a $LF - echo "============= Creating aseg.auto_noCCseg (map aparc labels back) ===============" |& tee -a $LF - echo " " |& tee -a $LF + echo " " 2>&1 | tee -a $LF + echo "============= Creating aseg.auto_noCCseg (map aparc labels back) ===============" 2>&1 | tee -a $LF + echo " " 2>&1 | tee -a $LF # reduce labels to aseg, then create mask (dilate 5, erode 4, largest component), also mask aseg to remove outliers # output will be uchar (else mri_cc will fail below) @@ -489,9 +489,9 @@ fi ### END SUPERSEDED BY SEGMENTATION PIPELINE, will be removed in the future ### ---------- -echo " " |& tee -a $LF -echo "============= Computing Talairach Transform and NU (bias corrected) ============" |& tee -a $LF -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF +echo "============= Computing Talairach Transform and NU (bias corrected) ============" 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF ### START SUPERSEDED BY SEGMENTATION PIPELINE, will be removed in the future ### ---------- @@ -519,13 +519,13 @@ fi ### ---------- if [[ ! -f "$mdir/transforms/talairach.lta" ]] || [[ ! -f "$mdir/transforms/talairach_with_skull.lta" ]]; then - echo "\"$binpath/talairach-reg.sh\" \"$mdir\" \"$atlas3T\" \"$LF\"" |& tee -a "$LF" + echo "\"$binpath/talairach-reg.sh\" \"$mdir\" \"$atlas3T\" \"$LF\"" 2>&1 | tee -a "$LF" "$binpath/talairach-reg.sh" "$mdir" "$atlas3T" "$LF" fi -echo " " |& tee -a $LF -echo "============ Creating brainmask from aseg and norm, and update aseg ============" |& tee -a $LF -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF +echo "============ Creating brainmask from aseg and norm, and update aseg ============" 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF # create norm by masking nu cmd="mri_mask $mdir/nu.mgz $mdir/mask.mgz $mdir/norm.mgz" @@ -557,9 +557,9 @@ RunIt "$cmd" $LF cmd="$python ${binpath}paint_cc_into_pred.py -in_cc $mdir/aseg.auto.mgz -in_pred $asegdkt_segfile -out $mdir/aparc.DKTatlas+aseg.deep.withCC.mgz" RunIt "$cmd" $LF -echo " " |& tee -a $LF -echo "========= Creating filled from brain (brainfinalsurfs, wm.asegedit, wm) =======" |& tee -a $LF -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF +echo "========= Creating filled from brain (brainfinalsurfs, wm.asegedit, wm) =======" 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF cmd="recon-all -s $subject -asegmerge -normalization2 -maskbfs -segmentation -fill $hiresflag $fsthreads" RunIt "$cmd" $LF @@ -577,9 +577,9 @@ CMDFS="$CMDFS $CMDF" rm -rf $CMDF echo "#!/bin/bash" > $CMDF -echo "echo " |& tee -a $CMDF -echo "echo \"================== Creating surfaces $hemi - orig.nofix ==================\"" |& tee -a $CMDF -echo "echo " |& tee -a $CMDF +echo "echo " 2>&1 | tee -a $CMDF +echo "echo \"================== Creating surfaces $hemi - orig.nofix ==================\"" 2>&1 | tee -a $CMDF +echo "echo " 2>&1 | tee -a $CMDF if [ "$fstess" == "1" ] @@ -610,8 +610,8 @@ else # Check if the surfaceRAS was correctly set and exit otherwise (sanity check in case nibabel changes their default header behaviour) cmd="mris_info $outmesh | tr -s ' ' | grep -q 'vertex locs : surfaceRAS'" - echo "echo \"$cmd\" " |& tee -a $CMDF - echo "$timecmd $cmd " |& tee -a $CMDF + echo "echo \"$cmd\" " 2>&1 | tee -a $CMDF + echo "$timecmd $cmd " 2>&1 | tee -a $CMDF echo "if [ \${PIPESTATUS[1]} -ne 0 ] ; then echo \"Incorrect header information detected in $outmesh: vertex locs is not set to surfaceRAS. Exiting... \"; exit 1 ; fi" >> $CMDF # Reduce to largest component (usually there should only be one) @@ -637,9 +637,9 @@ fi -echo "echo " |& tee -a $CMDF -echo "echo \"=================== Creating surfaces $hemi - qsphere ====================\"" |& tee -a $CMDF -echo "echo " |& tee -a $CMDF +echo "echo " 2>&1 | tee -a $CMDF +echo "echo \"=================== Creating surfaces $hemi - qsphere ====================\"" 2>&1 | tee -a $CMDF +echo "echo " 2>&1 | tee -a $CMDF #surface inflation (54sec both hemis) (needed for qsphere and for topo-fixer) cmd="recon-all -subject $subject -hemi $hemi -inflate1 -no-isrunning $hiresflag $fsthreads" @@ -665,9 +665,9 @@ else fi -echo "echo " |& tee -a $CMDF -echo "echo \"=================== Creating surfaces $hemi - fix ========================\"" |& tee -a $CMDF -echo "echo " |& tee -a $CMDF +echo "echo " 2>&1 | tee -a $CMDF +echo "echo \"=================== Creating surfaces $hemi - fix ========================\"" 2>&1 | tee -a $CMDF +echo "echo " 2>&1 | tee -a $CMDF ## -fix cmd="recon-all -subject $subject -hemi $hemi -fix -autodetgwstats -white-preaparc -cortex-label -no-isrunning $hiresflag $fsthreads" @@ -678,9 +678,9 @@ RunIt "$cmd" $LF $CMDF -echo "echo \" \"" |& tee -a $CMDF -echo "echo \"================== Creating surfaces $hemi - inflate2 ====================\"" |& tee -a $CMDF -echo "echo \" \"" |& tee -a $CMDF +echo "echo \" \"" 2>&1 | tee -a $CMDF +echo "echo \"================== Creating surfaces $hemi - inflate2 ====================\"" 2>&1 | tee -a $CMDF +echo "echo \" \"" 2>&1 | tee -a $CMDF # create nicer inflated surface from topo fixed (not needed, just later for visualization) @@ -688,9 +688,9 @@ cmd="recon-all -subject $subject -hemi $hemi -smooth2 -inflate2 -curvHK -no-isru RunIt "$cmd" $LF $CMDF -echo "echo \" \"" |& tee -a $CMDF -echo "echo \"=========== Creating surfaces $hemi - map input asegdkt_segfile to surf ===============\"" |& tee -a $CMDF -echo "echo \" \"" |& tee -a $CMDF +echo "echo \" \"" 2>&1 | tee -a $CMDF +echo "echo \"=========== Creating surfaces $hemi - map input asegdkt_segfile to surf ===============\"" 2>&1 | tee -a $CMDF +echo "echo \" \"" 2>&1 | tee -a $CMDF # sample input segmentation (aparc.DKTatlas+aseg orig) onto wm surface: # map input aparc to surface (requires thickness (and thus pail) to compute projfrac 0.5), here we do projmm which allows us to compute based only on white @@ -706,9 +706,9 @@ echo "echo \" \"" |& tee -a $CMDF # if we segment with FS or if surface registration is requested do it here: if [ "$fsaparc" == "1" ] || [ "$fssurfreg" == "1" ] ; then - echo "echo \" \"" |& tee -a $CMDF - echo "echo \"============ Creating surfaces $hemi - FS sphere, surfreg ===============\"" |& tee -a $CMDF - echo "echo \" \"" |& tee -a $CMDF + echo "echo \" \"" 2>&1 | tee -a $CMDF + echo "echo \"============ Creating surfaces $hemi - FS sphere, surfreg ===============\"" 2>&1 | tee -a $CMDF + echo "echo \" \"" 2>&1 | tee -a $CMDF # Surface registration for cross-subject correspondence (registration to fsaverage) cmd="recon-all -subject $subject -hemi $hemi -sphere $hiresflag -no-isrunning $fsthreads" @@ -746,9 +746,9 @@ fi if [ "$fsaparc" == "1" ] ; then - echo "echo \" \"" |& tee -a $CMDF - echo "echo \"============ Creating surfaces $hemi - FS asegdkt_segfile..pial ===============\"" |& tee -a $CMDF - echo "echo \" \"" |& tee -a $CMDF + echo "echo \" \"" 2>&1 | tee -a $CMDF + echo "echo \"============ Creating surfaces $hemi - FS asegdkt_segfile..pial ===============\"" 2>&1 | tee -a $CMDF + echo "echo \" \"" 2>&1 | tee -a $CMDF # 20-25 min for traditional surface segmentation (each hemi) # this creates aparc and creates pial using aparc, also computes jacobian @@ -759,9 +759,9 @@ if [ "$fsaparc" == "1" ] ; then else - echo "echo \" \"" |& tee -a $CMDF - echo "echo \"================ Creating surfaces $hemi - white and pial direct ===================\"" |& tee -a $CMDF - echo "echo \" \"" |& tee -a $CMDF + echo "echo \" \"" 2>&1 | tee -a $CMDF + echo "echo \"================ Creating surfaces $hemi - white and pial direct ===================\"" 2>&1 | tee -a $CMDF + echo "echo \" \"" 2>&1 | tee -a $CMDF # 4 min compute white : @@ -805,9 +805,9 @@ RunIt "$cmd" $LF "$CMDF" if [ "$DoParallel" == "0" ] ; then - echo " " |& tee -a $LF - echo " RUNNING $hemi sequentially ... " |& tee -a $LF - echo " " |& tee -a $LF + echo " " 2>&1 | tee -a $LF + echo " RUNNING $hemi sequentially ... " 2>&1 | tee -a $LF + echo " " 2>&1 | tee -a $LF chmod u+x $CMDF RunIt "$CMDF" $LF fi @@ -818,17 +818,17 @@ done # hemi loop ---------------------------------- if [ "$DoParallel" == 1 ] ; then - echo " " |& tee -a $LF - echo " RUNNING HEMIs in PARALLEL !!! " |& tee -a $LF - echo " " |& tee -a $LF + echo " " 2>&1 | tee -a $LF + echo " RUNNING HEMIs in PARALLEL !!! " 2>&1 | tee -a $LF + echo " " 2>&1 | tee -a $LF RunBatchJobs $LF $CMDFS fi -echo " " |& tee -a $LF -echo "============================ Creating surfaces - ribbon ===========================" |& tee -a $LF -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF +echo "============================ Creating surfaces - ribbon ===========================" 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF # -cortribbon 4 minutes, ribbon is used in mris_anatomical stats to remove voxels from surface based volumes that should not be cortex # anatomical stats can run without ribon, but will omit some surface based measures then # wmparc needs ribbon, probably other stuff (aparc to aseg etc). @@ -840,9 +840,9 @@ echo " " |& tee -a $LF if [ "$fsaparc" == "1" ] ; then - echo " " |& tee -a $LF - echo "============= Creating surfaces - other FS asegdkt_segfile and stats =======================" |& tee -a $LF - echo " " |& tee -a $LF + echo " " 2>&1 | tee -a $LF + echo "============= Creating surfaces - other FS asegdkt_segfile and stats =======================" 2>&1 | tee -a $LF + echo " " 2>&1 | tee -a $LF cmd="recon-all -subject $subject -cortparc2 -cortparc3 -pctsurfcon -hyporelabel $hiresflag $fsthreads" RunIt "$cmd" $LF @@ -854,9 +854,9 @@ if [ "$fsaparc" == "1" ] ; then fi # (FS-APARC) -echo " " |& tee -a $LF -echo "===================== Creating surfaces - mapped stats =========================" |& tee -a $LF -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF +echo "===================== Creating surfaces - mapped stats =========================" 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF # 2x18sec create stats from mapped aparc @@ -868,9 +868,9 @@ done if [ "$fsaparc" == "0" ] ; then - echo " " |& tee -a $LF - echo "============= Creating surfaces - pctsurfcon, hypo, segstats ====================" |& tee -a $LF - echo " " |& tee -a $LF + echo " " 2>&1 | tee -a $LF + echo "============= Creating surfaces - pctsurfcon, hypo, segstats ====================" 2>&1 | tee -a $LF + echo " " 2>&1 | tee -a $LF # pctsurfcon (has no way to specify which annot to use, so we need to link ours as aparc is not available) pushd $ldir @@ -911,9 +911,9 @@ fi -echo " " |& tee -a $LF -echo "===================== Creating wmparc from mapped =======================" |& tee -a $LF -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF +echo "===================== Creating wmparc from mapped =======================" 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF # 1m 11sec also create stats for aseg.presurf.hypos (which is basically the aseg derived from the input with CC and hypos) # difference between this and the surface improved one above are probably tiny, so the surface improvement above can probably be skipped to save time @@ -962,9 +962,9 @@ fi -echo " " |& tee -a $LF -echo "================= DONE =========================================================" |& tee -a $LF -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF +echo "================= DONE =========================================================" 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF # Collect info EndTime=`date` @@ -972,9 +972,9 @@ tSecEnd=`date '+%s'` tRunHours=`echo \($tSecEnd - $tSecStart\)/3600|bc -l` tRunHours=`printf %6.3f $tRunHours` -echo "Started at $StartTime " |& tee -a $LF -echo "Ended at $EndTime" |& tee -a $LF -echo "#@#%# recon-surf-run-time-hours $tRunHours" |& tee -a $LF +echo "Started at $StartTime " 2>&1 | tee -a $LF +echo "Ended at $EndTime" 2>&1 | tee -a $LF +echo "#@#%# recon-surf-run-time-hours $tRunHours" 2>&1 | tee -a $LF # Create the Done File echo "------------------------------" > $DoneFile @@ -991,7 +991,7 @@ echo "VERSION $VERSION" >> $DoneFile echo "CMDPATH $0" >> $DoneFile echo "CMDARGS ${inputargs[*]}" >> $DoneFile -echo "recon-surf.sh $subject finished without error at `date`" |& tee -a $LF +echo "recon-surf.sh $subject finished without error at `date`" 2>&1 | tee -a $LF cmd="$python ${binpath}utils/extract_recon_surf_time_info.py -i $LF -o $SUBJECTS_DIR/$subject/scripts/recon-surf_times.yaml" RunIt "$cmd" "/dev/null" diff --git a/recon_surf/recon-surfreg.sh b/recon_surf/recon-surfreg.sh index 546478e0..b3260165 100755 --- a/recon_surf/recon-surfreg.sh +++ b/recon_surf/recon-surfreg.sh @@ -109,12 +109,12 @@ function RunIt() if [[ $# -eq 3 ]] then CMDF=$3 - echo "echo \"$cmd\" " |& tee -a $CMDF - echo "$timecmd $cmd " |& tee -a $CMDF + echo "echo \"$cmd\" " 2>&1 | tee -a $CMDF + echo "$timecmd $cmd " 2>&1 | tee -a $CMDF echo "if [ \${PIPESTATUS[0]} -ne 0 ] ; then exit 1 ; fi" >> $CMDF else - echo $cmd |& tee -a $LF - $timecmd $cmd |& tee -a $LF + echo $cmd 2>&1 | tee -a $LF + $timecmd $cmd 2>&1 | tee -a $LF #if [ ${PIPESTATUS[0]} -ne 0 ] ; then exit 1 ; fi fi } @@ -356,28 +356,28 @@ if [ $DoneFile != /dev/null ] ; then rm -f $DoneFile ; fi LF=$SUBJECTS_DIR/$subject/scripts/recon-surfreg.log if [ $LF != /dev/null ] ; then rm -f $LF ; fi echo "Log file for recon-surfreg.sh" >> $LF -date |& tee -a $LF -echo "" |& tee -a $LF -echo "export SUBJECTS_DIR=$SUBJECTS_DIR" |& tee -a $LF -echo "cd `pwd`" |& tee -a $LF -echo $0 ${inputargs[*]} |& tee -a $LF -echo "" |& tee -a $LF -cat $FREESURFER_HOME/build-stamp.txt |& tee -a $LF -echo $VERSION |& tee -a $LF -uname -a |& tee -a $LF +date 2>&1 | tee -a $LF +echo "" 2>&1 | tee -a $LF +echo "export SUBJECTS_DIR=$SUBJECTS_DIR" 2>&1 | tee -a $LF +echo "cd `pwd`" 2>&1 | tee -a $LF +echo $0 ${inputargs[*]} 2>&1 | tee -a $LF +echo "" 2>&1 | tee -a $LF +cat $FREESURFER_HOME/build-stamp.txt 2>&1 | tee -a $LF +echo $VERSION 2>&1 | tee -a $LF +uname -a 2>&1 | tee -a $LF # Print parallelization parameters -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF if [ "$DoParallel" == "1" ] then - echo " RUNNING both hemis in PARALLEL " |& tee -a $LF + echo " RUNNING both hemis in PARALLEL " 2>&1 | tee -a $LF else - echo " RUNNING both hemis SEQUENTIALLY " |& tee -a $LF + echo " RUNNING both hemis SEQUENTIALLY " 2>&1 | tee -a $LF fi -echo " RUNNING $OMP_NUM_THREADS number of OMP THREADS " |& tee -a $LF -echo " RUNNING $ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS number of ITK THREADS " |& tee -a $LF -echo " " |& tee -a $LF +echo " RUNNING $OMP_NUM_THREADS number of OMP THREADS " 2>&1 | tee -a $LF +echo " RUNNING $ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS number of ITK THREADS " 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF #if false; then @@ -396,9 +396,9 @@ for hemi in lh rh; do CMDFS="$CMDFS $CMDF" rm -rf $CMDF - echo "echo \" \"" |& tee -a $CMDF - echo "echo \"============ Creating surfaces $hemi - FS sphere, surfreg ===============\"" |& tee -a $CMDF - echo "echo \" \"" |& tee -a $CMDF + echo "echo \" \"" 2>&1 | tee -a $CMDF + echo "echo \"============ Creating surfaces $hemi - FS sphere, surfreg ===============\"" 2>&1 | tee -a $CMDF + echo "echo \" \"" 2>&1 | tee -a $CMDF # Surface registration for cross-subject correspondence (registration to fsaverage) cmd="recon-all -subject $subject -hemi $hemi -sphere -no-isrunning $fsthreads" @@ -432,9 +432,9 @@ for hemi in lh rh; do # $SUBJECTS_DIR/$subject/label/${hemi}.aparc.DKTatlas-guided.annot" if [ "$DoParallel" == "0" ] ; then - echo " " |& tee -a $LF - echo " RUNNING $hemi sequentially ... " |& tee -a $LF - echo " " |& tee -a $LF + echo " " 2>&1 | tee -a $LF + echo " RUNNING $hemi sequentially ... " 2>&1 | tee -a $LF + echo " " 2>&1 | tee -a $LF chmod u+x $CMDF RunIt "$CMDF" $LF fi @@ -444,16 +444,16 @@ done # hemi loop ---------------------------------- if [ "$DoParallel" == 1 ] ; then - echo " " |& tee -a $LF - echo " RUNNING HEMIs in PARALLEL !!! " |& tee -a $LF - echo " " |& tee -a $LF + echo " " 2>&1 | tee -a $LF + echo " RUNNING HEMIs in PARALLEL !!! " 2>&1 | tee -a $LF + echo " " 2>&1 | tee -a $LF RunBatchJobs $LF $CMDFS fi -echo " " |& tee -a $LF -echo "================= DONE =========================================================" |& tee -a $LF -echo " " |& tee -a $LF +echo " " 2>&1 | tee -a $LF +echo "================= DONE =========================================================" 2>&1 | tee -a $LF +echo " " 2>&1 | tee -a $LF # Collect info EndTime=`date` @@ -461,9 +461,9 @@ tSecEnd=`date '+%s'` tRunHours=`echo \($tSecEnd - $tSecStart\)/3600|bc -l` tRunHours=`printf %6.3f $tRunHours` -echo "Started at $StartTime " |& tee -a $LF -echo "Ended at $EndTime" |& tee -a $LF -echo "#@#%# recon-surfreg-run-time-hours $tRunHours" |& tee -a $LF +echo "Started at $StartTime " 2>&1 | tee -a $LF +echo "Ended at $EndTime" 2>&1 | tee -a $LF +echo "#@#%# recon-surfreg-run-time-hours $tRunHours" 2>&1 | tee -a $LF # Create the Done File echo "------------------------------" > $DoneFile @@ -480,7 +480,7 @@ echo "VERSION $VERSION" >> $DoneFile echo "CMDPATH $0" >> $DoneFile echo "CMDARGS ${inputargs[*]}" >> $DoneFile -echo "recon-surfreg.sh $subject finished without error at `date`" |& tee -a $LF +echo "recon-surfreg.sh $subject finished without error at `date`" 2>&1 | tee -a $LF cmd="$python ${binpath}utils/extract_recon_surf_time_info.py -i $LF -o $SUBJECTS_DIR/$subject/scripts/recon-surfreg_times.yaml" RunIt "$cmd" "/dev/null" From 219df79d540b0e89d43344a50798516f1e0511d1 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 18 Jun 2024 23:35:03 +0200 Subject: [PATCH 10/19] replace deprecated delim_whitespace in pandas call --- FastSurferCNN/segstats.py | 2 +- FastSurferCNN/utils/mapper.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/FastSurferCNN/segstats.py b/FastSurferCNN/segstats.py index f2b93d52..ba467de2 100644 --- a/FastSurferCNN/segstats.py +++ b/FastSurferCNN/segstats.py @@ -725,7 +725,7 @@ def read_classes_from_lut(lut_file: str | Path): } return pd.read_csv( lut_file, - delim_whitespace=True, + sep='\s+', index_col=False, skip_blank_lines=True, comment="#", diff --git a/FastSurferCNN/utils/mapper.py b/FastSurferCNN/utils/mapper.py index fa96a83f..11b7bcc1 100644 --- a/FastSurferCNN/utils/mapper.py +++ b/FastSurferCNN/utils/mapper.py @@ -999,7 +999,7 @@ def __init__( self._data = pandas.read_csv( file_or_buffer, - delim_whitespace=True, + sep='\s+', index_col=0, skip_blank_lines=True, comment="#", From 9a070a2229eb37c0529c2060df8d91b44a43bb69 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Tue, 18 Jun 2024 23:36:39 +0200 Subject: [PATCH 11/19] replace |& to run on older bash --- run_fastsurfer.sh | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/run_fastsurfer.sh b/run_fastsurfer.sh index 694ad669..1e1c51cf 100755 --- a/run_fastsurfer.sh +++ b/run_fastsurfer.sh @@ -713,7 +713,7 @@ else log_existed="false" fi VERSION=$($python "$FASTSURFER_HOME/FastSurferCNN/version.py" "${version_args[@]}") -echo "Version: $VERSION" |& tee -a "$seg_log" +echo "Version: $VERSION" 2>&1 | tee -a "$seg_log" ### IF THE SCRIPT GETS TERMINATED, ADD A MESSAGE trap "{ echo \"run_fastsurfer.sh terminated via signal at \$(date -R)!\" >> \"$seg_log\" ; }" SIGINT SIGTERM @@ -733,8 +733,8 @@ if [[ "$run_seg_pipeline" == "1" ]] # "============= Running FastSurferCNN (Creating Segmentation aparc.DKTatlas.aseg.mgz) ===============" # use FastSurferCNN to create cortical parcellation + anatomical segmentation into 95 classes. echo "Log file for segmentation FastSurferCNN/run_prediction.py" >> "$seg_log" - date |& tee -a "$seg_log" - echo "" |& tee -a "$seg_log" + date 2>&1 | tee -a "$seg_log" + echo "" 2>&1 | tee -a "$seg_log" if [[ "$run_asegdkt_module" == "1" ]] then @@ -745,7 +745,7 @@ if [[ "$run_seg_pipeline" == "1" ]] --viewagg_device "$viewagg" --device "$device" "${allow_root[@]}") # specify the subject dir $sd, if asegdkt_segfile explicitly starts with it if [[ "$sd" == "${asegdkt_segfile:0:${#sd}}" ]]; then cmd=("${cmd[@]}" --sd "$sd"); fi - echo "${cmd[@]}" |& tee -a "$seg_log" + echo "${cmd[@]}" 2>&1 | tee -a "$seg_log" "${cmd[@]}" exit_code="${PIPESTATUS[0]}" if [[ "${exit_code}" == 2 ]] @@ -766,7 +766,7 @@ if [[ "$run_seg_pipeline" == "1" ]] echo "INFO: Running N4 bias-field correction" | tee -a "$seg_log" cmd=($python "${reconsurfdir}/N4_bias_correct.py" "--in" "$conformed_name" --rescale "$norm_name" --aseg "$asegdkt_segfile" --threads "$threads") - echo "${cmd[@]}" |& tee -a "$seg_log" + echo "${cmd[@]}" 2>&1 | tee -a "$seg_log" "${cmd[@]}" if [[ "${PIPESTATUS[0]}" -ne 0 ]] then @@ -778,7 +778,7 @@ if [[ "$run_seg_pipeline" == "1" ]] then echo "INFO: Running talairach registration" | tee -a "$seg_log" cmd=("$reconsurfdir/talairach-reg.sh" "$sd/$subject/mri" "$atlas3T" "$seg_log") - echo "${cmd[@]}" |& tee -a "$seg_log" + echo "${cmd[@]}" 2>&1 | tee -a "$seg_log" "${cmd[@]}" if [[ "${PIPESTATUS[0]}" -ne 0 ]] then @@ -800,8 +800,8 @@ if [[ "$run_seg_pipeline" == "1" ]] 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2034 2035 --lut "$fastsurfercnndir/config/FreeSurferColorLUT.txt") - echo "${cmd[@]}" |& tee -a "$seg_log" - "${cmd[@]}" |& tee -a "$seg_log" + echo "${cmd[@]}" 2>&1 | tee -a "$seg_log" + "${cmd[@]}" 2>&1 | tee -a "$seg_log" if [[ "${PIPESTATUS[0]}" -ne 0 ]] then echo "ERROR: asegdkt statsfile generation failed" | tee -a "$seg_log" @@ -817,7 +817,7 @@ if [[ "$run_seg_pipeline" == "1" ]] cereb_flags=("${cereb_flags[@]}" --norm_name "$norm_name" --cereb_statsfile "$cereb_statsfile") else - echo "INFO: Running CerebNet without generating a statsfile, since biasfield correction deactivated '--no_biasfield'." |& tee -a $seg_log + echo "INFO: Running CerebNet without generating a statsfile, since biasfield correction deactivated '--no_biasfield'." 2>&1 | tee -a $seg_log fi cmd=($python "$cerebnetdir/run_prediction.py" --t1 "$t1" @@ -827,11 +827,11 @@ if [[ "$run_seg_pipeline" == "1" ]] --threads "$threads" "${cereb_flags[@]}" "${allow_root[@]}") # specify the subject dir $sd, if asegdkt_segfile explicitly starts with it if [[ "$sd" == "${cereb_segfile:0:${#sd}}" ]] ; then cmd=("${cmd[@]}" --sd "$sd"); fi - echo "${cmd[@]}" |& tee -a "$seg_log" + echo "${cmd[@]}" 2>&1 | tee -a "$seg_log" "${cmd[@]}" if [[ "${PIPESTATUS[0]}" -ne 0 ]] then - echo "ERROR: Cerebellum Segmentation failed" |& tee -a "$seg_log" + echo "ERROR: Cerebellum Segmentation failed" 2>&1 | tee -a "$seg_log" exit 1 fi fi @@ -850,7 +850,7 @@ if [[ "$run_surf_pipeline" == "1" ]] cmd=("./recon-surf.sh" --sid "$subject" --sd "$sd" --t1 "$conformed_name" --asegdkt_segfile "$asegdkt_segfile" --threads "$threads" --py "$python" "${surf_flags[@]}" "${allow_root[@]}") - echo "${cmd[@]}" |& tee -a "$seg_log" + echo "${cmd[@]}" 2>&1 | tee -a "$seg_log" "${cmd[@]}" if [[ "${PIPESTATUS[0]}" -ne 0 ]] ; then exit 1 ; fi popd || return From 8031e660d2c7d9deb5fec0aad720a1da6dfc2590 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 19 Jun 2024 08:23:24 +0200 Subject: [PATCH 12/19] update versions to requirements.txt and add missing --- pyproject.toml | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3cf938d8..89d1d549 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,20 +32,22 @@ classifiers = [ ] dependencies = [ 'h5py>=3.7', - 'lapy>=0.4.1', - 'matplotlib>=3.5.1', - 'nibabel>=3.2.2', - 'numpy>=1.21', - 'pandas>=1.4.3', - 'torch>=1.12.0', + 'lapy>=1.0.1', + 'matplotlib>=3.7.1', + 'nibabel>=5.1.0', + 'numpy>=1.25,<2', + 'pandas>=1.5.3', 'pyyaml>=6.0', - 'scipy>=1.8.0', - 'yacs>=0.1.8', - 'simpleitk>=2.1.1', - 'scipy>=1.8.0', - 'tensorboard>=2.9.1', + 'scikit-image>=0.19.3', + 'scikit-learn>=1.2.2', + 'scipy>=1.10.1,!=1.13.0', + 'simpleitk>=2.2.1', + 'tensorboard>=2.12.1', + 'torch>=2.0.1+cu117', 'torchio>=0.18.83', - 'tqdm>=4.64', + 'torchvision>=0.15.2+cu117', + 'tqdm>=4.65', + 'yacs>=0.1.8', ] [project.optional-dependencies] From dca5774d697a67df5703d4416056c770015da5dc Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 19 Jun 2024 08:28:00 +0200 Subject: [PATCH 13/19] remove bash requirement on mac --- doc/overview/INSTALL.md | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/doc/overview/INSTALL.md b/doc/overview/INSTALL.md index 746995d1..1e52a7bb 100644 --- a/doc/overview/INSTALL.md +++ b/doc/overview/INSTALL.md @@ -165,19 +165,17 @@ Continue with the example in [Example 1](EXAMPLES.md#example-1-fastsurfer-docker ### Native -On modern Macs with the Apple Silicon M1 or M2 ARM-based chips, we recommend a native installation as it runs much faster than Docker in our tests. The experimental support for the built-in AI Accelerator is also only available on native installations. Native installation also supports older Intel chips. +On modern Macs with the Apple Silicon M1 or M2 ARM-based chips, we recommend a native installation as it runs much faster than Docker in our tests. The experimental support for the built-in AI accelerator (MPS) is also only available on native installations. Native installation also supports older Intel chips. #### 1. Git and Bash -If you do not have git and a recent bash (version > 4.0 required!) installed, install them via the packet manager, e.g. brew. +If you do not have git you can install it via the packet manager, e.g. brew. This installs brew and then bash: ```sh /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" -brew install bash +brew install git ``` -Make sure you use this bash and not the older one provided with MacOS! - #### 2. Python Create a python environment, activate it, and upgrade pip. Here we use pip, but you should also be able to use conda for python: @@ -195,7 +193,6 @@ cd FastSurfer export PYTHONPATH="${PYTHONPATH}:$PWD" ``` - Install the FastSurfer requirements ```sh python3 -m pip install -r requirements.mac.txt @@ -215,14 +212,12 @@ You can also download all network checkpoint files (this should be done if you a python3 FastSurferCNN/download_checkpoints.py --all ``` -Once all dependencies are installed, run the FastSurfer segmentation only (!!) by calling ```bash ./run_fastsurfer.sh --seg_only ....``` with the appropriate command line flags, see the [commandline documentation](../../README.md#usage). - -Note: You may always need to prepend the command with `bash` (i.e. `bash run_fastsurfer.sh <...>`) to ensure that bash 4.0 is used instead of the system default. +Once all dependencies are installed, you can run the FastSurfer segmentation only by calling ```./run_fastsurfer.sh --seg_only ....``` with the appropriate command line flags, see the [commandline documentation](../../README.md#usage). -To run the full pipeline, install and source also the supported FreeSurfer version according to their [Instructions](https://surfer.nmr.mgh.harvard.edu/fswiki/rel7downloads). There is a freesurfer email list, if you run into problems during this step. +To run the full pipeline, install and source also the supported FreeSurfer version according to their [Instructions](https://surfer.nmr.mgh.harvard.edu/fswiki/rel7downloads). There is a freesurfer email list, if you run into problems during this step. Note, that currently FreeSurfer for MacOS supports no ARM, but only Intel, so on modern M-chips it might be slow due to the emulation. #### 4. Apple AI Accelerator support -You can also try the experimental support for the Apple Silicon AI Accelerator by setting `PYTORCH_ENABLE_MPS_FALLBACK` and passing `--device mps`: +You can also try the experimental support for the Apple Silicon AI Accelerator by setting `PYTORCH_ENABLE_MPS_FALLBACK` and passing `--device mps` for the segmentation module to make use of the fast GPU: ```sh export PYTORCH_ENABLE_MPS_FALLBACK=1 From d05d21b046721a67bb8202c6243c46ca82fda7bf Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 19 Jun 2024 08:28:32 +0200 Subject: [PATCH 14/19] adjust versions to be more flexible, as in pyproject toml --- requirements.mac.txt | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/requirements.mac.txt b/requirements.mac.txt index 9de44fa4..0ea4fd47 100644 --- a/requirements.mac.txt +++ b/requirements.mac.txt @@ -1,18 +1,18 @@ h5py>=3.7 -lapy>=0.4.1 -matplotlib>=3.5.1 -nibabel>=3.2.2 -numpy>=1.21,<2 -pandas>=1.4.3 -torch>=2.0.0 +lapy>=1.0.1 +matplotlib>=3.7.1 +nibabel>=5.1.0 +numpy>=1.25,<2 +pandas>=1.5.3 pyyaml>=6.0 -requests>=2.31.0 -scipy>=1.8.0 -yacs>=0.1.8 -simpleitk>=2.1.1 scikit-image>=0.19.3 -scipy>=1.8.0,!=1.13.0 -tensorboard>=2.9.1 +scikit-learn>=1.2.2 +scipy>=1.10.1,!=1.13.0 +simpleitk>=2.2.1 +tensorboard>=2.12.1 +torch>=2.0.1+cu117 torchio>=0.18.83 torchvision>=0.15.2+cu117 -tqdm>=4.64 +tqdm>=4.65 +yacs>=0.1.8 + From 699b2d325d1717b9e029025e1a5d1a467b366175 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 19 Jun 2024 08:45:53 +0200 Subject: [PATCH 15/19] avoid @q expansion (bash 4.4) and use printf %q (bash 3.1) --- recon_surf/functions.sh | 3 ++- recon_surf/recon-surf.sh | 11 ++++++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/recon_surf/functions.sh b/recon_surf/functions.sh index 0ce21c8b..a5439418 100644 --- a/recon_surf/functions.sh +++ b/recon_surf/functions.sh @@ -30,7 +30,8 @@ function RunIt() if [[ $# -eq 3 ]] then local CMDF=$3 - echo "echo ${cmd@Q}" 2>&1 | tee -a $CMDF + printf -v tmp %q "$cmd" + echo "echo $tmp" 2>&1 | tee -a $CMDF echo "$timecmd $cmd" 2>&1 | tee -a $CMDF echo "if [ \${PIPESTATUS[0]} -ne 0 ] ; then exit 1 ; fi" >> $CMDF else diff --git a/recon_surf/recon-surf.sh b/recon_surf/recon-surf.sh index 1b31ee75..1bfe2a6f 100755 --- a/recon_surf/recon-surf.sh +++ b/recon_surf/recon-surf.sh @@ -44,10 +44,10 @@ else fi -# check bash version > 4 +# check bash version > 3.1 (needed for printf %q) function version { echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }'; } -if [ $(version ${BASH_VERSION}) -lt $(version "4.0.0") ]; then - echo "bash ${BASH_VERSION} is too old. Should be newer than 4.0, please upgrade!" +if [ $(version ${BASH_VERSION}) -lt $(version "3.1.0") ]; then + echo "bash ${BASH_VERSION} is too old. Should be newer than 3.1, please upgrade!" exit 1 fi @@ -91,7 +91,7 @@ FLAGS: etiv estimates for 3T MR images, default: 1.5T atlas). --parallel Run both hemispheres in parallel --threads Set openMP and ITK threads to - --py Command for python, default ${python@Q} + --py Command for python, default ${python} --fs_license Path to FreeSurfer license key file. Register at https://surfer.nmr.mgh.harvard.edu/registration.html for free to obtain it if you do not have FreeSurfer @@ -658,7 +658,8 @@ else # equivalent to -qsphere # (23sec) cmd="$python ${binpath}spherically_project_wrapper.py --hemi $hemi --sdir $sdir" - cmd="$cmd --subject $subject --threads=$threads --py ${python@Q} --binpath ${binpath}" + printf -v tmp %q "$python" + cmd="$cmd --subject $subject --threads=$threads --py ${tmp} --binpath ${binpath}" RunIt "$cmd" $LF $CMDF From 29a3fc5a8fea9684d82ba1a769e6c85ab2d9b65f Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 19 Jun 2024 10:34:02 +0200 Subject: [PATCH 16/19] follow pep508: drop +cu --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 89d1d549..124bcad1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,9 +43,9 @@ dependencies = [ 'scipy>=1.10.1,!=1.13.0', 'simpleitk>=2.2.1', 'tensorboard>=2.12.1', - 'torch>=2.0.1+cu117', + 'torch>=2.0.1', 'torchio>=0.18.83', - 'torchvision>=0.15.2+cu117', + 'torchvision>=0.15.2', 'tqdm>=4.65', 'yacs>=0.1.8', ] From 7399d21c9c5eb5f9f0628dab6166f9a30031eed0 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 19 Jun 2024 14:22:35 +0200 Subject: [PATCH 17/19] remove +cu* from torch etc --- requirements.mac.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.mac.txt b/requirements.mac.txt index 0ea4fd47..743a5903 100644 --- a/requirements.mac.txt +++ b/requirements.mac.txt @@ -10,9 +10,9 @@ scikit-learn>=1.2.2 scipy>=1.10.1,!=1.13.0 simpleitk>=2.2.1 tensorboard>=2.12.1 -torch>=2.0.1+cu117 +torch>=2.0.1 torchio>=0.18.83 -torchvision>=0.15.2+cu117 +torchvision>=0.15.2 tqdm>=4.65 yacs>=0.1.8 From 036f3f67fc202cce4d6f06e53a48c0faba87fe8d Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 19 Jun 2024 14:54:24 +0200 Subject: [PATCH 18/19] adding requests needed for download checkpoints --- pyproject.toml | 1 + requirements.mac.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 124bcad1..2d2a06aa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,7 @@ dependencies = [ 'numpy>=1.25,<2', 'pandas>=1.5.3', 'pyyaml>=6.0', + 'requests>=2.31.0', 'scikit-image>=0.19.3', 'scikit-learn>=1.2.2', 'scipy>=1.10.1,!=1.13.0', diff --git a/requirements.mac.txt b/requirements.mac.txt index 743a5903..95af69a7 100644 --- a/requirements.mac.txt +++ b/requirements.mac.txt @@ -5,6 +5,7 @@ nibabel>=5.1.0 numpy>=1.25,<2 pandas>=1.5.3 pyyaml>=6.0 +requests>=2.31.0 scikit-image>=0.19.3 scikit-learn>=1.2.2 scipy>=1.10.1,!=1.13.0 From ae9460daa5af169cdc674f6e218918bf5876235f Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Wed, 19 Jun 2024 14:55:10 +0200 Subject: [PATCH 19/19] python3.10 required --- doc/overview/INSTALL.md | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/doc/overview/INSTALL.md b/doc/overview/INSTALL.md index 1e52a7bb..38be4204 100644 --- a/doc/overview/INSTALL.md +++ b/doc/overview/INSTALL.md @@ -61,9 +61,9 @@ sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test sudo apt install -y g++-11 ``` -You also need to have bash-4.0 or higher (check with `bash --version`). +You also need to have bash-3.2 or higher (check with `bash --version`). -You also need a working version of python3 (we recommend python 3.10 -- we do not support other versions). These packages should be sufficient to install python dependencies and then run the FastSurfer neural network segmentation. If you want to run the full pipeline, you also need a [working installation of FreeSurfer](https://surfer.nmr.mgh.harvard.edu/fswiki/rel7downloads) (including its dependencies). +You also need a working version of python3.10 (we do not support other versions). These packages should be sufficient to install python dependencies and then run the FastSurfer neural network segmentation. If you want to run the full pipeline, you also need a [working installation of FreeSurfer](https://surfer.nmr.mgh.harvard.edu/fswiki/rel7downloads) (including its dependencies). If you are using pip, make sure pip is updated as older versions will fail. @@ -167,22 +167,22 @@ Continue with the example in [Example 1](EXAMPLES.md#example-1-fastsurfer-docker On modern Macs with the Apple Silicon M1 or M2 ARM-based chips, we recommend a native installation as it runs much faster than Docker in our tests. The experimental support for the built-in AI accelerator (MPS) is also only available on native installations. Native installation also supports older Intel chips. -#### 1. Git and Bash -If you do not have git you can install it via the packet manager, e.g. brew. -This installs brew and then bash: +#### 1. Dependency packages +If you do not have git, python3.10 or bash (at least 3.2) you can install these via the packet manager brew. +This installs brew and then git and python3.10: ```sh /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" -brew install git +brew install git python@3.10 ``` #### 2. Python Create a python environment, activate it, and upgrade pip. Here we use pip, but you should also be able to use conda for python: ```sh -python3 -m venv $HOME/python-envs/fastsurfer +python3.10 -m venv $HOME/python-envs/fastsurfer source $HOME/python-envs/fastsurfer/bin/activate -python3 -m pip install --upgrade pip +python3.10 -m pip install --upgrade pip ``` #### 3. FastSurfer and Requirements @@ -195,7 +195,7 @@ export PYTHONPATH="${PYTHONPATH}:$PWD" Install the FastSurfer requirements ```sh -python3 -m pip install -r requirements.mac.txt +python3.10 -m pip install -r requirements.mac.txt ``` If this step fails, you may need to edit ```requirements.mac.txt``` and adjust version number to what is available. @@ -209,7 +209,7 @@ pip3 install --no-binary=h5py h5py You can also download all network checkpoint files (this should be done if you are installing for multiple users): ```sh -python3 FastSurferCNN/download_checkpoints.py --all +python3.10 FastSurferCNN/download_checkpoints.py --all ``` Once all dependencies are installed, you can run the FastSurfer segmentation only by calling ```./run_fastsurfer.sh --seg_only ....``` with the appropriate command line flags, see the [commandline documentation](../../README.md#usage). @@ -224,7 +224,7 @@ export PYTORCH_ENABLE_MPS_FALLBACK=1 ./run_fastsurfer.sh --seg_only --device mps .... ``` -This will be at least twice as fast as `--device cpu`. The fallback environment variable is necessary as one function is not yet implemented for the GPU and will fall back to CPU. +This will be at least twice as fast as `--device cpu`. The fallback environment variable is necessary as `aten::max_unpool2d` is not yet implemented for MPS and will fall back to CPU. ## Windows