From f6aefb9ff9f25920ebd4e4f4ae8d2a26b5c48838 Mon Sep 17 00:00:00 2001 From: engrosamaali91 Date: Wed, 14 Feb 2024 17:39:23 +0100 Subject: [PATCH 1/2] Recon_surf and CerebNet API's --- CerebNet/apply_warp.py | 38 +++++++- CerebNet/inference.py | 12 +-- CerebNet/run_prediction.py | 30 +++++++ doc/api/index.rst | 2 +- doc/api/recon_surf.rst | 11 +++ doc/conf.py | 4 - recon_surf/N4_bias_correct.py | 105 ++++++++++++---------- recon_surf/fs_balabels.py | 24 ++--- recon_surf/lta.py | 18 ++-- recon_surf/map_surf_label.py | 69 +++++++------- recon_surf/paint_cc_into_pred.py | 32 ++++--- recon_surf/rewrite_mc_surface.py | 19 ++-- recon_surf/rotate_sphere.py | 24 ++--- recon_surf/sample_parc.py | 42 +++++---- recon_surf/smooth_aparc.py | 50 ++++++----- recon_surf/spherically_project_wrapper.py | 31 +++---- 16 files changed, 305 insertions(+), 206 deletions(-) diff --git a/CerebNet/apply_warp.py b/CerebNet/apply_warp.py index 5518b363..9bc2cb0c 100644 --- a/CerebNet/apply_warp.py +++ b/CerebNet/apply_warp.py @@ -14,20 +14,52 @@ # limitations under the License. # IMPORTS -from os.path import join import numpy as np import nibabel as nib +from os.path import join from CerebNet.datasets import utils def save_nii_image(img_data, save_path, header, affine): + """ + Save an image data array as a NIfTI file. + + Parameters + ---------- + img_data : ndarray + The image data to be saved. + save_path : str + The path (including file name) where the image will be saved. + header : nibabel.Nifti1Header + The header information for the NIfTI file. + affine : ndarray + The affine matrix for the NIfTI file. + """ + img_out = nib.Nifti1Image(img_data, header=header, affine=affine) print(f"Saving {save_path}") nib.save(img_out, save_path) -def store_warped_data(img_path, lbl_path, warp_path, result_path, patch_size): +def main(img_path, lbl_path, warp_path, result_path, patch_size): + + """ + Load, warp, crop, and save both an image and its corresponding label based on a given warp field. + + Parameters + ---------- + img_path : str + Path to the T1-weighted MRI image to be warped. + lbl_path : str + Path to the label image corresponding to the T1 image, to be warped similarly. + warp_path : str + Path to the warp field file used to warp the images. + result_path : str + Directory path where the warped and cropped images will be saved. + patch_size : tuple of int + The dimensions (height, width, depth) cropped images after warping. + """ img, img_file = utils.load_reorient_rescale_image(img_path) @@ -78,7 +110,7 @@ def store_warped_data(img_path, lbl_path, warp_path, result_path, patch_size): args = parser.parse_args() warp_path = join(args.result_path, args.warp_filename) - store_warped_data(args.img_path, + main(args.img_path, args.lbl_path, warp_path=warp_path, result_path=args.result_path, diff --git a/CerebNet/inference.py b/CerebNet/inference.py index b470d630..d7165b0c 100644 --- a/CerebNet/inference.py +++ b/CerebNet/inference.py @@ -14,17 +14,16 @@ # IMPORTS import time +import nibabel as nib +import numpy as np +import torch + from os import makedirs from os.path import join, dirname, isfile from typing import Dict, List, Tuple, Optional from concurrent.futures import Future, ThreadPoolExecutor - -import nibabel as nib -import numpy as np -import torch from torch.utils.data import DataLoader from tqdm import tqdm - from FastSurferCNN.utils import logging from FastSurferCNN.utils.threads import get_num_threads from FastSurferCNN.utils.mapper import JsonColorLookupTable, TSVLookupTable @@ -44,6 +43,9 @@ class Inference: + """ + Manages inference operations, including batch processing, data loading, and model predictions for neuroimaging data. + """ def __init__( self, cfg: "yacs.ConfigNode", diff --git a/CerebNet/run_prediction.py b/CerebNet/run_prediction.py index 5eae1776..62f6c6a3 100644 --- a/CerebNet/run_prediction.py +++ b/CerebNet/run_prediction.py @@ -30,6 +30,14 @@ def setup_options(): + """ + Configure and return an argument parser for the segmentation script. + + Returns + ------- + argparse.ArgumentParser + The configured argument parser. + """ # Training settings parser = argparse.ArgumentParser(description="Segmentation") @@ -94,6 +102,28 @@ def setup_options(): def main(args): + """ + Main function to run the inference based on the given command line arguments. + This implementation is inspired by methods described in CerebNet for cerebellum sub-segmentation. + + Parameters + ---------- + args : argparse.Namespace + Command line arguments parsed by `argparse.ArgumentParser`. + + Returns + ------- + int + Returns 0 upon successful execution to indicate success. + str + A message indicating the failure reason in case of an exception. + + References + ---------- + Faber J, Kuegler D, Bahrami E, et al. CerebNet: A fast and reliable deep-learning + pipeline for detailed cerebellum sub-segmentation. NeuroImage 264 (2022), 119703. + https://doi.org/10.1016/j.neuroimage.2022.119703 + """ cfg = get_config(args) cfg.TEST.ENABLE = True cfg.TRAIN.ENABLE = False diff --git a/doc/api/index.rst b/doc/api/index.rst index 40ac8b38..be199570 100644 --- a/doc/api/index.rst +++ b/doc/api/index.rst @@ -9,4 +9,4 @@ FastSurfer API FastSurferCNN.utils.rst FastSurferCNN.data_loader.rst recon_surf.rst - + CerebNet.rst \ No newline at end of file diff --git a/doc/api/recon_surf.rst b/doc/api/recon_surf.rst index 4e683970..44febda7 100644 --- a/doc/api/recon_surf.rst +++ b/doc/api/recon_surf.rst @@ -10,6 +10,17 @@ API recon_surf References align_points align_seg create_annotation + fs_balabels + lta + map_surf_label + N4_bias_correct + paint_cc_into_pred + rewrite_mc_surface + rotate_sphere + sample_parc + smooth_aparc + spherically_project_wrapper + diff --git a/doc/conf.py b/doc/conf.py index 6a8c744f..289c92c1 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -21,10 +21,6 @@ import os sys.path.append(os.path.dirname(__file__) + '/..') sys.path.append(os.path.dirname(__file__) + '/../recon_surf') -# sys.path.insert(0, '..') #after - -# autodoc_mock_imports = ["torch", "yacs"] - project = 'FastSurfer' copyright = '2023, Martin Reuter' diff --git a/recon_surf/N4_bias_correct.py b/recon_surf/N4_bias_correct.py index 252cdf6c..87e56f96 100644 --- a/recon_surf/N4_bias_correct.py +++ b/recon_surf/N4_bias_correct.py @@ -15,19 +15,21 @@ # IMPORTS +# Group 1: Python native modules import argparse +import logging import sys from pathlib import Path from typing import Optional, cast, Tuple -import logging +# Group 2: External modules import SimpleITK as sitk import numpy as np from numpy import typing as npt +# Group 3: Internal modules import image_io as iio - HELPTEXT = """ Script to call SITK N4 Bias Correction @@ -100,13 +102,13 @@ def options_parse(): - """Command line option parser. + """ + Command line option parser. Returns ------- options - object holding options - + Namespace object holding options. """ parser = argparse.ArgumentParser( description=HELPTEXT, @@ -146,28 +148,28 @@ def itk_n4_bfcorrection( numiter: int = 50, thres: float = 0.0, ) -> sitk.Image: - """Perform the bias field correction. + """ + Perform the bias field correction. Parameters ---------- itk_image : sitk.Image - n-dimensional image + N-dimensional image. itk_mask : Optional[sitk.Image] - Image mask. Defaults to None. Optional + Image mask. Defaults to None. Optional. shrink : int - Shrink factors. Defaults to 4 + Shrink factors. Defaults to 4. levels : int - Number of levels for maximum number of iterations. Defaults to 4 + Number of levels for maximum number of iterations. Defaults to 4. numiter : int - Maximum number if iterations. Defaults 50 + Maximum number if iterations. Defaults 50. thres : float - Convergence threshold. Defaults to 0.0 + Convergence threshold. Defaults to 0.0. Returns ------- itk_bfcorr_image - Bias field corrected image - + Bias field corrected image. """ _logger = logging.getLogger(__name__ + ".itk_n4_bfcorrection") # if no mask is passed, create a simple mask from the image @@ -214,28 +216,28 @@ def normalize_wm_mask_ball( target_wm: float = 110., target_bg: float = 3. ) -> sitk.Image: - """Normalize WM image by Mask and optionally ball around talairach center. + """ + Normalize WM image by Mask and optionally ball around talairach center. Parameters ---------- itk_image : sitk.Image - n-dimensional itk image + N-dimensional itk image. itk_mask : sitk.Image, optional Image mask. radius : float | int - Defaults to 50 [MISSING] + Defaults to 50 [MISSING]. centroid : np.ndarray - brain centroid. + Brain centroid. target_wm : float | int Target white matter intensity. Defaults to 110. target_bg : float | int - target background intensity. Defaults to 3 (1% of 255) + Target background intensity. Defaults to 3 (1% of 255). Returns ------- normalized_image : sitk.Image - Normalized WM image - + Normalized WM image. """ _logger = logging.getLogger(__name__ + ".normalize_wm_mask_ball") _logger.info(f"- centroid: {centroid}") @@ -278,30 +280,31 @@ def normalize_wm_aseg( target_wm: float = 110., target_bg: float = 3. ) -> sitk.Image: - """Normalize WM image [MISSING]. + """ + Normalize WM image so the white matter has a mean + intensity of target_wm and the backgroud has intensity target_bg. Parameters ---------- itk_image : sitk.Image - n-dimensional itk image + N-dimensional itk image. itk_mask : sitk.Image | None Image mask. itk_aseg : sitk.Image - aseg-like segmentation image to find WM. + Aseg-like segmentation image to find WM. radius : float | int - Defaults to 50 [MISSING] + Defaults to 50 [MISSING]. centroid : Optional[np.ndarray] - Image centroid. Defaults to None + Image centroid. Defaults to None. target_wm : float | int - target white matter intensity. Defaults to 110 + Target white matter intensity. Defaults to 110. target_bg : float | int - target background intensity Defaults to 3 (1% of 255) + Target background intensity Defaults to 3 (1% of 255). Returns ------- normed : sitk.Image - Normalized WM image - + Normalized WM image. """ _logger = logging.getLogger(__name__ + ".normalize_wm_aseg") @@ -325,8 +328,8 @@ def normalize_wm_aseg( def normalize_img( itk_image: sitk.Image, itk_mask: Optional[sitk.Image], - source_intensity: Tuple[float, float], - target_intensity: Tuple[float, float] + source_intensity: tuple[float, float], + target_intensity: tuple[float, float] ) -> sitk.Image: """ Normalize image by source and target intensity values. @@ -334,13 +337,19 @@ def normalize_img( Parameters ---------- itk_image : sitk.Image + Input image to be normalized. itk_mask : sitk.Image | None - source_intensity : Tuple[float, float] - target_intensity : Tuple[float, float] + Brain mask, voxels inside the mask are guaranteed to be > 0, + None is optional. + source_intensity : tuple[float, float] + Source intensity range. + target_intensity : tuple[float, float] + Target intensity range. Returns ------- - Rescaled image + sitk.Image + Rescaled image. """ _logger = logging.getLogger(__name__ + ".normalize_wm") # compute intensity transformation @@ -359,23 +368,23 @@ def normalize_img( def read_talairach_xfm(fname: Path | str) -> np.ndarray: - """Read Talairach transform. + """ + Read Talairach transform. Parameters ---------- fname : str - Filename to Talairach transform + Filename to Talairach transform. Returns ------- tal - Talairach transform matrix + Talairach transform matrix. Raises ------ ValueError if the file is of an invalid format. - """ _logger = logging.getLogger(__name__ + ".read_talairach_xfm") _logger.info(f"reading talairach transform from {fname}") @@ -398,7 +407,8 @@ def read_talairach_xfm(fname: Path | str) -> np.ndarray: def get_tal_origin_voxel(tal: npt.ArrayLike, image: sitk.Image) -> np.ndarray: - """Get the origin of Talairach space in voxel coordinates. + """ + Get the origin of Talairach space in voxel coordinates. Parameters ---------- @@ -410,8 +420,7 @@ def get_tal_origin_voxel(tal: npt.ArrayLike, image: sitk.Image) -> np.ndarray: Returns ------- vox_origin : np.ndarray - Voxel coordinate of Talairach origin - + Voxel coordinate of Talairach origin. """ tal_inv = np.linalg.inv(tal) tal_origin = np.array(tal_inv[0:3, 3]).ravel() @@ -453,13 +462,14 @@ def get_image_mean(image: sitk.Image, mask: Optional[sitk.Image] = None) -> floa Parameters ---------- image : sitk.Image - image to get mean of + Image to get mean of. mask : sitk.Image, optional - optional mask to apply first + Optional mask to apply first. Returns ------- mean : float + The mean value of the image. """ img = sitk.GetArrayFromImage(image) if mask is not None: @@ -470,16 +480,17 @@ def get_image_mean(image: sitk.Image, mask: Optional[sitk.Image] = None) -> floa def get_brain_centroid(itk_mask: sitk.Image) -> np.ndarray: """ - Get the brain centroid from the itk_mask + Get the brain centroid from a binary image. Parameters ---------- itk_mask : sitk.Image + Binary image to compute the centroid of its labeled region. Returns ------- - brain centroid - + np.ndarray + Brain centroid. """ _logger = logging.getLogger(__name__ + ".get_brain_centroid") _logger.debug("No talairach center passed, estimating center from mask.") diff --git a/recon_surf/fs_balabels.py b/recon_surf/fs_balabels.py index 69cb11ab..d19ddb4b 100755 --- a/recon_surf/fs_balabels.py +++ b/recon_surf/fs_balabels.py @@ -74,13 +74,13 @@ def options_parse(): - """Command line option parser. + """ + Create a command line interface and return command line options. Returns ------- - options - object holding options - + options : argparse.Namespace + Namespace object holding options. """ parser = optparse.OptionParser( version="$Id:fs_balabels.py,v 1.0 2022/08/24 21:22:08 mreuter Exp $", @@ -110,27 +110,27 @@ def read_colortables( colappend: List[str], drop_unknown: bool = True ) -> Tuple[List, List, List]: - """Read multiple colortables and appends extensions, drops unknown by default. + """ + Read multiple colortables and appends extensions, drops unknown by default. Parameters ---------- colnames : List[str] - List of color-names + List of color-names. colappend : List[str] - List of appends for names + List of appends for names. drop_unknown : bool True if unknown colors should be dropped. - Defaults to True + Defaults to True. Returns ------- all_ids - List of all ids + List of all ids. all_names - List of all names + List of all names. all_cols - List of all colors - + List of all colors. """ pos = 0 all_names = [] diff --git a/recon_surf/lta.py b/recon_surf/lta.py index dd38e1dc..0ce1b682 100755 --- a/recon_surf/lta.py +++ b/recon_surf/lta.py @@ -28,28 +28,28 @@ def writeLTA( dst_fname: str, dst_header: Dict ) -> None: - """Write linear transform array info to a .lta file. + """ + Write linear transform array info to an .lta file. Parameters ---------- filename : str - File to write on + File to write on. T : npt.ArrayLike - Linear transform array to be saved + Linear transform array to be saved. src_fname : str - Source filename + Source filename. src_header : Dict - Source header + Source header. dst_fname : str - Destination filename + Destination filename. dst_header : Dict - Destination header + Destination header. Raises ------ ValueError - Header format missing field (Source or Destination) - + Header format missing field (Source or Destination). """ from datetime import datetime import getpass diff --git a/recon_surf/map_surf_label.py b/recon_surf/map_surf_label.py index 0e6b6737..404f7b0b 100755 --- a/recon_surf/map_surf_label.py +++ b/recon_surf/map_surf_label.py @@ -59,13 +59,13 @@ def options_parse(): - """Command line option parser. + """ + Create a command line interface and return command line options. Returns ------- - options - object holding options - + options : argparse.Namespace + Namespace object holding options. """ parser = optparse.OptionParser( version="$Id:map_surf_label.py,v 1.0 2022/08/24 21:22:08 mreuter Exp $", @@ -99,30 +99,30 @@ def writeSurfLabel( values: npt.NDArray, surf: npt.NDArray ) -> None: - """Write a FS surface label file to filename (e.g. lh.labelname.label). + """ + Write a FS surface label file to filename (e.g. lh.labelname.label). Stores sid string in the header, then number of vertices and table of vertex index, RAS wm-surface coords (taken from surf) - and values (which can be zero) + and values (which can be zero). Parameters ---------- filename : str - File there surface label is written + File there surface label is written. sid : str - Subject id + Subject id. label : npt.NDArray[str] - List of label names + List of label names. values : npt.NDArray - List of values + List of values. surf : npt.NDArray - Surface coordinations + Surface coordinations. Raises ------ ValueError - Label and values should have same sizes - + If label and values are not the same size. """ if values is None: values = np.zeros(label.shape) @@ -151,32 +151,32 @@ def getSurfCorrespondence( trg_sphere: Union[str, Tuple, np.ndarray], tree: Optional[KDTree] = None ) -> Tuple[np.ndarray, np.ndarray, KDTree]: - """For each vertex in src_sphere find the closest vertex in trg_sphere. + """ + For each vertex in src_sphere find the closest vertex in trg_sphere. - Spheres are Nx3 arrays of coordinates on the sphere (usually R=100 FS format) + Spheres are Nx3 arrays of coordinates on the sphere (usually R=100 FS format). *_sphere can also be a file name of the sphere.reg files, then we load it. - The KDtree can be passed in cases where src moves around and trg stays fixed + The KDtree can be passed in cases where src moves around and trg stays fixed. Parameters ---------- src_sphere : Union[str, Tuple, np.ndarray] Either filepath (as str) or surface vertices - of source sphere + of source sphere. trg_sphere : Union[str, Tuple, np.ndarray] Either filepath (as str) or surface vertices - of target sphere + of target sphere. tree : Optional[KDTree] - Defaults to None + Defaults to None. Returns ------- mapping : np.ndarray - Surface mapping of the trg surface + Surface mapping of the trg surface. distances : np.ndarray - Surface distance of the trg surface + Surface distance of the trg surface. tree : KDTree - KDTree of the trg surface - + KDTree of the trg surface. """ # We can also work with file names instead of surface vertices if isinstance(src_sphere, str): @@ -204,36 +204,37 @@ def mapSurfLabel( trg_sid: str, rev_mapping: np.ndarray ) -> Tuple[np.ndarray, np.ndarray]: - """Map a label from src surface according to the correspondence. + """ + Map a label from src surface according to the correspondence. trg_surf is passed so that the label file will list the - correct coordinates (usually the white surface), can be vetrices or filename + correct coordinates (usually the white surface), can be vetrices or filename. Parameters ---------- src_label_name : str - Path to label file of source + Path to label file of source. out_label_name : str - Path to label file of output + Path to label file of output. trg_surf : Union[str, np.ndarray] - Numpy array of vertex coordinates or filepath to target surface + Numpy array of vertex coordinates or filepath to target surface. trg_sid : str - Subject id of the target subject (as stored in the output label file header) + Subject id of the target subject (as stored in the output label file header). rev_mapping : np.ndarray - a mapping from target to source, listing the corresponding src vertex for each vertex on the trg surface + A mapping from target to source, listing the corresponding src vertex + for each vertex on the trg surface. Returns ------- trg_label : np.ndarray - target labels + Target labels. trg_values : np.ndarray - target values + Target values. Raises ------ ValueError - Label and trg vertices should have same sizes - + If label and trg vertices are not of same sizes. """ print("Mapping label: {} ...".format(src_label_name)) src_label, src_values = fs.read_label(src_label_name, read_scalars=True) diff --git a/recon_surf/paint_cc_into_pred.py b/recon_surf/paint_cc_into_pred.py index eb805978..e81f5ad9 100644 --- a/recon_surf/paint_cc_into_pred.py +++ b/recon_surf/paint_cc_into_pred.py @@ -14,11 +14,12 @@ # IMPORTS -import numpy as np -from numpy import typing as npt -import nibabel as nib + import sys import argparse +import numpy as np +import nibabel as nib +from numpy import typing as npt HELPTEXT = """ Script to add corpus callosum segmentation (CC, FreeSurfer IDs 251-255) to @@ -42,13 +43,13 @@ def argument_parse(): - """Command line option parser. + """ + Create a command line interface and return command line options. Returns ------- - options - object holding options - + options : argparse.Namespace + Namespace object holding options. """ parser = argparse.ArgumentParser(usage=HELPTEXT) parser.add_argument( @@ -79,22 +80,22 @@ def argument_parse(): def paint_in_cc(pred: npt.ArrayLike, aseg_cc: npt.ArrayLike) -> npt.ArrayLike: - """Paint corpus callosum segmentation into prediction. + """ + Paint corpus callosum segmentation into prediction. Note, that this function modifies the original array and does not create a copy. Parameters ---------- - pred : npt.ArrayLike - Deep-learning prediction + asegdkt : npt.ArrayLike + Deep-learning segmentation map. aseg_cc : npt.ArrayLike - Aseg segmentation with CC + Aseg segmentation with CC. Returns ------- - pred - Prediction with added CC - + asegdkt + Segmentation map with added CC. """ cc_mask = (aseg_cc >= 251) & (aseg_cc <= 255) pred[cc_mask] = aseg_cc[cc_mask] @@ -115,3 +116,6 @@ def paint_in_cc(pred: npt.ArrayLike, aseg_cc: npt.ArrayLike) -> npt.ArrayLike: pred_with_cc_fin.to_filename(options.output) sys.exit(0) + + +# TODO: Rename the file (paint_cc_into_asegdkt or similar) and functions. diff --git a/recon_surf/rewrite_mc_surface.py b/recon_surf/rewrite_mc_surface.py index 8372f94f..bfc0cf93 100644 --- a/recon_surf/rewrite_mc_surface.py +++ b/recon_surf/rewrite_mc_surface.py @@ -21,14 +21,13 @@ def options_parse(): - """Command line option parser. + """ + Create a command line interface and return command line options. Returns ------- options - object holding options - - + Namespace object holding options. """ parser = optparse.OptionParser( version="$Id: rewrite_mc_surface,v 1.1 2020/06/23 15:42:08 henschell $", @@ -54,19 +53,19 @@ def options_parse(): def resafe_surface(insurf: str, outsurf: str, pretess: str) -> None: - """Take path to insurf and rewrite it to outsurf thereby fixing vertex locs flag error. + """ + Take path to insurf and rewrite it to outsurf thereby fixing vertex locs flag error. - (scannerRAS instead of surfaceRAS after marching cube) + (scannerRAS instead of surfaceRAS after marching cube). Parameters ---------- insurf : str - Path and name of input surface + Path and name of input surface. outsurf : str - Path and name of output surface + Path and name of output surface. pretess : str - Path and name of file the input surface was created on (e.g. filled-pretess127.mgz) - + Path and name of file the input surface was created on (e.g. filled-pretess127.mgz). """ surf = fs.read_geometry(insurf, read_metadata=True) diff --git a/recon_surf/rotate_sphere.py b/recon_surf/rotate_sphere.py index 2d61f0c9..796eb04c 100644 --- a/recon_surf/rotate_sphere.py +++ b/recon_surf/rotate_sphere.py @@ -18,10 +18,12 @@ # IMPORTS import optparse -import numpy as np -from numpy import typing as npt import sys + +import numpy as np +from numpy import typing as np import nibabel.freesurfer.io as fs + import align_points as align @@ -66,13 +68,13 @@ def options_parse(): - """Command line option parser. + """ + Create a command line interface and return command line options. Returns ------- - options - object holding options - + options : argparse.Namespace + Namespace object holding options. """ parser = optparse.OptionParser( version="$Id: rotate_sphere.py,v 1.0 2022/03/18 21:22:08 mreuter Exp $", @@ -106,7 +108,8 @@ def align_aparc_centroids( labels_dst: npt.ArrayLike, label_ids: npt.ArrayLike = [] ) -> np.ndarray: - """Align centroid of aparc parcels on the sphere (Attention mapping back to sphere!). + """ + Align centroid of aparc parcels on the sphere (Attention mapping back to sphere!). Parameters ---------- @@ -119,13 +122,12 @@ def align_aparc_centroids( labels_dst : npt.ArrayLike Labels of aparc parcelation for rotation destination. label_ids : npt.ArrayLike - Ids of the centroid to be aligned. Defaults to [] + Ids of the centroid to be aligned. Defaults to []. Returns ------- - R - Rotation Matrix - + R : npt.NDArray[float] + Rotation Matrix. """ #nferiorparietal, inferiortemporal, lateraloccipital, postcentral, posteriorsingulate # precentral, precuneus, superiorfrontal, supramarginal diff --git a/recon_surf/sample_parc.py b/recon_surf/sample_parc.py index 322fa2e7..71850d4f 100644 --- a/recon_surf/sample_parc.py +++ b/recon_surf/sample_parc.py @@ -19,13 +19,15 @@ # IMPORTS import optparse import sys + import numpy as np -from numpy import typing as npt import nibabel.freesurfer.io as fs import nibabel as nib +from numpy import typing as npt from scipy import sparse from scipy.sparse.csgraph import connected_components from lapy import TriaMesh + from smooth_aparc import smooth_aparc @@ -64,13 +66,13 @@ def options_parse(): - """Command line option parser. + """ + Create a command line interface and return command line options. Returns ------- - options - object holding options - + options : argparse.Namespace + Namespace object holding options. """ parser = optparse.OptionParser( version="$Id: smooth_aparc,v 1.0 2018/06/24 11:34:08 mreuter Exp $", @@ -101,7 +103,8 @@ def options_parse(): return options def construct_adj_cluster(tria, annot): - """Adjacency matrix of edges from same annotation label only. + """ + Compute adjacency matrix of edges from same annotation label only. Operates only on triangles and removes edges that cross annotation label boundaries. @@ -132,7 +135,8 @@ def construct_adj_cluster(tria, annot): return sparse.csc_matrix((dat, (i, j)), shape=(n, n)) def find_all_islands(surf, annot): - """Find vertices in disconnected islands for all labels in surface annotation. + """ + Find vertices in disconnected islands for all labels in surface annotation. Parameters ---------- @@ -169,7 +173,8 @@ def find_all_islands(surf, annot): return vidx def sample_nearest_nonzero(img, vox_coords, radius=3.0): - """Sample closest non-zero value in a ball of radius around vox_coords. + """ + Sample closest non-zero value in a ball of radius around vox_coords. Parameters ---------- @@ -182,8 +187,8 @@ def sample_nearest_nonzero(img, vox_coords, radius=3.0): Returns ------- - samples : np.ndarray (n,) - Sampled values. Retruns zero for vertices where values are zero in ball. + samples : np.ndarray(n,) + Sampled values, returns zero for vertices where values are zero in ball. """ # check for isotropic voxels voxsize = img.header.get_zooms() @@ -248,7 +253,8 @@ def sample_nearest_nonzero(img, vox_coords, radius=3.0): def sample_img(surf, img, cortex=None, projmm=0.0, radius=None): - """Sample volume at a distance from the surface. + """ + Sample volume at a distance from the surface. Parameters ---------- @@ -261,10 +267,10 @@ def sample_img(surf, img, cortex=None, projmm=0.0, radius=None): Image to sample. If type is str, read image from file. cortex : np.ndarray | str - Filename of cortex label or np.array with cortex indices + Filename of cortex label or np.array with cortex indices. projmm : float Sample projmm mm along the surface vertex normals (default=0). - radius : float [optional] | None + radius : float, optional If given and if the sample is equal to zero, then consider all voxels inside this radius to find a non-zero value. @@ -326,7 +332,8 @@ def sample_img(surf, img, cortex=None, projmm=0.0, radius=None): def replace_labels(img_labels, img_lut, surf_lut): - """Replace image labels with corresponding surface labels or unknown. + """ + Replace image labels with corresponding surface labels or unknown. Parameters ---------- @@ -361,7 +368,8 @@ def replace_labels(img_labels, img_lut, surf_lut): def sample_parc (surf, seg, imglut, surflut, outaparc, cortex=None, projmm=0.0, radius=None): - """Sample cortical GM labels from image to surface and smooth. + """ + Sample cortical GM labels from image to surface and smooth. Parameters ---------- @@ -380,10 +388,10 @@ def sample_parc (surf, seg, imglut, surflut, outaparc, cortex=None, projmm=0.0, outaparc : str Filename for output surface parcellation. cortex : np.ndarray | str - Filename of cortex label or np.ndarray with cortex indices + Filename of cortex label or np.ndarray with cortex indices. projmm : float Sample projmm mm along the surface vertex normals (default=0). - radius : float [optional] | None + radius : float, optional If given and if the sample is equal to zero, then consider all voxels inside this radius to find a non-zero value. """ diff --git a/recon_surf/smooth_aparc.py b/recon_surf/smooth_aparc.py index 672f15c0..6ace3361 100644 --- a/recon_surf/smooth_aparc.py +++ b/recon_surf/smooth_aparc.py @@ -20,8 +20,8 @@ import optparse import sys import numpy as np -from numpy import typing as npt import nibabel.freesurfer.io as fs +from numpy import typing as npt from scipy import sparse @@ -54,13 +54,13 @@ def options_parse(): - """Command line option parser. + """ + Create a command line interface and return command line options. Returns ------- options - object holding options - + Namespace object holding options. """ parser = optparse.OptionParser( version="$Id: smooth_aparc,v 1.0 2018/06/24 11:34:08 mreuter Exp $", @@ -78,12 +78,14 @@ def options_parse(): return options -def get_adjM(trias: npt.NDArray, n: int): - """Create symmetric sparse adjacency matrix of triangle mesh. +def get_adjM(trias: npt.NDArray[int], n: int): + """ + Create symmetric sparse adjacency matrix of triangle mesh. Parameters ---------- - trias : np.ndarray shape (m, 3) int + trias : npt.NDArray[int](m, 3) + Triangle mesh matrix. n : int Shape of output (n,n) adjaceny matrix, where n>=m. @@ -92,7 +94,6 @@ def get_adjM(trias: npt.NDArray, n: int): ------- adjM : np.ndarray (bool) shape (n,n) Symmetric sparse CSR adjacency matrix, true corresponds to an edge. - """ I = trias J = I[:, [1, 2, 0]] @@ -108,7 +109,8 @@ def get_adjM(trias: npt.NDArray, n: int): def bincount2D_vectorized(a: npt.NDArray) -> np.ndarray: - """Count number of occurrences of each value in array of non-negative ints. + """ + Count number of occurrences of each value in array of non-negative ints. Parameters ---------- @@ -119,7 +121,6 @@ def bincount2D_vectorized(a: npt.NDArray) -> np.ndarray: ------- np.ndarray Array of counted values. - """ N = a.max() + 1 a_offs = a + np.arange(a.shape[0])[:, None] * N @@ -132,15 +133,16 @@ def mode_filter( fillonlylabel = None, novote: npt.ArrayLike = [] ) -> npt.NDArray[int]: - """Apply mode filter (smoothing) to integer labels on mesh vertices. + """ + Apply mode filter (smoothing) to integer labels on mesh vertices. Parameters ---------- adjM : sparse.csr_matrix[bool] - Symmetric adjacency matrix defining edges between vertices. - This determines what edges can vote so usually one adds the + Symmetric adjacency matrix defining edges between vertices, + this determines what edges can vote so usually one adds the identity to the adjacency matrix so that each vertex is included - in its own vote. + in its own vote. labels : npt.NDArray[int] List of integer labels at each vertex of the mesh. fillonlylabel : int @@ -152,7 +154,6 @@ def mode_filter( ------- labels_new : npt.NDArray[int] New smoothed labels. - """ # make sure labels lengths equals adjM dimension n = labels.shape[0] @@ -250,7 +251,8 @@ def mode_filter( def smooth_aparc(surf, labels, cortex = None): - """Smoothes aparc and fills holes. + """ + Smooth aparc label regions on the surface and fill holes. First all labels with 0 and -1 unside cortex are filled via repeated mode filtering, then all labels are smoothed first with a wider and @@ -362,25 +364,25 @@ def smooth_aparc(surf, labels, cortex = None): return labels -def smooth_aparc_files( +def main( insurfname: str, inaparcname: str, incortexname: str, outaparcname: str ) -> None: - """Smoothes aparc. + """ + Read files, smooth the aparc labels on the surface and save the smoothed labels. Parameters ---------- insurfname : str - Suface filepath and name of source + Suface filepath and name of source. inaparcname : str - Annotation filepath and name of source + Annotation filepath and name of source. incortexname : str - Label filepath and name of source + Label filepath and name of source. outaparcname : str - Suface filepath and name of destination - + Surface filepath and name of destination. """ # read input files print("Reading in surface: {} ...".format(insurfname)) @@ -400,6 +402,6 @@ def smooth_aparc_files( # Command Line options are error checking done here options = options_parse() - smooth_aparc_files(options.insurf, options.inaparc, options.incort, options.outaparc) + main(options.insurf, options.inaparc, options.incort, options.outaparc) sys.exit(0) diff --git a/recon_surf/spherically_project_wrapper.py b/recon_surf/spherically_project_wrapper.py index e65904a7..a94192f2 100644 --- a/recon_surf/spherically_project_wrapper.py +++ b/recon_surf/spherically_project_wrapper.py @@ -14,20 +14,20 @@ # IMPORTS -from subprocess import Popen, PIPE import shlex import argparse from typing import Any +from subprocess import Popen, PIPE def setup_options(): - """Command line option parser. + """ + Create a command line interface and return command line options. Returns ------- - options - object holding options - + options : argparse.Namespace + Namespace object holding options. """ # Validation settings parser = argparse.ArgumentParser(description="Wrapper for spherical projection") @@ -46,22 +46,23 @@ def setup_options(): def call(command: str, **kwargs: Any) -> int: - """Run command with arguments. + """ + Run command with arguments. Wait for command to complete. Sends output to logging module. Parameters ---------- command : str - Command to call + Command to call. **kwargs : Any + Keyword arguments. Returns ------- int - Returncode of called command - + Returncode of called command. """ kwargs["stdout"] = PIPE kwargs["stderr"] = PIPE @@ -78,22 +79,22 @@ def call(command: str, **kwargs: Any) -> int: def spherical_wrapper(command1: str, command2: str, **kwargs: Any) -> int: - """Run the first command. If it fails the fallback command is run as well. + """ + Run the first command. If it fails the fallback command is run instead. Parameters ---------- command1 : str - Command to call + Command to call. command2 : str - Fallback command to call + Fallback command to call. **kwargs : Any - Arguments. The same as for the Popen constructor + Arguments. The same as for the Popen constructor. Returns ------- code_1 - Return code of command1. If command1 failed return code of command2 - + Return code of command1. If command1 failed return code of command2. """ # First try to run standard spherical project print("Running command: {}".format(command1)) From a93d76c21f1e86434fbae189c3f42c92cc587cf9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20K=C3=BCgler?= Date: Wed, 21 Feb 2024 16:40:46 +0100 Subject: [PATCH 2/2] Update rewrite_mc_surface.py Update paint_cc_into_pred.py Update doc string --- recon_surf/paint_cc_into_pred.py | 2 +- recon_surf/rewrite_mc_surface.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/recon_surf/paint_cc_into_pred.py b/recon_surf/paint_cc_into_pred.py index e81f5ad9..1d41ae57 100644 --- a/recon_surf/paint_cc_into_pred.py +++ b/recon_surf/paint_cc_into_pred.py @@ -81,7 +81,7 @@ def argument_parse(): def paint_in_cc(pred: npt.ArrayLike, aseg_cc: npt.ArrayLike) -> npt.ArrayLike: """ - Paint corpus callosum segmentation into prediction. + Paint corpus callosum segmentation into aseg+dkt segmentation map. Note, that this function modifies the original array and does not create a copy. diff --git a/recon_surf/rewrite_mc_surface.py b/recon_surf/rewrite_mc_surface.py index bfc0cf93..bd13429e 100644 --- a/recon_surf/rewrite_mc_surface.py +++ b/recon_surf/rewrite_mc_surface.py @@ -56,7 +56,9 @@ def resafe_surface(insurf: str, outsurf: str, pretess: str) -> None: """ Take path to insurf and rewrite it to outsurf thereby fixing vertex locs flag error. - (scannerRAS instead of surfaceRAS after marching cube). + This function fixes header information not properly saved in marching cubes. + It makes sure the file header correctly references the scannerRAS instead of the surfaceRAS, + i.e. filename and volume is set to the correct data in the header. Parameters ----------