From f1943771af4bf1061f87dd79288fb464992e1b94 Mon Sep 17 00:00:00 2001 From: Jon Clucas Date: Mon, 8 Jul 2024 16:07:15 -0400 Subject: [PATCH] :recycle: Exclusively use custom `Function` Nodes + :rotating_light: Lint --- CPAC/anat_preproc/anat_preproc.py | 30 +++--- CPAC/anat_preproc/lesion_preproc.py | 28 ++++-- CPAC/anat_preproc/utils.py | 98 ++++++------------- .../distortion_correction.py | 14 +-- CPAC/distortion_correction/utils.py | 29 +++++- CPAC/easy_thresh/easy_thresh.py | 47 +++++---- CPAC/func_preproc/func_motion.py | 2 +- CPAC/func_preproc/func_preproc.py | 5 +- CPAC/group_analysis/group_analysis.py | 33 +++++-- .../longitudinal_preproc.py | 4 +- CPAC/median_angle/median_angle.py | 35 +++++-- CPAC/nuisance/nuisance.py | 13 +-- CPAC/nuisance/utils/utils.py | 2 +- CPAC/randomise/randomise.py | 10 +- CPAC/registration/output_func_to_standard.py | 6 +- CPAC/registration/registration.py | 57 +++++------ CPAC/reho/reho.py | 19 +++- CPAC/sca/sca.py | 20 ++-- CPAC/scrubbing/scrubbing.py | 49 ++++++---- CPAC/seg_preproc/seg_preproc.py | 38 ++++--- CPAC/surface/surf_preproc.py | 39 +++++--- CPAC/timeseries/timeseries_analysis.py | 8 +- CPAC/utils/interfaces/function/seg_preproc.py | 23 ++++- CPAC/utils/tests/test_datasource.py | 20 +++- CPAC/utils/utils.py | 10 +- 25 files changed, 385 insertions(+), 254 deletions(-) diff --git a/CPAC/anat_preproc/anat_preproc.py b/CPAC/anat_preproc/anat_preproc.py index 8e24b54b81..0f4e770f97 100644 --- a/CPAC/anat_preproc/anat_preproc.py +++ b/CPAC/anat_preproc/anat_preproc.py @@ -15,7 +15,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . -# from copy import deepcopy import os from nipype.interfaces import afni, ants, freesurfer, fsl @@ -36,6 +35,7 @@ ) from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge @@ -138,7 +138,7 @@ def acpc_alignment( aff_to_rig_imports = ["import os", "from numpy import *"] aff_to_rig = pe.Node( - util.Function( + Function( input_names=["in_xfm", "out_name"], output_names=["out_mat"], function=fsl_aff_to_rigid, @@ -319,7 +319,7 @@ def T1wmulT2w_brain_norm_s_string(sigma, in_file): return "-s %f -div %s" % (sigma, in_file) T1wmulT2w_brain_norm_s_string = pe.Node( - util.Function( + Function( input_names=["sigma", "in_file"], output_names=["out_str"], function=T1wmulT2w_brain_norm_s_string, @@ -378,7 +378,7 @@ def form_lower_string(mean, std): return "-thr %s -bin -ero -mul 255" % (lower) form_lower_string = pe.Node( - util.Function( + Function( input_names=["mean", "std"], output_names=["out_str"], function=form_lower_string, @@ -444,7 +444,7 @@ def file_to_a_list(infile_1, infile_2): return [infile_1, infile_2] file_to_a_list = pe.Node( - util.Function( + Function( input_names=["infile_1", "infile_2"], output_names=["out_list"], function=file_to_a_list, @@ -544,7 +544,7 @@ def afni_brain_connector(wf, cfg, strat_pool, pipe_num, opt): ) skullstrip_args = pe.Node( - util.Function( + Function( input_names=[ "spat_norm", "spat_norm_dxyz", @@ -762,7 +762,7 @@ def fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): anat_robustfov.inputs.output_type = "NIFTI_GZ" anat_pad_RobustFOV_cropped = pe.Node( - util.Function( + Function( input_names=["cropped_image_path", "target_image_path"], output_names=["padded_image_path"], function=pad, @@ -902,7 +902,7 @@ def unet_brain_connector(wf, cfg, strat_pool, pipe_num, opt): from CPAC.unet.function import predict_volumes unet_mask = pe.Node( - util.Function( + Function( input_names=["model_path", "cimg_in"], output_names=["out_path"], function=predict_volumes, @@ -1083,7 +1083,7 @@ def freesurfer_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # convert brain mask file from .mgz to .nii.gz fs_brain_mask_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"fs_brainmask_to_nifti_{pipe_num}", @@ -1119,7 +1119,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): Ref: https://github.com/DCAN-Labs/DCAN-HCP/blob/7927754/PostFreeSurfer/PostFreeSurferPipeline.sh#L151-L156 """ wmparc_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file", "reslice_like", "args"], output_names=["out_file"], function=mri_convert, @@ -1130,7 +1130,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # Register wmparc file if ingressing FreeSurfer data if strat_pool.check_rpool("pipeline-fs_xfm"): wmparc_to_native = pe.Node( - util.Function( + Function( input_names=["source_file", "target_file", "xfm", "out_file"], output_names=["transformed_file"], function=normalize_wmparc, @@ -1168,7 +1168,7 @@ def freesurfer_abcd_brain_connector(wf, cfg, strat_pool, pipe_num, opt): wf.connect(wmparc_to_nifti, "out_file", binary_mask, "in_file") wb_command_fill_holes = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=wb_command ), name=f"wb_command_fill_holes_{pipe_num}", @@ -1206,7 +1206,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/brainmask.mgz -ot nii brainmask.nii.gz convert_fs_brainmask_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"convert_fs_brainmask_to_nifti_{node_id}", @@ -1217,7 +1217,7 @@ def freesurfer_fsl_brain_connector(wf, cfg, strat_pool, pipe_num, opt): # mri_convert -it mgz ${SUBJECTS_DIR}/${subject}/mri/T1.mgz -ot nii T1.nii.gz convert_fs_T1_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"convert_fs_T1_to_nifti_{node_id}", @@ -2888,7 +2888,7 @@ def freesurfer_abcd_preproc(wf, cfg, strat_pool, pipe_num, opt=None): # fslmaths "$T1wImageFile"_1mm.nii.gz -div $Mean -mul 150 -abs "$T1wImageFile"_1mm.nii.gz normalize_head = pe.Node( - util.Function( + Function( input_names=["in_file", "number", "out_file_suffix"], output_names=["out_file"], function=fslmaths_command, diff --git a/CPAC/anat_preproc/lesion_preproc.py b/CPAC/anat_preproc/lesion_preproc.py index 2ef58c3d2a..07871ae32d 100644 --- a/CPAC/anat_preproc/lesion_preproc.py +++ b/CPAC/anat_preproc/lesion_preproc.py @@ -1,13 +1,30 @@ # -*- coding: utf-8 -*- +# Copyright (C) 2019-2023 C-PAC Developers +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . from nipype.interfaces import afni import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def inverse_lesion(lesion_path): - """ + """Replace non-zeroes with zeroes and zeroes with ones. + Check if the image contains more zeros than non-zeros, if so, replaces non-zeros by zeros and zeros by ones. @@ -38,13 +55,12 @@ def inverse_lesion(lesion_path): nii = nu.inverse_nifti_values(image=lesion_path) nib.save(nii, lesion_out) return lesion_out - else: - return lesion_out + return lesion_out def create_lesion_preproc(wf_name="lesion_preproc"): - """ - The main purpose of this workflow is to process lesions masks. + """Process lesions masks. + Lesion mask file is deobliqued and reoriented in the same way as the T1 in the anat_preproc function. @@ -95,7 +111,7 @@ def create_lesion_preproc(wf_name="lesion_preproc"): lesion_deoblique.inputs.deoblique = True lesion_inverted = pe.Node( - interface=util.Function( + interface=Function( input_names=["lesion_path"], output_names=["lesion_out"], function=inverse_lesion, diff --git a/CPAC/anat_preproc/utils.py b/CPAC/anat_preproc/utils.py index b3246fc41a..39904bbb66 100644 --- a/CPAC/anat_preproc/utils.py +++ b/CPAC/anat_preproc/utils.py @@ -1,73 +1,34 @@ # -*- coding: utf-8 -*- -from numpy import zeros -from nibabel import load as nib_load, Nifti1Image -import nipype.interfaces.utility as util - -from CPAC.pipeline import nipype_pipeline_engine as pe - - -def get_shape(nifti_image): - return nib_load(nifti_image).shape - - -def pad(cropped_image_path, target_image_path): - """ - Pad a cropped image to match the dimensions of a target image along the z-axis, - while keeping padded image aligned with target_image. - - Parameters - ---------- - - cropped_image_path (str): The file path to the cropped image (NIfTI format). - - target_image_path (str): The file path to the target image (NIfTI format). - - Returns - ------- - - str: The file path to the saved padded image (NIfTI format). +# Copyright (C) 2018-2023 C-PAC Developers - The function loads cropped and target iamges, calculates the z-dimension shift required for alignment such - that the mask generated from padded image will work correctly on the target image. The result padded image is - saved as an NIfTI file in the working directory/node and file path is returned as output. +# This file is part of C-PAC. - Note: The function assumes that the input images are in NIfTI format and have compatible dimensions. The cropped - and target image should only differ in z-axis dimension. - """ - from os import getcwd, path - from typing import Optional +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. - from numpy import asanyarray, ndarray, zeros_like - from nibabel import load, Nifti1Image, save +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. - cropped_image: Optional[ndarray] = asanyarray(load(cropped_image_path).dataobj) - target_image: Optional[ndarray] = asanyarray(load(target_image_path).dataobj) +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +import os - # Taking 1 slice to calculate the z dimension shift from top - center_row: int = target_image.shape[0] // 2 - center_column: int = target_image.shape[1] // 2 - z_slice_cropped_image: Optional[ndarray] = cropped_image[ - center_row, center_column, : - ] - z_slice_target_image: Optional[ndarray] = target_image[center_row, center_column, :] - - for z_shift in range(len(z_slice_target_image) - len(z_slice_cropped_image) + 1): - if ( - z_slice_target_image[z_shift : z_shift + len(z_slice_cropped_image)] - == z_slice_cropped_image - ).all(): - break +from numpy import * +from nibabel import load as nib_load +from nipype.interfaces.base import CommandLineInputSpec, File, TraitedSpec +import nipype.interfaces.utility as util +from nipype.interfaces.workbench.base import WBCommand - padded_image_matrix: Optional[ndarray] = zeros_like(target_image) - padded_image_matrix[:, :, z_shift : cropped_image.shape[2] + z_shift] = ( - cropped_image - ) - padded_image_path: str = path.join(getcwd(), "padded_image_T1w.nii.gz") - cropped_image = load(cropped_image_path) - save( - Nifti1Image(padded_image_matrix, affine=cropped_image.affine), padded_image_path - ) - return padded_image_path +from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def get_shape(nifti_image): + """Return the shape of a NIfTI image.""" return nib_load(nifti_image).shape @@ -286,7 +247,7 @@ def split_hemi(multi_file): def split_hemi_interface() -> util.Function: """Return a function interface for split_hemi.""" - return util.Function( + return Function( input_names=["multi_file"], output_names=["lh", "rh"], function=split_hemi ) @@ -587,12 +548,9 @@ def normalize_wmparc(source_file, target_file, xfm, out_file): return os.path.join(os.getcwd(), out_file) -"""This module provides interfaces for workbench -volume-remove-islands commands""" -from nipype.interfaces.base import CommandLineInputSpec, File, TraitedSpec -from nipype.interfaces.workbench.base import WBCommand - - class VolumeRemoveIslandsInputSpec(CommandLineInputSpec): + """InputSpec for workbench -volume-remove-islands commands.""" + in_file = File( exists=True, mandatory=True, @@ -610,14 +568,14 @@ class VolumeRemoveIslandsInputSpec(CommandLineInputSpec): class VolumeRemoveIslandsOutputSpec(TraitedSpec): + """OutputSpec for workbench -volume-remove-islands commands.""" + out_file = File(exists=True, desc="the output ROI volume") class VolumeRemoveIslands(WBCommand): - """ - workbench - -volume-remove-islands - REMOVE ISLANDS FROM AN ROI VOLUME + """Remove islandes from an ROI volume. + wb_command -volume-remove-islands - the input ROI volume - output - the output ROI volume. diff --git a/CPAC/distortion_correction/distortion_correction.py b/CPAC/distortion_correction/distortion_correction.py index 79b8400bb1..91b379b0a7 100644 --- a/CPAC/distortion_correction/distortion_correction.py +++ b/CPAC/distortion_correction/distortion_correction.py @@ -131,7 +131,7 @@ def distcor_phasediff_fsl_fugue(wf, cfg, strat_pool, pipe_num, opt=None): == "AFNI" ): skullstrip_args = pe.Node( - util.Function( + Function( input_names=["shrink_fac"], output_names=["expr"], function=create_afni_arg, @@ -667,7 +667,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): "import sys", ] phase_encoding = pe.Node( - util.Function( + Function( input_names=[ "unwarp_dir", "phase_one", @@ -710,7 +710,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): topup_imports = ["import os", "import subprocess"] run_topup = pe.Node( - util.Function( + Function( input_names=["merged_file", "acqparams"], output_names=[ "out_fieldcoef", @@ -732,7 +732,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(phase_encoding, "acq_params", run_topup, "acqparams") choose_phase = pe.Node( - util.Function( + Function( input_names=["phase_imgs", "unwarp_dir"], output_names=["out_phase_image", "vnum"], function=choose_phase_image, @@ -746,7 +746,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, choose_phase, "unwarp_dir") vnum_base = pe.Node( - util.Function( + Function( input_names=[ "vnum", "motion_mat_list", @@ -797,7 +797,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): name = "PhaseTwo_aw" vnum_base_two = pe.Node( - util.Function( + Function( input_names=[ "vnum", "motion_mat_list", @@ -840,7 +840,7 @@ def distcor_blip_fsl_topup(wf, cfg, strat_pool, pipe_num, opt=None): name = "PhaseOne_aw" vnum_base_one = pe.Node( - util.Function( + Function( input_names=[ "vnum", "motion_mat_list", diff --git a/CPAC/distortion_correction/utils.py b/CPAC/distortion_correction/utils.py index 2b78dbfa4d..b76acba074 100644 --- a/CPAC/distortion_correction/utils.py +++ b/CPAC/distortion_correction/utils.py @@ -1,3 +1,19 @@ +# Copyright (C) 2021-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import os import subprocess import sys @@ -12,6 +28,8 @@ import nipype.interfaces.utility as util from nipype.pipeline import engine as pe +from CPAC.utils.interfaces import Function + def run_HCP_gradient_unwarp(phase_vol, input_coeffs): import os @@ -49,7 +67,7 @@ def run_convertwarp(cw_trilinear, cw_fullWarp_abs): f"--warp1={cw_fullWarp_abs}", "--relout", f"--out={out_file}", - f"--j={jac_out}", + f"--j={out_jac}", ] subprocess.check_output(cmd) @@ -64,7 +82,7 @@ def gradient_distortion_correction(wf, inp_image, name): grad_unwarp_imports = ["import os", "import subprocess"] grad_unwarp = pe.Node( - util.Function( + Function( input_names=["phase_vol", "input_coeffs"], output_names=["trilinear", "abs_fullWarp"], function=run_HCP_gradient_unwarp, @@ -78,7 +96,7 @@ def gradient_distortion_correction(wf, inp_image, name): convertwarp_imports = ["import os", "import subprocess"] convert_warp = pe.Node( - util.Function( + Function( input_names=["cw_trilinear", "cw_fullWarp_abs"], output_names=["out_file_cw", "out_jac_cw"], function=run_convertwarp, @@ -248,8 +266,9 @@ def phase_encode( def z_pad(name="z_pad"): - """Pad in Z by one slice if odd so that topup does not complain - (slice consists of zeros that will be dilated by following step). + """Pad in Z by one slice if odd so that topup does not complain. + + (Slice consists of zeros that will be dilated by following step). """ wf = pe.Workflow(name=name) diff --git a/CPAC/easy_thresh/easy_thresh.py b/CPAC/easy_thresh/easy_thresh.py index d514d51c54..20918c08a9 100644 --- a/CPAC/easy_thresh/easy_thresh.py +++ b/CPAC/easy_thresh/easy_thresh.py @@ -1,3 +1,19 @@ +# Copyright (C) 2012-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import os import re import subprocess @@ -7,12 +23,11 @@ import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def easy_thresh(wf_name): - """ - Workflow for carrying out cluster-based thresholding - and colour activation overlaying. + """Carry out cluster-based thresholding and colour activation overlaying. Parameters ---------- @@ -213,7 +228,7 @@ def easy_thresh(wf_name): # or qform/sform info) from one image to another geo_imports = ["import subprocess"] copy_geometry = pe.MapNode( - util.Function( + Function( input_names=["infile_a", "infile_b"], output_names=["out_file"], function=copy_geom, @@ -246,7 +261,7 @@ def easy_thresh(wf_name): cluster_imports = ["import os", "import re", "import subprocess"] cluster = pe.MapNode( - util.Function( + Function( input_names=[ "in_file", "volume", @@ -271,7 +286,7 @@ def easy_thresh(wf_name): # create tuple of z_threshold and max intensity value of threshold file create_tuple = pe.MapNode( - util.Function( + Function( input_names=["infile_a", "infile_b"], output_names=["out_file"], function=get_tuple, @@ -299,7 +314,7 @@ def easy_thresh(wf_name): # as FSLDIR,MNI and voxel size get_bg_imports = ["import os", "import nibabel as nib"] get_backgroundimage = pe.MapNode( - util.Function( + Function( input_names=["in_file", "file_parameters"], output_names=["out_file"], function=get_standard_background_img, @@ -312,7 +327,7 @@ def easy_thresh(wf_name): # function node to get the standard fsl brain image # outputs single file get_backgroundimage2 = pe.Node( - util.Function( + Function( input_names=["in_file", "file_parameters"], output_names=["out_file"], function=get_standard_background_img, @@ -412,10 +427,9 @@ def call_cluster(in_file, volume, dlh, threshold, pthreshold, parameters): def copy_geom(infile_a, infile_b): - """ - Method to call fsl fslcpgeom command to copy - certain parts of the header information (image dimensions, - voxel dimensions, voxel dimensions units string, image + """Call fsl fslcpgeom command to copy certain parts of the header information. + + Copy (image dimensions, voxel dimensions, voxel dimensions units string, image orientation/origin or qform/sform info) from one image to another. Parameters @@ -449,9 +463,7 @@ def copy_geom(infile_a, infile_b): def get_standard_background_img(in_file, file_parameters): - """ - Method to get the standard brain image from FSL - standard data directory. + """Get the standard brain image from FSL standard data directory. Parameters ---------- @@ -487,10 +499,7 @@ def get_standard_background_img(in_file, file_parameters): def get_tuple(infile_a, infile_b): - """ - Simple method to return tuple of z_threhsold - maximum intensity values of Zstatistic image - for input to the overlay. + """Return tuple of z_threhsold maximum intensity values of Zstatistic image for input to the overlay. Parameters ---------- diff --git a/CPAC/func_preproc/func_motion.py b/CPAC/func_preproc/func_motion.py index 21fdb86a50..bea7d2e29c 100644 --- a/CPAC/func_preproc/func_motion.py +++ b/CPAC/func_preproc/func_motion.py @@ -423,7 +423,7 @@ def get_motion_ref(wf, cfg, strat_pool, pipe_num, opt=None): elif opt == "fmriprep_reference": func_get_RPI = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=estimate_reference_image, diff --git a/CPAC/func_preproc/func_preproc.py b/CPAC/func_preproc/func_preproc.py index 4d0fe73c9e..7004b4f025 100644 --- a/CPAC/func_preproc/func_preproc.py +++ b/CPAC/func_preproc/func_preproc.py @@ -23,6 +23,7 @@ from CPAC.func_preproc.utils import nullify from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.ants import ( AI, # niworkflows PrintHeader, @@ -343,7 +344,7 @@ def create_wf_edit_func(wf_name="edit_func"): # allocate a node to check that the requested edits are # reasonable given the data func_get_idx = pe.Node( - util.Function( + Function( input_names=["in_files", "stop_idx", "start_idx"], output_names=["stopidx", "startidx"], function=get_idx, @@ -877,7 +878,7 @@ def form_thr_string(thr): return "-thr %s" % (threshold_z) form_thr_string = pe.Node( - util.Function( + Function( input_names=["thr"], output_names=["out_str"], function=form_thr_string, diff --git a/CPAC/group_analysis/group_analysis.py b/CPAC/group_analysis/group_analysis.py index d3e78c4698..6da81ff37e 100644 --- a/CPAC/group_analysis/group_analysis.py +++ b/CPAC/group_analysis/group_analysis.py @@ -1,14 +1,29 @@ +# Copyright (C) 2012-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . from nipype.interfaces import fsl import nipype.interfaces.utility as util from CPAC.easy_thresh import easy_thresh from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def get_operation(in_file): - """ - Method to create operation string - for fslmaths. + """Create operation string for fslmaths. Parameters ---------- @@ -39,7 +54,9 @@ def get_operation(in_file): def label_zstat_files(zstat_list, con_file): - """Take in the z-stat file outputs of FSL FLAME and rename them after the + """Rename z-stat file outputs from FSL FLAME using contrast labels. + + Take in the z-stat file outputs of FSL FLAME and rename them after the contrast labels of the contrasts provided. """ cons = [] @@ -64,9 +81,7 @@ def label_zstat_files(zstat_list, con_file): def create_fsl_flame_wf(ftest=False, wf_name="groupAnalysis"): - """ - FSL `FEAT `_ - BASED Group Analysis. + """Run FSL `FEAT `_ BASED Group Analysis. Parameters ---------- @@ -313,7 +328,7 @@ def create_fsl_flame_wf(ftest=False, wf_name="groupAnalysis"): # easier interpretation label_zstat_imports = ["import os"] label_zstat = pe.Node( - util.Function( + Function( input_names=["zstat_list", "con_file"], output_names=["new_zstat_list"], function=label_zstat_files, @@ -341,7 +356,7 @@ def create_fsl_flame_wf(ftest=False, wf_name="groupAnalysis"): # function node to get the operation string for fslmaths command get_opstring = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=get_operation ), name="get_opstring", diff --git a/CPAC/longitudinal_pipeline/longitudinal_preproc.py b/CPAC/longitudinal_pipeline/longitudinal_preproc.py index dfead14d59..9fbe31c6b5 100644 --- a/CPAC/longitudinal_pipeline/longitudinal_preproc.py +++ b/CPAC/longitudinal_pipeline/longitudinal_preproc.py @@ -24,9 +24,9 @@ import numpy as np import nibabel as nib from nipype.interfaces import fsl -import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function from CPAC.utils.monitoring import IFLOGGER from CPAC.utils.nifti_utils import nifti_image_input @@ -617,7 +617,7 @@ def subject_specific_template( ] if method == "flirt": template_gen_node = pe.Node( - util.Function( + Function( input_names=[ "input_brain_list", "input_skull_list", diff --git a/CPAC/median_angle/median_angle.py b/CPAC/median_angle/median_angle.py index 1433df8ac8..de4fd683cb 100644 --- a/CPAC/median_angle/median_angle.py +++ b/CPAC/median_angle/median_angle.py @@ -1,12 +1,29 @@ +# Copyright (C) 2012-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def median_angle_correct(target_angle_deg, realigned_file): - """ - Performs median angle correction on fMRI data. Median angle correction algorithm - based on [1]_. + """Perform median angle correction on fMRI data. + + Median angle correction algorithm based on [1]_. Parameters ---------- @@ -89,8 +106,7 @@ def writeToFile(data, nii, fname): def calc_median_angle_params(subject): - """ - Calculates median angle parameters of a subject. + """Calculate median angle parameters of a subject. Parameters ---------- @@ -133,8 +149,7 @@ def calc_median_angle_params(subject): def calc_target_angle(mean_bolds, median_angles): """ - Calculates a target angle based on median angle parameters of - the group. + Calculate a target angle based on median angle parameters of the group. Parameters ---------- @@ -229,7 +244,7 @@ def create_median_angle_correction(name="median_angle_correction"): ) mac = pe.Node( - util.Function( + Function( input_names=["target_angle_deg", "realigned_file"], output_names=["corrected_file", "angles_file"], function=median_angle_correct, @@ -305,7 +320,7 @@ def create_target_angle(name="target_angle"): ) cmap = pe.MapNode( - util.Function( + Function( input_names=["subject"], output_names=["mean_bold", "median_angle"], function=calc_median_angle_params, @@ -315,7 +330,7 @@ def create_target_angle(name="target_angle"): ) cta = pe.Node( - util.Function( + Function( input_names=["mean_bolds", "median_angles"], output_names=["target_angle"], function=calc_target_angle, diff --git a/CPAC/nuisance/nuisance.py b/CPAC/nuisance/nuisance.py index c547ff6b01..45337a0c23 100644 --- a/CPAC/nuisance/nuisance.py +++ b/CPAC/nuisance/nuisance.py @@ -125,7 +125,7 @@ def form_mask_erosion_prop(erosion_prop): ] eroded_mask = pe.Node( - util.Function( + Function( input_names=[ "roi_mask", "skullstrip_mask", @@ -156,7 +156,7 @@ def form_mask_erosion_prop(erosion_prop): wf.connect(eroded_mask, "output_roi_mask", outputspec, "eroded_mask") if segmentmap: erosion_segmentmap = pe.Node( - util.Function( + Function( input_names=["roi_mask", "erosion_mm", "erosion_prop"], output_names=["eroded_roi_mask"], function=erosion, @@ -1357,7 +1357,7 @@ def create_regressor_workflow( ] cosfilter_node = pe.Node( - util.Function( + Function( input_names=["input_image_path", "timestep"], output_names=["cosfiltered_img"], function=cosine_filter, @@ -1374,7 +1374,7 @@ def create_regressor_workflow( "input_image_path", ) tr_string2float_node = pe.Node( - util.Function( + Function( input_names=["tr"], output_names=["tr_float"], function=TR_string_to_float, @@ -1887,7 +1887,7 @@ def filtering_bold_and_regressors( bandpass_ts.inputs.outputtype = "NIFTI_GZ" tr_string2float_node = pe.Node( - util.Function( + Function( input_names=["tr"], output_names=["tr_float"], function=TR_string_to_float, @@ -2418,7 +2418,8 @@ def nuisance_regressors_generation( opt: dict, space: Literal["T1w", "bold"], ) -> tuple[Workflow, dict]: - """ + """Generate nuisance regressors. + Parameters ---------- wf : ~nipype.pipeline.engine.workflows.Workflow diff --git a/CPAC/nuisance/utils/utils.py b/CPAC/nuisance/utils/utils.py index 92499523a8..db6667dcb3 100644 --- a/CPAC/nuisance/utils/utils.py +++ b/CPAC/nuisance/utils/utils.py @@ -499,7 +499,7 @@ def generate_summarize_tissue_mask_ventricles_masking( # generate inverse transform flags, which depends on the number of transforms inverse_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, diff --git a/CPAC/randomise/randomise.py b/CPAC/randomise/randomise.py index 8c2351c9f0..b3144685aa 100644 --- a/CPAC/randomise/randomise.py +++ b/CPAC/randomise/randomise.py @@ -15,6 +15,7 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function from CPAC.utils.monitoring import IFLOGGER @@ -53,7 +54,6 @@ def prep_randomise_workflow( ): from nipype.interfaces import fsl import nipype.interfaces.io as nio - import nipype.interfaces.utility as util wf = pe.Workflow(name="randomise_workflow") wf.base_dir = c.work_dir @@ -74,7 +74,7 @@ def prep_randomise_workflow( randomise.inputs.fcon = fts_file select_tcorrp_files = pe.Node( - util.Function( + Function( input_names=["input_list"], output_names=["out_file"], function=select ), name="select_t_corrp", @@ -83,7 +83,7 @@ def prep_randomise_workflow( wf.connect(randomise, "t_corrected_p_files", select_tcorrp_files, "input_list") select_tstat_files = pe.Node( - util.Function( + Function( input_names=["input_list"], output_names=["out_file"], function=select ), name="select_t_stat", @@ -147,6 +147,10 @@ def run(group_config_path): import os from CPAC.pipeline.cpac_group_runner import load_config_yml + from CPAC.pipeline.cpac_randomise_pipeline import ( + randomise_merged_file, + randomise_merged_mask, + ) group_config_obj = load_config_yml(group_config_path) pipeline_output_folder = group_config_obj.pipeline_dir diff --git a/CPAC/registration/output_func_to_standard.py b/CPAC/registration/output_func_to_standard.py index 6cf172f76d..bafea7d8d0 100644 --- a/CPAC/registration/output_func_to_standard.py +++ b/CPAC/registration/output_func_to_standard.py @@ -374,7 +374,7 @@ def ants_apply_warps_func_mni( itk_imports = ["import os"] change_transform = pe.Node( - util.Function( + Function( input_names=["input_affine_file"], output_names=["updated_affine_file"], function=change_itk_transform_type, @@ -534,7 +534,7 @@ def ants_apply_warps_func_mni( # check transform list (if missing any init/rig/affine) and exclude Nonetype check_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -546,7 +546,7 @@ def ants_apply_warps_func_mni( # generate inverse transform flags, which depends on the number of transforms inverse_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, diff --git a/CPAC/registration/registration.py b/CPAC/registration/registration.py index b6cc9892ea..da63e694e4 100644 --- a/CPAC/registration/registration.py +++ b/CPAC/registration/registration.py @@ -39,6 +39,7 @@ seperate_warps_list, single_ants_xfm_to_list, ) +from CPAC.utils.interfaces import Function from CPAC.utils.interfaces.fsl import Merge as fslMerge from CPAC.utils.utils import check_prov_for_motion_tool, check_prov_for_regtool @@ -104,7 +105,7 @@ def apply_transform( wf.connect(inputNode, "reference", apply_warp, "reference_image") interp_string = pe.Node( - util.Function( + Function( input_names=["interpolation", "reg_tool"], output_names=["interpolation"], function=interpolation_string, @@ -118,7 +119,7 @@ def apply_transform( wf.connect(interp_string, "interpolation", apply_warp, "interpolation") ants_xfm_list = pe.Node( - util.Function( + Function( input_names=["transform"], output_names=["transform_list"], function=single_ants_xfm_to_list, @@ -135,7 +136,7 @@ def apply_transform( if int(num_cpus) > 1 and time_series: chunk_imports = ["import nibabel as nib"] chunk = pe.Node( - util.Function( + Function( input_names=["func_file", "n_chunks", "chunk_size"], output_names=["TR_ranges"], function=chunk_ts, @@ -154,7 +155,7 @@ def apply_transform( split_imports = ["import os", "import subprocess"] split = pe.Node( - util.Function( + Function( input_names=["func_file", "tr_ranges"], output_names=["split_funcs"], function=split_ts_chunks, @@ -196,7 +197,7 @@ def apply_transform( ) interp_string = pe.Node( - util.Function( + Function( input_names=["interpolation", "reg_tool"], output_names=["interpolation"], function=interpolation_string, @@ -222,7 +223,7 @@ def apply_transform( if int(num_cpus) > 1 and time_series: chunk_imports = ["import nibabel as nib"] chunk = pe.Node( - util.Function( + Function( input_names=["func_file", "n_chunks", "chunk_size"], output_names=["TR_ranges"], function=chunk_ts, @@ -241,7 +242,7 @@ def apply_transform( split_imports = ["import os", "import subprocess"] split = pe.Node( - util.Function( + Function( input_names=["func_file", "tr_ranges"], output_names=["split_funcs"], function=split_ts_chunks, @@ -761,7 +762,7 @@ def create_register_func_to_anat( if phase_diff_distcor: conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -1067,7 +1068,7 @@ def bbreg_args(bbreg_target): if phase_diff_distcor: conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -1276,7 +1277,7 @@ def create_wf_calculate_ants_warp( """ reg_imports = ["import os", "import subprocess"] calculate_ants_warp = pe.Node( - interface=util.Function( + interface=Function( input_names=[ "moving_brain", "reference_brain", @@ -1302,7 +1303,7 @@ def create_wf_calculate_ants_warp( calculate_ants_warp.interface.num_threads = num_threads select_forward_initial = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1313,7 +1314,7 @@ def create_wf_calculate_ants_warp( select_forward_initial.inputs.selection = "Initial" select_forward_rigid = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1324,7 +1325,7 @@ def create_wf_calculate_ants_warp( select_forward_rigid.inputs.selection = "Rigid" select_forward_affine = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1335,7 +1336,7 @@ def create_wf_calculate_ants_warp( select_forward_affine.inputs.selection = "Affine" select_forward_warp = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1346,7 +1347,7 @@ def create_wf_calculate_ants_warp( select_forward_warp.inputs.selection = "Warp" select_inverse_warp = pe.Node( - util.Function( + Function( input_names=["warp_list", "selection"], output_names=["selected_warp"], function=seperate_warps_list, @@ -1788,7 +1789,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1851,7 +1852,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_invlinear_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -1873,7 +1874,7 @@ def ANTs_registration_connector( # generate inverse transform flags, which depends on the # number of transforms inverse_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, @@ -1935,7 +1936,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_all_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -2004,7 +2005,7 @@ def ANTs_registration_connector( # check transform list to exclude Nonetype (missing) init/rig/affine check_all_inv_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -2026,7 +2027,7 @@ def ANTs_registration_connector( # generate inverse transform flags, which depends on the # number of transforms inverse_all_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, @@ -2122,7 +2123,7 @@ def bold_to_T1template_xfm_connector( itk_imports = ["import os"] change_transform = pe.Node( - util.Function( + Function( input_names=["input_affine_file"], output_names=["updated_affine_file"], function=change_itk_transform_type, @@ -2964,7 +2965,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # c4d -mcs ${WD}/xfms/ANTs_CombinedWarp.nii.gz -oo ${WD}/xfms/e1.nii.gz ${WD}/xfms/e2.nii.gz ${WD}/xfms/e3.nii.gz # -mcs: -multicomponent-split, -oo: -output-multiple split_combined_warp = pe.Node( - util.Function( + Function( input_names=["input_name", "output_name"], output_names=["output1", "output2", "output3"], function=run_c4d, @@ -2982,7 +2983,7 @@ def overwrite_transform_anat_to_template(wf, cfg, strat_pool, pipe_num, opt=None # c4d -mcs ${WD}/xfms/ANTs_CombinedInvWarp.nii.gz -oo ${WD}/xfms/e1inv.nii.gz ${WD}/xfms/e2inv.nii.gz ${WD}/xfms/e3inv.nii.gz split_combined_inv_warp = pe.Node( - util.Function( + Function( input_names=["input_name", "output_name"], output_names=["output1", "output2", "output3"], function=run_c4d, @@ -3678,7 +3679,7 @@ def apply_phasediff_to_timeseries_separately(wf, cfg, strat_pool, pipe_num, opt= wf.connect(warp_fmap, "out_file", mask_fmap, "in_file") conv_pedir = pe.Node( - interface=util.Function( + interface=Function( input_names=["pedir", "convert"], output_names=["pedir"], function=convert_pedir, @@ -4819,7 +4820,7 @@ def single_step_resample_timeseries_to_T1template( reg_tool = check_prov_for_regtool(xfm_prov) bbr2itk = pe.Node( - util.Function( + Function( input_names=["reference_file", "source_file", "transform_file"], output_names=["itk_transform"], function=run_c3d, @@ -4860,7 +4861,7 @@ def single_step_resample_timeseries_to_T1template( ### Loop starts! ### motionxfm2itk = pe.MapNode( - util.Function( + Function( input_names=["reference_file", "source_file", "transform_file"], output_names=["itk_transform"], function=run_c3d, @@ -4881,7 +4882,7 @@ def single_step_resample_timeseries_to_T1template( wf.connect(node, out, motionxfm2itk, "transform_file") elif motion_correct_tool == "3dvolreg": convert_transform = pe.Node( - util.Function( + Function( input_names=["one_d_filename"], output_names=["transform_directory"], function=one_d_to_mat, diff --git a/CPAC/reho/reho.py b/CPAC/reho/reho.py index 80e6599d10..870d3fa36d 100644 --- a/CPAC/reho/reho.py +++ b/CPAC/reho/reho.py @@ -1,9 +1,26 @@ # coding: utf-8 +# Copyright (C) 2012-2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock from CPAC.reho.utils import * +from CPAC.utils.interfaces import Function def create_reho(wf_name): @@ -99,7 +116,7 @@ def create_reho(wf_name): "from CPAC.reho.utils import f_kendall", ] raw_reho_map = pe.Node( - util.Function( + Function( input_names=["in_file", "mask_file", "cluster_size"], output_names=["out_file"], function=compute_reho, diff --git a/CPAC/sca/sca.py b/CPAC/sca/sca.py index 8e714dbd5f..d12aae7de9 100644 --- a/CPAC/sca/sca.py +++ b/CPAC/sca/sca.py @@ -30,11 +30,15 @@ create_spatial_map_dataflow, resample_func_roi, ) +from CPAC.utils.interfaces import Function def create_sca(name_sca="sca"): """ - Map of the correlations of the Region of Interest(Seed in native or MNI space) with the rest of brain voxels. + Create map of the correlations of the Region of Interest with the rest of brain voxels. + + (Seed in native or MNI space) + The map is normalized to contain Z-scores, mapped in standard space and treated with spatial smoothing. Parameters @@ -150,8 +154,8 @@ def create_sca(name_sca="sca"): def create_temporal_reg(wflow_name="temporal_reg", which="SR"): - r""" - Temporal multiple regression workflow + r"""Create temporal multiple regression workflow. + Provides a spatial map of parameter estimates corresponding to each provided timeseries in a timeseries.txt file as regressors. @@ -280,9 +284,7 @@ def create_temporal_reg(wflow_name="temporal_reg", which="SR"): ) check_timeseries = pe.Node( - util.Function( - input_names=["in_file"], output_names=["out_file"], function=check_ts - ), + Function(input_names=["in_file"], output_names=["out_file"], function=check_ts), name="check_timeseries", ) @@ -325,7 +327,7 @@ def create_temporal_reg(wflow_name="temporal_reg", which="SR"): map_roi_imports = ['import os', 'import numpy as np'] # get roi order and send to output node for raw outputs - get_roi_order = pe.Node(util.Function(input_names=['maps', + get_roi_order = pe.Node(Function(input_names=['maps', 'timeseries'], output_names=['labels', 'maps'], @@ -350,7 +352,7 @@ def create_temporal_reg(wflow_name="temporal_reg", which="SR"): outputNode, 'temp_reg_map_files') # get roi order and send to output node for z-stat outputs - get_roi_order_zstat = pe.Node(util.Function(input_names=['maps', + get_roi_order_zstat = pe.Node(Function(input_names=['maps', 'timeseries'], output_names=['labels', 'maps'], @@ -396,7 +398,7 @@ def SCA_AVG(wf, cfg, strat_pool, pipe_num, opt=None): # same workflow, except to run TSE and send it to the resource # pool so that it will not get sent to SCA resample_functional_roi_for_sca = pe.Node( - util.Function( + Function( input_names=["in_func", "in_roi", "realignment", "identity_matrix"], output_names=["out_func", "out_roi"], function=resample_func_roi, diff --git a/CPAC/scrubbing/scrubbing.py b/CPAC/scrubbing/scrubbing.py index ed85ef1024..e08b816edc 100644 --- a/CPAC/scrubbing/scrubbing.py +++ b/CPAC/scrubbing/scrubbing.py @@ -1,13 +1,29 @@ +# Copyright (C) 2012-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe +from CPAC.utils.interfaces import Function def create_scrubbing_preproc(wf_name="scrubbing"): - """ - This workflow essentially takes the list of offending timepoints that are to be removed - and removes it from the motion corrected input image. Also, it removes the information - of discarded time points from the movement parameters file obtained during motion correction. + """Take the list of offending timepoints that are to be removed and remove it from the motion corrected input image. + + Also remove the information of discarded time points from the movement parameters file obtained during motion correction. Parameters ---------- @@ -94,7 +110,7 @@ def create_scrubbing_preproc(wf_name="scrubbing"): ) craft_scrub_input = pe.Node( - util.Function( + Function( input_names=["scrub_input", "frames_in_1D_file"], output_names=["scrub_input_string"], function=get_indx, @@ -103,7 +119,7 @@ def create_scrubbing_preproc(wf_name="scrubbing"): ) scrubbed_movement_parameters = pe.Node( - util.Function( + Function( input_names=["infile_a", "infile_b"], output_names=["out_file"], function=get_mov_parameters, @@ -120,7 +136,7 @@ def create_scrubbing_preproc(wf_name="scrubbing"): # scrubbed_preprocessed.inputs.outputtype = 'NIFTI_GZ' scrubbed_preprocessed = pe.Node( - util.Function( + Function( input_names=["scrub_input"], output_names=["scrubbed_image"], function=scrub_image, @@ -152,9 +168,8 @@ def create_scrubbing_preproc(wf_name="scrubbing"): def get_mov_parameters(infile_a, infile_b): - """ - Method to get the new movement parameters - file after removing the offending time frames + """Get the new movement parameters file after removing the offending time frames. + (i.e., those exceeding FD 0.5mm/0.2mm threshold). Parameters @@ -192,7 +207,7 @@ def get_mov_parameters(infile_a, infile_b): raise Exception(msg) f = open(out_file, "a") - for l in l1: + for l in l1: # noqa: E741 data = l2[int(l.strip())] f.write(data) f.close() @@ -200,9 +215,7 @@ def get_mov_parameters(infile_a, infile_b): def get_indx(scrub_input, frames_in_1D_file): - """ - Method to get the list of time - frames that are to be included. + """Get the list of time frames that are to be included. Parameters ---------- @@ -230,10 +243,10 @@ def get_indx(scrub_input, frames_in_1D_file): def scrub_image(scrub_input): - """ - Method to run 3dcalc in order to scrub the image. This is used instead of - the Nipype interface for 3dcalc because functionality is needed for - specifying an input file with specifically-selected volumes. For example: + """Run 3dcalc in order to scrub the image. + + This is used instead of the Nipype interface for 3dcalc because functionality is + needed for specifying an input file with specifically-selected volumes. For example: input.nii.gz[2,3,4,..98], etc. Parameters diff --git a/CPAC/seg_preproc/seg_preproc.py b/CPAC/seg_preproc/seg_preproc.py index a66990f1e6..f769cf14b3 100644 --- a/CPAC/seg_preproc/seg_preproc.py +++ b/CPAC/seg_preproc/seg_preproc.py @@ -1,3 +1,19 @@ +# Copyright (C) 2012-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . from nipype.interfaces import ants, freesurfer, fsl, utility as util from nipype.interfaces.utility import Function @@ -23,10 +39,10 @@ def process_segment_map(wf_name, use_priors, use_custom_threshold, reg_tool): - """This is a sub workflow used inside segmentation workflow to process - probability maps obtained in segmentation. Steps include overlapping - of the prior tissue with probability maps, thresholding and binarizing - it and creating a mask that is used in further analysis. + """Create a sub workflow used inside segmentation workflow to process probability maps obtained in segmentation. + + Steps include overlapping of the prior tissue with probability maps, thresholding + and binarizing it and creating a mask that is used in further analysis. Parameters ---------- @@ -274,7 +290,7 @@ def tissue_mask_template_to_t1(wf_name, use_ants): # check transform list to exclude Nonetype (missing) init/rig/affine check_transform = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["checked_transform_list", "list_length"], function=check_transforms, @@ -289,7 +305,7 @@ def tissue_mask_template_to_t1(wf_name, use_ants): # generate inverse transform flags, which depends on the # number of transforms inverse_transform_flags = pe.Node( - util.Function( + Function( input_names=["transform_list"], output_names=["inverse_transform_flags"], function=generate_inverse_transform_flags, @@ -356,9 +372,7 @@ def tissue_mask_template_to_t1(wf_name, use_ants): def create_seg_preproc_antsJointLabel_method(wf_name="seg_preproc_templated_based"): - """ - Generate the subject's cerebral spinal fluids, - white matter and gray matter mask based on provided template, if selected to do so. + """Generate the subject's cerebral spinal fluids, white matter and gray matter mask based on provided template, if selected to do so. Parameters ---------- @@ -417,7 +431,7 @@ def create_seg_preproc_antsJointLabel_method(wf_name="seg_preproc_templated_base ) seg_preproc_antsJointLabel = pe.Node( - util.Function( + Function( input_names=[ "anatomical_brain", "anatomical_brain_mask", @@ -700,7 +714,7 @@ def tissue_seg_fsl_fast(wf, cfg, strat_pool, pipe_num, opt=None): ) get_csf = pe.Node( - util.Function( + Function( input_names=["probability_maps"], output_names=["filename"], function=pick_wm_prob_0, @@ -945,7 +959,7 @@ def tissue_seg_freesurfer(wf, cfg, strat_pool, pipe_num, opt=None): wf.connect(node, out, fs_aseg_to_native, "target_file") fs_aseg_to_nifti = pe.Node( - util.Function( + Function( input_names=["in_file"], output_names=["out_file"], function=mri_convert ), name=f"fs_aseg_to_nifti_{pipe_num}", diff --git a/CPAC/surface/surf_preproc.py b/CPAC/surface/surf_preproc.py index 2229e24b5a..1defe4e2d1 100644 --- a/CPAC/surface/surf_preproc.py +++ b/CPAC/surface/surf_preproc.py @@ -1,10 +1,25 @@ -import os +# Copyright (C) 2021-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. -import nipype.interfaces.utility as util +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . +import os from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.pipeline.nodeblock import nodeblock from CPAC.surface.PostFreeSurfer.surf_reho import run_surf_reho +from CPAC.utils.interfaces import Function def run_surface( @@ -1026,7 +1041,7 @@ def run_surface( ) def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None): surf = pe.Node( - util.Function( + Function( input_names=[ "post_freesurfer_folder", "freesurfer_folder", @@ -1369,7 +1384,7 @@ def surface_postproc(wf, cfg, strat_pool, pipe_num, opt=None): ) def surface_falff(wf, cfg, strat_pool, pipe_num, opt): falff = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries"], output_names=["surf_falff"], function=run_surf_falff, @@ -1394,7 +1409,7 @@ def surface_falff(wf, cfg, strat_pool, pipe_num, opt): ) def surface_alff(wf, cfg, strat_pool, pipe_num, opt): alff = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries"], output_names=["surf_alff"], function=run_surf_alff, @@ -1427,7 +1442,7 @@ def surface_alff(wf, cfg, strat_pool, pipe_num, opt): ) def surface_reho(wf, cfg, strat_pool, pipe_num, opt): L_cortex_file = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries", "structure", "cortex_filename"], output_names=["L_cortex_file"], function=run_get_cortex, @@ -1442,7 +1457,7 @@ def surface_reho(wf, cfg, strat_pool, pipe_num, opt): wf.connect(node, out, L_cortex_file, "dtseries") R_cortex_file = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries", "structure", "cortex_filename"], output_names=["R_cortex_file"], function=run_get_cortex, @@ -1456,7 +1471,7 @@ def surface_reho(wf, cfg, strat_pool, pipe_num, opt): wf.connect(node, out, R_cortex_file, "dtseries") mean_timeseries = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries"], output_names=["mean_timeseries"], function=run_mean_timeseries, @@ -1468,7 +1483,7 @@ def surface_reho(wf, cfg, strat_pool, pipe_num, opt): wf.connect(node, out, mean_timeseries, "dtseries") L_reho = pe.Node( - util.Function( + Function( input_names=[ "subject", "dtseries", @@ -1499,7 +1514,7 @@ def surface_reho(wf, cfg, strat_pool, pipe_num, opt): wf.connect(node, out, L_reho, "dtseries") R_reho = pe.Node( - util.Function( + Function( input_names=[ "subject", "dtseries", @@ -1545,7 +1560,7 @@ def surface_reho(wf, cfg, strat_pool, pipe_num, opt): ) def surface_connectivity_matrix(wf, cfg, strat_pool, pipe_num, opt): connectivity_parcellation = pe.Node( - util.Function( + Function( input_names=["subject", "dtseries", "surf_atlaslabel"], output_names=["parcellation_file"], function=run_ciftiparcellate, @@ -1561,7 +1576,7 @@ def surface_connectivity_matrix(wf, cfg, strat_pool, pipe_num, opt): ]["surface_parcellation_template"] correlation_matrix = pe.Node( - util.Function( + Function( input_names=["subject", "ptseries"], output_names=["correlation_matrix"], function=run_cifticorrelation, diff --git a/CPAC/timeseries/timeseries_analysis.py b/CPAC/timeseries/timeseries_analysis.py index 14547bc79b..a56bc33c74 100644 --- a/CPAC/timeseries/timeseries_analysis.py +++ b/CPAC/timeseries/timeseries_analysis.py @@ -15,7 +15,6 @@ # You should have received a copy of the GNU Lesser General Public # License along with C-PAC. If not, see . from nipype.interfaces import afni, fsl, utility as util -from nipype.interfaces.utility import Function from CPAC.connectome.connectivity_matrix import ( create_connectome_afni, @@ -29,6 +28,7 @@ create_spatial_map_dataflow, resample_func_roi, ) +from CPAC.utils.interfaces import Function from CPAC.utils.monitoring import FMLOGGER @@ -86,7 +86,7 @@ def get_voxel_timeseries(wf_name: str = "voxel_timeseries") -> pe.Workflow: ) timeseries_voxel = pe.Node( - util.Function( + Function( input_names=["data_file", "template"], output_names=["oneD_file"], function=gen_voxel_timeseries, @@ -241,7 +241,7 @@ def get_roi_timeseries(wf_name: str = "roi_timeseries") -> pe.Workflow: clean_csv_imports = ["import os"] clean_csv = pe.Node( - util.Function( + Function( input_names=["roi_csv"], output_names=["roi_array", "edited_roi_csv"], function=clean_roi_csv, @@ -382,7 +382,7 @@ def get_vertices_timeseries(wf_name="vertices_timeseries"): ) timeseries_surface = pe.Node( - util.Function( + Function( input_names=["rh_surface_file", "lh_surface_file"], output_names=["out_file"], function=gen_vertices_timeseries, diff --git a/CPAC/utils/interfaces/function/seg_preproc.py b/CPAC/utils/interfaces/function/seg_preproc.py index d220781f48..5fe9152b23 100644 --- a/CPAC/utils/interfaces/function/seg_preproc.py +++ b/CPAC/utils/interfaces/function/seg_preproc.py @@ -1,11 +1,26 @@ +# Copyright (C) 2022-2023 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . """Function interfaces for seg_preproc.""" -from nipype.interfaces import utility as util +from CPAC.utils.interfaces import Function def pick_tissue_from_labels_file_interface(input_names=None): - """Function to create a Function interface for - CPAC.seg_preproc.utils.pick_tissue_from_labels_file. + """Create a Function interface for ~CPAC.seg_preproc.utils.pick_tissue_from_labels_file. Parameters ---------- @@ -20,7 +35,7 @@ def pick_tissue_from_labels_file_interface(input_names=None): if input_names is None: input_names = ["multiatlas_Labels", "csf_label", "gm_label", "wm_label"] - return util.Function( + return Function( input_names=input_names, output_names=["csf_mask", "gm_mask", "wm_mask"], function=pick_tissue_from_labels_file, diff --git a/CPAC/utils/tests/test_datasource.py b/CPAC/utils/tests/test_datasource.py index 9842310bb1..be7c2255c2 100644 --- a/CPAC/utils/tests/test_datasource.py +++ b/CPAC/utils/tests/test_datasource.py @@ -1,10 +1,26 @@ +# Copyright (C) 2019-2024 C-PAC Developers + +# This file is part of C-PAC. + +# C-PAC is free software: you can redistribute it and/or modify it under +# the terms of the GNU Lesser General Public License as published by the +# Free Software Foundation, either version 3 of the License, or (at your +# option) any later version. + +# C-PAC is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +# License for more details. + +# You should have received a copy of the GNU Lesser General Public +# License along with C-PAC. If not, see . import json import pytest -import nipype.interfaces.utility as util from CPAC.pipeline import nipype_pipeline_engine as pe from CPAC.utils.datasource import match_epi_fmaps +from CPAC.utils.interfaces import Function from CPAC.utils.test_resources import setup_test_wf @@ -48,7 +64,7 @@ def test_match_epi_fmaps(): } match_fmaps = pe.Node( - util.Function( + Function( input_names=["fmap_dct", "bold_pedir"], output_names=["opposite_pe_epi", "same_pe_epi"], function=match_epi_fmaps, diff --git a/CPAC/utils/utils.py b/CPAC/utils/utils.py index bfd313a56c..b459262993 100644 --- a/CPAC/utils/utils.py +++ b/CPAC/utils/utils.py @@ -132,7 +132,7 @@ def get_flag_wf(wf_name="get_flag"): input_node = pe.Node(util.IdentityInterface(fields=["in_flag"]), name="inputspec") get_flag = pe.Node( - util.Function(input_names=["in_flag"], function=_get_flag), name="get_flag" + Function(input_names=["in_flag"], function=_get_flag), name="get_flag" ) wf.connect(input_node, "in_flag", get_flag, "in_flag") @@ -322,7 +322,7 @@ def get_zscore(map_node=False, wf_name="z_score"): ) op_string = pe.MapNode( - util.Function( + Function( input_names=["mean", "std_dev"], output_names=["op_string"], function=get_operand_string, @@ -345,7 +345,7 @@ def get_zscore(map_node=False, wf_name="z_score"): ) op_string = pe.Node( - util.Function( + Function( input_names=["mean", "std_dev"], output_names=["op_string"], function=get_operand_string, @@ -400,7 +400,7 @@ def get_fisher_zscore(input_name, map_node=False, wf_name="fisher_z_score"): if map_node: # node to separate out fisher_z_score = pe.MapNode( - util.Function( + Function( input_names=["correlation_file", "timeseries_one_d", "input_name"], output_names=["out_file"], function=compute_fisher_z_score, @@ -410,7 +410,7 @@ def get_fisher_zscore(input_name, map_node=False, wf_name="fisher_z_score"): ) else: fisher_z_score = pe.Node( - util.Function( + Function( input_names=["correlation_file", "timeseries_one_d", "input_name"], output_names=["out_file"], function=compute_fisher_z_score,