From 17adcc8cfa01537b669e3913ea4ce2e91084007f Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Thu, 8 Oct 2020 14:53:55 +0200 Subject: [PATCH 01/51] Remove lower/upper case of PET tracer --- clinica/pipelines/pet_surface/pet_surface_pipeline.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/clinica/pipelines/pet_surface/pet_surface_pipeline.py b/clinica/pipelines/pet_surface/pet_surface_pipeline.py index 1c10f2435..5b0a4845a 100644 --- a/clinica/pipelines/pet_surface/pet_surface_pipeline.py +++ b/clinica/pipelines/pet_surface/pet_surface_pipeline.py @@ -157,10 +157,10 @@ def build_input_node_longitudinal(self): check_relative_volume_location_in_world_coordinate_system('T1w-MRI (orig_nu.mgz)', read_parameters_node.inputs.orig_nu, - self.parameters['acq_label'].upper() + ' PET', + self.parameters['acq_label'] + ' PET', read_parameters_node.inputs.pet, self.bids_directory, - self.parameters['acq_label'].lower()) + self.parameters['acq_label']) self.connect([ (read_parameters_node, self.input_node, [('pet', 'pet'), @@ -260,9 +260,9 @@ def build_input_node_cross_sectional(self): raise ClinicaException(error_message) check_relative_volume_location_in_world_coordinate_system('T1w-MRI (orig_nu.mgz)', read_parameters_node.inputs.orig_nu, - self.parameters['acq_label'].upper() + ' PET', read_parameters_node.inputs.pet, + self.parameters['acq_label'] + ' PET', read_parameters_node.inputs.pet, self.bids_directory, - self.parameters['acq_label'].lower()) + self.parameters['acq_label']) self.connect([ (read_parameters_node, self.input_node, [('pet', 'pet'), From 29dd415303f17ecd049d2abc5d3c4d96e4983715 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Thu, 8 Oct 2020 15:50:27 +0200 Subject: [PATCH 02/51] Harmonize ml-prepare-data w.r.t. PET tracers --- .../spatial_svm_cli.py | 42 +++++++++++++++---- .../spatial_svm_pipeline.py | 27 +++++------- 2 files changed, 43 insertions(+), 26 deletions(-) diff --git a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py index 950f67da1..2b58603c3 100644 --- a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py +++ b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py @@ -35,12 +35,21 @@ def define_options(self): '%sPipeline options if you use inputs from pet-volume pipeline%s' % (Fore.BLUE, Fore.RESET) ) - optional_pet.add_argument("-pt", "--pet_tracer", - default='fdg', - help='PET tracer. Can be fdg or av45 (default: --pet_tracer %(default)s)') - optional_pet.add_argument("-no_pvc", "--no_pvc", - action='store_true', default=False, - help="Force the use of non PVC PET data (by default, PVC PET data are used)") + optional_pet.add_argument("-al", "--acq_label", + type=str, + default=None, + help='Name of the PET tracer label in the acquisition entity ' + '(acq-).') + optional_pet.add_argument("-suvr", "--suvr_reference_region", + choices=['cerebellumPons', 'pons'], + default=None, + help='Intensity normalization using the average PET uptake in reference regions ' + 'resulting in a standardized uptake value ratio (SUVR) map. It can be ' + 'cerebellumPons (used for AV45 tracers) or pons (used for 18F-FDG tracers).') + optional_pet.add_argument("-pvc", "--use_pvc_data", + action='store_true', + default=False, + help="Use PET data with partial value correction (by default, PET data with no PVC are used)") # Clinica standard arguments (e.g. --n_procs) self.add_clinica_standard_arguments() # Advanced arguments (i.e. tricky parameters) @@ -54,15 +63,30 @@ def define_options(self): def run_command(self, args): """Run the pipeline with defined args.""" from networkx import Graph + from colorama import Fore from .spatial_svm_pipeline import SpatialSVM + from clinica.utils.exceptions import ClinicaException from clinica.utils.ux import print_end_pipeline, print_crash_files_and_exit + if args.orig_input_data == 'pet-volume': + if args.acq_label is None: + raise ClinicaException( + f"{Fore.RED}You selected pet-volume pipeline without setting --acq_label flag. " + f"Clinica will now exit.{Fore.RESET}" + ) + if args.suvr_reference_region is None: + raise ClinicaException( + f"{Fore.RED}You selected pet-volume pipeline without setting --suvr_reference_region flag. " + f"Clinica will now exit.{Fore.RESET}" + ) + parameters = { 'group_label': args.group_label, 'orig_input_data': args.orig_input_data, - 'pet_tracer': args.pet_tracer, - 'no_pvc': args.no_pvc, - 'fwhm': args.fwhm, + 'acq_label': args.acq_label, + 'use_pvc_data': args.use_pvc_data, + 'suvr_reference_region': args.suvr_reference_region, + 'fwhm': args.full_width_half_maximum, } pipeline = SpatialSVM( caps_directory=self.absolute_path(args.caps_directory), diff --git a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_pipeline.py b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_pipeline.py index e7f12d49b..0d7a46e5a 100644 --- a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_pipeline.py +++ b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_pipeline.py @@ -22,10 +22,6 @@ def check_pipeline_parameters(self): raise KeyError('Missing compulsory orig_input_data key in pipeline parameter.') if 'fwhm' not in self.parameters.keys(): self.parameters['fwhm'] = 4 - if 'pet_tracer' not in self.parameters.keys(): - self.parameters['pet_tracer'] = 'fdg' - if 'no_pvc' not in self.parameters.keys(): - self.parameters['no_pvc'] = False check_group_label(self.parameters['group_label']) @@ -84,20 +80,20 @@ def build_input_node(self): 'description': 'graymatter tissue segmented in T1w MRI in Ixi549 space', 'needed_pipeline': 't1-volume-tissue-segmentation' } - elif self.parameters['orig_input_data'] is 'pet-volume': - if self.parameters['no_pvc']: + elif self.parameters['orig_input_data'] == 'pet-volume': + if self.parameters['use_pvc_data']: caps_files_information = { 'pattern': os.path.join('pet', 'preprocessing', 'group-' + self.parameters['group_label'], - '*_pet_space-Ixi549Space_suvr-pons_pet.nii.gz'), - 'description': self.parameters['pet_tracer'] + ' PET in Ixi549 space', - 'needed_pipeline': 'pet-volume' + '*_acq-' + self.parameters['acq_label'] + '_pet_space-Ixi549Space_pvc-rbv_suvr-' + self.parameters['suvr_reference_region'] + '_pet.nii.gz'), + 'description': self.parameters['acq_label'] + ' PET partial volume corrected (RBV) in Ixi549 space', + 'needed_pipeline': 'pet-volume with PVC' } else: caps_files_information = { 'pattern': os.path.join('pet', 'preprocessing', 'group-' + self.parameters['group_label'], - '*_pet_space-Ixi549Space_pvc-rbv_suvr-pons_pet.nii.gz'), - 'description': self.parameters['pet_tracer'] + ' PET partial volume corrected (RBV) in Ixi549 space', - 'needed_pipeline': 'pet-volume with PVC' + '*_acq-' + self.parameters['acq_label'] + '*_pet_space-Ixi549Space_suvr-' + self.parameters['suvr_reference_region'] + '_pet.nii.gz'), + 'description': self.parameters['acq_label'] + ' PET in Ixi549 space', + 'needed_pipeline': 'pet-volume' } else: raise ValueError('Image type ' + self.parameters['orig_input_data'] + ' unknown.') @@ -133,14 +129,11 @@ def build_input_node(self): ]) def build_output_node(self): - """Build and connect an output node to the pipeline. - """ + """Build and connect an output node to the pipeline.""" pass def build_core_nodes(self): - """Build and connect the core nodes of the pipeline. - """ - + """Build and connect the core nodes of the pipeline.""" import clinica.pipelines.machine_learning_spatial_svm.spatial_svm_utils as utils import nipype.interfaces.utility as nutil import nipype.pipeline.engine as npe From 69eda8c46e8a6da23aa85b0872d0fc9700d4ea44 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Thu, 8 Oct 2020 16:15:06 +0200 Subject: [PATCH 03/51] Use -acq/--acq_label flag instead of -al/--acq_label --- .../pipelines/machine_learning_spatial_svm/spatial_svm_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py index 2b58603c3..3f8eba412 100644 --- a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py +++ b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py @@ -35,7 +35,7 @@ def define_options(self): '%sPipeline options if you use inputs from pet-volume pipeline%s' % (Fore.BLUE, Fore.RESET) ) - optional_pet.add_argument("-al", "--acq_label", + optional_pet.add_argument("-acq", "--acq_label", type=str, default=None, help='Name of the PET tracer label in the acquisition entity ' From 596c5702b67bd183381f3b78a2374cdc8f75017b Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Sun, 18 Oct 2020 11:39:50 +0200 Subject: [PATCH 04/51] Add path option to get_file_from_server --- clinica/utils/inputs.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/clinica/utils/inputs.py b/clinica/utils/inputs.py index aee5bedcd..fab1b1dab 100644 --- a/clinica/utils/inputs.py +++ b/clinica/utils/inputs.py @@ -447,13 +447,16 @@ def fetch_file(remote, dirname=None): return file_path -def get_file_from_server(remote_file): +def get_file_from_server(remote_file, cache_path=None): import os from pathlib import Path from clinica.utils.stream import cprint home = str(Path.home()) - cache_clinica = os.path.join(home, ".cache", "clinica", "data") + if cache_path: + cache_clinica = os.path.join(home, ".cache", cache_path) + else: + cache_clinica = os.path.join(home, ".cache", "clinica", "data") if not (os.path.exists(cache_clinica)): os.makedirs(cache_clinica) From b0190a971ca9820b0c9f177680097b4af63866bb Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Mon, 19 Oct 2020 17:08:17 +0200 Subject: [PATCH 05/51] Harmonize PET tracers handling (stats-surface) --- .../statistics_surface_cli.py | 52 ++++++++++++--- .../statistics_surface_pipeline.py | 65 +++++++++++-------- .../statistics_surface_utils.py | 18 +++-- .../test_instantiate_all_pipelines.py | 13 ++-- test/nonregression/test_run_pipelines.py | 14 ++-- 5 files changed, 109 insertions(+), 53 deletions(-) diff --git a/clinica/pipelines/statistics_surface/statistics_surface_cli.py b/clinica/pipelines/statistics_surface/statistics_surface_cli.py index dff6a80f2..7f9c7815f 100644 --- a/clinica/pipelines/statistics_surface/statistics_surface_cli.py +++ b/clinica/pipelines/statistics_surface/statistics_surface_cli.py @@ -28,7 +28,7 @@ def define_options(self): clinica_comp.add_argument("orig_input_data", help='Type of surface-based feature: type ' '\'t1-freesurfer\' to use cortical thickness, ' - '\'pet-surface\' to use projected FDG-PET data or ' + '\'pet-surface\' to use projected PET data or ' '\'custom-pipeline\' to use you own data in CAPS directory ' '(see Wiki for details).', choices=['t1-freesurfer', 'pet-surface', 'custom-pipeline']) @@ -55,6 +55,21 @@ def define_options(self): type=int, default=20, help='FWHM for the surface smoothing ' '(default: --full_width_at_half_maximum %(default)s).') + # Optional arguments for inputs from pet-surface pipeline + opt_pet = self._args.add_argument_group( + '%sPipeline options if you use inputs from pet-volume pipeline%s' % + (Fore.BLUE, Fore.RESET) + ) + opt_pet.add_argument("-acq", "--acq_label", + type=str, + default=None, + help='Name of the PET tracer label in the acquisition entity (acq-).') + opt_pet.add_argument("-suvr", "--suvr_reference_region", + choices=['cerebellumPons', 'pons'], + default=None, + help='Intensity normalization using the average PET uptake in reference regions ' + 'resulting in a standardized uptake value ratio (SUVR) map. It can be ' + 'cerebellumPons (used for AV45 tracers) or pons (used for 18F-FDG tracers).') # Optional arguments for custom pipeline opt_custom_input = self._args.add_argument_group( '%sPipeline options if you selected custom-pipeline%s' % (Fore.BLUE, Fore.RESET) @@ -83,33 +98,54 @@ def define_options(self): def run_command(self, args): """Run the pipeline with defined args.""" from networkx import Graph + from colorama import Fore from .statistics_surface_pipeline import StatisticsSurface from .statistics_surface_utils import (get_t1_freesurfer_custom_file, - get_fdg_pet_surface_custom_file) + get_pet_surface_custom_file) from clinica.utils.ux import print_end_pipeline, print_crash_files_and_exit from clinica.utils.exceptions import ClinicaException + # PET-Surface pipeline + if args.orig_input_data == 'pet-surface': + if args.acq_label is None: + raise ClinicaException( + f"{Fore.RED}You selected pet-surface pipeline without setting --acq_label flag. " + f"Clinica will now exit.{Fore.RESET}" + ) + if args.suvr_reference_region is None: + raise ClinicaException( + f"{Fore.RED}You selected pet-surface pipeline without setting --suvr_reference_region flag. " + f"Clinica will now exit.{Fore.RESET}" + ) + # FreeSurfer cortical thickness if args.orig_input_data == 't1-freesurfer': args.custom_file = get_t1_freesurfer_custom_file() args.measure_label = 'ct' # PET cortical projection elif args.orig_input_data == 'pet-surface': - args.custom_file = get_fdg_pet_surface_custom_file() - args.measure_label = 'fdg' + args.custom_file = get_pet_surface_custom_file(args.acq_label, args.suvr_reference_region) + args.measure_label = args.acq_label else: if (args.custom_file is None) or (args.measure_label is None): raise ClinicaException('You must set --measure_label and --custom_file flags.') parameters = { - 'orig_input_data': args.orig_input_data, + # Clinica compulsory arguments 'group_label': args.group_label, - 'covariates': args.covariates, - 'contrast': args.contrast, + 'orig_input_data': args.orig_input_data, 'glm_type': args.glm_type, + 'contrast': args.contrast, + # Optional arguments + 'covariates': args.covariates, + 'full_width_at_half_maximum': args.full_width_at_half_maximum, + # Optional arguments for inputs from pet-surface pipeline + 'acq_label': args.acq_label, + 'suvr_reference_region': args.suvr_reference_region, + # Optional arguments for custom pipeline 'custom_file': args.custom_file, 'measure_label': args.measure_label, - 'full_width_at_half_maximum': args.full_width_at_half_maximum, + # Advanced arguments (i.e. tricky parameters) 'cluster_threshold': args.cluster_threshold, } pipeline = StatisticsSurface( diff --git a/clinica/pipelines/statistics_surface/statistics_surface_pipeline.py b/clinica/pipelines/statistics_surface/statistics_surface_pipeline.py index 7e0d6484e..1255a25cd 100644 --- a/clinica/pipelines/statistics_surface/statistics_surface_pipeline.py +++ b/clinica/pipelines/statistics_surface/statistics_surface_pipeline.py @@ -24,38 +24,49 @@ def check_pipeline_parameters(self): from clinica.utils.exceptions import ClinicaException from clinica.utils.group import check_group_label + # Clinica compulsory parameters + self.parameters.setdefault('group_label', None) + check_group_label(self.parameters['group_label']) + if 'orig_input_data' not in self.parameters.keys(): raise KeyError('Missing compulsory orig_input_data key in pipeline parameter.') - if 'covariates' not in self.parameters.keys(): - self.parameters['covariates'] = None - if 'custom_file' not in self.parameters.keys(): - self.parameters['custom_file'] = get_t1_freesurfer_custom_file() - if 'measure_label' not in self.parameters.keys(): - self.parameters['measure_label'] = 'ct', - if 'full_width_at_half_maximum' not in self.parameters.keys(): - self.parameters['full_width_at_half_maximum'] = 20 - if 'cluster_threshold' not in self.parameters.keys(): - self.parameters['cluster_threshold'] = 0.001, - check_group_label(self.parameters['group_label']) + self.parameters.setdefault('glm_type', None) if self.parameters['glm_type'] not in ['group_comparison', 'correlation']: - raise ClinicaException("The glm_type you specified is wrong: it should be group_comparison or " - "correlation (given value: %s)." % self.parameters['glm_type']) - if self.parameters['full_width_at_half_maximum'] not in [0, 5, 10, 15, 20]: raise ClinicaException( - "FWHM for the surface smoothing you specified is wrong: it should be 0, 5, 10, 15 or 20 " - "(given value: %s)." % self.parameters['full_width_at_half_maximum']) + f"The glm_type you specified is wrong: it should be group_comparison or " + f"correlation (given value: {self.parameters['glm_type']})." + ) + + if 'contrast' not in self.parameters.keys(): + raise KeyError('Missing compulsory contrast key in pipeline parameter.') + + # Optional parameters + self.parameters.setdefault('covariates', None) + self.parameters.setdefault('full_width_at_half_maximum', 20) + + # Optional parameters for inputs from pet-surface pipeline + self.parameters.setdefault('acq_label', None) + self.parameters.setdefault('suvr_reference_region', None) + + # Optional parameters for custom pipeline + self.parameters.setdefault('custom_file', get_t1_freesurfer_custom_file()) + self.parameters.setdefault('measure_label', 'ct') + + # Advanced parameters + self.parameters.setdefault('cluster_threshold', 0.001) if self.parameters['cluster_threshold'] < 0 or self.parameters['cluster_threshold'] > 1: - raise ClinicaException("Cluster threshold should be between 0 and 1 " - "(given value: %s)." % self.parameters['cluster_threshold']) + raise ClinicaException( + f"Cluster threshold should be between 0 and 1 " + f"(given value: {self.parameters['cluster_threshold']})." + ) def check_custom_dependencies(self): - """Check dependencies that can not be listed in the `info.json` file. - """ + """Check dependencies that can not be listed in the `info.json` file.""" pass def get_input_fields(self): - """Specify the list of possible inputs of this pipelines. + """Specify the list of possible inputs of this pipeline. Returns: A list of (string) input fields name. @@ -63,7 +74,7 @@ def get_input_fields(self): return [] def get_output_fields(self): - """Specify the list of possible outputs of this pipelines. + """Specify the list of possible outputs of this pipeline. Returns: A list of (string) output fields name. @@ -71,8 +82,7 @@ def get_output_fields(self): return ['output_dir'] def build_input_node(self): - """Build and connect an input node to the pipelines. - """ + """Build and connect an input node to the pipeline.""" import os from clinica.utils.inputs import clinica_file_reader from clinica.utils.exceptions import ClinicaException @@ -85,8 +95,11 @@ def build_input_node(self): # using the group_id, Clinica won't allow it. # TODO: Modify this behaviour if os.path.exists(os.path.join(self.caps_directory, 'groups', 'group-' + self.parameters['group_label'])): - error_message = ('Group ID %s already exists, please choose another one or delete the existing folder and ' - 'also the working directory and rerun the pipeline') % self.parameters['group_label'] + error_message = ( + f"Group label {self.parameters['group_label']} already exists, " + f"please choose another one or delete the existing folder and " + f"also the working directory and rerun the pipeline" + ) raise ClinicaException(error_message) # statistics_dir_tsv = os.path.join(input_directory, 'groups', group_id, 'statistics', 'participant.tsv') # # Copy the subjects_visits_tsv to the result folder diff --git a/clinica/pipelines/statistics_surface/statistics_surface_utils.py b/clinica/pipelines/statistics_surface/statistics_surface_utils.py index 8fc396d07..eecc754da 100644 --- a/clinica/pipelines/statistics_surface/statistics_surface_utils.py +++ b/clinica/pipelines/statistics_surface/statistics_surface_utils.py @@ -15,14 +15,14 @@ def get_t1_freesurfer_custom_file(): return custom_file -def get_fdg_pet_surface_custom_file(): +def get_pet_surface_custom_file(acq_label, suvr_reference_region): import os custom_file = os.path.join( '@subject', '@session', 'pet', 'surface', - '@subject_@session_task-rest_acq-fdg_pet_space-fsaverage_suvr-pons_pvc-iy_hemi-@hemi_fwhm-@fwhm_projection.mgh' + '@subject_@session_task-rest_acq-' + acq_label + '_pet_space-fsaverage_suvr-' + suvr_reference_region + '_pvc-iy_hemi-@hemi_fwhm-@fwhm_projection.mgh' ) return custom_file @@ -169,7 +169,6 @@ def run_matlab(caps_dir, pipeline_parameters (dict): parameters of StatisticsSurface pipeline """ import os - import sys from nipype.interfaces.matlab import MatlabCommand, get_matlab_command import clinica.pipelines as clinica_pipelines from clinica.utils.check_dependency import check_environment_variable @@ -223,6 +222,7 @@ def run_matlab(caps_dir, def create_glm_info_dictionary(tsv_file, pipeline_parameters): """Create dictionary containing the GLM information that will be stored in a JSON file.""" out_dict = { + # Clinica compulsory arguments 'AnalysisType': pipeline_parameters['glm_type'], 'DesignMatrix': covariates_to_design_matrix( pipeline_parameters['contrast'], @@ -230,13 +230,23 @@ def create_glm_info_dictionary(tsv_file, pipeline_parameters): ), 'StringFormatTSV': get_string_format_from_tsv(tsv_file), 'Contrast': pipeline_parameters['contrast'], - 'Covariates': pipeline_parameters['covariates'], 'GroupLabel': pipeline_parameters['group_label'], + # Optional arguments + 'Covariates': pipeline_parameters['covariates'], 'FWHM': pipeline_parameters['full_width_at_half_maximum'], + # Optional arguments for custom pipeline + 'custom_file': pipeline_parameters['custom_file'], + 'measure_label': pipeline_parameters['measure_label'], + # Advanced arguments (i.e. tricky parameters) 'ThresholdUncorrectedPvalue': 0.001, 'ThresholdCorrectedPvalue': 0.05, 'ClusterThreshold': pipeline_parameters['cluster_threshold'] } + # Optional arguments for inputs from pet-surface pipeline + if pipeline_parameters['acq_label'] and pipeline_parameters['suvr_reference_region']: + out_dict['acq_label'] = pipeline_parameters['acq_label'] + out_dict['suvr_reference_region'] = pipeline_parameters['suvr_reference_region'] + return out_dict diff --git a/test/instantiation/test_instantiate_all_pipelines.py b/test/instantiation/test_instantiate_all_pipelines.py index 5df77e2e7..49e807dc7 100644 --- a/test/instantiation/test_instantiate_all_pipelines.py +++ b/test/instantiation/test_instantiate_all_pipelines.py @@ -210,16 +210,15 @@ def test_instantiate_StatisticsSurface(): root = dirname(abspath(join(abspath(__file__), pardir))) root = join(root, 'data', 'StatisticsSurface') + parameters = { - 'orig_input_data': 't1-freesurfer', - 'covariates': 'age + sex', - 'contrast': 'group', + # Clinica compulsory parameters 'group_label': 'UnitTest', + 'orig_input_data': 't1-freesurfer', 'glm_type': 'group_comparison', - 'custom_file': '@subject/@session/t1/freesurfer_cross_sectional/@subject_@session/surf/@hemi.thickness.fwhm@fwhm.fsaverage.mgh', - 'measure_label': 'ct', - 'full_width_at_half_maximum': 20, - 'cluster_threshold': 0.001 + 'contrast': 'group', + # Optional parameters + 'covariates': 'age sex', } pipeline = StatisticsSurface( caps_directory=join(root, 'in', 'caps'), diff --git a/test/nonregression/test_run_pipelines.py b/test/nonregression/test_run_pipelines.py index 655486e39..a44bc0936 100644 --- a/test/nonregression/test_run_pipelines.py +++ b/test/nonregression/test_run_pipelines.py @@ -507,15 +507,13 @@ def test_run_StatisticsSurface(cmdopt): shutil.copytree(join(root, 'in', 'caps'), join(root, 'out', 'caps')) parameters = { - 'orig_input_data': 't1-freesurfer', - 'covariates': 'age sex', - 'contrast': 'group', + # Clinica compulsory parameters 'group_label': 'UnitTest', + 'orig_input_data': 't1-freesurfer', 'glm_type': 'group_comparison', - 'custom_file': '@subject/@session/t1/freesurfer_cross_sectional/@subject_@session/surf/@hemi.thickness.fwhm@fwhm.fsaverage.mgh', - 'measure_label': 'ct', - 'full_width_at_half_maximum': 20, - 'cluster_threshold': 0.001 + 'contrast': 'group', + # Optional parameters + 'covariates': 'age sex', } pipeline = StatisticsSurface( caps_directory=join(root, 'out', 'caps'), @@ -524,7 +522,7 @@ def test_run_StatisticsSurface(cmdopt): parameters=parameters ) pipeline.build() - pipeline.run(plugin='MultiProc', plugin_args={'n_procs': 8}, bypass_check=True) + pipeline.run(plugin='MultiProc', plugin_args={'n_procs': 1}, bypass_check=True) # Check files filename = 'group-UnitTest_AD-lt-CN_measure-ct_fwhm-20_correctedPValue.mat' From b3f49009457151e443ae9737549c0e55b1db4129 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 00:13:07 +0200 Subject: [PATCH 06/51] Add pet_volume_normalized_suvr_pet function --- clinica/utils/input_files.py | 42 ++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/clinica/utils/input_files.py b/clinica/utils/input_files.py index 7ede10040..4cb4bc01d 100644 --- a/clinica/utils/input_files.py +++ b/clinica/utils/input_files.py @@ -257,3 +257,45 @@ def bids_pet_nii(acq_label): 'description': 'PET data with ' + acq_label + ' tracer' } return information + +# PET-Volume + + +def pet_volume_normalized_suvr_pet( + acq_label, suvr_reference_region, use_brainmasked_image, use_pvc_data, fwhm=0 +): + if use_brainmasked_image: + mask_key_value = "_mask-brain" + mask_description = "Brain-masked" + else: + mask_key_value = "" + mask_description = "Full" + + if use_pvc_data: + pvc_key_value = "_pvc-rbv" + pvc_description = "using RBV method for PVC" + else: + pvc_key_value = "" + pvc_description = "without PVC" + + if fwhm: + fwhm_key_value = f"_fwhm-{fwhm}mm" + fwhm_description = f"with {fwhm}mm smoothing" + else: + fwhm_key_value = f"" + fwhm_description = f"with no smoothing" + + suvr_key_value = f"_suvr-{suvr_reference_region}" + + information = { + "pattern": ( + f"*_acq-{acq_label}_pet" + f"_space-Ixi549Space{pvc_key_value}{suvr_key_value}{mask_key_value}{fwhm_key_value}_pet.nii*" + ), + "description": ( + f"{mask_description} SUVR map (using {suvr_reference_region} region) of {acq_label}-PET " + f"with {pvc_description} and {fwhm_description} in Ixi549Space space" + ), + "needed_pipeline": "pet-volume", + } + return information From 670597fdee736e82b5c585bfec280eaabb018db5 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 00:40:39 +0200 Subject: [PATCH 07/51] Add group_label to pet_volume_normalized_suvr_pet --- clinica/utils/input_files.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/clinica/utils/input_files.py b/clinica/utils/input_files.py index 4cb4bc01d..3ab969788 100644 --- a/clinica/utils/input_files.py +++ b/clinica/utils/input_files.py @@ -261,9 +261,19 @@ def bids_pet_nii(acq_label): # PET-Volume +# coding: utf8 + + def pet_volume_normalized_suvr_pet( - acq_label, suvr_reference_region, use_brainmasked_image, use_pvc_data, fwhm=0 + acq_label, + group_label, + suvr_reference_region, + use_brainmasked_image, + use_pvc_data, + fwhm=0, ): + import os + if use_brainmasked_image: mask_key_value = "_mask-brain" mask_description = "Brain-masked" @@ -288,9 +298,12 @@ def pet_volume_normalized_suvr_pet( suvr_key_value = f"_suvr-{suvr_reference_region}" information = { - "pattern": ( + "pattern": os.path.join( + "pet", + "preprocessing", + f"group-{group_label}", f"*_acq-{acq_label}_pet" - f"_space-Ixi549Space{pvc_key_value}{suvr_key_value}{mask_key_value}{fwhm_key_value}_pet.nii*" + f"_space-Ixi549Space{pvc_key_value}{suvr_key_value}{mask_key_value}{fwhm_key_value}_pet.nii*", ), "description": ( f"{mask_description} SUVR map (using {suvr_reference_region} region) of {acq_label}-PET " From 6586ecca5eb976e6418b1cc6e2bab719d2f380de Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 14:39:20 +0200 Subject: [PATCH 08/51] Improve pet_volume_normalized_suvr_pet description --- clinica/utils/input_files.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/clinica/utils/input_files.py b/clinica/utils/input_files.py index 3ab969788..dd84f8395 100644 --- a/clinica/utils/input_files.py +++ b/clinica/utils/input_files.py @@ -276,10 +276,10 @@ def pet_volume_normalized_suvr_pet( if use_brainmasked_image: mask_key_value = "_mask-brain" - mask_description = "Brain-masked" + mask_description = "brain-masked" else: mask_key_value = "" - mask_description = "Full" + mask_description = "full" if use_pvc_data: pvc_key_value = "_pvc-rbv" @@ -307,7 +307,7 @@ def pet_volume_normalized_suvr_pet( ), "description": ( f"{mask_description} SUVR map (using {suvr_reference_region} region) of {acq_label}-PET " - f"with {pvc_description} and {fwhm_description} in Ixi549Space space" + f"{pvc_description} and {fwhm_description} in Ixi549Space space based on {group_label} DARTEL template" ), "needed_pipeline": "pet-volume", } From 317a55f64166c4855884aef651f84bae65cf3129 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 14:39:50 +0200 Subject: [PATCH 09/51] Harmonize PET tracers handling (stats-volume) --- .../statistics_volume_cli.py | 58 ++++++++++++-- .../statistics_volume_pipeline.py | 75 +++++++++++++------ .../test_instantiate_all_pipelines.py | 11 +-- test/nonregression/test_run_pipelines.py | 11 +-- 4 files changed, 117 insertions(+), 38 deletions(-) diff --git a/clinica/pipelines/statistics_volume/statistics_volume_cli.py b/clinica/pipelines/statistics_volume/statistics_volume_cli.py index c9b4316ba..f7f12aa43 100644 --- a/clinica/pipelines/statistics_volume/statistics_volume_cli.py +++ b/clinica/pipelines/statistics_volume/statistics_volume_cli.py @@ -28,7 +28,7 @@ def define_options(self): clinica_comp.add_argument("orig_input_data", help='Type of volume-based feature: type ' '\'t1-volume\' to use gray matter maps, ' - '\'pet-volume\' to use FDG-PET data or ' + '\'pet-volume\' to use PET data or ' '\'custom-pipeline\' to use you own data in CAPS directory ' '(see Wiki for details).', choices=['t1-volume', 'pet-volume', 'custom-pipeline']) @@ -40,12 +40,33 @@ def define_options(self): # Optional arguments (e.g. FWHM) optional = self._args.add_argument_group(PIPELINE_CATEGORIES['OPTIONAL']) - optional.add_argument("-gld", "--group_label_dartel", type=str, default=None, + optional.add_argument("-gld", "--group_label_dartel", type=str, default='*', help='Name of the DARTEL template that Clinica needs to use to grab input file.') optional.add_argument("-fwhm", "--full_width_at_half_maximum", type=int, default=8, help='Full Width at Half Maximum (FWHM) of the smoothing used in your input file ' '(default: --full_width_at_half_maximum %(default)s).') + # Optional arguments for inputs from pet-volume pipeline + optional_pet = self._args.add_argument_group( + '%sPipeline options if you use inputs from pet-volume pipeline%s' % + (Fore.BLUE, Fore.RESET) + ) + optional_pet.add_argument("-acq", "--acq_label", + type=str, + default=None, + help='Name of the PET tracer label in the acquisition entity ' + '(acq-).') + optional_pet.add_argument("-suvr", "--suvr_reference_region", + choices=['cerebellumPons', 'pons'], + default=None, + help='Intensity normalization using the average PET uptake in reference regions ' + 'resulting in a standardized uptake value ratio (SUVR) map. It can be ' + 'cerebellumPons (used for AV45 tracers) or pons (used for 18F-FDG tracers).') + optional_pet.add_argument("-pvc", "--use_pvc_data", + action='store_true', + default=False, + help="Use PET data with partial value correction (by default, PET data with no PVC are used)") + # Optional arguments for custom pipeline opt_custom_input = self._args.add_argument_group( '%sPipeline options if you selected custom-pipeline%s' % (Fore.BLUE, Fore.RESET) @@ -76,23 +97,46 @@ def define_options(self): def run_command(self, args): from networkx import Graph + from colorama import Fore from .statistics_volume_pipeline import StatisticsVolume from clinica.utils.ux import print_end_pipeline, print_crash_files_and_exit from clinica.utils.exceptions import ClinicaException + # PET-Volume pipeline + if args.orig_input_data == 'pet-volume': + if args.acq_label is None: + raise ClinicaException( + f"{Fore.RED}You selected pet-volume pipeline without setting --acq_label flag. " + f"Clinica will now exit.{Fore.RESET}" + ) + if args.suvr_reference_region is None: + raise ClinicaException( + f"{Fore.RED}You selected pet-volume pipeline without setting --suvr_reference_region flag. " + f"Clinica will now exit.{Fore.RESET}" + ) + # Custom pipeline if args.orig_input_data == 'custom-pipeline': if (args.custom_file is None) or (args.measure_label is None): raise ClinicaException('You must set --measure_label and --custom_file flags.') parameters = { - 'contrast': args.contrast, - 'orig_input_data': args.orig_input_data, + # Clinica compulsory arguments 'group_label': args.group_label, - 'custom_files': args.custom_files, - 'cluster_threshold': args.cluster_threshold, + 'orig_input_data': args.orig_input_data, + 'contrast': args.contrast, + # Optional arguments 'group_label_dartel': args.group_label_dartel, - 'full_width_at_half_maximum': args.full_width_at_half_maximum + 'full_width_at_half_maximum': args.full_width_at_half_maximum, + # Optional arguments for inputs from pet-volume pipeline + 'acq_label': args.acq_label, + 'use_pvc_data': args.use_pvc_data, + 'suvr_reference_region': args.suvr_reference_region, + # Optional arguments for custom pipeline + 'measure_label': args.measure_label, + 'custom_file': args.custom_file, + # Advanced arguments + 'cluster_threshold': args.cluster_threshold, } pipeline = StatisticsVolume( diff --git a/clinica/pipelines/statistics_volume/statistics_volume_pipeline.py b/clinica/pipelines/statistics_volume/statistics_volume_pipeline.py index b179dfcbe..59b684320 100644 --- a/clinica/pipelines/statistics_volume/statistics_volume_pipeline.py +++ b/clinica/pipelines/statistics_volume/statistics_volume_pipeline.py @@ -12,11 +12,33 @@ class StatisticsVolume(cpe.Pipeline): def check_pipeline_parameters(self): """Check pipeline parameters.""" from clinica.utils.exceptions import ClinicaException + from clinica.utils.group import check_group_label + + # Clinica compulsory parameters + self.parameters.setdefault('group_label', None) + check_group_label(self.parameters['group_label']) if 'orig_input_data' not in self.parameters.keys(): raise KeyError('Missing compulsory orig_input_data key in pipeline parameter.') - if 'group_label_dartel' not in self.parameters.keys(): - self.parameters['group_label_dartel'] = None + + if 'contrast' not in self.parameters.keys(): + raise KeyError('Missing compulsory contrast key in pipeline parameter.') + + # Optional parameters + self.parameters.setdefault('group_label_dartel', '*') + self.parameters.setdefault('full_width_at_half_maximum', 8) + + # Optional parameters for inputs from pet-volume pipeline + self.parameters.setdefault('acq_label', None) + self.parameters.setdefault('use_pvc_data', None) + self.parameters.setdefault('suvr_reference_region', None) + + # Optional parameters for custom pipeline + self.parameters.setdefault('measure_label', None) + self.parameters.setdefault('custom_file', None) + + # Advanced parameters + self.parameters.setdefault('cluster_threshold', 0.001) if self.parameters['cluster_threshold'] < 0 or self.parameters['cluster_threshold'] > 1: raise ClinicaException("Cluster threshold should be between 0 and 1 " @@ -58,38 +80,49 @@ def build_input_node(self): import nipype.pipeline.engine as npe from clinica.utils.exceptions import ClinicaException from clinica.utils.inputs import clinica_file_reader - from clinica.utils.input_files import t1_volume_template_tpm_in_mni + from clinica.utils.input_files import (t1_volume_template_tpm_in_mni, + pet_volume_normalized_suvr_pet) from clinica.utils.stream import cprint from clinica.utils.ux import print_images_to_process, print_begin_image - gic = '*' - if self.parameters['group_label_dartel'] is not None: - gic = self.parameters['group_label_dartel'] - all_errors = [] if self.parameters['orig_input_data'] == 'pet-volume': - self.parameters['measure_label'] = 'fdg' - information_dict = { - 'pattern': '*_pet_space-Ixi549Space_suvr-pons_mask-brain_fwhm-' + str(self.parameters['full_width_at_half_maximum']) + 'mm_pet.nii*', - 'description': 'pons normalized FDG PET image in MNI space (brain masked)', - 'needed_pipeline': 'pet-volume' - } + if not ( + self.parameters["acq_label"] + and self.parameters["suvr_reference_region"] + ): + raise ValueError( + f"Missing value(s) in parameters from pet-volume pipeline. Given values:\n" + f"- acq_label: {self.parameters['acq_label']}\n" + f"- suvr_reference_region: {self.parameters['suvr_reference_region']}\n" + ) + + self.parameters['measure_label'] = self.parameters['acq_label'] + information_dict = pet_volume_normalized_suvr_pet( + acq_label=self.parameters["acq_label"], + group_label=self.parameters["group_label_dartel"], + suvr_reference_region=self.parameters["suvr_reference_region"], + use_brainmasked_image=True, + use_pvc_data=self.parameters["use_pvc_data"], + fwhm=self.parameters['full_width_at_half_maximum'] + ) elif self.parameters['orig_input_data'] == 't1-volume': self.parameters['measure_label'] = 'graymatter' - information_dict = t1_volume_template_tpm_in_mni(gic, 0, True) + information_dict = t1_volume_template_tpm_in_mni(self.parameters['group_label_dartel'], 0, True) - else: - if not self.parameters['custom_file']: - raise ClinicaException(Fore.RED + '[Error] You did not specify the --custom_file flag in the command line for the feature type ' - + Fore.Blue + self.parameters['measure_label'] + Fore.RED + '! Clinica can\'t ' - + 'know what file to use in your analysis ! Type: \n\t' + Fore.BLUE + 'clinica run statistics-volume\n' - + Fore.RED + ' to have help on how to use the command line.' + Fore.RESET) + elif self.parameters['orig_input_data'] == 'custom-pipeline': + if self.parameters['custom_file'] is None: + raise ClinicaException( + f"{Fore.RED}Custom pipeline was selected but no 'custom_file' was specified.{Fore.RESET}" + ) # If custom file are grabbed, information of fwhm is irrelevant and should not appear on final filenames self.parameters['full_width_at_half_maximum'] = None information_dict = { 'pattern': self.parameters['custom_file'], 'description': 'custom file provided by user' } + else: + raise ValueError(f"Input data {self.parameters['orig_input_data']} unknown.") try: input_files = clinica_file_reader(self.subjects, @@ -115,7 +148,7 @@ def build_input_node(self): if len(self.subjects): print_images_to_process(self.subjects, self.sessions) cprint('The pipeline will last a few minutes. Images generated by SPM will popup during the pipeline.') - print_begin_image('group-' + self.parameters['group_label']) + print_begin_image(f"group-{self.parameters['group_label']}") self.connect([ (read_parameters_node, self.input_node, [('input_files', 'input_files')]) diff --git a/test/instantiation/test_instantiate_all_pipelines.py b/test/instantiation/test_instantiate_all_pipelines.py index 49e807dc7..e81b35435 100644 --- a/test/instantiation/test_instantiate_all_pipelines.py +++ b/test/instantiation/test_instantiate_all_pipelines.py @@ -415,13 +415,14 @@ def test_instantiate_StatisticsVolume(): # Instantiate pipeline and run() parameters = { + # Clinica compulsory parameters + 'group_label': 'UnitTest', 'orig_input_data': 'pet-volume', 'contrast': 'group', - 'measure_label': 'fdg', - 'group_label': 'UnitTest', - 'cluster_threshold': 0.001, - 'group_label_caps': None, - 'full_width_at_half_maximum': 8 + # Optional arguments for inputs from pet-volume pipeline + 'acq_label': 'FDG', + 'use_pvc_data': False, + 'suvr_reference_region': 'pons', } pipeline = StatisticsVolume( diff --git a/test/nonregression/test_run_pipelines.py b/test/nonregression/test_run_pipelines.py index a44bc0936..783695326 100644 --- a/test/nonregression/test_run_pipelines.py +++ b/test/nonregression/test_run_pipelines.py @@ -870,13 +870,14 @@ def test_run_StatisticsVolume(cmdopt): # Instantiate pipeline and run() parameters = { + # Clinica compulsory parameters + 'group_label': 'UnitTest', 'orig_input_data': 'pet-volume', 'contrast': 'group', - 'measure_label': 'fdg', - 'group_label': 'UnitTest', - 'cluster_threshold': 0.001, - 'group_label_caps': None, - 'full_width_at_half_maximum': 8 + # Optional arguments for inputs from pet-volume pipeline + 'acq_label': 'FDG', + 'use_pvc_data': False, + 'suvr_reference_region': 'pons', } pipeline = StatisticsVolume( From ed7bdae113b226fa88a6f4194189b40b750fa297 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 14:45:27 +0200 Subject: [PATCH 10/51] Use f-string for custom-pipeline/pet-* pipeline CLI --- .../pipelines/statistics_surface/statistics_surface_cli.py | 5 ++--- clinica/pipelines/statistics_volume/statistics_volume_cli.py | 5 ++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/clinica/pipelines/statistics_surface/statistics_surface_cli.py b/clinica/pipelines/statistics_surface/statistics_surface_cli.py index 7f9c7815f..e8db71c0d 100644 --- a/clinica/pipelines/statistics_surface/statistics_surface_cli.py +++ b/clinica/pipelines/statistics_surface/statistics_surface_cli.py @@ -57,8 +57,7 @@ def define_options(self): '(default: --full_width_at_half_maximum %(default)s).') # Optional arguments for inputs from pet-surface pipeline opt_pet = self._args.add_argument_group( - '%sPipeline options if you use inputs from pet-volume pipeline%s' % - (Fore.BLUE, Fore.RESET) + f"{Fore.BLUE}Pipeline options if you use inputs from pet-surface pipeline{Fore.RESET}" ) opt_pet.add_argument("-acq", "--acq_label", type=str, @@ -72,7 +71,7 @@ def define_options(self): 'cerebellumPons (used for AV45 tracers) or pons (used for 18F-FDG tracers).') # Optional arguments for custom pipeline opt_custom_input = self._args.add_argument_group( - '%sPipeline options if you selected custom-pipeline%s' % (Fore.BLUE, Fore.RESET) + f"{Fore.BLUE}Pipeline options if you selected custom-pipeline{Fore.RESET}" ) opt_custom_input.add_argument("-cf", "--custom_file", type=str, default=None, diff --git a/clinica/pipelines/statistics_volume/statistics_volume_cli.py b/clinica/pipelines/statistics_volume/statistics_volume_cli.py index f7f12aa43..7ba83d3bd 100644 --- a/clinica/pipelines/statistics_volume/statistics_volume_cli.py +++ b/clinica/pipelines/statistics_volume/statistics_volume_cli.py @@ -48,8 +48,7 @@ def define_options(self): # Optional arguments for inputs from pet-volume pipeline optional_pet = self._args.add_argument_group( - '%sPipeline options if you use inputs from pet-volume pipeline%s' % - (Fore.BLUE, Fore.RESET) + f"{Fore.BLUE}Pipeline options if you use inputs from pet-volume pipeline{Fore.RESET}" ) optional_pet.add_argument("-acq", "--acq_label", type=str, @@ -69,7 +68,7 @@ def define_options(self): # Optional arguments for custom pipeline opt_custom_input = self._args.add_argument_group( - '%sPipeline options if you selected custom-pipeline%s' % (Fore.BLUE, Fore.RESET) + f"{Fore.BLUE}Pipeline options if you selected custom-pipeline{Fore.RESET}" ) opt_custom_input.add_argument("-cf", "--custom_file", type=str, default=None, From c0fbe5bb2c79f54b2920bbd43f72c04c5f25618e Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 15:06:16 +0200 Subject: [PATCH 11/51] Set False as default value for use_pvc_data --- .../pipelines/statistics_volume/statistics_volume_pipeline.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/clinica/pipelines/statistics_volume/statistics_volume_pipeline.py b/clinica/pipelines/statistics_volume/statistics_volume_pipeline.py index 59b684320..35a986eb5 100644 --- a/clinica/pipelines/statistics_volume/statistics_volume_pipeline.py +++ b/clinica/pipelines/statistics_volume/statistics_volume_pipeline.py @@ -30,8 +30,8 @@ def check_pipeline_parameters(self): # Optional parameters for inputs from pet-volume pipeline self.parameters.setdefault('acq_label', None) - self.parameters.setdefault('use_pvc_data', None) self.parameters.setdefault('suvr_reference_region', None) + self.parameters.setdefault('use_pvc_data', False) # Optional parameters for custom pipeline self.parameters.setdefault('measure_label', None) @@ -95,6 +95,7 @@ def build_input_node(self): f"Missing value(s) in parameters from pet-volume pipeline. Given values:\n" f"- acq_label: {self.parameters['acq_label']}\n" f"- suvr_reference_region: {self.parameters['suvr_reference_region']}\n" + f"- use_pvc_data: {self.parameters['use_pvc_data']}\n" ) self.parameters['measure_label'] = self.parameters['acq_label'] From 2e83b7b611048cd36e401679ac8121552ac3b0a8 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 15:11:44 +0200 Subject: [PATCH 12/51] Harmonize PET tracers handling (ml-spatial-svm) --- .../spatial_svm_cli.py | 8 ++- .../spatial_svm_pipeline.py | 55 +++++++++++-------- 2 files changed, 37 insertions(+), 26 deletions(-) diff --git a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py index 3f8eba412..9d87bc693 100644 --- a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py +++ b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py @@ -30,10 +30,9 @@ def define_options(self): 'pet-volume' to use SUVr maps.''', choices=['t1-volume', 'pet-volume'], ) - # Optional arguments + # Optional arguments for inputs from pet-volume pipeline optional_pet = self._args.add_argument_group( - '%sPipeline options if you use inputs from pet-volume pipeline%s' % - (Fore.BLUE, Fore.RESET) + f"{Fore.BLUE}Pipeline options if you use inputs from pet-volume pipeline{Fore.RESET}" ) optional_pet.add_argument("-acq", "--acq_label", type=str, @@ -81,11 +80,14 @@ def run_command(self, args): ) parameters = { + # Clinica compulsory arguments 'group_label': args.group_label, 'orig_input_data': args.orig_input_data, + # Optional arguments for inputs from pet-volume pipeline 'acq_label': args.acq_label, 'use_pvc_data': args.use_pvc_data, 'suvr_reference_region': args.suvr_reference_region, + # Advanced arguments 'fwhm': args.full_width_half_maximum, } pipeline = SpatialSVM( diff --git a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_pipeline.py b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_pipeline.py index 0d7a46e5a..a35772160 100644 --- a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_pipeline.py +++ b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_pipeline.py @@ -16,14 +16,20 @@ def check_pipeline_parameters(self): """Check pipeline parameters.""" from clinica.utils.group import check_group_label - if 'group_label' not in self.parameters.keys(): - raise KeyError('Missing compulsory group_label key in pipeline parameter.') + # Clinica compulsory parameters + self.parameters.setdefault('group_label', None) + check_group_label(self.parameters['group_label']) + if 'orig_input_data' not in self.parameters.keys(): raise KeyError('Missing compulsory orig_input_data key in pipeline parameter.') - if 'fwhm' not in self.parameters.keys(): - self.parameters['fwhm'] = 4 - check_group_label(self.parameters['group_label']) + # Optional parameters for inputs from pet-volume pipeline + self.parameters.setdefault('acq_label', None) + self.parameters.setdefault('suvr_reference_region', None) + self.parameters.setdefault('use_pvc_data', False) + + # Advanced parameters + self.parameters.setdefault('fwhm', 4) def check_custom_dependencies(self): """Check dependencies that can not be listed in the `info.json` file. @@ -49,14 +55,14 @@ def get_output_fields(self): return ['regularized_image'] def build_input_node(self): - """Build and connect an input node to the pipeline. - """ + """Build and connect an input node to the pipeline.""" import os from colorama import Fore import nipype.pipeline.engine as npe import nipype.interfaces.utility as nutil from clinica.utils.inputs import clinica_file_reader, clinica_group_reader - from clinica.utils.input_files import t1_volume_final_group_template + from clinica.utils.input_files import (t1_volume_final_group_template, + pet_volume_normalized_suvr_pet) from clinica.utils.exceptions import ClinicaCAPSError, ClinicaException from clinica.utils.ux import print_groups_in_caps_directory @@ -81,22 +87,25 @@ def build_input_node(self): 'needed_pipeline': 't1-volume-tissue-segmentation' } elif self.parameters['orig_input_data'] == 'pet-volume': - if self.parameters['use_pvc_data']: - caps_files_information = { - 'pattern': os.path.join('pet', 'preprocessing', 'group-' + self.parameters['group_label'], - '*_acq-' + self.parameters['acq_label'] + '_pet_space-Ixi549Space_pvc-rbv_suvr-' + self.parameters['suvr_reference_region'] + '_pet.nii.gz'), - 'description': self.parameters['acq_label'] + ' PET partial volume corrected (RBV) in Ixi549 space', - 'needed_pipeline': 'pet-volume with PVC' - } - else: - caps_files_information = { - 'pattern': os.path.join('pet', 'preprocessing', 'group-' + self.parameters['group_label'], - '*_acq-' + self.parameters['acq_label'] + '*_pet_space-Ixi549Space_suvr-' + self.parameters['suvr_reference_region'] + '_pet.nii.gz'), - 'description': self.parameters['acq_label'] + ' PET in Ixi549 space', - 'needed_pipeline': 'pet-volume' - } + if not ( + self.parameters["acq_label"] + and self.parameters["suvr_reference_region"] + ): + raise ValueError( + f"Missing value(s) in parameters from pet-volume pipeline. Given values:\n" + f"- acq_label: {self.parameters['acq_label']}\n" + f"- suvr_reference_region: {self.parameters['suvr_reference_region']}\n" + f"- use_pvc_data: {self.parameters['use_pvc_data']}\n" + ) + caps_files_information = pet_volume_normalized_suvr_pet( + acq_label=self.parameters["acq_label"], + suvr_reference_region=self.parameters["suvr_reference_region"], + use_brainmasked_image=False, + use_pvc_data=self.parameters["use_pvc_data"], + fwhm=0 + ) else: - raise ValueError('Image type ' + self.parameters['orig_input_data'] + ' unknown.') + raise ValueError(f"Image type {self.parameters['orig_input_data']} unknown.") try: input_image = clinica_file_reader(self.subjects, From 10b45511fa52d55fd4114650b19e5a394143a474 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 15:24:54 +0200 Subject: [PATCH 13/51] Immprove readibility of get_pet_surface_custom_file --- .../statistics_surface_utils.py | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/clinica/pipelines/statistics_surface/statistics_surface_utils.py b/clinica/pipelines/statistics_surface/statistics_surface_utils.py index eecc754da..5b7f96912 100644 --- a/clinica/pipelines/statistics_surface/statistics_surface_utils.py +++ b/clinica/pipelines/statistics_surface/statistics_surface_utils.py @@ -4,13 +4,13 @@ def get_t1_freesurfer_custom_file(): import os custom_file = os.path.join( - '@subject', - '@session', - 't1', - 'freesurfer_cross_sectional', - '@subject_@session', - 'surf', - '@hemi.thickness.fwhm@fwhm.fsaverage.mgh' + "@subject", + "@session", + "t1", + "freesurfer_cross_sectional", + "@subject_@session", + "surf", + "@hemi.thickness.fwhm@fwhm.fsaverage.mgh" ) return custom_file @@ -18,11 +18,12 @@ def get_t1_freesurfer_custom_file(): def get_pet_surface_custom_file(acq_label, suvr_reference_region): import os custom_file = os.path.join( - '@subject', - '@session', - 'pet', - 'surface', - '@subject_@session_task-rest_acq-' + acq_label + '_pet_space-fsaverage_suvr-' + suvr_reference_region + '_pvc-iy_hemi-@hemi_fwhm-@fwhm_projection.mgh' + "@subject", + "@session", + "pet", + "surface", + f"@subject_@session_task-rest_acq-{acq_label}_pet" + f"_space-fsaverage_suvr-{suvr_reference_region}_pvc-iy_hemi-@hemi_fwhm-@fwhm_projection.mgh" ) return custom_file From a91fe911084d92f418bd5051e424e65b4c367d4c Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 16:00:04 +0200 Subject: [PATCH 14/51] Document get_file_from_server --- clinica/utils/inputs.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/clinica/utils/inputs.py b/clinica/utils/inputs.py index fab1b1dab..79756cf94 100644 --- a/clinica/utils/inputs.py +++ b/clinica/utils/inputs.py @@ -448,6 +448,19 @@ def fetch_file(remote, dirname=None): def get_file_from_server(remote_file, cache_path=None): + """ + Download file from server + + Args: + remote_file (str): RemoteFileStructure defined in clinica.utils.inputs + cache_path (str): (default: ~/.cache/clinica/data) + + Returns: + Path to downloaded file. + + Note: + This function will be in Clinica. + """ import os from pathlib import Path from clinica.utils.stream import cprint From 9bf26b49447edf488253c0f6626932ce984f6a43 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 16:12:46 +0200 Subject: [PATCH 15/51] Centralize list of SUVR regions --- .../machine_learning_spatial_svm/spatial_svm_cli.py | 4 +++- clinica/pipelines/pet_surface/pet_surface_cli.py | 3 ++- clinica/pipelines/pet_volume/pet_volume_cli.py | 3 ++- .../pipelines/statistics_surface/statistics_surface_cli.py | 5 +++-- .../pipelines/statistics_volume/statistics_volume_cli.py | 5 +++-- clinica/utils/pet.py | 6 ++++++ 6 files changed, 19 insertions(+), 7 deletions(-) diff --git a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py index 9d87bc693..e3e86c9ea 100644 --- a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py +++ b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py @@ -18,6 +18,8 @@ def define_options(self): """Define the sub-command arguments.""" from colorama import Fore from clinica.engine.cmdparser import PIPELINE_CATEGORIES + from clinica.utils.pet import LIST_SUVR_REFERENCE_REGIONS + # Clinica compulsory arguments (e.g. BIDS, CAPS, group_label) clinica_comp = self._args.add_argument_group(PIPELINE_CATEGORIES['CLINICA_COMPULSORY']) clinica_comp.add_argument("caps_directory", @@ -40,7 +42,7 @@ def define_options(self): help='Name of the PET tracer label in the acquisition entity ' '(acq-).') optional_pet.add_argument("-suvr", "--suvr_reference_region", - choices=['cerebellumPons', 'pons'], + choices=LIST_SUVR_REFERENCE_REGIONS, default=None, help='Intensity normalization using the average PET uptake in reference regions ' 'resulting in a standardized uptake value ratio (SUVR) map. It can be ' diff --git a/clinica/pipelines/pet_surface/pet_surface_cli.py b/clinica/pipelines/pet_surface/pet_surface_cli.py index 19c9df73f..d2f949d59 100644 --- a/clinica/pipelines/pet_surface/pet_surface_cli.py +++ b/clinica/pipelines/pet_surface/pet_surface_cli.py @@ -17,6 +17,7 @@ def define_description(self): def define_options(self): """Define the sub-command arguments.""" from clinica.engine.cmdparser import PIPELINE_CATEGORIES + from clinica.utils.pet import LIST_SUVR_REFERENCE_REGIONS # Clinica compulsory arguments (e.g. BIDS, CAPS, group_label) clinica_comp = self._args.add_argument_group(PIPELINE_CATEGORIES['CLINICA_COMPULSORY']) clinica_comp.add_argument("bids_directory", @@ -26,7 +27,7 @@ def define_options(self): clinica_comp.add_argument("acq_label", type=str, help='Name of the PET tracer label in the acquisition entity ' '(acq-).') - clinica_comp.add_argument("suvr_reference_region", choices=['cerebellumPons', 'pons'], + clinica_comp.add_argument("suvr_reference_region", choices=LIST_SUVR_REFERENCE_REGIONS, help='Intensity normalization using the average PET uptake in reference regions ' 'resulting in a standardized uptake value ratio (SUVR) map. It can be ' 'cerebellumPons (used for AV45 tracers) or pons (used for 18F-FDG tracers).') diff --git a/clinica/pipelines/pet_volume/pet_volume_cli.py b/clinica/pipelines/pet_volume/pet_volume_cli.py index 9a93ced25..f81a3e63a 100644 --- a/clinica/pipelines/pet_volume/pet_volume_cli.py +++ b/clinica/pipelines/pet_volume/pet_volume_cli.py @@ -17,6 +17,7 @@ def define_description(self): def define_options(self): """Define the sub-command arguments.""" from clinica.engine.cmdparser import PIPELINE_CATEGORIES + from clinica.utils.pet import LIST_SUVR_REFERENCE_REGIONS # Clinica compulsory arguments (e.g. BIDS, CAPS, group_label) clinica_comp = self._args.add_argument_group(PIPELINE_CATEGORIES['CLINICA_COMPULSORY']) clinica_comp.add_argument("bids_directory", @@ -28,7 +29,7 @@ def define_options(self): clinica_comp.add_argument("acq_label", type=str, help='Name of the PET tracer label in the acquisition entity ' '(acq-).') - clinica_comp.add_argument("suvr_reference_region", choices=['cerebellumPons', 'pons'], + clinica_comp.add_argument("suvr_reference_region", choices=LIST_SUVR_REFERENCE_REGIONS, help='Intensity normalization using the average PET uptake in reference regions ' 'resulting in a standardized uptake value ratio (SUVR) map. It can be ' 'cerebellumPons (used for AV45 tracers) or pons (used for 18F-FDG tracers).') diff --git a/clinica/pipelines/statistics_surface/statistics_surface_cli.py b/clinica/pipelines/statistics_surface/statistics_surface_cli.py index e8db71c0d..6b494ff22 100644 --- a/clinica/pipelines/statistics_surface/statistics_surface_cli.py +++ b/clinica/pipelines/statistics_surface/statistics_surface_cli.py @@ -16,8 +16,9 @@ def define_description(self): def define_options(self): """Define the sub-command arguments.""" - from clinica.engine.cmdparser import PIPELINE_CATEGORIES from colorama import Fore + from clinica.engine.cmdparser import PIPELINE_CATEGORIES + from clinica.utils.pet import LIST_SUVR_REFERENCE_REGIONS # Clinica compulsory arguments clinica_comp = self._args.add_argument_group(PIPELINE_CATEGORIES['CLINICA_COMPULSORY']) @@ -64,7 +65,7 @@ def define_options(self): default=None, help='Name of the PET tracer label in the acquisition entity (acq-).') opt_pet.add_argument("-suvr", "--suvr_reference_region", - choices=['cerebellumPons', 'pons'], + choices=LIST_SUVR_REFERENCE_REGIONS, default=None, help='Intensity normalization using the average PET uptake in reference regions ' 'resulting in a standardized uptake value ratio (SUVR) map. It can be ' diff --git a/clinica/pipelines/statistics_volume/statistics_volume_cli.py b/clinica/pipelines/statistics_volume/statistics_volume_cli.py index 7ba83d3bd..f672a731d 100644 --- a/clinica/pipelines/statistics_volume/statistics_volume_cli.py +++ b/clinica/pipelines/statistics_volume/statistics_volume_cli.py @@ -16,8 +16,9 @@ def define_description(self): def define_options(self): """Define the sub-command arguments.""" - from clinica.engine.cmdparser import PIPELINE_CATEGORIES from colorama import Fore + from clinica.engine.cmdparser import PIPELINE_CATEGORIES + from clinica.utils.pet import LIST_SUVR_REFERENCE_REGIONS # Clinica compulsory arguments clinica_comp = self._args.add_argument_group(PIPELINE_CATEGORIES['CLINICA_COMPULSORY']) @@ -56,7 +57,7 @@ def define_options(self): help='Name of the PET tracer label in the acquisition entity ' '(acq-).') optional_pet.add_argument("-suvr", "--suvr_reference_region", - choices=['cerebellumPons', 'pons'], + choices=LIST_SUVR_REFERENCE_REGIONS, default=None, help='Intensity normalization using the average PET uptake in reference regions ' 'resulting in a standardized uptake value ratio (SUVR) map. It can be ' diff --git a/clinica/utils/pet.py b/clinica/utils/pet.py index 685c3f0cd..4ecb17996 100644 --- a/clinica/utils/pet.py +++ b/clinica/utils/pet.py @@ -73,6 +73,12 @@ def read_psf_information(pvc_psf_tsv, subject_ids, session_ids, pet_tracer): return iterables_psf +LIST_SUVR_REFERENCE_REGIONS = [ + "pons", + "cerebellumPons", +] + + def get_suvr_mask(suvr_reference_region): """Get path of the SUVR mask from SUVR reference region label. From 27ecf51da4fad0dc1f8497a1763398271212abc9 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 17:17:07 +0200 Subject: [PATCH 16/51] Format SUVR utils --- clinica/utils/pet.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/clinica/utils/pet.py b/clinica/utils/pet.py index 4ecb17996..d4ab1c997 100644 --- a/clinica/utils/pet.py +++ b/clinica/utils/pet.py @@ -89,20 +89,21 @@ def get_suvr_mask(suvr_reference_region): Path of the SUVR mask """ import os + suvr_reference_region_to_suvr = { - 'pons': os.path.join( + "pons": os.path.join( os.path.split(os.path.realpath(__file__))[0], - '..', - 'resources', - 'masks', - 'region-pons_eroded-6mm_mask.nii.gz' + "..", + "resources", + "masks", + "region-pons_eroded-6mm_mask.nii.gz", ), - 'cerebellumPons': os.path.join( + "cerebellumPons": os.path.join( os.path.split(os.path.realpath(__file__))[0], - '..', - 'resources', - 'masks', - 'region-cerebellumPons_eroded-6mm_mask.nii.gz' + "..", + "resources", + "masks", + "region-cerebellumPons_eroded-6mm_mask.nii.gz", ), } return suvr_reference_region_to_suvr[suvr_reference_region] From 1b4453217e8f1611ddae2133c0e0f565d8769a3b Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 18:57:16 +0200 Subject: [PATCH 17/51] Add PET_Introduction page --- docs/Pipelines/PET_Introduction.md | 84 ++++++++++++++++++++++++++++++ mkdocs.yml | 1 + 2 files changed, 85 insertions(+) create mode 100644 docs/Pipelines/PET_Introduction.md diff --git a/docs/Pipelines/PET_Introduction.md b/docs/Pipelines/PET_Introduction.md new file mode 100644 index 000000000..b3744d4fb --- /dev/null +++ b/docs/Pipelines/PET_Introduction.md @@ -0,0 +1,84 @@ +# Introduction + + +## Partial volume correction (PVC) + +To correct for [partial volume effects](http://www.turkupetcentre.net/petanalysis/image_pve.html), several PVC algorithms exist and are implemented in the [PETPVC toolbox](https://github.com/UCL/PETPVC). + +To perform PVC (compulsory for [`pet-surface` pipeline](../PET_Surface), optional for [`pet-volume` pipeline](../PET_Volume)), you will need to specify in a TSV file the full width at half maximum (FWHM), in millimeters, of the [point spread function (PSF)](https://en.wikipedia.org/wiki/Point_spread_function) associated with your data, in the x, y and z directions. + +For instance, if the FWHM of the PSF associated with your first image is 8 mm along the x axis, 9 mm along the y axis, and 10 mm along z axis, the first row of your TSV file will look like this: + +```text +participant_id session_id acq_label psf_x psf_y psf_z +sub-CLNC01 ses-M00 FDG 8 9 10 +sub-CLNC01 ses-M18 FDG 8 9 10 +sub-CLNC01 ses-M00 AV45 7 6 5 +sub-CLNC02 ses-M00 FDG 8 9 10 +sub-CLNC03 ses-M00 FDG 8 9 10 +``` + +Since PSF information may differ according to the PET tracer, `participant_id`, `session_id`, ` acq_label`, ` psf_x`, `psf_y` and `psf_z` columns are compulsory columns. + + + +## Reference regions for standardized uptake value ratio (SUVR) map + +Clinica `v0.3.8` introduces the possibility for the user to select the reference region for the SUVR map computation. + +Reference regions provided by Clinica come from the Pick atlas in MNI space and currently are: + +- `pons`: 6 mm eroded version of the pons region + +- `cerebellumPons`: 6 mm eroded version of the cerebellum + pons regions + + + +## Tutorial: How to add new SUVR reference region to Clinica? + +If you need to use a reference region not provided by Clinica but still want to use [`pet-surface`](../PET_Surface) or [`pet-volume`](../PET_Volume) pipelines, it is possible to easily extend the list of SUVR regions. + +- You first need to install Clinica following [developer instructions](../../Installation/#install-clinica) ; + +- Once done you will need to modify your `/clinica/utils/pet.py` file in particular the following two elements: + - The label of the SUVR reference region that will be stored in CAPS filename(s): + ```python + LIST_SUVR_REFERENCE_REGIONS = [ + "pons", + "cerebellumPons", + ] + ``` + Simply define a new label that will be your new SUVR reference region. `LIST_SUVR_REFERENCE_REGIONS` is used by all command-line interfaces so you do need to modify the pipelines' CLI to make appear this new region. + + - The path of the SUVR reference region that you will use: + ```python + def get_suvr_mask(suvr_reference_region): + """Get path of the SUVR mask from SUVR reference region label. + + Args: + suvr_reference_region: Label of the SUVR reference region + + Returns: + Path of the SUVR mask + """ + import os + + suvr_reference_region_to_suvr = { + "pons": os.path.join( + os.path.split(os.path.realpath(__file__))[0], + "..", + "resources", + "masks", + "region-pons_eroded-6mm_mask.nii.gz", + ), + "cerebellumPons": os.path.join( + os.path.split(os.path.realpath(__file__))[0], + "..", + "resources", + "masks", + "region-cerebellumPons_eroded-6mm_mask.nii.gz", + ), + } + return suvr_reference_region_to_suvr[suvr_reference_region] + ``` + In this example, the SUVR reference region associated to `cerebellumPons` label is located at `/resources/masks/region-cerebellumPons_eroded-6mm_mask.nii.gz`. diff --git a/mkdocs.yml b/mkdocs.yml index 349c879c9..394f0c42e 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -82,6 +82,7 @@ nav: - DTI: Pipelines/DWI_DTI.md - Connectome: Pipelines/DWI_Connectome.md - PET: + - Introduction: Pipelines/PET_Introduction.md - Volume processing: Pipelines/PET_Volume.md - Surface processing: Pipelines/PET_Surface.md - Statistics: From 0ff0dce92392c4d38f1612783629b934ae45029a Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 20 Oct 2020 23:28:47 +0200 Subject: [PATCH 18/51] Unify how volume atlases are handled --- clinica/pipelines/machine_learning/input.py | 12 ++++----- .../pet_volume/pet_volume_pipeline.py | 27 +++++++------------ .../t1_volume_parcellation_cli.py | 1 - .../t1_volume_parcellation_pipeline.py | 11 +++----- clinica/utils/atlas.py | 25 +++++++++++++++++ 5 files changed, 45 insertions(+), 31 deletions(-) diff --git a/clinica/pipelines/machine_learning/input.py b/clinica/pipelines/machine_learning/input.py index c13dc67f1..e8a9b0f11 100644 --- a/clinica/pipelines/machine_learning/input.py +++ b/clinica/pipelines/machine_learning/input.py @@ -223,12 +223,12 @@ def get_default_parameters(): class CAPSRegionBasedInput(CAPSInput): def __init__(self, input_params): + from clinica.utils.atlas import VOLUME_ATLASES super().__init__(input_params) - if self._input_params['atlas'] not in ['AAL2', 'Neuromorphometrics', 'AICHA', 'LPBA40', 'Hammers']: - raise Exception("Incorrect atlas name. It must be one of the values 'AAL2', 'Neuromorphometrics', " - "'AICHA', 'LPBA40', 'Hammers' ") + if self._input_params['atlas'] not in VOLUME_ATLASES: + raise ValueError(f"Incorrect atlas name (given value: {self._input_params['atlas']}). It must be one of {VOLUME_ATLASES}") def get_images(self): """ @@ -393,12 +393,12 @@ def get_default_parameters(): class CAPSTSVBasedInput(CAPSInput): def __init__(self, input_params): + from clinica.utils.atlas import VOLUME_ATLASES super().__init__(input_params) - if self._input_params['atlas'] not in ['AAL2', 'Neuromorphometrics', 'AICHA', 'LPBA40', 'Hammers']: - raise Exception("Incorrect atlas name. It must be one of the values 'AAL2', 'Neuromorphometrics', " - "'AICHA', 'LPBA40', 'Hammers' ") + if self._input_params['atlas'] not in VOLUME_ATLASES: + raise ValueError(f"Incorrect atlas name (given value: {self._input_params['atlas']}). It must be one of {VOLUME_ATLASES}") def get_images(self): """ diff --git a/clinica/pipelines/pet_volume/pet_volume_pipeline.py b/clinica/pipelines/pet_volume/pet_volume_pipeline.py index d5c26ea1a..5580ce8ae 100644 --- a/clinica/pipelines/pet_volume/pet_volume_pipeline.py +++ b/clinica/pipelines/pet_volume/pet_volume_pipeline.py @@ -19,26 +19,19 @@ class PETVolume(cpe.Pipeline): def check_pipeline_parameters(self): """Check pipeline parameters.""" from clinica.utils.group import check_group_label - default_atlases = ['AAL2', 'LPBA40', 'Neuromorphometrics', 'AICHA', 'Hammers'] + from clinica.utils.atlas import PET_VOLUME_ATLASES - if 'group_label' not in self.parameters.keys(): - raise KeyError('Missing compulsory group_label key in pipeline parameter.') + self.parameters.setdefault("group_label", None) + check_group_label(self.parameters["group_label"]) if 'acq_label' not in self.parameters.keys(): raise KeyError('Missing compulsory acq_label key in pipeline parameter.') - if 'pvc_psf_tsv' not in self.parameters.keys(): - self.parameters['pvc_psf_tsv'] = None - if 'mask_tissues' not in self.parameters.keys(): - self.parameters['mask_tissues'] = [1, 2, 3] - if 'mask_threshold' not in self.parameters.keys(): - self.parameters['mask_threshold'] = 0.3 - if 'pvc_mask_tissues' not in self.parameters.keys(): - self.parameters['pvc_mask_tissues'] = [1, 2, 3] - if 'smooth' not in self.parameters.keys(): - self.parameters['smooth'] = [8] - if 'atlases' not in self.parameters.keys(): - self.parameters['atlases'] = default_atlases - - check_group_label(self.parameters['group_label']) + + self.parameters.setdefault("pvc_psf_tsv", None) + self.parameters.setdefault("mask_tissues", [1, 2, 3]) + self.parameters.setdefault("mask_threshold", 0.3) + self.parameters.setdefault("pvc_mask_tissues", [1, 2, 3]) + self.parameters.setdefault("smooth", [8]) + self.parameters.setdefault("atlases", PET_VOLUME_ATLASES) def check_custom_dependencies(self): """Check dependencies that can not be listed in the `info.json` file.""" diff --git a/clinica/pipelines/t1_volume_parcellation/t1_volume_parcellation_cli.py b/clinica/pipelines/t1_volume_parcellation/t1_volume_parcellation_cli.py index bcdb0ab4d..78b318b54 100644 --- a/clinica/pipelines/t1_volume_parcellation/t1_volume_parcellation_cli.py +++ b/clinica/pipelines/t1_volume_parcellation/t1_volume_parcellation_cli.py @@ -34,7 +34,6 @@ def run_command(self, args): parameters = { 'group_label': args.group_label, - 'atlases': args.atlases, } pipeline = T1VolumeParcellation( caps_directory=self.absolute_path(args.caps_directory), diff --git a/clinica/pipelines/t1_volume_parcellation/t1_volume_parcellation_pipeline.py b/clinica/pipelines/t1_volume_parcellation/t1_volume_parcellation_pipeline.py index 49a887b74..c1bb0832d 100644 --- a/clinica/pipelines/t1_volume_parcellation/t1_volume_parcellation_pipeline.py +++ b/clinica/pipelines/t1_volume_parcellation/t1_volume_parcellation_pipeline.py @@ -16,15 +16,12 @@ def check_custom_dependencies(self): def check_pipeline_parameters(self): """Check pipeline parameters.""" from clinica.utils.group import check_group_label + from clinica.utils.atlas import T1_VOLUME_ATLASES - default_atlases = ['AAL2', 'LPBA40', 'Neuromorphometrics', 'AICHA', 'Hammers'] + self.parameters.setdefault("group_label", None) + check_group_label(self.parameters["group_label"]) - if 'group_label' not in self.parameters.keys(): - raise KeyError('Missing compulsory group_label key in pipeline parameter.') - if 'atlases' not in self.parameters.keys(): - self.parameters['atlases'] = default_atlases - - check_group_label(self.parameters['group_label']) + self.parameters.setdefault("atlases", T1_VOLUME_ATLASES) def get_input_fields(self): """Specify the list of possible inputs of this pipeline. diff --git a/clinica/utils/atlas.py b/clinica/utils/atlas.py index af7e4340f..a4dfd58d9 100644 --- a/clinica/utils/atlas.py +++ b/clinica/utils/atlas.py @@ -17,6 +17,31 @@ import abc +T1_VOLUME_ATLASES = [ + "AAL2", + "AICHA", + "Hammers", + "LPBA40", + "Neuromorphometrics", +] + +PET_VOLUME_ATLASES = [ + "AAL2", + "AICHA", + "Hammers", + "LPBA40", + "Neuromorphometrics", +] + +DWI_DTI_ATLASES = [ + "JHUDTI81", + "JHUTract0", + "JHUTract25", +] + +VOLUME_ATLASES = list(set(T1_VOLUME_ATLASES + PET_VOLUME_ATLASES + DWI_DTI_ATLASES)) + + class AtlasAbstract: """ Abstract class for Atlas handling. From 478029dca9b8f722edb942c0adc001faf6f975c1 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Wed, 21 Oct 2020 11:15:18 +0200 Subject: [PATCH 19/51] Update region_based_io.py get_caps_pet_list function was removed. This function is obsolete (it contained the attempt of defining an analysis-series ID abandoned in 2016-17) and is not used in Clinica. It is likely to be replaced by CAPSTSVBasedInput now. --- .../machine_learning/region_based_io.py | 65 +++++++------------ 1 file changed, 24 insertions(+), 41 deletions(-) diff --git a/clinica/pipelines/machine_learning/region_based_io.py b/clinica/pipelines/machine_learning/region_based_io.py index a75466751..8043c5b93 100644 --- a/clinica/pipelines/machine_learning/region_based_io.py +++ b/clinica/pipelines/machine_learning/region_based_io.py @@ -4,7 +4,6 @@ import numpy as np import pandas as pd import nibabel as nib -from os.path import join def get_caps_t1_list(input_directory, subjects_visits_tsv, group_label, atlas_id): @@ -19,44 +18,29 @@ def get_caps_t1_list(input_directory, subjects_visits_tsv, group_label, atlas_id Returns: """ - from os.path import join import pandas as pd - subjects_visits = pd.io.parsers.read_csv(subjects_visits_tsv, sep='\t') - if list(subjects_visits.columns.values) != ['participant_id', 'session_id']: - raise Exception('Subjects and visits file is not in the correct format.') - subjects = list(subjects_visits.participant_id) - sessions = list(subjects_visits.session_id) - image_list = [join(input_directory + '/subjects/' + subjects[i] + '/' - + sessions[i] + '/t1/spm/dartel/group-' + group_label + '/atlas_statistics/' + subjects[i] + '_' - + sessions[i]+'_T1w_space-'+atlas_id+'_map-graymatter_statistics.tsv') - for i in range(len(subjects))] - return image_list - - -def get_caps_pet_list(input_directory, subjects_visits_tsv, group_label, atlas_id): - """ - - Args: - input_directory: - subjects_visits_tsv: - group_label: - atlas_id: - - Returns: - - """ - - subjects_visits = pd.io.parsers.read_csv(subjects_visits_tsv, sep='\t') - if list(subjects_visits.columns.values) != ['participant_id', 'session_id']: - raise Exception('Subjects and visits file is not in the correct format.') + subjects_visits = pd.io.parsers.read_csv(subjects_visits_tsv, sep="\t") + if list(subjects_visits.columns.values) != ["participant_id", "session_id"]: + raise Exception("Subjects and visits file is not in the correct format.") subjects = list(subjects_visits.participant_id) sessions = list(subjects_visits.session_id) - image_list = [join(input_directory, 'analysis-series-default/subjects/' + subjects[i] + '/' - + sessions[i] + '/pet/atlas_statistics/' + subjects[i] + '_' + sessions[i] - + '_space-' + atlas_id + '_map-fdgstatistic2.tsv') - for i in range(len(subjects))] + image_list = [ + join( + input_directory, + "subjects", + subjects[i], + sessions[i], + "t1", + "spm", + "dartel", + f"group-{group_label}", + "atlas_statistics", + f"{subjects[i]}_{sessions[i]}_T1w_space-{atlas_id}_map-graymatter_statistics.tsv", + ) + for i in range(len(subjects)) + ] return image_list @@ -103,12 +87,14 @@ def features_weights(image_list, dual_coefficients, sv_indices, scaler=None): """ if len(sv_indices) != len(dual_coefficients): - print("Length dual coefficients: " + str(len(dual_coefficients))) - print("Length indices: " + str(len(sv_indices))) - raise ValueError('The number of support vectors indices and the number of coefficients must be the same.') + raise ValueError( + f"The number of support vectors indices and the number of coefficients must be the same.\n" + f"- Number of dual coefficients: {len(dual_coefficients)}\n" + f"- Number of indices:: {len(sv_indices)}\n" + ) if len(image_list) == 0: - raise ValueError('The number of images must be greater than 0.') + raise ValueError("The number of images must be greater than 0.") sv_images = [image_list[i] for i in sv_indices] @@ -134,9 +120,6 @@ def weights_to_nifti(weights, atlas, output_filename): Returns: """ - - from os.path import join, split, realpath - from clinica.utils.atlas import AtlasAbstract atlas_path = None From 86423f9f87094e2217fd8c7b5c81c030839f8fc5 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Wed, 21 Oct 2020 11:34:16 +0200 Subject: [PATCH 20/51] Update voxel_based_io.py --- .../machine_learning/voxel_based_io.py | 60 ++++++++++++------- 1 file changed, 40 insertions(+), 20 deletions(-) diff --git a/clinica/pipelines/machine_learning/voxel_based_io.py b/clinica/pipelines/machine_learning/voxel_based_io.py index 5e1703c08..b8d9126d7 100644 --- a/clinica/pipelines/machine_learning/voxel_based_io.py +++ b/clinica/pipelines/machine_learning/voxel_based_io.py @@ -6,37 +6,57 @@ from os.path import join -def get_caps_t1_list(input_directory, subjects_visits_tsv, group_label, fwhm, modulated): +def get_caps_t1_list( + input_directory, subjects_visits_tsv, group_label, fwhm, modulated +): - subjects_visits = pd.io.parsers.read_csv(subjects_visits_tsv, sep='\t') - if list(subjects_visits.columns.values) != ['participant_id', 'session_id']: - raise Exception('Subjects and visits file is not in the correct format.') + subjects_visits = pd.io.parsers.read_csv(subjects_visits_tsv, sep="\t") + if list(subjects_visits.columns.values) != ["participant_id", "session_id"]: + raise Exception("Subjects and visits file is not in the correct format.") subjects = list(subjects_visits.participant_id) sessions = list(subjects_visits.session_id) if fwhm == 0: - image_list = [join(input_directory, 'subjects/' + subjects[i] + '/' - + sessions[i] + '/t1/spm/dartel/group-' + group_label + '/' - + subjects[i] + '_' + sessions[i] + '_T1w_segm-graymatter'+'_space-Ixi549Space_modulated-'+modulated+'_probability.nii.gz') for i in range(len(subjects))] + fwhm_key_value = "" else: - image_list = [join(input_directory, 'subjects/' + subjects[i] + '/' - + sessions[i] + '/t1/spm/dartel/group-' + group_label + '/' - + subjects[i] + '_' + sessions[i] + '_T1w_segm-graymatter' + '_space-Ixi549Space_modulated-' + modulated + '_fwhm-'+fwhm+'mm_probability.nii.gz') - for i in range(len(subjects))] - + fwhm_key_value = f"_{fwhm}mm" + + image_list = [ + join( + input_directory, + "subjects", + subjects[i], + sessions[i], + "t1", + "spm", + "dartel", + f"group-{group_label}", + f"{subjects[i]}_{sessions[i]}_T1w_segm-graymatter_space-Ixi549Space_modulated-{modulated}{fwhm_key_value}_probability.nii.gz", + ) + for i in range(len(subjects)) + ] return image_list -def get_caps_pet_list(input_directory, subjects_visits_tsv, group_label, pet_type): - - subjects_visits = pd.io.parsers.read_csv(subjects_visits_tsv, sep='\t') - if list(subjects_visits.columns.values) != ['participant_id', 'session_id']: - raise Exception('Subjects and visits file is not in the correct format.') +def get_caps_pet_list(input_directory, subjects_visits_tsv, group_label, acq_label): + subjects_visits = pd.io.parsers.read_csv(subjects_visits_tsv, sep="\t") + if list(subjects_visits.columns.values) != ["participant_id", "session_id"]: + raise Exception("Subjects and visits file is not in the correct format.") subjects = list(subjects_visits.participant_id) sessions = list(subjects_visits.session_id) - image_list = [join(input_directory, 'subjects/' + subjects[i] + '/' - + sessions[i] + '/pet/preprocessing/group-' + group_label + '/' + subjects[i] - + '_' + sessions[i] + '_task-rest_acq-' + pet_type + '_pet_space-Ixi549Space_pet.nii.gz') for i in range(len(subjects))] + image_list = [ + join( + input_directory, + "subjects", + subjects[i], + sessions[i], + "pet", + "preprocessing", + f"group-{group_label}", + f"{subjects[i]}_{sessions[i]}_task-rest_acq-{acq_label}_pet_space-Ixi549Space_pet.nii.gz", + ) + for i in range(len(subjects)) + ] return image_list From 8264d1149a498fb0ff8aba851a01a894a577ede3 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Wed, 21 Oct 2020 12:40:02 +0200 Subject: [PATCH 21/51] Harmonize PET tracers handling (ML modules) --- clinica/pipelines/machine_learning/input.py | 356 +++++++++++------- .../machine_learning/ml_workflows.py | 24 +- 2 files changed, 233 insertions(+), 147 deletions(-) diff --git a/clinica/pipelines/machine_learning/input.py b/clinica/pipelines/machine_learning/input.py index e8a9b0f11..445405083 100644 --- a/clinica/pipelines/machine_learning/input.py +++ b/clinica/pipelines/machine_learning/input.py @@ -35,9 +35,8 @@ def __init__(self, input_params): raise Exception('Diagnoses file is not in the correct format.') self._diagnoses = list(diagnoses.diagnosis) - if self._input_params['image_type'] not in ['T1', 'fdg', 'av45', 'pib', 'flute', 'dwi']: - raise Exception("Incorrect image type. It must be one of the values 'T1', 'fdg', 'av45', " - "'pib', 'flute' or 'dwi'") + if self._input_params["image_type"] not in ["T1w", "PET"]: + raise Exception("Incorrect image type. It must be one of the values 'T1w', 'PET'") if self._input_params['precomputed_kernel'] is not None: if type(self._input_params['precomputed_kernel']) == np.ndarray: @@ -57,26 +56,20 @@ def __init__(self, input_params): @abc.abstractmethod def get_images(self): """ - Returns: a list of filenames - """ pass @abc.abstractmethod def get_x(self): """ - Returns: a numpy 2d-array. - """ pass def get_y(self): """ - Returns: a list of integers. Each integer represents a class. - """ if self._y is not None: return self._y @@ -87,9 +80,7 @@ def get_y(self): def get_kernel(self, kernel_function=utils.gram_matrix_linear, recompute_if_exists=False): """ - Returns: a numpy 2d-array. - """ if self._kernel is not None and not recompute_if_exists: return self._kernel @@ -124,12 +115,13 @@ def save_weights_as_nifti(self, weights, output_dir): @staticmethod def get_default_parameters(): - parameters_dict = {'caps_directory': None, - 'subjects_visits_tsv': None, - 'diagnoses_tsv': None, - 'group_label': None, - 'image_type': None, - 'precomputed_kernel': None} + parameters_dict = {} + parameters_dict.setdefault('caps_directory', None) + parameters_dict.setdefault('subjects_visits_tsv', None) + parameters_dict.setdefault('diagnoses_tsv', None) + parameters_dict.setdefault('group_label', None) + parameters_dict.setdefault('image_type', None) + parameters_dict.setdefault('precomputed_kernel', None) return parameters_dict @@ -148,51 +140,71 @@ def __init__(self, input_params): def get_images(self): """ - Returns: a list of filenames - """ + from clinica.utils.inputs import clinica_file_reader + from clinica.utils.input_files import pet_volume_normalized_suvr_pet + if self._images is not None: return self._images - if self._input_params['image_type'] == 'T1': - fwhm = '' if self._input_params['fwhm'] == 0 else '_fwhm-%dmm' % int(self._input_params['fwhm']) - - self._images = [path.join(self._input_params['caps_directory'], 'subjects', self._subjects[i], - self._sessions[i], 't1/spm/dartel/group-' + self._input_params['group_label'], - '%s_%s_T1w_segm-graymatter_space-Ixi549Space_modulated-%s%s_probability.nii.gz' - % (self._subjects[i], self._sessions[i], self._input_params['modulated'], fwhm)) - for i in range(len(self._subjects))] + if self._input_params["image_type"] == "T1w": + if self._input_params['fwhm'] == 0: + fwhm_key_value = "" + else: + fwhm_key_value = f"_{self._input_params['fwhm']}mm" + + self._images = [ + path.join( + self._input_params['caps_directory'], + "subjects", + self._subjects[i], + self._sessions[i], + "t1", + "spm", + "dartel", + f"group-{self._input_params['group_label']}", + f"{self._subjects[i]}_{self._sessions[i]}_T1w" + f"_segm-graymatter_space-Ixi549Space_modulated-{self._input_params['modulated']}{fwhm_key_value}_probability.nii.gz", + ) + for i in range(len(self._subjects)) + ] + + for image in self._images: + if not path.exists(image): + raise Exception("File %s doesn't exists." % image) + + elif self._input_params['image_type'] == "PET": + caps_files_information = pet_volume_normalized_suvr_pet( + acq_label=self._input_params["acq_label"], + suvr_reference_region=self._input_params["suvr_reference_region"], + use_brainmasked_image=True, + use_pvc_data=self._input_params["use_pvc_data"], + fwhm=self._input_params['fwhm'] + ) + self._images = clinica_file_reader( + self._subjects, + self._sessions, + self._input_params['caps_directory'], + caps_files_information + ) else: - pvc = '' if self._input_params['pvc'] is None else '_pvc-%s' % self._input_params['pvc'] - fwhm = '' if self._input_params['fwhm'] == 0 else '_fwhm-%dmm' % int(self._input_params['fwhm']) - suvr = 'pons' if self._input_params['image_type'] == 'fdg' else 'cerebellumPons' - - self._images = [path.join(self._input_params['caps_directory'], 'subjects', self._subjects[i], - self._sessions[i], 'pet/preprocessing/group-' + self._input_params['group_label'], - '%s_%s_task-rest_acq-%s_pet_space-Ixi549Space%s_suvr-%s_mask-brain%s_pet.nii.gz' - % (self._subjects[i], self._sessions[i], self._input_params['image_type'], pvc, - suvr, fwhm)) - for i in range(len(self._subjects))] - - for image in self._images: - if not path.exists(image): - raise Exception("File %s doesn't exists." % image) + raise ValueError( + f"Unknown image type (given value: {self._input_params['image_type']})" + ) return self._images def get_x(self): """ - Returns: a numpy 2d-array. - """ if self._x is not None: return self._x - cprint('Loading ' + str(len(self.get_images())) + ' subjects') + cprint(f"Loading {len(self.get_images())} subjects") self._x, self._orig_shape, self._data_mask = vbio.load_data(self._images, mask=self._input_params['mask_zeros']) - cprint('Subjects loaded') + cprint("Subjects loaded") return self._x @@ -209,13 +221,16 @@ def save_weights_as_nifti(self, weights, output_dir): def get_default_parameters(): parameters_dict = super(CAPSVoxelBasedInput, CAPSVoxelBasedInput).get_default_parameters() - - new_parameters = {'fwhm': 0, - 'modulated': "on", - 'pvc': None, - 'mask_zeros': True} - - parameters_dict.update(new_parameters) + # t1-volume / pet-volume + parameters_dict.setdefault("fwhm", 0) + # t1-volume / pet-volume ? + parameters_dict.setdefault("mask_zeros", True) + # t1-volume + parameters_dict.setdefault("modulated", "on") + # pet-volume + parameters_dict.setdefault('acq_label', None) + parameters_dict.setdefault('suvr_reference_region', None) + parameters_dict.setdefault("use_pvc_data", False) return parameters_dict @@ -228,7 +243,10 @@ def __init__(self, input_params): super().__init__(input_params) if self._input_params['atlas'] not in VOLUME_ATLASES: - raise ValueError(f"Incorrect atlas name (given value: {self._input_params['atlas']}). It must be one of {VOLUME_ATLASES}") + raise ValueError( + f"Incorrect atlas name (given value: {self._input_params['atlas']}). " + f"It must be one of {VOLUME_ATLASES}" + ) def get_images(self): """ @@ -239,22 +257,50 @@ def get_images(self): if self._images is not None: return self._images - if self._input_params['image_type'] == 'T1': - self._images = [path.join(self._input_params['caps_directory'], 'subjects', self._subjects[i], - self._sessions[i], 't1/spm/dartel/group-' + self._input_params['group_label'], - 'atlas_statistics/', '%s_%s_T1w_space-%s_map-graymatter_statistics.tsv' - % (self._subjects[i], self._sessions[i], self._input_params['atlas'])) - for i in range(len(self._subjects))] + if self._input_params["image_type"] == "T1w": + self._images = [ + path.join( + self._input_params['caps_directory'], + "subjects", + self._subjects[i], + self._sessions[i], + "t1", + "spm", + "dartel", + f"group-{self._input_params['group_label']}", + "atlas_statistics", + f"{self._subjects[i]}_{self._sessions[i]}_T1w" + f"_space-{self._input_params['atlas']}_map-graymatter_statistics.tsv", + ) + for i in range(len(self._subjects)) + ] + + elif self._input_params["image_type"] == "PET": + if self._input_params["use_pvc_data"]: + pvc_key_value = "_pvc-rbv" + else: + pvc_key_value = "" + + self._images = [ + path.join( + self._input_params['caps_directory'], + "subjects", + self._subjects[i], + self._sessions[i], + "pet", + "preprocessing", + f"group-{self._input_params['group_label']}", + "atlas_statistics", + f"{self._subjects[i]}_{self._sessions[i]}_task-rest_acq-{self._input_params['acq_label']}_pet" + f"_space-{self._input_params['atlas']}{pvc_key_value}" + f"_suvr-{self._input_params['suvr_reference_region']}_statistics.tsv", + ) + for i in range(len(self._subjects)) + ] else: - pvc = '' if self._input_params['pvc'] is None else '_pvc-%s' % self._input_params['pvc'] - suvr = 'pons' if self._input_params['image_type'] == 'fdg' else 'cerebellumPons' - - self._images = [path.join(self._input_params['caps_directory'], 'subjects', self._subjects[i], - self._sessions[i], 'pet/preprocessing/group-' + self._input_params['group_label'], - 'atlas_statistics', '%s_%s_task-rest_acq-%s_pet_space-%s%s_suvr-%s_statistics.tsv' - % (self._subjects[i], self._sessions[i], self._input_params['image_type'], - self._input_params['atlas'], pvc, suvr)) - for i in range(len(self._subjects))] + raise ValueError( + f"Unknown image type (given value: {self._input_params['image_type']})" + ) for image in self._images: if not path.exists(image): @@ -264,16 +310,14 @@ def get_images(self): def get_x(self): """ - Returns: a numpy 2d-array. - """ if self._x is not None: return self._x - cprint('Loading ' + str(len(self.get_images())) + ' subjects') + cprint(f"Loading {len(self.get_images())} subjects") self._x = rbio.load_data(self._images, self._subjects) - cprint('Subjects loaded') + cprint("Subjects loaded") return self._x @@ -287,71 +331,88 @@ def save_weights_as_nifti(self, weights, output_dir): Returns: """ - output_filename = path.join(output_dir, 'weights.nii.gz') rbio.weights_to_nifti(weights, self._input_params['atlas'], output_filename) @staticmethod def get_default_parameters(): - parameters_dict = super(CAPSRegionBasedInput, CAPSRegionBasedInput).get_default_parameters() - new_parameters = {'atlas': None, - 'pvc': None, - 'mask_zeros': True} - - parameters_dict.update(new_parameters) + # t1-volume / pet-volume + parameters_dict.setdefault("atlas", None) + # t1-volume / pet-volume ? + parameters_dict.setdefault("mask_zeros", True) + # pet-volume + parameters_dict.setdefault('acq_label', None) + parameters_dict.setdefault('suvr_reference_region', None) + parameters_dict.setdefault("use_pvc_data", False) return parameters_dict class CAPSVertexBasedInput(CAPSInput): - def __init__(self, input_params): super().__init__(input_params) def get_images(self): - import os """ returns list of filnames """ + import os if self._images is not None: return self._images - if self._input_params['image_type'] == 'fdg' and self._images is None: + if self._input_params["image_type"] == "PET": self._images = [] - hemi = ['lh', 'rh'] + hemi = ["lh", "rh"] for i in range(len(self._subjects)): - self._images.append([os.path.join(self._input_params['caps_directory'], 'subjects', self._subjects[i], - self._sessions[i], 'pet', 'surface', self._subjects[i] + '_' + - self._sessions[i] + '_task-rest_acq-fdg_pet_space-fsaverage_' - 'suvr-pons_pvc-iy_hemi-' + h + '_fwhm-' + - str(self._input_params['fwhm']) + '_projection.mgh') for h in hemi]) + self._images.append( + [ + os.path.join( + self._input_params["caps_directory"], + "subjects", + self._subjects[i], + self._sessions[i], + "pet", + "surface", + f"{self._subjects[i]}_{self._sessions[i]}_task-rest_acq-fdg_pet" + f"_space-fsaverage_suvr-{self._input_params['suvr_reference_region']}" + f"_pvc-iy_hemi-{h}_fwhm-{self._input_params['fwhm']}_projection.mgh", + ) + for h in hemi + ] + ) missing_files = [] - missing_files_string_error = '' + missing_files_string_error = "" for img in self._images: for side in img: if not os.path.exists(side): missing_files.append(side) - missing_files_string_error += side + '\n' + missing_files_string_error += side + "\n" if len(missing_files) > 0: - raise IOError('Could not find the following files : \n' + missing_files_string_error - + '\n' + str(len(missing_files)) + ' files missing') + raise IOError( + f"Could not find the following files:\n" + f"{missing_files_string_error}\n" + f"{len(missing_files)} files missing." + ) + else: + raise ValueError( + f"Unknown image type (given value: {self._input_params['image_type']})" + ) return self._images def get_x(self): """ Returns numpy 2D array """ - if self._x is not None: return self._x - cprint('Loading ' + str(len(self.get_images())) + ' subjects') + cprint(f"Loading str({len(self.get_images())} subjects") self._x = vtxbio.load_data(self._images) - cprint(str(len(self._x)) + ' subjects loaded') + cprint(f"{len(self._x)} subjects loaded") return self._x def save_weights_as_datasurface(self, weights, output_dir): @@ -380,12 +441,10 @@ def save_weights_as_nifti(self, weights, output_dir): @staticmethod def get_default_parameters(): - parameters_dict = super(CAPSVertexBasedInput, CAPSVertexBasedInput).get_default_parameters() - new_parameters = {'fwhm': 0} - - parameters_dict.update(new_parameters) + # pet-surface + parameters_dict.setdefault("fwhm", 0) return parameters_dict @@ -398,33 +457,34 @@ def __init__(self, input_params): super().__init__(input_params) if self._input_params['atlas'] not in VOLUME_ATLASES: - raise ValueError(f"Incorrect atlas name (given value: {self._input_params['atlas']}). It must be one of {VOLUME_ATLASES}") + raise ValueError( + f"Incorrect atlas name (given value: {self._input_params['atlas']}). " + f"It must be one of {VOLUME_ATLASES}" + ) def get_images(self): """ - Returns: string - """ - pass def get_x(self): """ - Returns: a numpy 2d-array. - """ # if self._x is not None: # return self._x - cprint('Loading TSV subjects') - string = str('group-' + self._input_params['group_label'] + '_T1w_space-' + self._input_params['atlas'] + - '_map-graymatter') + cprint("Loading TSV subjects") - self._x = tbio.load_data(string, self._input_params['caps_directory'], self._subjects, self._sessions, - self._input_params['dataset']) + self._x = tbio.load_data( + f"group-{self._input_params['group_label']}_T1w_space-{self._input_params['atlas']}_map-graymatter", + self._input_params['caps_directory'], + self._subjects, + self._sessions, + self._input_params['dataset'] + ) cprint('Subjects loaded') @@ -451,43 +511,68 @@ def get_default_parameters(): parameters_dict = super(CAPSTSVBasedInput, CAPSTSVBasedInput).get_default_parameters() - new_parameters = {'atlas': None, - 'pvc': None, - 'dataset': None} - - parameters_dict.update(new_parameters) + # ??? + parameters_dict.setdefault("dataset", None) + # pet-volume + parameters_dict.setdefault('acq_label', None) + parameters_dict.setdefault('suvr_reference_region', None) + parameters_dict.setdefault("use_pvc_data", False) return parameters_dict class CAPSVoxelBasedInputREGSVM(CAPSVoxelBasedInput): - def get_images(self): """ - Returns: a list of filenames - """ if self._images is not None: return self._images - if self._input_params['image_type'] == 'T1': - fwhm = '' if self._input_params['fwhm'] == 0 else '_fwhm-%dmm' % int(self._input_params['fwhm']) + if self._input_params["image_type"] == "T1w": + if self._input_params['fwhm'] == 0: + fwhm_key_value = "" + else: + fwhm_key_value = f"_{self._input_params['fwhm']}mm" + + self._images = [ + path.join( + self._input_params["caps_directory"], + f"regul_{self._subjects[i]}_{self._sessions[i]}_T1w" + f"_segm-graymatter_space-Ixi549Space_modulated-{self._input_params['modulated']}{fwhm_key_value}_probability.nii", + ) + for i in range(len(self._subjects)) + ] + elif self._input_params["image_type"] == "PET": + if self._input_params['fwhm'] == 0: + fwhm_key_value = "" + else: + fwhm_key_value = f"_{self._input_params['fwhm']}mm" - self._images = [path.join(self._input_params['caps_directory'], - 'regul_%s_%s_T1w_segm-graymatter_space-Ixi549Space_modulated-%s%s_probability.nii' - % (self._subjects[i], self._sessions[i], self._input_params['modulated'], fwhm)) - for i in range(len(self._subjects))] + if self._input_params["use_pvc_data"]: + pvc_key_value = "_pvc-rbv" + else: + pvc_key_value = "" + + self._images = [ + path.join( + self._input_params["caps_directory"], + "subjects", + self._subjects[i], + self._sessions[i], + "pet", + "preprocessing", + "group-" + self._input_params["group_label"], + f"{self._subjects[i]}_{self._sessions[i]}_task-rest_acq-{self._input_params['acq_label']}_pet" + f"_space-Ixi549Space{pvc_key_value}_suvr-{self._input_params['suvr_reference_region']}" + f"_mask-brain{fwhm_key_value}_pet.nii.gz", + ) + for i in range(len(self._subjects)) + ] else: - pvc = '' if self._input_params['pvc'] is None else '_pvc-%s' % self._input_params['pvc'] - fwhm = '' if self._input_params['fwhm'] == 0 else '_fwhm-%dmm' % int(self._input_params['fwhm']) - suvr = 'pons' if self._input_params['image_type'] == 'fdg' else 'cerebellumPons' - self._images = [path.join(self._input_params['caps_directory'], 'subjects', self._subjects[i], - self._sessions[i], 'pet/preprocessing/group-' + self._input_params['group_label'], - '%s_%s_task-rest_acq-%s_pet_space-Ixi549Space%s_suvr-%s_mask-brain%s_pet.nii.gz' - % (self._subjects[i], self._sessions[i], self._input_params['image_type'], - pvc, suvr, fwhm)) - for i in range(len(self._subjects))] + raise ValueError( + f"Unknown image type (given value: {self._input_params['image_type']})" + ) for image in self._images: if not path.exists(image): @@ -537,7 +622,8 @@ def get_kernel(self, kernel_function=utils.gram_matrix_linear, recompute_if_exis @staticmethod def get_default_parameters(): - parameters_dict = {'data_tsv': None, - 'columns': None} + parameters_dict = {} + parameters_dict.setdefault("data_tsv", None) + parameters_dict.setdefault("columns", None) return parameters_dict diff --git a/clinica/pipelines/machine_learning/ml_workflows.py b/clinica/pipelines/machine_learning/ml_workflows.py index 33fc198df..4e9b599b8 100644 --- a/clinica/pipelines/machine_learning/ml_workflows.py +++ b/clinica/pipelines/machine_learning/ml_workflows.py @@ -9,7 +9,7 @@ class VoxelBasedKFoldDualSVM(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, output_dir, fwhm=0, - modulated="on", pvc=None, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_folds=10, + modulated="on", use_pvc_data=False, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_folds=10, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): super(VoxelBasedKFoldDualSVM, self).__init__(input.CAPSVoxelBasedInput, @@ -22,7 +22,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class VoxelBasedRepKFoldDualSVM(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, output_dir, fwhm=0, - modulated="on", pvc=None, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, + modulated="on", use_pvc_data=False, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, n_folds=10, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): super(VoxelBasedRepKFoldDualSVM, self).__init__(input.CAPSVoxelBasedInput, @@ -35,7 +35,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class VoxelBasedRepHoldOutDualSVM(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, output_dir, fwhm=0, - modulated="on", pvc=None, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, + modulated="on", use_pvc_data=False, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, test_size=0.3, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): @@ -62,7 +62,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class RegionBasedRepHoldOutDualSVM(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, - output_dir, pvc=None, n_threads=15, n_iterations=100, test_size=0.3, + output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): super(RegionBasedRepHoldOutDualSVM, self).__init__(input.CAPSRegionBasedInput, @@ -75,7 +75,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class RegionBasedRepHoldOutLogisticRegression(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, - output_dir, pvc=None, n_threads=15, n_iterations=100, test_size=0.3, + output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): super(RegionBasedRepHoldOutLogisticRegression, self).__init__(input.CAPSRegionBasedInput, @@ -88,7 +88,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class RegionBasedRepHoldOutRandomForest(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, - output_dir, pvc=None, n_threads=15, n_iterations=100, test_size=0.3, + output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, grid_search_folds=10, balanced=True, n_estimators_range=(100, 200, 400), max_depth_range=[None], min_samples_split_range=[2], max_features_range=('auto', 0.25, 0.5), splits_indices=None): @@ -103,7 +103,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class RegionBasedLearningCurveRepHoldOutDualSVM(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, - output_dir, pvc=None, precomputed_kernel=None, n_threads=15, n_iterations=100, test_size=0.3, + output_dir, use_pvc_data=False, precomputed_kernel=None, n_threads=15, n_iterations=100, test_size=0.3, n_learning_points=10, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17)): super(RegionBasedLearningCurveRepHoldOutDualSVM, self).__init__(input.CAPSRegionBasedInput, @@ -116,7 +116,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class VoxelBasedLearningCurveRepHoldOutDualSVM(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, output_dir, fwhm=0, - modulated="on", pvc=None, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, + modulated="on", use_pvc_data=False, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, test_size=0.3, n_learning_points=10, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17)): @@ -130,7 +130,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class RegionBasedRepKFoldDualSVM(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, - output_dir, pvc=None, n_threads=15, n_iterations=100, test_size=0.3, n_folds=10, + output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, n_folds=10, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): super(RegionBasedRepKFoldDualSVM, self).__init__(input.CAPSRegionBasedInput, @@ -143,7 +143,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class CAPSTsvRepHoldOutDualSVM(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, dataset, - output_dir, pvc=None, n_threads=15, n_iterations=100, test_size=0.3, + output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): super(CAPSTsvRepHoldOutDualSVM, self).__init__(input.CAPSTSVBasedInput, @@ -155,7 +155,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class CAPSTsvRepHoldOutRandomForest(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, dataset, - output_dir, pvc=None, n_threads=15, n_iterations=100, test_size=0.3, + output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, grid_search_folds=10, balanced=True, n_estimators_range=(100, 200, 400), max_depth_range=[None], min_samples_split_range=[2], max_features_range=('auto', 0.25, 0.5), splits_indices=None): @@ -172,7 +172,7 @@ def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_lab class VoxelBasedREGRepKFoldDualSVM(base.MLWorkflow): def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, output_dir, fwhm=0, - modulated="on", pvc=None, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, + modulated="on", use_pvc_data=False, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, n_folds=10, test_size=0.1, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): From e18a076a5e762d64088886d8196a1a170867d5b3 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Thu, 22 Oct 2020 14:21:50 +0200 Subject: [PATCH 22/51] Remove get_caps_t1_list/get_caps_pet_list These functions are obsolete and are replaced by CAPSInputs classes in input.py --- .../machine_learning/region_based_io.py | 38 ------------- .../machine_learning/voxel_based_io.py | 57 ------------------- 2 files changed, 95 deletions(-) diff --git a/clinica/pipelines/machine_learning/region_based_io.py b/clinica/pipelines/machine_learning/region_based_io.py index 8043c5b93..75a900df8 100644 --- a/clinica/pipelines/machine_learning/region_based_io.py +++ b/clinica/pipelines/machine_learning/region_based_io.py @@ -6,44 +6,6 @@ import nibabel as nib -def get_caps_t1_list(input_directory, subjects_visits_tsv, group_label, atlas_id): - """ - path to arrive to the list of the file with the statistics on atlas_id - Args: - input_directory: - subjects_visits_tsv: - group_label: - atlas_id: - - Returns: - - """ - from os.path import join - import pandas as pd - - subjects_visits = pd.io.parsers.read_csv(subjects_visits_tsv, sep="\t") - if list(subjects_visits.columns.values) != ["participant_id", "session_id"]: - raise Exception("Subjects and visits file is not in the correct format.") - subjects = list(subjects_visits.participant_id) - sessions = list(subjects_visits.session_id) - image_list = [ - join( - input_directory, - "subjects", - subjects[i], - sessions[i], - "t1", - "spm", - "dartel", - f"group-{group_label}", - "atlas_statistics", - f"{subjects[i]}_{sessions[i]}_T1w_space-{atlas_id}_map-graymatter_statistics.tsv", - ) - for i in range(len(subjects)) - ] - return image_list - - def load_data(image_list, subjects): """ diff --git a/clinica/pipelines/machine_learning/voxel_based_io.py b/clinica/pipelines/machine_learning/voxel_based_io.py index b8d9126d7..5e00e59e6 100644 --- a/clinica/pipelines/machine_learning/voxel_based_io.py +++ b/clinica/pipelines/machine_learning/voxel_based_io.py @@ -1,64 +1,7 @@ # coding: utf8 import numpy as np -import pandas as pd import nibabel as nib -from os.path import join - - -def get_caps_t1_list( - input_directory, subjects_visits_tsv, group_label, fwhm, modulated -): - - subjects_visits = pd.io.parsers.read_csv(subjects_visits_tsv, sep="\t") - if list(subjects_visits.columns.values) != ["participant_id", "session_id"]: - raise Exception("Subjects and visits file is not in the correct format.") - subjects = list(subjects_visits.participant_id) - sessions = list(subjects_visits.session_id) - if fwhm == 0: - fwhm_key_value = "" - else: - fwhm_key_value = f"_{fwhm}mm" - - image_list = [ - join( - input_directory, - "subjects", - subjects[i], - sessions[i], - "t1", - "spm", - "dartel", - f"group-{group_label}", - f"{subjects[i]}_{sessions[i]}_T1w_segm-graymatter_space-Ixi549Space_modulated-{modulated}{fwhm_key_value}_probability.nii.gz", - ) - for i in range(len(subjects)) - ] - return image_list - - -def get_caps_pet_list(input_directory, subjects_visits_tsv, group_label, acq_label): - subjects_visits = pd.io.parsers.read_csv(subjects_visits_tsv, sep="\t") - if list(subjects_visits.columns.values) != ["participant_id", "session_id"]: - raise Exception("Subjects and visits file is not in the correct format.") - subjects = list(subjects_visits.participant_id) - sessions = list(subjects_visits.session_id) - - image_list = [ - join( - input_directory, - "subjects", - subjects[i], - sessions[i], - "pet", - "preprocessing", - f"group-{group_label}", - f"{subjects[i]}_{sessions[i]}_task-rest_acq-{acq_label}_pet_space-Ixi549Space_pet.nii.gz", - ) - for i in range(len(subjects)) - ] - - return image_list def load_data(image_list, mask=True): From 25df67264f973e48640afd3835c781524a16066e Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Thu, 22 Oct 2020 15:38:48 +0200 Subject: [PATCH 23/51] Fix how CAPS files are grabbed --- clinica/pipelines/machine_learning/input.py | 16 ++++++++++------ clinica/utils/input_files.py | 3 --- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/clinica/pipelines/machine_learning/input.py b/clinica/pipelines/machine_learning/input.py index 445405083..f60117eef 100644 --- a/clinica/pipelines/machine_learning/input.py +++ b/clinica/pipelines/machine_learning/input.py @@ -36,7 +36,10 @@ def __init__(self, input_params): self._diagnoses = list(diagnoses.diagnosis) if self._input_params["image_type"] not in ["T1w", "PET"]: - raise Exception("Incorrect image type. It must be one of the values 'T1w', 'PET'") + raise ValueError( + f"Incorrect image type. It must be one of the values 'T1w', 'PET' " + f"(given value: {self._input_params['image_type']})" + ) if self._input_params['precomputed_kernel'] is not None: if type(self._input_params['precomputed_kernel']) == np.ndarray: @@ -152,7 +155,7 @@ def get_images(self): if self._input_params['fwhm'] == 0: fwhm_key_value = "" else: - fwhm_key_value = f"_{self._input_params['fwhm']}mm" + fwhm_key_value = f"_fwhm-{self._input_params['fwhm']}mm" self._images = [ path.join( @@ -177,6 +180,7 @@ def get_images(self): elif self._input_params['image_type'] == "PET": caps_files_information = pet_volume_normalized_suvr_pet( acq_label=self._input_params["acq_label"], + group_label=self._input_params["group_label"], suvr_reference_region=self._input_params["suvr_reference_region"], use_brainmasked_image=True, use_pvc_data=self._input_params["use_pvc_data"], @@ -377,7 +381,7 @@ def get_images(self): self._sessions[i], "pet", "surface", - f"{self._subjects[i]}_{self._sessions[i]}_task-rest_acq-fdg_pet" + f"{self._subjects[i]}_{self._sessions[i]}_task-rest_acq-{self._input_params['acq_label']}_pet" f"_space-fsaverage_suvr-{self._input_params['suvr_reference_region']}" f"_pvc-iy_hemi-{h}_fwhm-{self._input_params['fwhm']}_projection.mgh", ) @@ -533,7 +537,7 @@ def get_images(self): if self._input_params['fwhm'] == 0: fwhm_key_value = "" else: - fwhm_key_value = f"_{self._input_params['fwhm']}mm" + fwhm_key_value = f"_fwhm-{self._input_params['fwhm']}mm" self._images = [ path.join( @@ -547,7 +551,7 @@ def get_images(self): if self._input_params['fwhm'] == 0: fwhm_key_value = "" else: - fwhm_key_value = f"_{self._input_params['fwhm']}mm" + fwhm_key_value = f"_fwhm-{self._input_params['fwhm']}mm" if self._input_params["use_pvc_data"]: pvc_key_value = "_pvc-rbv" @@ -562,7 +566,7 @@ def get_images(self): self._sessions[i], "pet", "preprocessing", - "group-" + self._input_params["group_label"], + f"group-{self._input_params['group_label']}", f"{self._subjects[i]}_{self._sessions[i]}_task-rest_acq-{self._input_params['acq_label']}_pet" f"_space-Ixi549Space{pvc_key_value}_suvr-{self._input_params['suvr_reference_region']}" f"_mask-brain{fwhm_key_value}_pet.nii.gz", diff --git a/clinica/utils/input_files.py b/clinica/utils/input_files.py index dd84f8395..c12ceb8a2 100644 --- a/clinica/utils/input_files.py +++ b/clinica/utils/input_files.py @@ -261,9 +261,6 @@ def bids_pet_nii(acq_label): # PET-Volume -# coding: utf8 - - def pet_volume_normalized_suvr_pet( acq_label, group_label, From 83c758aeceba7c33268d3ea65213cc951693d1b6 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Fri, 23 Oct 2020 10:18:07 +0200 Subject: [PATCH 24/51] Add missing default keys to CAPSVertexBasedInput --- clinica/pipelines/machine_learning/input.py | 4 + .../test_instantiate_all_pipelines.py | 100 ++++++++++++------ 2 files changed, 69 insertions(+), 35 deletions(-) diff --git a/clinica/pipelines/machine_learning/input.py b/clinica/pipelines/machine_learning/input.py index f60117eef..2698210de 100644 --- a/clinica/pipelines/machine_learning/input.py +++ b/clinica/pipelines/machine_learning/input.py @@ -364,6 +364,8 @@ def get_images(self): returns list of filnames """ import os + from clinica.utils.stream import cprint + cprint(f"input_params = {self._input_params}") if self._images is not None: return self._images @@ -449,6 +451,8 @@ def get_default_parameters(): # pet-surface parameters_dict.setdefault("fwhm", 0) + parameters_dict.setdefault('acq_label', None) + parameters_dict.setdefault('suvr_reference_region', None) return parameters_dict diff --git a/test/instantiation/test_instantiate_all_pipelines.py b/test/instantiation/test_instantiate_all_pipelines.py index e81b35435..0721c2f0f 100644 --- a/test/instantiation/test_instantiate_all_pipelines.py +++ b/test/instantiation/test_instantiate_all_pipelines.py @@ -273,42 +273,72 @@ def test_instantiate_PETSurfaceCrossSectional(): def test_instantiate_InputsML(): from os.path import dirname, join, abspath, exists - from clinica.pipelines.machine_learning.input import CAPSVoxelBasedInput, CAPSRegionBasedInput, CAPSVertexBasedInput + from clinica.pipelines.machine_learning.input import ( + CAPSVoxelBasedInput, + CAPSRegionBasedInput, + CAPSVertexBasedInput, + ) root = dirname(abspath(join(abspath(__file__), pardir))) - root = join(root, 'data', 'InputsML') - caps_dir = join(root, 'in', 'caps') - tsv = join(root, 'in', 'subjects.tsv') - diagnoses_tsv = join(root, 'in', 'diagnosis.tsv') - group_label = 'allADNIdartel' - image_type = ['T1', 'fdg'] - atlases = ['AAL2', 'Neuromorphometrics', 'AICHA', 'LPBA40', 'Hammers'] - possible_psf = [0, 5, 10, 15, 20, 25] - - voxel_input = [CAPSVoxelBasedInput({'caps_directory': caps_dir, - 'subjects_visits_tsv': tsv, - 'diagnoses_tsv': diagnoses_tsv, - 'group_label': group_label, - 'image_type': im, - 'fwhm': 8}) - for im in image_type] - - region_input = [CAPSRegionBasedInput({'caps_directory': caps_dir, - 'subjects_visits_tsv': tsv, - 'diagnoses_tsv': diagnoses_tsv, - 'group_label': group_label, - 'image_type': im, - 'atlas': at}) - for im in image_type - for at in atlases] - - vertex_input = [CAPSVertexBasedInput({'caps_directory': caps_dir, - 'subjects_visits_tsv': tsv, - 'diagnoses_tsv': diagnoses_tsv, - 'group_label': group_label, - 'image_type': 'fdg', - 'fwhm': fwhm}) - for fwhm in possible_psf] + root = join(root, "data", "InputsML") + caps_dir = join(root, "in", "caps") + tsv = join(root, "in", "subjects.tsv") + diagnoses_tsv = join(root, "in", "diagnosis.tsv") + group_label = "allADNIdartel" + image_type = ["T1w", "PET"] + atlases = ["AAL2", "Neuromorphometrics", "AICHA", "LPBA40", "Hammers"] + possible_fwhm = [0, 5, 10, 15, 20, 25] + + voxel_input = [ + CAPSVoxelBasedInput( + { + "caps_directory": caps_dir, + "subjects_visits_tsv": tsv, + "diagnoses_tsv": diagnoses_tsv, + "group_label": group_label, + "image_type": im, + "fwhm": 8, + "acq_label": "fdg", + "suvr_reference_region": "pons", + "use_pvc_data": False, + } + ) + for im in image_type + ] + + region_input = [ + CAPSRegionBasedInput( + { + "caps_directory": caps_dir, + "subjects_visits_tsv": tsv, + "diagnoses_tsv": diagnoses_tsv, + "group_label": group_label, + "image_type": im, + "atlas": at, + "acq_label": "fdg", + "suvr_reference_region": "pons", + "use_pvc_data": False, + } + ) + for im in image_type + for at in atlases + ] + + vertex_input = [ + CAPSVertexBasedInput( + { + "caps_directory": caps_dir, + "subjects_visits_tsv": tsv, + "diagnoses_tsv": diagnoses_tsv, + "group_label": group_label, + "image_type": "PET", + "fwhm": fwhm, + "acq_label": "fdg", + "suvr_reference_region": "pons", + } + ) + for fwhm in possible_fwhm + ] # Check that each file exists for inputs in voxel_input + region_input + vertex_input: @@ -319,7 +349,7 @@ def test_instantiate_InputsML(): assert exists(file[0]) assert exists(file[1]) else: - raise ValueError('An error occured...') + raise ValueError("An error occurred...") def test_instantiate_SpatialSVM(): From d144358bbf5a896e83d1b8cc265f96e96e82ffcb Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Fri, 23 Oct 2020 10:20:37 +0200 Subject: [PATCH 25/51] Harmonize PET tracers handling (ML-Workflows modules) --- .../machine_learning/ml_workflows.py | 562 +++++++++++++----- test/nonregression/test_run_pipelines.py | 97 ++- 2 files changed, 489 insertions(+), 170 deletions(-) diff --git a/clinica/pipelines/machine_learning/ml_workflows.py b/clinica/pipelines/machine_learning/ml_workflows.py index 4e9b599b8..9a3b17a21 100644 --- a/clinica/pipelines/machine_learning/ml_workflows.py +++ b/clinica/pipelines/machine_learning/ml_workflows.py @@ -7,194 +7,460 @@ class VoxelBasedKFoldDualSVM(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, output_dir, fwhm=0, - modulated="on", use_pvc_data=False, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_folds=10, - grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): - - super(VoxelBasedKFoldDualSVM, self).__init__(input.CAPSVoxelBasedInput, - validation.KFoldCV, - algorithm.DualSVMAlgorithm, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + output_dir, + fwhm=0, + modulated="on", + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + precomputed_kernel=None, + mask_zeros=True, + n_threads=15, + n_folds=10, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-6, 2, 17), + splits_indices=None, + ): + + super(VoxelBasedKFoldDualSVM, self).__init__( + input.CAPSVoxelBasedInput, + validation.KFoldCV, + algorithm.DualSVMAlgorithm, + locals(), + output_dir, + ) class VoxelBasedRepKFoldDualSVM(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, output_dir, fwhm=0, - modulated="on", use_pvc_data=False, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, - n_folds=10, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): - - super(VoxelBasedRepKFoldDualSVM, self).__init__(input.CAPSVoxelBasedInput, - validation.RepeatedKFoldCV, - algorithm.DualSVMAlgorithm, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + output_dir, + fwhm=0, + modulated="on", + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + precomputed_kernel=None, + mask_zeros=True, + n_threads=15, + n_iterations=100, + n_folds=10, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-6, 2, 17), + splits_indices=None, + ): + + super(VoxelBasedRepKFoldDualSVM, self).__init__( + input.CAPSVoxelBasedInput, + validation.RepeatedKFoldCV, + algorithm.DualSVMAlgorithm, + locals(), + output_dir, + ) class VoxelBasedRepHoldOutDualSVM(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, output_dir, fwhm=0, - modulated="on", use_pvc_data=False, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, - test_size=0.3, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), - splits_indices=None): - - super().__init__(input.CAPSVoxelBasedInput, - validation.RepeatedHoldOut, - algorithm.DualSVMAlgorithm, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + output_dir, + fwhm=0, + modulated="on", + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + precomputed_kernel=None, + mask_zeros=True, + n_threads=15, + n_iterations=100, + test_size=0.3, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-6, 2, 17), + splits_indices=None, + ): + + super().__init__( + input.CAPSVoxelBasedInput, + validation.RepeatedHoldOut, + algorithm.DualSVMAlgorithm, + locals(), + output_dir, + ) class VertexBasedRepHoldOutDualSVM(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, output_dir, image_type='fdg', fwhm=20, - precomputed_kernel=None, n_threads=15, n_iterations=100, test_size=0.3, grid_search_folds=10, - balanced=True, c_range=np.logspace(-10, 2, 1000), splits_indices=None): - - super(VertexBasedRepHoldOutDualSVM, self).__init__(input.CAPSVertexBasedInput, - validation.RepeatedHoldOut, - algorithm.DualSVMAlgorithm, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + output_dir, + image_type="PET", + acq_label=None, + suvr_reference_region=None, + fwhm=20, + precomputed_kernel=None, + n_threads=15, + n_iterations=100, + test_size=0.3, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-10, 2, 1000), + splits_indices=None, + ): + + super(VertexBasedRepHoldOutDualSVM, self).__init__( + input.CAPSVertexBasedInput, + validation.RepeatedHoldOut, + algorithm.DualSVMAlgorithm, + locals(), + output_dir, + ) class RegionBasedRepHoldOutDualSVM(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, - output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, - grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): - - super(RegionBasedRepHoldOutDualSVM, self).__init__(input.CAPSRegionBasedInput, - validation.RepeatedHoldOut, - algorithm.DualSVMAlgorithm, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + atlas, + output_dir, + use_pvc_data=False, + n_threads=15, + n_iterations=100, + test_size=0.3, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-6, 2, 17), + splits_indices=None, + ): + + super(RegionBasedRepHoldOutDualSVM, self).__init__( + input.CAPSRegionBasedInput, + validation.RepeatedHoldOut, + algorithm.DualSVMAlgorithm, + locals(), + output_dir, + ) class RegionBasedRepHoldOutLogisticRegression(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, - output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, - grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): - - super(RegionBasedRepHoldOutLogisticRegression, self).__init__(input.CAPSRegionBasedInput, - validation.RepeatedHoldOut, - algorithm.LogisticReg, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + atlas, + output_dir, + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + n_threads=15, + n_iterations=100, + test_size=0.3, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-6, 2, 17), + splits_indices=None, + ): + + super(RegionBasedRepHoldOutLogisticRegression, self).__init__( + input.CAPSRegionBasedInput, + validation.RepeatedHoldOut, + algorithm.LogisticReg, + locals(), + output_dir, + ) class RegionBasedRepHoldOutRandomForest(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, - output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, - grid_search_folds=10, balanced=True, n_estimators_range=(100, 200, 400), - max_depth_range=[None], min_samples_split_range=[2], - max_features_range=('auto', 0.25, 0.5), splits_indices=None): - - super(RegionBasedRepHoldOutRandomForest, self).__init__(input.CAPSRegionBasedInput, - validation.RepeatedHoldOut, - algorithm.RandomForest, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + atlas, + output_dir, + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + n_threads=15, + n_iterations=100, + test_size=0.3, + grid_search_folds=10, + balanced=True, + n_estimators_range=(100, 200, 400), + max_depth_range=[None], + min_samples_split_range=[2], + max_features_range=("auto", 0.25, 0.5), + splits_indices=None, + ): + + super(RegionBasedRepHoldOutRandomForest, self).__init__( + input.CAPSRegionBasedInput, + validation.RepeatedHoldOut, + algorithm.RandomForest, + locals(), + output_dir, + ) class RegionBasedLearningCurveRepHoldOutDualSVM(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, - output_dir, use_pvc_data=False, precomputed_kernel=None, n_threads=15, n_iterations=100, test_size=0.3, - n_learning_points=10, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17)): - - super(RegionBasedLearningCurveRepHoldOutDualSVM, self).__init__(input.CAPSRegionBasedInput, - validation.LearningCurveRepeatedHoldOut, - algorithm.DualSVMAlgorithm, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + atlas, + output_dir, + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + precomputed_kernel=None, + n_threads=15, + n_iterations=100, + test_size=0.3, + n_learning_points=10, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-6, 2, 17), + ): + + super(RegionBasedLearningCurveRepHoldOutDualSVM, self).__init__( + input.CAPSRegionBasedInput, + validation.LearningCurveRepeatedHoldOut, + algorithm.DualSVMAlgorithm, + locals(), + output_dir, + ) class VoxelBasedLearningCurveRepHoldOutDualSVM(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, output_dir, fwhm=0, - modulated="on", use_pvc_data=False, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, - test_size=0.3, n_learning_points=10, grid_search_folds=10, balanced=True, - c_range=np.logspace(-6, 2, 17)): - - super(VoxelBasedLearningCurveRepHoldOutDualSVM, self).__init__(input.CAPSVoxelBasedInput, - validation.LearningCurveRepeatedHoldOut, - algorithm.DualSVMAlgorithm, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + output_dir, + fwhm=0, + modulated="on", + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + precomputed_kernel=None, + mask_zeros=True, + n_threads=15, + n_iterations=100, + test_size=0.3, + n_learning_points=10, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-6, 2, 17), + ): + + super(VoxelBasedLearningCurveRepHoldOutDualSVM, self).__init__( + input.CAPSVoxelBasedInput, + validation.LearningCurveRepeatedHoldOut, + algorithm.DualSVMAlgorithm, + locals(), + output_dir, + ) class RegionBasedRepKFoldDualSVM(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, - output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, n_folds=10, - grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): - - super(RegionBasedRepKFoldDualSVM, self).__init__(input.CAPSRegionBasedInput, - validation.RepeatedKFoldCV, - algorithm.DualSVMAlgorithm, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + atlas, + output_dir, + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + n_threads=15, + n_iterations=100, + test_size=0.3, + n_folds=10, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-6, 2, 17), + splits_indices=None, + ): + + super(RegionBasedRepKFoldDualSVM, self).__init__( + input.CAPSRegionBasedInput, + validation.RepeatedKFoldCV, + algorithm.DualSVMAlgorithm, + locals(), + output_dir, + ) class CAPSTsvRepHoldOutDualSVM(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, dataset, - output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, - grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), splits_indices=None): - - super(CAPSTsvRepHoldOutDualSVM, self).__init__(input.CAPSTSVBasedInput, - validation.RepeatedHoldOut, - algorithm.DualSVMAlgorithm, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + atlas, + dataset, + output_dir, + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + n_threads=15, + n_iterations=100, + test_size=0.3, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-6, 2, 17), + splits_indices=None, + ): + + super(CAPSTsvRepHoldOutDualSVM, self).__init__( + input.CAPSTSVBasedInput, + validation.RepeatedHoldOut, + algorithm.DualSVMAlgorithm, + locals(), + output_dir, + ) class CAPSTsvRepHoldOutRandomForest(base.MLWorkflow): - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, atlas, dataset, - output_dir, use_pvc_data=False, n_threads=15, n_iterations=100, test_size=0.3, - grid_search_folds=10, balanced=True, n_estimators_range=(100, 200, 400), - max_depth_range=[None], min_samples_split_range=[2], - max_features_range=('auto', 0.25, 0.5), splits_indices=None): - - super(CAPSTsvRepHoldOutRandomForest, self).__init__(input.CAPSTSVBasedInput, - validation.RepeatedHoldOut, - algorithm.RandomForest, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + atlas, + dataset, + output_dir, + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + n_threads=15, + n_iterations=100, + test_size=0.3, + grid_search_folds=10, + balanced=True, + n_estimators_range=(100, 200, 400), + max_depth_range=[None], + min_samples_split_range=[2], + max_features_range=("auto", 0.25, 0.5), + splits_indices=None, + ): + + super(CAPSTsvRepHoldOutRandomForest, self).__init__( + input.CAPSTSVBasedInput, + validation.RepeatedHoldOut, + algorithm.RandomForest, + locals(), + output_dir, + ) + # SVM reg class VoxelBasedREGRepKFoldDualSVM(base.MLWorkflow): - - def __init__(self, caps_directory, subjects_visits_tsv, diagnoses_tsv, group_label, image_type, output_dir, fwhm=0, - modulated="on", use_pvc_data=False, precomputed_kernel=None, mask_zeros=True, n_threads=15, n_iterations=100, - n_folds=10, - test_size=0.1, grid_search_folds=10, balanced=True, c_range=np.logspace(-6, 2, 17), - splits_indices=None): - - super(VoxelBasedREGRepKFoldDualSVM, self).__init__(input.CAPSTSVBasedInput, - validation.RepeatedKFoldCV, - algorithm.DualSVMAlgorithm, - locals(), - output_dir) + def __init__( + self, + caps_directory, + subjects_visits_tsv, + diagnoses_tsv, + group_label, + image_type, + output_dir, + fwhm=0, + modulated="on", + acq_label=None, + suvr_reference_region=None, + use_pvc_data=False, + precomputed_kernel=None, + mask_zeros=True, + n_threads=15, + n_iterations=100, + n_folds=10, + test_size=0.1, + grid_search_folds=10, + balanced=True, + c_range=np.logspace(-6, 2, 17), + splits_indices=None, + ): + + super(VoxelBasedREGRepKFoldDualSVM, self).__init__( + input.CAPSTSVBasedInput, + validation.RepeatedKFoldCV, + algorithm.DualSVMAlgorithm, + locals(), + output_dir, + ) # TSV -class TsvRepHoldOutRandomForest(base.MLWorkflow): - - def __init__(self, data_tsv, columns, output_dir, n_threads=20, n_iterations=250, test_size=0.2, - grid_search_folds=10, balanced=True, n_estimators_range=(100, 200, 400), max_depth_range=[None], - min_samples_split_range=[2], max_features_range=('auto', 0.25, 0.5), splits_indices=None, - inner_cv=False): - super(TsvRepHoldOutRandomForest, self).__init__(input.TsvInput, - validation.RepeatedHoldOut, - algorithm.RandomForest, - locals(), - output_dir) +class TsvRepHoldOutRandomForest(base.MLWorkflow): + def __init__( + self, + data_tsv, + columns, + output_dir, + n_threads=20, + n_iterations=250, + test_size=0.2, + grid_search_folds=10, + balanced=True, + n_estimators_range=(100, 200, 400), + max_depth_range=[None], + min_samples_split_range=[2], + max_features_range=("auto", 0.25, 0.5), + splits_indices=None, + inner_cv=False, + ): + + super(TsvRepHoldOutRandomForest, self).__init__( + input.TsvInput, + validation.RepeatedHoldOut, + algorithm.RandomForest, + locals(), + output_dir, + ) diff --git a/test/nonregression/test_run_pipelines.py b/test/nonregression/test_run_pipelines.py index 783695326..33fac1279 100644 --- a/test/nonregression/test_run_pipelines.py +++ b/test/nonregression/test_run_pipelines.py @@ -645,52 +645,105 @@ def test_run_PETSurfaceCrossSectional(cmdopt): def test_run_WorkflowsML(cmdopt): - from clinica.pipelines.machine_learning.ml_workflows import (RegionBasedRepHoldOutLogisticRegression, - VertexBasedRepHoldOutDualSVM, - RegionBasedRepHoldOutRandomForest, - VoxelBasedKFoldDualSVM) + from clinica.pipelines.machine_learning.ml_workflows import ( + RegionBasedRepHoldOutLogisticRegression, + VertexBasedRepHoldOutDualSVM, + RegionBasedRepHoldOutRandomForest, + VoxelBasedKFoldDualSVM, + ) from os.path import dirname, join, abspath import shutil import warnings + warnings.filterwarnings("ignore", category=DeprecationWarning) warnings.filterwarnings("ignore", category=UserWarning) warnings.filterwarnings("ignore", category=FutureWarning) root = dirname(abspath(join(abspath(__file__), pardir))) - root = join(root, 'data', 'WorkflowsML') + root = join(root, "data", "WorkflowsML") root_input = dirname(abspath(join(abspath(__file__), pardir))) - root_input = join(root_input, 'data', 'InputsML') + root_input = join(root_input, "data", "InputsML") - caps_dir = join(root_input, 'in', 'caps') - tsv = join(root_input, 'in', 'subjects.tsv') - diagnoses_tsv = join(root_input, 'in', 'diagnosis.tsv') - group_label = 'allADNIdartel' + caps_dir = join(root_input, "in", "caps") + tsv = join(root_input, "in", "subjects.tsv") + diagnoses_tsv = join(root_input, "in", "diagnosis.tsv") + group_label = "allADNIdartel" - output_dir1 = join(root, 'out', 'VertexBasedRepHoldOutDualSVM') + output_dir1 = join(root, "out", "VertexBasedRepHoldOutDualSVM") clean_folder(output_dir1, recreate=True) - wf1 = VertexBasedRepHoldOutDualSVM(caps_dir, tsv, diagnoses_tsv, group_label, output_dir1, image_type='fdg', fwhm=20, - n_threads=8, n_iterations=10, grid_search_folds=3, test_size=0.3) + wf1 = VertexBasedRepHoldOutDualSVM( + caps_directory=caps_dir, + subjects_visits_tsv=tsv, + diagnoses_tsv=diagnoses_tsv, + group_label=group_label, + output_dir=output_dir1, + image_type="PET", + acq_label="fdg", + suvr_reference_region="pons", + fwhm=20, + n_threads=2, + n_iterations=10, + grid_search_folds=3, + test_size=0.3, + ) wf1.run() shutil.rmtree(output_dir1) - output_dir2 = join(root, 'out', 'RegionBasedRepHoldOutLogisticRegression') + output_dir2 = join(root, "out", "RegionBasedRepHoldOutLogisticRegression") clean_folder(output_dir2, recreate=True) - wf2 = RegionBasedRepHoldOutLogisticRegression(caps_dir, tsv, diagnoses_tsv, group_label, 'fdg', 'AICHA', output_dir2, - n_threads=8, n_iterations=10, grid_search_folds=3, test_size=0.3) + wf2 = RegionBasedRepHoldOutLogisticRegression( + caps_directory=caps_dir, + subjects_visits_tsv=tsv, + diagnoses_tsv=diagnoses_tsv, + group_label=group_label, + image_type="PET", + atlas="AICHA", + output_dir=output_dir2, + acq_label="fdg", + suvr_reference_region="pons", + use_pvc_data=False, + n_threads=2, + n_iterations=10, + grid_search_folds=3, + test_size=0.3, + ) wf2.run() shutil.rmtree(output_dir2) - output_dir3 = join(root, 'out', 'RegionBasedRepHoldOutRandomForest') + output_dir3 = join(root, "out", "RegionBasedRepHoldOutRandomForest") clean_folder(output_dir3, recreate=True) - wf3 = RegionBasedRepHoldOutRandomForest(caps_dir, tsv, diagnoses_tsv, group_label, 'T1', 'AAL2', output_dir3, - n_threads=8, n_iterations=10, grid_search_folds=3, test_size=0.3) + wf3 = RegionBasedRepHoldOutRandomForest( + caps_directory=caps_dir, + subjects_visits_tsv=tsv, + diagnoses_tsv=diagnoses_tsv, + group_label=group_label, + image_type="T1w", + atlas="AAL2", + output_dir=output_dir3, + n_threads=2, + n_iterations=10, + grid_search_folds=3, + test_size=0.3, + ) wf3.run() shutil.rmtree(output_dir3) - output_dir4 = join(root, 'out', 'VoxelBasedKFoldDualSVM') + output_dir4 = join(root, "out", "VoxelBasedKFoldDualSVM") clean_folder(output_dir4, recreate=True) - wf4 = VoxelBasedKFoldDualSVM(caps_dir, tsv, diagnoses_tsv, group_label, 'fdg', output_dir4, fwhm=8, n_threads=8, - n_folds=5, grid_search_folds=3) + wf4 = VoxelBasedKFoldDualSVM( + caps_directory=caps_dir, + subjects_visits_tsv=tsv, + diagnoses_tsv=diagnoses_tsv, + group_label=group_label, + image_type="PET", + output_dir=output_dir4, + acq_label="fdg", + suvr_reference_region="pons", + fwhm=8, + n_threads=2, + n_folds=5, + grid_search_folds=3, + ) wf4.run() shutil.rmtree(output_dir4) From 2ca7ce5aeda8307fb7fac270e2979f46a3d3ff4f Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Fri, 23 Oct 2020 10:34:14 +0200 Subject: [PATCH 26/51] Remove cprint() displaye used for debug --- clinica/pipelines/machine_learning/input.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/clinica/pipelines/machine_learning/input.py b/clinica/pipelines/machine_learning/input.py index 2698210de..a40e4ed64 100644 --- a/clinica/pipelines/machine_learning/input.py +++ b/clinica/pipelines/machine_learning/input.py @@ -364,8 +364,6 @@ def get_images(self): returns list of filnames """ import os - from clinica.utils.stream import cprint - cprint(f"input_params = {self._input_params}") if self._images is not None: return self._images From 7ec3c678ccdd2b8283039b770e0e378a7ed7ee8c Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Fri, 23 Oct 2020 11:18:01 +0200 Subject: [PATCH 27/51] Improve description of --acq_label flag Co-authored-by: Ninon Burgos --- .../pipelines/machine_learning_spatial_svm/spatial_svm_cli.py | 3 +-- clinica/pipelines/pet_surface/pet_surface_cli.py | 3 +-- clinica/pipelines/pet_volume/pet_volume_cli.py | 3 +-- clinica/pipelines/statistics_surface/statistics_surface_cli.py | 2 +- clinica/pipelines/statistics_volume/statistics_volume_cli.py | 3 +-- docs/Pipelines/PET_Surface.md | 3 +++ docs/Pipelines/PET_Volume.md | 2 ++ docs/Pipelines/Stats_Surface.md | 3 +++ docs/Pipelines/Stats_Volume.md | 2 ++ 9 files changed, 15 insertions(+), 9 deletions(-) diff --git a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py index e3e86c9ea..779ccd96c 100644 --- a/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py +++ b/clinica/pipelines/machine_learning_spatial_svm/spatial_svm_cli.py @@ -39,8 +39,7 @@ def define_options(self): optional_pet.add_argument("-acq", "--acq_label", type=str, default=None, - help='Name of the PET tracer label in the acquisition entity ' - '(acq-).') + help='Name of the label given to the acquisition, specifying the tracer used (acq-).') optional_pet.add_argument("-suvr", "--suvr_reference_region", choices=LIST_SUVR_REFERENCE_REGIONS, default=None, diff --git a/clinica/pipelines/pet_surface/pet_surface_cli.py b/clinica/pipelines/pet_surface/pet_surface_cli.py index d2f949d59..87c40601f 100644 --- a/clinica/pipelines/pet_surface/pet_surface_cli.py +++ b/clinica/pipelines/pet_surface/pet_surface_cli.py @@ -25,8 +25,7 @@ def define_options(self): clinica_comp.add_argument("caps_directory", help='Path to the CAPS directory. (Filled with results from t1-freesurfer pipeline') clinica_comp.add_argument("acq_label", type=str, - help='Name of the PET tracer label in the acquisition entity ' - '(acq-).') + help='Name of the label given to the acquisition, specifying the tracer used (acq-).') clinica_comp.add_argument("suvr_reference_region", choices=LIST_SUVR_REFERENCE_REGIONS, help='Intensity normalization using the average PET uptake in reference regions ' 'resulting in a standardized uptake value ratio (SUVR) map. It can be ' diff --git a/clinica/pipelines/pet_volume/pet_volume_cli.py b/clinica/pipelines/pet_volume/pet_volume_cli.py index f81a3e63a..0651609dc 100644 --- a/clinica/pipelines/pet_volume/pet_volume_cli.py +++ b/clinica/pipelines/pet_volume/pet_volume_cli.py @@ -27,8 +27,7 @@ def define_options(self): clinica_comp.add_argument("group_label", help='User-defined identifier for the provided group of subjects.') clinica_comp.add_argument("acq_label", type=str, - help='Name of the PET tracer label in the acquisition entity ' - '(acq-).') + help='Name of the label given to the acquisition, specifying the tracer used (acq-).') clinica_comp.add_argument("suvr_reference_region", choices=LIST_SUVR_REFERENCE_REGIONS, help='Intensity normalization using the average PET uptake in reference regions ' 'resulting in a standardized uptake value ratio (SUVR) map. It can be ' diff --git a/clinica/pipelines/statistics_surface/statistics_surface_cli.py b/clinica/pipelines/statistics_surface/statistics_surface_cli.py index 6b494ff22..70e0156b8 100644 --- a/clinica/pipelines/statistics_surface/statistics_surface_cli.py +++ b/clinica/pipelines/statistics_surface/statistics_surface_cli.py @@ -63,7 +63,7 @@ def define_options(self): opt_pet.add_argument("-acq", "--acq_label", type=str, default=None, - help='Name of the PET tracer label in the acquisition entity (acq-).') + help='Name of the label given to the acquisition, specifying the tracer used (acq-).') opt_pet.add_argument("-suvr", "--suvr_reference_region", choices=LIST_SUVR_REFERENCE_REGIONS, default=None, diff --git a/clinica/pipelines/statistics_volume/statistics_volume_cli.py b/clinica/pipelines/statistics_volume/statistics_volume_cli.py index f672a731d..e0861d12d 100644 --- a/clinica/pipelines/statistics_volume/statistics_volume_cli.py +++ b/clinica/pipelines/statistics_volume/statistics_volume_cli.py @@ -54,8 +54,7 @@ def define_options(self): optional_pet.add_argument("-acq", "--acq_label", type=str, default=None, - help='Name of the PET tracer label in the acquisition entity ' - '(acq-).') + help='Name of the label given to the acquisition, specifying the tracer used (acq-).') optional_pet.add_argument("-suvr", "--suvr_reference_region", choices=LIST_SUVR_REFERENCE_REGIONS, default=None, diff --git a/docs/Pipelines/PET_Surface.md b/docs/Pipelines/PET_Surface.md index 2fcc1caa4..a6414a9d3 100644 --- a/docs/Pipelines/PET_Surface.md +++ b/docs/Pipelines/PET_Surface.md @@ -31,6 +31,9 @@ where: - `bids_directory` is the input folder containing the dataset in a [BIDS](../../BIDS) hierarchy. - `caps_directory` is the output folder containing the results in a [CAPS](../../CAPS/Introduction) hierarchy. +- `acq_label` is the label given to the acquisition, specifying the tracer used. + + If you want to run the pipeline on a subset of your BIDS dataset, you can use the `-tsv` flag to specify in a TSV file the participants belonging to your subset. Please note that next to each PET file in your BIDS folder, a `json` file must be added to specify the `EffectiveResolutionInPlane` and `EffectiveResolutionAxial` in mm relative to the point spread function (PSF). diff --git a/docs/Pipelines/PET_Volume.md b/docs/Pipelines/PET_Volume.md index 248b1f70b..0c57a0272 100644 --- a/docs/Pipelines/PET_Volume.md +++ b/docs/Pipelines/PET_Volume.md @@ -35,6 +35,8 @@ where: - `caps_directory` acts both as an input folder (where the results of the `t1-volume-*` pipeline are stored) and as the output folder containing the results in a [CAPS](../../CAPS/Introduction) hierarchy. - `group_label` is the ID of the group that is associated to the DARTEL template that you had created when running the `t1-volume-*` pipeline. +- `acq_label` is the label given to the acquisition, specifying the tracer used. + Pipeline options: - `--pet_tracer`: type of PET image to process. Possible values are `fdg` and `av45`. Default value is `fdg`. diff --git a/docs/Pipelines/Stats_Surface.md b/docs/Pipelines/Stats_Surface.md index 8a9d93a13..f0963b28a 100644 --- a/docs/Pipelines/Stats_Surface.md +++ b/docs/Pipelines/Stats_Surface.md @@ -41,6 +41,9 @@ where: By default, the pipeline will try to run the analysis using the cortical thickness generated by the `t1-freesurfer` pipeline. Add the `--feature_type pet_fdg_projection` option to run the analyses on PET data generated by the `pet-surface` pipeline. + +- `acq_label` is the label given to the acquisition, specifying the tracer used. + !!! tip Check the [Example](../Stats_Surface/#comparison-analysis) subsection for further clarification. diff --git a/docs/Pipelines/Stats_Volume.md b/docs/Pipelines/Stats_Volume.md index 40a526a3b..f4b8f5665 100644 --- a/docs/Pipelines/Stats_Volume.md +++ b/docs/Pipelines/Stats_Volume.md @@ -33,6 +33,8 @@ Optional parameters: - `--group_id_caps` is used when you have multiple groups in your CAPS and Clinica is not able to determine which one to choose when reading inputs. - `-fwhm` is the full width at half maximum (FWHM) of the smoothing used in your input file (by default 8 (mm), i.e. the default value of the [`t1-volume`](../T1_Volume)) and [`pet-volume`](../PET_Volume) pipelines)). +- `acq_label` is the label given to the acquisition, specifying the tracer used. + ### `statistics-volume-correction` pipeline Once the `statistics-volume` sub-pipeline has finished, you need to open the SPM report (`report1.png` or `report2.png` file). This will look like as follows: From ec9699fb6d9129817d1b29469365e2c88f9296c1 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Fri, 23 Oct 2020 16:24:22 +0200 Subject: [PATCH 28/51] Improve PET Introduction page Co-authored-by: Ninon Burgos --- docs/Pipelines/PET_Introduction.md | 36 ++++++++++++++---------------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/docs/Pipelines/PET_Introduction.md b/docs/Pipelines/PET_Introduction.md index b3744d4fb..f36bdb57b 100644 --- a/docs/Pipelines/PET_Introduction.md +++ b/docs/Pipelines/PET_Introduction.md @@ -5,42 +5,40 @@ To correct for [partial volume effects](http://www.turkupetcentre.net/petanalysis/image_pve.html), several PVC algorithms exist and are implemented in the [PETPVC toolbox](https://github.com/UCL/PETPVC). -To perform PVC (compulsory for [`pet-surface` pipeline](../PET_Surface), optional for [`pet-volume` pipeline](../PET_Volume)), you will need to specify in a TSV file the full width at half maximum (FWHM), in millimeters, of the [point spread function (PSF)](https://en.wikipedia.org/wiki/Point_spread_function) associated with your data, in the x, y and z directions. +To perform PVC (compulsory for [`pet-surface`](../PET_Surface), optional for [`pet-volume`](../PET_Volume)), you will need to specify in a TSV file the full width at half maximum (FWHM), in millimeters, of the [point spread function (PSF)](https://en.wikipedia.org/wiki/Point_spread_function) associated with your data, in the x, y and z directions. -For instance, if the FWHM of the PSF associated with your first image is 8 mm along the x axis, 9 mm along the y axis, and 10 mm along z axis, the first row of your TSV file will look like this: +For instance, if the FWHM of the PSF associated with your first image is 5 mm along the x and y axes, and 6 mm along the z axis, the first row of your TSV file will look like this: -```text +``` participant_id session_id acq_label psf_x psf_y psf_z -sub-CLNC01 ses-M00 FDG 8 9 10 -sub-CLNC01 ses-M18 FDG 8 9 10 -sub-CLNC01 ses-M00 AV45 7 6 5 -sub-CLNC02 ses-M00 FDG 8 9 10 -sub-CLNC03 ses-M00 FDG 8 9 10 +sub-CLNC0001 ses-M00 FDG 5 5 6 +sub-CLNC0001 ses-M00 AV45 4.5 4.5 5 +sub-CLNC0002 ses-M00 FDG 5 5 6 +sub-CLNC0003 ses-M00 FDG 7 7 7 ``` -Since PSF information may differ according to the PET tracer, `participant_id`, `session_id`, ` acq_label`, ` psf_x`, `psf_y` and `psf_z` columns are compulsory columns. +Since the PSF depends on the PET tracer and scanner, the `participant_id`, `session_id`, ` acq_label`, ` psf_x`, `psf_y` and `psf_z` columns are compulsory. +## Reference regions used for intensity normalization -## Reference regions for standardized uptake value ratio (SUVR) map +In neurology, an approach widely used to allow inter- and intra-subject comparison of PET images is to compute standardized uptake value ratio (SUVR) maps. The images are intensity normalized by dividing each voxel of the image by the average uptake in a reference region. This region is chosen according to the tracer and disease studied as it must be unaffected by the disease. Clinica `v0.3.8` introduces the possibility for the user to select the reference region for the SUVR map computation. -Reference regions provided by Clinica come from the Pick atlas in MNI space and currently are: +Reference regions provided by Clinica come from the [Pick atlas](https://www.nitrc.org/projects/wfu_pickatlas) in MNI space and currently are: - `pons`: 6 mm eroded version of the pons region - `cerebellumPons`: 6 mm eroded version of the cerebellum + pons regions +## Tutorial: How to add new SUVR reference regions to Clinica? +It is possible to run the [`pet-surface`](../PET_Surface) and [`pet-volume`](../PET_Volume) pipelines using a custom reference region. -## Tutorial: How to add new SUVR reference region to Clinica? - -If you need to use a reference region not provided by Clinica but still want to use [`pet-surface`](../PET_Surface) or [`pet-volume`](../PET_Volume) pipelines, it is possible to easily extend the list of SUVR regions. - -- You first need to install Clinica following [developer instructions](../../Installation/#install-clinica) ; +- Install Clinica following the [developer instructions](../../Installation/#install-clinica) ; -- Once done you will need to modify your `/clinica/utils/pet.py` file in particular the following two elements: +- In the `/clinica/utils/pet.py` file, modify the following two elements: - The label of the SUVR reference region that will be stored in CAPS filename(s): ```python LIST_SUVR_REFERENCE_REGIONS = [ @@ -48,7 +46,7 @@ If you need to use a reference region not provided by Clinica but still want to "cerebellumPons", ] ``` - Simply define a new label that will be your new SUVR reference region. `LIST_SUVR_REFERENCE_REGIONS` is used by all command-line interfaces so you do need to modify the pipelines' CLI to make appear this new region. + Simply define a new label that will be your new SUVR reference region. `LIST_SUVR_REFERENCE_REGIONS` is used by all command-line interfaces so you do not need to modify the pipelines' CLI to make this new region appear. - The path of the SUVR reference region that you will use: ```python @@ -81,4 +79,4 @@ If you need to use a reference region not provided by Clinica but still want to } return suvr_reference_region_to_suvr[suvr_reference_region] ``` - In this example, the SUVR reference region associated to `cerebellumPons` label is located at `/resources/masks/region-cerebellumPons_eroded-6mm_mask.nii.gz`. + In this example, the SUVR reference region associated with the `cerebellumPons` label is located at `/resources/masks/region-cerebellumPons_eroded-6mm_mask.nii.gz`. From 39c9a43bb898cd4bac6ee9802f672bcfb579cab4 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 27 Oct 2020 15:06:39 +0100 Subject: [PATCH 29/51] Explain how to add new volume atlas to Clinica --- docs/Atlases.md | 63 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/docs/Atlases.md b/docs/Atlases.md index 0b5591004..bffc3ea26 100644 --- a/docs/Atlases.md +++ b/docs/Atlases.md @@ -29,3 +29,66 @@ These atlases are mainly used when processing T1-weighted images with the [`t1-f !!! tip Easily access the papers cited on this page on [Zotero](https://www.zotero.org/groups/2240070/clinica_aramislab/items/collectionKey/JPGDLCMZ). + + +## Tutorial: How to add new volume atlas to Clinica? + +It is possible to run the [`t1-volume`](../T1_Volume) and [`pet-volume`](../PET_Volume) pipelines using a custom parcellation. + +- Install Clinica following the [developer instructions](../../Installation/#install-clinica) ; + +- In the `/clinica/utils/atlas.py` file, modify the following two elements: + - The label of the volume atlas that will be stored in CAPS filename(s): + ```python + T1_VOLUME_ATLASES = [ + "AAL2", + "AICHA", + "Hammers", + "LPBA40", + "Neuromorphometrics", + ] + ``` + Simply define a new label that will be your new volume. `T1_VOLUME_ATLASES` is used by all command-line interfaces using atlases from [`t1-volume`](../T1_Volume) pipeline so you do not need to modify the pipelines' CLI to make this new region appear. The same rationale applies for `PET_VOLUME_ATLASES`. + + - Create a new class inherited from `AtlasAbstract` and fill the three compulsory methods. If we take for instance the AAL2 parcellation: + ```python + class AAL2(AtlasAbstract): + def __init__(self): + AtlasAbstract.__init__(self) + + @staticmethod + def get_name_atlas(): + return "AAL2" + + @staticmethod + def get_atlas_labels(): + from os.path import join, split, realpath + + return join( + split(realpath(__file__))[0], + "..", + "resources", + "atlases", + "atlas-AAL2_dseg.nii.gz", + ) + + @staticmethod + def get_tsv_roi(): + from os.path import join, split, realpath + + return join( + split(realpath(__file__))[0], + "..", + "resources", + "atlases", + "atlas-AAL2_dseg.tsv", + ) + ``` + The string returned by the `get_name_atlas()` method must match the label given in the `{T1|PET}_VOLUME_ATLASES` list. The `get_atlas_labels()` method must return the path to the parcellation in NIfTI format while the `get_tsv_roi()` method must return the path a TSV file. In this example, the TSV and labels files associated with the `AAL2` atlas are located at `/resources/atlases/atlas-AAL2_dseg.{nii.gz|tsv}`. Finally, the TSV file must contain the `roi_value` and `roi_name` columns and looks like: + ``` + roi_value roi_name + 0   Background + 2001  Precentral_L + [...] [...] + 9170  Vermis_10 + ``` From b2bb50ea933035651e55bbd0cea002848a4c70d4 Mon Sep 17 00:00:00 2001 From: Alexandre Routier Date: Tue, 27 Oct 2020 16:40:59 +0100 Subject: [PATCH 30/51] Update PET-Volume page w.r.t. tracers harmonization Co-authored-by: Ninon Burgos --- docs/Pipelines/PET_Volume.md | 28 ++++++++++------------------ 1 file changed, 10 insertions(+), 18 deletions(-) diff --git a/docs/Pipelines/PET_Volume.md b/docs/Pipelines/PET_Volume.md index 0c57a0272..e7191a28f 100644 --- a/docs/Pipelines/PET_Volume.md +++ b/docs/Pipelines/PET_Volume.md @@ -27,33 +27,25 @@ You need to have performed the [`t1-volume`](../T1_Volume) pipeline on your T1-w The pipeline can be run with the following command line: ```Text -clinica run pet-volume +clinica run pet-volume ``` where: - `bids_directory` is the input folder containing the dataset in a [BIDS](../../BIDS) hierarchy. - `caps_directory` acts both as an input folder (where the results of the `t1-volume-*` pipeline are stored) and as the output folder containing the results in a [CAPS](../../CAPS/Introduction) hierarchy. -- `group_label` is the ID of the group that is associated to the DARTEL template that you had created when running the `t1-volume-*` pipeline. - -- `acq_label` is the label given to the acquisition, specifying the tracer used. +- `group_label` is the label of the group that is associated to the DARTEL template that you had created when running the [`t1-volume`](../T1_Volume) pipeline. +- `acq_label` is the label given to the acquisition, specifying the tracer used (`acq-`). +- `suvr_reference_region` is the reference region used to perform intensity normalization (i.e. dividing each voxel of the image by the average uptake in this region) resulting in a standardized uptake value ratio (SUVR) map. It can be `cerebellumPons` (used for amyloid tracers) or `pons` (used for FDG). Pipeline options: -- `--pet_tracer`: type of PET image to process. Possible values are `fdg` and `av45`. Default value is `fdg`. -- `--smooth`: a list of integers specifying the different isomorphic full width at half maximum (FWHM) in millimeters to smooth the image. Default value is: 0, 8 (both without smoothing and with an isomorphic smoothing of 8 mm) -- `--pvc_fwhm`: TSV file containing the `fwhm_x`, `fwhm_y` and `fwhm_z` of the PSF for each PET image. More explanation below. +- `--smooth`: a list of integers specifying the different isotropic full width at half maximum (FWHM) in millimeters to smooth the image. Default value is: 0, 8 (both without smoothing and with an isotropic smoothing of 8 mm) +- `--pvc_psf_tsv`: TSV file containing the `psf_x`, `psf_y` and `psf_z` of the PSF for each PET image. More explanation are given in [PET Introduction](../PET_Introduction) page. -!!! note "Partial volume correction" - To correct for [partial volume effects](http://www.turkupetcentre.net/petanalysis/image_pve.html), the pipeline uses the [region-based voxel-wise (RBV) correction](http://doc.pmod.com/pneuro/8893.htm) implemented in the [PETPVC toolbox](https://github.com/UCL/PETPVC). - You need to specify in a TSV file the full width at half maximum (FWHM), in millimeters, of the [point spread function (PSF)](https://en.wikipedia.org/wiki/Point_spread_function) associated with your data, in the x, y and z directions. For instance, if the FWHM of the PSF associated with your first image is 8 mm along the x axis, 9 mm along the y axis, and 10 mm along z axis, the first row of your TSV file will look like this: - ``` - participant_id session_id fwhm_x fwhm_y fwhm_z - sub-CLNC0001 ses-M00 8 9 10 - sub-CLNC0002 ses-M00 7 6 5 - sub-CLNC0003 ses-M00 6 6 6 - ``` +!!! info + Since the release of Clinica v0.3.8, the handling of PSF information in the TSV file has changed: `fwhm_x`, `fwhm_y`, `fwhm_z` columns have been replaced by `psf_x`, `psf_y`, `psf_z` and the `acq_label` column has been added. Additionally, the SUVR reference region is now a compulsory argument: it will be easier for you to modify Clinica if you want to add a custom reference region ([PET Introduction](../PET_Introduction) page). Choose `cerebellumPons` for amyloid tracers or `pons` for FDG to have the previous behaviour. !!! note The arguments common to all Clinica pipelines are described in [Interacting with clinica](../../InteractingWithClinica). @@ -71,10 +63,10 @@ The main output files are: - `_space-Ixi549Space[_pvc-rbv]_suvr-