Skip to content

Commit

Permalink
Merge origin/develop into feature/fmriprep_ingress
Browse files Browse the repository at this point in the history
  • Loading branch information
e-kenneally committed Oct 9, 2023
2 parents 68ee445 + 8ebbd4e commit 28c9b42
Show file tree
Hide file tree
Showing 20 changed files with 513 additions and 113 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,5 @@ ENV PATH=$PATH:/home/c-pac_user/.local/bin \
PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages

# set user
WORKDIR /home/c-pac_user
# USER c-pac_user
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,5 @@ ENV PATH=$PATH:/home/c-pac_user/.local/bin \
PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages

# set user
WORKDIR /home/c-pac_user
# USER c-pac_user
1 change: 1 addition & 0 deletions .github/Dockerfiles/C-PAC.develop-jammy.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -48,4 +48,5 @@ ENV PATH=$PATH:/home/c-pac_user/.local/bin \
PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages

# set user
WORKDIR /home/c-pac_user
# USER c-pac_user
1 change: 1 addition & 0 deletions .github/Dockerfiles/C-PAC.develop-lite-jammy.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,5 @@ ENV PATH=$PATH:/home/c-pac_user/.local/bin \
PYTHONPATH=$PYTHONPATH:$PYTHONUSERBASE/lib/python3.10/site-packages

# set user
WORKDIR /home/c-pac_user
# USER c-pac_user
1 change: 1 addition & 0 deletions .github/Dockerfiles/Ubuntu.jammy-non-free.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ RUN groupadd -r c-pac \
graphviz-dev \
locales \
rdfind \
xvfb \
&& ln -snf /usr/share/zoneinfo/$TZ /etc/localtime \
&& echo $TZ > /etc/timezone \
&& sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \
Expand Down
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## Fixed

- Fixed a bug where some connectivity matrices wouldn't generate if anatomical and functional outputs were in different resolutions.
- Handling of `3dECM` outputs for AFNI ≥ 21.1.1.
- Fixed a bug where sparsity thresholds were not being scaled for network centrality.

### Changed

Expand All @@ -39,6 +41,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

- `click-aliases`
- `dc`
- `semver`

### Removed dependencies

Expand Down
1 change: 1 addition & 0 deletions CPAC/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,7 @@ def get_cpac_gitversion():
"scikit-learn",
"scipy",
"sdcflows",
"semver",
"traits",
"voluptuous>=0.12.0",
"xvfbwrapper"
Expand Down
20 changes: 18 additions & 2 deletions CPAC/network_centrality/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
from .utils import convert_pvalue_to_r, merge_lists
# Copyright (C) 2012-2023 C-PAC Developers

__all__ = ['convert_pvalue_to_r', 'merge_lists']
# This file is part of C-PAC.

# C-PAC is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.

# C-PAC is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.

# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
from .utils import convert_pvalue_to_r, create_merge_node

__all__ = ['convert_pvalue_to_r', 'create_merge_node']
184 changes: 121 additions & 63 deletions CPAC/network_centrality/network_centrality.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,48 @@
from nipype.interfaces.afni.preprocess import DegreeCentrality, ECM, LFCD
# Copyright (C) 2015-2023 C-PAC Developers

# This file is part of C-PAC.

# C-PAC is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.

# C-PAC is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.

# You should have received a copy of the GNU Lesser General Public
# License along with C-PAC. If not, see <https://www.gnu.org/licenses/>.
from pathlib import Path
from typing import Optional, Union
from nipype.interfaces.afni.preprocess import DegreeCentrality, LFCD
from nipype.pipeline.engine import Workflow
from CPAC.network_centrality.utils import ThresholdOptionError
from CPAC.pipeline.schema import valid_options
from CPAC.utils.docs import docstring_parameter
from CPAC.utils.interfaces.afni import AFNI_GTE_21_1_1, ECM
from CPAC.utils.typing import LIST


@docstring_parameter(m_options=valid_options['centrality']['method_options'],
t_options=valid_options['centrality'][
'threshold_options'],
w_options=valid_options['centrality']['weight_options'])
def create_centrality_wf(wf_name, method_option, weight_options,
threshold_option, threshold, num_threads=1,
memory_gb=1.0):
def create_centrality_wf(wf_name: str, method_option: str,
weight_options: LIST[str], threshold_option: str,
threshold: float, num_threads: Optional[int] = 1,
memory_gb: Optional[float] = 1.0,
base_dir: Optional[Union[Path, str]] = None
) -> Workflow:
"""
Function to create the afni-based centrality workflow
Function to create the afni-based centrality workflow.
.. seealso::
* :py:func:`~CPAC.network_centrality.pipeline.connect_centrality_workflow`
* :py:func:`~CPAC.network_centrality.utils.create_merge_node`
* :py:func:`~CPAC.network_centrality.utils.sep_nifti_subbriks`
Parameters
----------
Expand All @@ -25,38 +56,66 @@ def create_centrality_wf(wf_name, method_option, weight_options,
one of {t_options}
threshold : float
the threshold value for thresholding the similarity matrix
num_threads : integer (optional); default=1
the number of threads to utilize for centrality computation
memory_gb : float (optional); default=1.0
the amount of memory the centrality calculation will take (GB)
num_threads : integer, optional
the number of threads to utilize for centrality computation; default=1
memory_gb : float,optional
the amount of memory the centrality calculation will take (GB);
default=1.0
base_dir : path or str, optional
the base directory for the workflow; default=None
Returns
-------
centrality_wf : nipype Workflow
the initialized nipype workflow for the afni centrality command
"""
Notes
-----
Workflow Inputs::
inputspec.in_file : string
path to input functional data NIfTI file
inputspec.template : string
path to input mask template NIfTI file
inputspec.threshold : float
threshold value for thresholding the similarity matrix
Workflow Outputs::
outputspec.outfile_list : list of strings
list of paths to output files (binarized and weighted)
""" # pylint: disable=line-too-long
from CPAC.pipeline import nipype_pipeline_engine as pe
import nipype.interfaces.utility as util
import CPAC.network_centrality.utils as utils
from nipype.interfaces import utility as util
from CPAC.network_centrality import utils
from CPAC.utils.interfaces.function import Function

test_thresh = threshold

if threshold_option == 'sparsity':
if threshold_option == 'Sparsity threshold':
test_thresh = threshold / 100.0

method_option, threshold_option = \
utils.check_centrality_params(method_option, threshold_option,
test_thresh)

centrality_wf = pe.Workflow(name=wf_name)
# Eigenvector centrality and AFNI ≥ 21.1.1?
ecm_gte_21_1_01 = ((method_option == 'eigenvector_centrality') and
AFNI_GTE_21_1_1)
out_names = tuple(f'{method_option}_{x}' for x in weight_options)
if base_dir is None:
centrality_wf = pe.Workflow(name=wf_name)
else:
centrality_wf = pe.Workflow(name=wf_name, base_dir=base_dir)

input_node = pe.Node(util.IdentityInterface(fields=['in_file',
'template',
'threshold']),
name='inputspec')

input_node.inputs.threshold = threshold
output_node = pe.Node(util.IdentityInterface(fields=['outfile_list']),
name='outputspec')

# Degree centrality
if method_option == 'degree_centrality':
Expand All @@ -68,11 +127,24 @@ def create_centrality_wf(wf_name, method_option, weight_options,

# Eigenvector centrality
elif method_option == 'eigenvector_centrality':
afni_centrality_node = pe.Node(ECM(environ={
'OMP_NUM_THREADS': str(num_threads)
}), name='afni_centrality', mem_gb=memory_gb)
afni_centrality_node.inputs.out_file = \
'eigenvector_centrality_merged.nii.gz'
if ecm_gte_21_1_01:
afni_centrality_node = pe.MapNode(ECM(environ={
'OMP_NUM_THREADS': str(num_threads)
}), name='afni_centrality', mem_gb=memory_gb,
iterfield=['do_binary', 'out_file'])
afni_centrality_node.inputs.out_file = [
f'eigenvector_centrality_{w_option}.nii.gz' for
w_option in weight_options]
afni_centrality_node.inputs.do_binary = [
w_option == 'Binarized' for w_option in weight_options]
centrality_wf.connect(afni_centrality_node, 'out_file',
output_node, 'outfile_list')
else:
afni_centrality_node = pe.Node(ECM(environ={
'OMP_NUM_THREADS': str(num_threads)
}), name='afni_centrality', mem_gb=memory_gb)
afni_centrality_node.inputs.out_file = \
'eigenvector_centrality_merged.nii.gz'
afni_centrality_node.inputs.memory = memory_gb # 3dECM input only

# lFCD
Expand All @@ -82,39 +154,48 @@ def create_centrality_wf(wf_name, method_option, weight_options,
}), name='afni_centrality', mem_gb=memory_gb)
afni_centrality_node.inputs.out_file = 'lfcd_merged.nii.gz'

out_names = tuple(f'{method_option}_{x}' for x in weight_options)
if not ecm_gte_21_1_01:
# Need to separate sub-briks except for 3dECM if AFNI > 21.1.01
sep_subbriks_node = \
pe.Node(Function(input_names=['nifti_file', 'out_names'],
output_names=['output_niftis'],
function=utils.sep_nifti_subbriks),
name='sep_nifti_subbriks')
sep_subbriks_node.inputs.out_names = out_names
centrality_wf.connect([(afni_centrality_node, sep_subbriks_node,
[('out_file', 'nifti_file')]),
(sep_subbriks_node, output_node,
[('output_niftis', 'outfile_list')])])

afni_centrality_node.interface.num_threads = num_threads

# Connect input image and mask tempalte
centrality_wf.connect(input_node, 'in_file',
afni_centrality_node, 'in_file')
centrality_wf.connect(input_node, 'template',
afni_centrality_node, 'mask')
# Connect input image and mask template
centrality_wf.connect([(input_node, afni_centrality_node,
[('in_file', 'in_file'),
('template', 'mask')])])

# If we're doing significan thresholding, convert to correlation
# If we're doing significance thresholding, convert to correlation
if threshold_option == 'Significance threshold':
# Check and (possibly) conver threshold
convert_thr_node = pe.Node(
util.Function(input_names=['datafile',
'p_value',
'two_tailed'],
output_names=['rvalue_threshold'],
function=utils.convert_pvalue_to_r),
Function(input_names=['datafile',
'p_value',
'two_tailed'],
output_names=['rvalue_threshold'],
function=utils.convert_pvalue_to_r),
name='convert_threshold')
# Wire workflow to connect in conversion node
centrality_wf.connect(input_node, 'in_file',
convert_thr_node, 'datafile')
centrality_wf.connect(input_node, 'threshold',
convert_thr_node, 'p_value')
centrality_wf.connect(convert_thr_node, 'rvalue_threshold',
afni_centrality_node, 'thresh')
centrality_wf.connect([(input_node, convert_thr_node,
[('in_file', 'datafile'),
('threshold', 'p_value')]),
(convert_thr_node, afni_centrality_node,
[('rvalue_threshold', 'thresh')])])

# Sparsity thresholding
elif threshold_option == 'Sparsity threshold':
# Check to make sure it's not lFCD
if method_option == 'local_functional_connectivity_density':
raise Exception('Sparsity thresholding is not supported for lFCD')
raise ThresholdOptionError(threshold_option, method_option)

# Otherwise, connect threshold to sparsity input
centrality_wf.connect(input_node, 'threshold',
Expand All @@ -125,27 +206,4 @@ def create_centrality_wf(wf_name, method_option, weight_options,
centrality_wf.connect(input_node, 'threshold',
afni_centrality_node, 'thresh')

# Need to separate sub-briks
sep_nifti_imports = ["import os", "import nibabel as nib",
"from CPAC.pipeline.schema import valid_options",
"from CPAC.utils.docs import docstring_parameter"]
sep_subbriks_node = \
pe.Node(util.Function(input_names=['nifti_file', 'out_names'],
output_names=['output_niftis'],
function=utils.sep_nifti_subbriks,
imports=sep_nifti_imports),
name='sep_nifti_subbriks')

sep_subbriks_node.inputs.out_names = out_names

centrality_wf.connect(afni_centrality_node, 'out_file',
sep_subbriks_node, 'nifti_file')

output_node = pe.Node(util.IdentityInterface(fields=['outfile_list',
'oned_output']),
name='outputspec')

centrality_wf.connect(sep_subbriks_node, 'output_niftis',
output_node, 'outfile_list')

return centrality_wf
Loading

0 comments on commit 28c9b42

Please sign in to comment.