Skip to content

Commit

Permalink
Merge pull request #287 from rciric/master
Browse files Browse the repository at this point in the history
[ENH] Confound enhancement
  • Loading branch information
oesteban authored Feb 15, 2019
2 parents a71c834 + 314c939 commit 9d7b80e
Show file tree
Hide file tree
Showing 9 changed files with 1,406 additions and 3 deletions.
5 changes: 4 additions & 1 deletion niworkflows/interfaces/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# vi: set ft=python sts=4 ts=4 sw=4 et:
from __future__ import absolute_import, division, print_function, unicode_literals

from .confounds import ExpandModel, SpikeRegressors
from .masks import BETRPT as BET
from .segmentation import (FASTRPT as FAST)
from .registration import (FLIRTRPT as FLIRT,
Expand All @@ -12,4 +13,6 @@
ANTSApplyTransformsRPT as ApplyTransforms,
SimpleBeforeAfterRPT as SimpleBeforeAfter)
from .utils import CopyXForm, CopyHeader, NormalizeMotionParams, SanitizeImage
from .plotting import FMRISummary
from .plotting import (
FMRISummary, CompCorVariancePlot, ConfoundsCorrelationPlot
)
569 changes: 569 additions & 0 deletions niworkflows/interfaces/confounds.py

Large diffs are not rendered by default.

97 changes: 95 additions & 2 deletions niworkflows/interfaces/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@
from nipype.interfaces.base import (
File, BaseInterfaceInputSpec, TraitedSpec, SimpleInterface, traits
)
from ..viz.plots import fMRIPlot
from ..viz.plots import (
fMRIPlot, compcor_variance_plot, confounds_correlation_plot
)


class FMRISummaryInputSpec(BaseInterfaceInputSpec):
Expand All @@ -34,7 +36,7 @@ class FMRISummaryOutputSpec(TraitedSpec):

class FMRISummary(SimpleInterface):
"""
Copy the x-form matrices from `hdr_file` to `out_file`.
Prepare a fMRI summary plot for the report.
"""
input_spec = FMRISummaryInputSpec
output_spec = FMRISummaryOutputSpec
Expand Down Expand Up @@ -70,3 +72,94 @@ def _run_interface(self, runtime):
).plot()
fig.savefig(self._results['out_file'], bbox_inches='tight')
return runtime


class CompCorVariancePlotInputSpec(BaseInterfaceInputSpec):
metadata_files = traits.List(File(exists=True), mandatory=True,
desc='List of files containing component '
'metadata')
metadata_sources = traits.List(traits.Str,
desc='List of names of decompositions '
'(e.g., aCompCor, tCompCor) yielding '
'the arguments in `metadata_files`')
variance_thresholds = traits.Tuple(
traits.Float(0.5), traits.Float(0.7), traits.Float(0.9),
usedefault=True, desc='Levels of explained variance to include in '
'plot')
out_file = traits.Either(None, File, value=None, usedefault=True,
desc='Path to save plot')


class CompCorVariancePlotOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='Path to saved plot')


class CompCorVariancePlot(SimpleInterface):
"""
Plot the number of components necessary to explain the specified levels
of variance in the data.
"""
input_spec = CompCorVariancePlotInputSpec
output_spec = CompCorVariancePlotOutputSpec

def _run_interface(self, runtime):
if self.inputs.out_file is None:
self._results['out_file'] = fname_presuffix(
self.inputs.metadata_files[0],
suffix='_compcor.svg',
use_ext=False,
newpath=runtime.cwd)
else:
self._results['out_file'] = self.inputs.out_file
compcor_variance_plot(
metadata_files=self.inputs.metadata_files,
metadata_sources=self.inputs.metadata_sources,
output_file=self._results['out_file'],
varexp_thresh=self.inputs.variance_thresholds
)
return runtime


class ConfoundsCorrelationPlotInputSpec(BaseInterfaceInputSpec):
confounds_file = File(exists=True, mandatory=True,
desc='File containing confound regressors')
out_file = traits.Either(None, File, value=None, usedefault=True,
desc='Path to save plot')
reference_column = traits.Str('global_signal', usedefault=True,
desc='Column in the confound file for '
'which all correlation magnitudes '
'should be ranked and plotted')
max_dim = traits.Int(70, usedefault=True,
desc='Maximum number of regressors to include in '
'plot. Regressors with highest magnitude of '
'correlation with `reference_column` will be '
'selected.')


class ConfoundsCorrelationPlotOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='Path to saved plot')


class ConfoundsCorrelationPlot(SimpleInterface):
"""
Plot the correlation among confound regressors.
"""
input_spec = ConfoundsCorrelationPlotInputSpec
output_spec = ConfoundsCorrelationPlotOutputSpec

def _run_interface(self, runtime):
if self.inputs.out_file is None:
self._results['out_file'] = fname_presuffix(
self.inputs.confounds_file,
suffix='_confoundCorrelation.svg',
use_ext=False,
newpath=runtime.cwd)
else:
self._results['out_file'] = self.inputs.out_file
confounds_correlation_plot(
confounds_file=self.inputs.confounds_file,
output_file=self._results['out_file'],
reference=self.inputs.reference_column,
max_dim=self.inputs.max_dim
)
return runtime
124 changes: 124 additions & 0 deletions niworkflows/interfaces/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,14 @@
from __future__ import absolute_import, division, print_function, unicode_literals

import os
import re
import json
import shutil
import numpy as np
import nibabel as nb
import nilearn.image as nli
from textwrap import indent
from collections import OrderedDict

import scipy.ndimage as nd
from nipype import logging
Expand Down Expand Up @@ -703,6 +706,127 @@ def _run_interface(self, runtime):
return runtime


class TSV2JSONInputSpec(BaseInterfaceInputSpec):
in_file = File(exists=True, mandatory=True, desc='Input TSV file')
index_column = traits.Str(mandatory=True,
desc='Name of the column in the TSV to be used '
'as the top-level key in the JSON. All '
'remaining columns will be assigned as '
'nested keys.')
out_file = File(desc='Path where the output file is to be saved')
additional_metadata = traits.Either(None, traits.Dict(), usedefault=True,
desc='Any additional metadata that '
'should be applied to all '
'entries in the JSON.')
drop_columns = traits.Either(None, traits.List(), usedefault=True,
desc='List of columns in the TSV to be '
'dropped from the JSON.')
enforce_case = traits.Bool(True, usedefault=True,
desc='Enforce snake case for top-level keys '
'and camel case for nested keys')


class TSV2JSONOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='Output JSON file')


class TSV2JSON(SimpleInterface):
"""Convert metadata from TSV format to JSON format.
"""
input_spec = TSV2JSONInputSpec
output_spec = TSV2JSONOutputSpec

def _run_interface(self, runtime):
if not isdefined(self.inputs.out_file):
out_file = fname_presuffix(
self.inputs.in_file, suffix='.json', newpath=runtime.cwd,
use_ext=False)
else:
out_file = self.inputs.out_file

self._results['out_file'] = _tsv2json(
in_tsv=self.inputs.in_file,
out_json=out_file,
index_column=self.inputs.index_column,
additional_metadata=self.inputs.additional_metadata,
drop_columns=self.inputs.drop_columns,
enforce_case=self.inputs.enforce_case
)
return runtime


def _tsv2json(in_tsv, out_json, index_column, additional_metadata=None,
drop_columns=None, enforce_case=True):
"""
Convert metadata from TSV format to JSON format.
Parameters
----------
in_tsv: str
Path to the metadata in TSV format.
out_json: str
Path where the metadata should be saved in JSON format after
conversion.
index_column: str
Name of the column in the TSV to be used as an index (top-level key in
the JSON).
additional_metadata: dict
Any additional metadata that should be applied to all entries in the
JSON.
drop_columns: list
List of columns from the input TSV to be dropped from the JSON.
enforce_case: bool
Indicates whether BIDS case conventions should be followed. Currently,
this means that index fields (column names in the associated data TSV)
use snake case and other fields use camel case.
Returns
-------
str
Path to the metadata saved in JSON format.
"""
import pandas as pd
# Taken from https://dev.to/rrampage/snake-case-to-camel-case-and- ...
# back-using-regular-expressions-and-python-m9j
re_to_camel = r'(.*?)_([a-zA-Z0-9])'
re_to_snake = r'(^.+?|.*?)((?<![_A-Z])[A-Z]|(?<![_0-9])[0-9]+)'

def snake(match):
return '{}_{}'.format(match.group(1).lower(), match.group(2).lower())

def camel(match):
return '{}{}'.format(match.group(1), match.group(2).upper())

# from fmriprep
def less_breakable(a_string):
""" hardens the string to different envs (i.e. case insensitive, no
whitespace, '#' """
return ''.join(a_string.split()).strip('#')

drop_columns = drop_columns or []
additional_metadata = additional_metadata or {}
tsv_data = pd.read_csv(in_tsv, '\t')
for k, v in additional_metadata.items():
tsv_data[k] = v
for col in drop_columns:
tsv_data.drop(labels=col, axis='columns', inplace=True)
tsv_data.set_index(index_column, drop=True, inplace=True)
if enforce_case:
tsv_data.index = [re.sub(re_to_snake, snake,
less_breakable(i), 0).lower()
for i in tsv_data.index]
tsv_data.columns = [re.sub(re_to_camel, camel,
less_breakable(i).title(), 0)
for i in tsv_data.columns]
json_data = tsv_data.to_json(orient='index')
json_data = json.JSONDecoder(
object_pairs_hook=OrderedDict).decode(json_data)

with open(out_json, 'w') as f:
json.dump(json_data, f, indent=4)
return out_json


def _tpm2roi(in_tpm, in_mask, mask_erosion_mm=None, erosion_mm=None,
mask_erosion_prop=None, erosion_prop=None, pthres=0.95,
newpath=None):
Expand Down
Loading

0 comments on commit 9d7b80e

Please sign in to comment.