Skip to content

[ENH] New ComputeDVARS interface #1606

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 14 commits into from
Sep 13, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ install:
- conda update --all -y python=$TRAVIS_PYTHON_VERSION
# - if [[ "${INSTALL_DEB_DEPENDECIES}" == "true" && ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]]; then
# conda install -y vtk mayavi; fi
- conda install -y nipype
- conda install -y nipype matplotlib nitime
- pip install python-coveralls coverage doctest-ignore-unicode
- if [ ! -z "$DUECREDIT_ENABLE"]; then pip install duecredit; fi
- rm -r /home/travis/miniconda/lib/python${TRAVIS_PYTHON_VERSION}/site-packages/nipype*
Expand Down
1 change: 1 addition & 0 deletions CHANGES
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
Upcoming release 0.13
=====================

* ENH: Add a DVARS calculation interface (https://github.com/nipy/nipype/pull/1606)
* ENH: Convenient load/save of interface inputs (https://github.com/nipy/nipype/pull/1591)
* ENH: Add a Framewise Displacement calculation interface (https://github.com/nipy/nipype/pull/1604)
* FIX: Use builtins open and unicode literals for py3 compatibility (https://github.com/nipy/nipype/pull/1572)
Expand Down
268 changes: 266 additions & 2 deletions nipype/algorithms/confounds.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,181 @@
from __future__ import print_function, division, unicode_literals, absolute_import
from builtins import str, zip, range, open

import os
import os.path as op

import nibabel as nb
import numpy as np

from .. import logging
from ..external.due import due, BibTeX
from ..external.due import due, Doi, BibTeX
from ..interfaces.base import (traits, TraitedSpec, BaseInterface,
BaseInterfaceInputSpec, File, isdefined)
IFLOG = logging.getLogger('interface')


class ComputeDVARSInputSpec(BaseInterfaceInputSpec):
in_file = File(exists=True, mandatory=True, desc='functional data, after HMC')
in_mask = File(exists=True, mandatory=True, desc='a brain mask')
remove_zerovariance = traits.Bool(False, usedefault=True,
desc='remove voxels with zero variance')
save_std = traits.Bool(True, usedefault=True,
desc='save standardized DVARS')
save_nstd = traits.Bool(False, usedefault=True,
desc='save non-standardized DVARS')
save_vxstd = traits.Bool(False, usedefault=True,
desc='save voxel-wise standardized DVARS')
save_all = traits.Bool(False, usedefault=True, desc='output all DVARS')

series_tr = traits.Float(desc='repetition time in sec.')
save_plot = traits.Bool(False, usedefault=True, desc='write DVARS plot')
figdpi = traits.Int(100, usedefault=True, desc='output dpi for the plot')
figsize = traits.Tuple(traits.Float(11.7), traits.Float(2.3), usedefault=True,
desc='output figure size')
figformat = traits.Enum('png', 'pdf', 'svg', usedefault=True,
desc='output format for figures')



class ComputeDVARSOutputSpec(TraitedSpec):
out_std = File(exists=True, desc='output text file')
out_nstd = File(exists=True, desc='output text file')
out_vxstd = File(exists=True, desc='output text file')
out_all = File(exists=True, desc='output text file')
avg_std = traits.Float()
avg_nstd = traits.Float()
avg_vxstd = traits.Float()
fig_std = File(exists=True, desc='output DVARS plot')
fig_nstd = File(exists=True, desc='output DVARS plot')
fig_vxstd = File(exists=True, desc='output DVARS plot')


class ComputeDVARS(BaseInterface):
"""
Computes the DVARS.
"""
input_spec = ComputeDVARSInputSpec
output_spec = ComputeDVARSOutputSpec
references_ = [{
'entry': BibTeX("""\
@techreport{nichols_notes_2013,
address = {Coventry, UK},
title = {Notes on {Creating} a {Standardized} {Version} of {DVARS}},
url = {http://www2.warwick.ac.uk/fac/sci/statistics/staff/academic-\
research/nichols/scripts/fsl/standardizeddvars.pdf},
urldate = {2016-08-16},
institution = {University of Warwick},
author = {Nichols, Thomas},
year = {2013}
}"""),
'tags': ['method']
}, {
'entry': BibTeX("""\
@article{power_spurious_2012,
title = {Spurious but systematic correlations in functional connectivity {MRI} networks \
arise from subject motion},
volume = {59},
doi = {10.1016/j.neuroimage.2011.10.018},
number = {3},
urldate = {2016-08-16},
journal = {NeuroImage},
author = {Power, Jonathan D. and Barnes, Kelly A. and Snyder, Abraham Z. and Schlaggar, \
Bradley L. and Petersen, Steven E.},
year = {2012},
pages = {2142--2154},
}
"""),
'tags': ['method']
}]

def __init__(self, **inputs):
self._results = {}
super(ComputeDVARS, self).__init__(**inputs)

def _gen_fname(self, suffix, ext=None):
fname, in_ext = op.splitext(op.basename(
self.inputs.in_file))

if in_ext == '.gz':
fname, in_ext2 = op.splitext(fname)
in_ext = in_ext2 + in_ext

if ext is None:
ext = in_ext

if ext.startswith('.'):
ext = ext[1:]

return op.abspath('{}_{}.{}'.format(fname, suffix, ext))

def _run_interface(self, runtime):
dvars = compute_dvars(self.inputs.in_file, self.inputs.in_mask,
remove_zerovariance=self.inputs.remove_zerovariance)

self._results['avg_std'] = dvars[0].mean()
self._results['avg_nstd'] = dvars[1].mean()
self._results['avg_vxstd'] = dvars[2].mean()

tr = None
if isdefined(self.inputs.series_tr):
tr = self.inputs.series_tr

if self.inputs.save_std:
out_file = self._gen_fname('dvars_std', ext='tsv')
np.savetxt(out_file, dvars[0], fmt=b'%0.6f')
self._results['out_std'] = out_file

if self.inputs.save_plot:
self._results['fig_std'] = self._gen_fname(
'dvars_std', ext=self.inputs.figformat)
fig = plot_confound(dvars[0], self.inputs.figsize, 'Standardized DVARS',
series_tr=tr)
fig.savefig(self._results['fig_std'], dpi=float(self.inputs.figdpi),
format=self.inputs.figformat,
bbox_inches='tight')
fig.clf()

if self.inputs.save_nstd:
out_file = self._gen_fname('dvars_nstd', ext='tsv')
np.savetxt(out_file, dvars[1], fmt=b'%0.6f')
self._results['out_nstd'] = out_file

if self.inputs.save_plot:
self._results['fig_nstd'] = self._gen_fname(
'dvars_nstd', ext=self.inputs.figformat)
fig = plot_confound(dvars[1], self.inputs.figsize, 'DVARS', series_tr=tr)
fig.savefig(self._results['fig_nstd'], dpi=float(self.inputs.figdpi),
format=self.inputs.figformat,
bbox_inches='tight')
fig.clf()

if self.inputs.save_vxstd:
out_file = self._gen_fname('dvars_vxstd', ext='tsv')
np.savetxt(out_file, dvars[2], fmt=b'%0.6f')
self._results['out_vxstd'] = out_file

if self.inputs.save_plot:
self._results['fig_vxstd'] = self._gen_fname(
'dvars_vxstd', ext=self.inputs.figformat)
fig = plot_confound(dvars[2], self.inputs.figsize, 'Voxelwise std DVARS',
series_tr=tr)
fig.savefig(self._results['fig_vxstd'], dpi=float(self.inputs.figdpi),
format=self.inputs.figformat,
bbox_inches='tight')
fig.clf()

if self.inputs.save_all:
out_file = self._gen_fname('dvars', ext='tsv')
np.savetxt(out_file, np.vstack(dvars).T, fmt=b'%0.8f', delimiter=b'\t',
header='std DVARS\tnon-std DVARS\tvx-wise std DVARS')
self._results['out_all'] = out_file

return runtime

def _list_outputs(self):
return self._results


class FramewiseDisplacementInputSpec(BaseInterfaceInputSpec):
in_plots = File(exists=True, desc='motion parameters as written by FSL MCFLIRT')
radius = traits.Float(50, usedefault=True,
Expand Down Expand Up @@ -90,7 +255,6 @@ def _run_interface(self, runtime):
}
np.savetxt(self.inputs.out_file, fd_res)


if self.inputs.save_plot:
tr = None
if isdefined(self.inputs.series_tr):
Expand All @@ -106,16 +270,116 @@ def _run_interface(self, runtime):
format=self.inputs.out_figure[-3:],
bbox_inches='tight')
fig.clf()

return runtime

def _list_outputs(self):
return self._results


def compute_dvars(in_file, in_mask, remove_zerovariance=False):
"""
Compute the :abbr:`DVARS (D referring to temporal
derivative of timecourses, VARS referring to RMS variance over voxels)`
[Power2012]_.

Particularly, the *standardized* :abbr:`DVARS (D referring to temporal
derivative of timecourses, VARS referring to RMS variance over voxels)`
[Nichols2013]_ are computed.

.. [Nichols2013] Nichols T, `Notes on creating a standardized version of
DVARS <http://www2.warwick.ac.uk/fac/sci/statistics/staff/academic-\
research/nichols/scripts/fsl/standardizeddvars.pdf>`_, 2013.

.. note:: Implementation details

Uses the implementation of the `Yule-Walker equations
from nitime
<http://nipy.org/nitime/api/generated/nitime.algorithms.autoregressive.html\
#nitime.algorithms.autoregressive.AR_est_YW>`_
for the :abbr:`AR (auto-regressive)` filtering of the fMRI signal.

:param numpy.ndarray func: functional data, after head-motion-correction.
:param numpy.ndarray mask: a 3D mask of the brain
:param bool output_all: write out all dvars
:param str out_file: a path to which the standardized dvars should be saved.
:return: the standardized DVARS

"""
import os.path as op
import numpy as np
import nibabel as nb
from nitime.algorithms import AR_est_YW

func = nb.load(in_file).get_data().astype(np.float32)
mask = nb.load(in_mask).get_data().astype(np.uint8)

if len(func.shape) != 4:
raise RuntimeError(
"Input fMRI dataset should be 4-dimensional")

# Robust standard deviation
func_sd = (np.percentile(func, 75, axis=3) -
np.percentile(func, 25, axis=3)) / 1.349
func_sd[mask <= 0] = 0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Robust standard deviation sounds useful--we should consider moving it to another module so everyone can use it.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Well, I'd say that this is a quick and dirty way of calculating the SD. I am +1 if we had a really robust SD computation algorithm though.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

got it


if remove_zerovariance:
# Remove zero-variance voxels across time axis
mask = zero_variance(func, mask)

idx = np.where(mask > 0)
mfunc = func[idx[0], idx[1], idx[2], :]

# Demean
mfunc -= mfunc.mean(axis=1).astype(np.float32)[..., np.newaxis]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've seen this multiple places too

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't know what you mean, the masking of voxels with zero-variance or some other thing?

Anyways, thanks to this comment I spotted those ugly leftovers in ll. 326-328

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I meant "demeaning":

$ grep -rIw "\-.*\.mean" .
./examples/rsfmri_vol_surface_preprocessing.py:        X = (X - np.mean(X, axis=0)) / stdX
./nipype/workflows/dmri/fsl/epi.py:    vsmmag_masked = vsmmag_masked - vsmmag_masked.mean()
./nipype/algorithms/#misc.py#:            signal = signal - signal.mean()
./nipype/algorithms/misc.py:            signal = signal - signal.mean()
./nipype/algorithms/compcor.py:        voxel_timecourses = voxel_timecourses - np.mean(voxel_timecourses,
./nipype/algorithms/compcor.py:        imgseries = imgseries - np.mean(imgseries, axis=1)[:, np.newaxis]
./nipype/algorithms/rapidart.py:        gz = (gz - np.mean(gz)) / np.std(gz)  # normalize the detrended signal


# Compute (non-robust) estimate of lag-1 autocorrelation
ar1 = np.apply_along_axis(AR_est_YW, 1, mfunc, 1)[:, 0]

# Compute (predicted) standard deviation of temporal difference time series
diff_sdhat = np.squeeze(np.sqrt(((1 - ar1) * 2).tolist())) * func_sd[mask > 0].reshape(-1)
diff_sd_mean = diff_sdhat.mean()

# Compute temporal difference time series
func_diff = np.diff(mfunc, axis=1)

# DVARS (no standardization)
dvars_nstd = func_diff.std(axis=0)

# standardization
dvars_stdz = dvars_nstd / diff_sd_mean

# voxelwise standardization
diff_vx_stdz = func_diff / np.array([diff_sdhat] * func_diff.shape[-1]).T
dvars_vx_stdz = diff_vx_stdz.std(axis=0, ddof=1)

return (dvars_stdz, dvars_nstd, dvars_vx_stdz)

def zero_variance(func, mask):
"""
Mask out voxels with zero variance across t-axis

:param numpy.ndarray func: input fMRI dataset, after motion correction
:param numpy.ndarray mask: 3D brain mask
:return: the 3D mask of voxels with nonzero variance across :math:`t`.
:rtype: numpy.ndarray

"""
idx = np.where(mask > 0)
func = func[idx[0], idx[1], idx[2], :]
tvariance = func.var(axis=1)
tv_mask = np.zeros_like(tvariance, dtype=np.uint8)
tv_mask[tvariance > 0] = 1

newmask = np.zeros_like(mask, dtype=np.uint8)
newmask[idx] = tv_mask
return newmask
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I did something very similar to this in compcor as well

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So you mean, mask out all zero-variance pixels? There is this little function to do that now :)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

is this the right place to put it?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Where else?


def plot_confound(tseries, figsize, name, units=None,
series_tr=None, normalize=False):
"""
A helper function to plot :abbr:`fMRI (functional MRI)` confounds.

"""
import matplotlib
matplotlib.use('Agg')
Expand Down
57 changes: 57 additions & 0 deletions nipype/algorithms/tests/test_auto_ComputeDVARS.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ...testing import assert_equal
from ..confounds import ComputeDVARS


def test_ComputeDVARS_inputs():
input_map = dict(figdpi=dict(usedefault=True,
),
figformat=dict(usedefault=True,
),
figsize=dict(usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(mandatory=True,
),
in_mask=dict(mandatory=True,
),
remove_zerovariance=dict(usedefault=True,
),
save_all=dict(usedefault=True,
),
save_nstd=dict(usedefault=True,
),
save_plot=dict(usedefault=True,
),
save_std=dict(usedefault=True,
),
save_vxstd=dict(usedefault=True,
),
series_tr=dict(),
)
inputs = ComputeDVARS.input_spec()

for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value


def test_ComputeDVARS_outputs():
output_map = dict(avg_nstd=dict(),
avg_std=dict(),
avg_vxstd=dict(),
fig_nstd=dict(),
fig_std=dict(),
fig_vxstd=dict(),
out_all=dict(),
out_nstd=dict(),
out_std=dict(),
out_vxstd=dict(),
)
outputs = ComputeDVARS.output_spec()

for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
Loading