Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/changes/latest.inc
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ Changelog

- Add support for signals in mV for :func:`mne.io.read_raw_brainvision` by `Clemens Brunner`_

- Allow resampling raw data with :func:`mne.io.Raw.resample` without preloading data, by `Eric Larson`_

- :func:`mne.viz.plot_dipole_locations` and :meth:`mne.Dipole.plot_locations` gained a ``title`` argument to specify a custom figure title in ``orthoview`` mode by `Richard Höchenberger`_

- Added temporal derivative distribution repair :func:`mne.preprocessing.nirs.temporal_derivative_distribution_repair` by `Robert Luke`_
Expand Down
10 changes: 8 additions & 2 deletions mne/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -1333,6 +1333,11 @@ def _check_filterable(x, kind='filtered'):
return x


def _resamp_ratio_len(up, down, n):
ratio = float(up) / down
return ratio, int(round(ratio * n))


@verbose
def resample(x, up=1., down=1., npad=100, axis=-1, window='boxcar', n_jobs=1,
pad='reflect_limited', verbose=None):
Expand Down Expand Up @@ -1388,7 +1393,8 @@ def resample(x, up=1., down=1., npad=100, axis=-1, window='boxcar', n_jobs=1,

# make sure our arithmetic will work
x = _check_filterable(x, 'resampled')
ratio = float(up) / down
ratio, final_len = _resamp_ratio_len(up, down, x.shape[axis])
del up, down
if axis < 0:
axis = x.ndim + axis
orig_last_axis = x.ndim - 1
Expand Down Expand Up @@ -1418,7 +1424,6 @@ def resample(x, up=1., down=1., npad=100, axis=-1, window='boxcar', n_jobs=1,
x_flat = x.reshape((-1, x_len))
orig_len = x_len + npads.sum() # length after padding
new_len = int(round(ratio * orig_len)) # length after resampling
final_len = int(round(ratio * x_len))
to_removes = [int(round(ratio * npads[0]))]
to_removes.append(new_len - final_len - to_removes[0])
to_removes = np.array(to_removes)
Expand Down Expand Up @@ -1458,6 +1463,7 @@ def resample(x, up=1., down=1., npad=100, axis=-1, window='boxcar', n_jobs=1,
y.shape = orig_shape[:-1] + (y.shape[1],)
if axis != orig_last_axis:
y = y.swapaxes(axis, orig_last_axis)
assert y.shape[axis] == final_len

return y

Expand Down
78 changes: 48 additions & 30 deletions mne/io/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

from ..annotations import (_annotations_starts_stops, _write_annotations,
_handle_meas_date)
from ..filter import (FilterMixin, notch_filter, resample,
from ..filter import (FilterMixin, notch_filter, resample, _resamp_ratio_len,
_resample_stim_channels, _check_fun)
from ..parallel import parallel_func
from ..utils import (_check_fname, _check_pandas_installed, sizeof_fmt,
Expand Down Expand Up @@ -1015,9 +1015,6 @@ def resample(self, sfreq, npad='auto', window='boxcar', stim_picks=None,
verbose=None): # lgtm
"""Resample all channels.

The Raw object has to have the data loaded e.g. with ``preload=True``
or ``self.load_data()``.

.. warning:: The intended purpose of this function is primarily to
speed up computations (e.g., projection calculation) when
precise timing of events is not required, as downsampling
Expand Down Expand Up @@ -1074,9 +1071,12 @@ def resample(self, sfreq, npad='auto', window='boxcar', stim_picks=None,
-----
For some data, it may be more accurate to use ``npad=0`` to reduce
artifacts. This is dataset dependent -- check your data!
"""
_check_preload(self, 'raw.resample')

For optimum performance and to make use of ``n_jobs > 1``, the raw
object has to have the data loaded e.g. with ``preload=True`` or
``self.load_data()``, but this increases memory requirements. The
resulting raw object will have the data loaded into memory.
"""
# When no event object is supplied, some basic detection of dropped
# events is performed to generate a warning. Finding events can fail
# for a variety of reasons, e.g. if no stim channel is present or it is
Expand All @@ -1092,37 +1092,55 @@ def resample(self, sfreq, npad='auto', window='boxcar', stim_picks=None,
o_sfreq = float(self.info['sfreq'])

offsets = np.concatenate(([0], np.cumsum(self._raw_lengths)))
new_data = list()

ratio = sfreq / o_sfreq

# set up stim channel processing
if stim_picks is None:
stim_picks = pick_types(self.info, meg=False, ref_meg=False,
stim=True, exclude=[])
stim_picks = np.asanyarray(stim_picks)

for ri in range(len(self._raw_lengths)):
data_chunk = self._data[:, offsets[ri]:offsets[ri + 1]]
new_data.append(resample(data_chunk, sfreq, o_sfreq, npad,
window=window, n_jobs=n_jobs, pad=pad))
new_ntimes = new_data[ri].shape[1]

# In empirical testing, it was faster to resample all channels
# (above) and then replace the stim channels than it was to only
# resample the proper subset of channels and then use np.insert()
# to restore the stims.
if len(stim_picks) > 0:
stim_resampled = _resample_stim_channels(
data_chunk[stim_picks], new_data[ri].shape[1],
data_chunk.shape[1])
new_data[ri][stim_picks] = stim_resampled

self._first_samps[ri] = int(self._first_samps[ri] * ratio)
self._last_samps[ri] = self._first_samps[ri] + new_ntimes - 1
self._raw_lengths[ri] = new_ntimes

self._data = np.concatenate(new_data, axis=1)
kwargs = dict(up=sfreq, down=o_sfreq, npad=npad, window=window,
n_jobs=n_jobs, pad=pad)
ratio, n_news = zip(*(_resamp_ratio_len(sfreq, o_sfreq, old_len)
for old_len in self._raw_lengths))
ratio, n_news = ratio[0], np.array(n_news, int)
new_offsets = np.cumsum([0] + list(n_news))
if self.preload:
new_data = np.empty(
(len(self.ch_names), new_offsets[-1]), self._data.dtype)
for ri, (n_orig, n_new) in enumerate(zip(self._raw_lengths, n_news)):
this_sl = slice(new_offsets[ri], new_offsets[ri + 1])
if self.preload:
data_chunk = self._data[:, offsets[ri]:offsets[ri + 1]]
new_data[:, this_sl] = resample(data_chunk, **kwargs)
# In empirical testing, it was faster to resample all channels
# (above) and then replace the stim channels than it was to
# only resample the proper subset of channels and then use
# np.insert() to restore the stims.
if len(stim_picks) > 0:
new_data[stim_picks, this_sl] = _resample_stim_channels(
data_chunk[stim_picks], n_new, data_chunk.shape[1])
else: # this will not be I/O efficient, but will be mem efficient
for ci in range(len(self.ch_names)):
data_chunk = self.get_data(
ci, offsets[ri], offsets[ri + 1], verbose='error')[0]
if ci == 0 and ri == 0:
new_data = np.empty(
(len(self.ch_names), new_offsets[-1]),
data_chunk.dtype)
if ci in stim_picks:
resamp = _resample_stim_channels(
data_chunk, n_new, data_chunk.shape[-1])[0]
else:
resamp = resample(data_chunk, **kwargs)
new_data[ci, this_sl] = resamp

self._first_samps = (self._first_samps * ratio).astype(int)
self._last_samps = (np.array(self._first_samps) + n_news - 1).tolist()
self._raw_lengths[ri] = list(n_news)
assert np.array_equal(n_news, self._last_samps - self._first_samps + 1)
self._data = new_data
self.preload = True
self.info['sfreq'] = sfreq
lowpass = self.info.get('lowpass')
lowpass = np.inf if lowpass is None else lowpass
Expand Down
26 changes: 19 additions & 7 deletions mne/io/fiff/tests/test_raw_fiff.py
Original file line number Diff line number Diff line change
Expand Up @@ -965,10 +965,22 @@ def test_crop():


@testing.requires_testing_data
def test_resample(tmpdir):
def test_resample_equiv():
"""Test resample (with I/O and multiple files)."""
raw = read_raw_fif(fif_fname).crop(0, 1)
raw_preload = raw.copy().load_data()
for r in (raw, raw_preload):
r.resample(r.info['sfreq'] / 4.)
assert_allclose(raw._data, raw_preload._data)


@testing.requires_testing_data
@pytest.mark.parametrize('preload', (True, False))
def test_resample(tmpdir, preload):
"""Test resample (with I/O and multiple files)."""
raw = read_raw_fif(fif_fname).crop(0, 3)
raw.load_data()
if preload:
raw.load_data()
raw_resamp = raw.copy()
sfreq = raw.info['sfreq']
# test parallel on upsample
Expand All @@ -979,22 +991,22 @@ def test_resample(tmpdir):
preload=True)
assert sfreq == raw_resamp.info['sfreq'] / 2
assert raw.n_times == raw_resamp.n_times // 2
assert raw_resamp._data.shape[1] == raw_resamp.n_times
assert raw._data.shape[0] == raw_resamp._data.shape[0]
assert raw_resamp.get_data().shape[1] == raw_resamp.n_times
assert raw.get_data().shape[0] == raw_resamp._data.shape[0]
# test non-parallel on downsample
raw_resamp.resample(sfreq, n_jobs=1, npad='auto')
assert raw_resamp.info['sfreq'] == sfreq
assert raw._data.shape == raw_resamp._data.shape
assert raw.get_data().shape == raw_resamp._data.shape
assert raw.first_samp == raw_resamp.first_samp
assert raw.last_samp == raw.last_samp
# upsampling then downsampling doubles resampling error, but this still
# works (hooray). Note that the stim channels had to be sub-sampled
# without filtering to be accurately preserved
# note we have to treat MEG and EEG+STIM channels differently (tols)
assert_allclose(raw._data[:306, 200:-200],
assert_allclose(raw.get_data()[:306, 200:-200],
raw_resamp._data[:306, 200:-200],
rtol=1e-2, atol=1e-12)
assert_allclose(raw._data[306:, 200:-200],
assert_allclose(raw.get_data()[306:, 200:-200],
raw_resamp._data[306:, 200:-200],
rtol=1e-2, atol=1e-7)

Expand Down