Skip to content

Commit

Permalink
fast conversion
Browse files Browse the repository at this point in the history
  • Loading branch information
dizcza committed Oct 25, 2020
1 parent eb70d27 commit 080fcb3
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 15 deletions.
29 changes: 14 additions & 15 deletions elephant/conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import quantities as pq
import scipy.sparse as sps

from elephant.utils import is_binary, deprecated_alias
from elephant.utils import is_binary, deprecated_alias, rescale_magnitude

__all__ = [
"binarize",
Expand Down Expand Up @@ -186,7 +186,8 @@ def _detect_rounding_errors(values, tolerance):
"""
if tolerance is None:
return np.zeros_like(values, dtype=bool)
return 1 - (values % 1) <= tolerance
# same as '1 - (values % 1) <= tolerance' but faster
return 1 - tolerance <= values % 1


def _calc_tstart(n_bins, bin_size, t_stop):
Expand Down Expand Up @@ -901,8 +902,7 @@ def to_array(self, store_array=False):
if store_array:
self._store_array()
return self._mat_u
# Matrix on demand
else:
else: # Matrix on demand
return self._sparse_mat_u.toarray()

def _store_array(self):
Expand Down Expand Up @@ -972,16 +972,17 @@ def _convert_to_binned(self, spiketrains):
"""
if not self.is_spiketrain:
self._sparse_mat_u = sps.csr_matrix(spiketrains, dtype=int)
self._sparse_mat_u = sps.csr_matrix(spiketrains, dtype=np.int32)
return

row_ids, column_ids = [], []
# data
counts = []

for idx, st in enumerate(spiketrains):
times = (st.times - self.t_start).rescale(self.bin_size.units)
scale = np.array((times / self.bin_size).magnitude)
times = rescale_magnitude(st.times - self.t_start,
units=self.bin_size.units)
scale = times / self.bin_size.item()

# shift spikes that are very close
# to the right edge into the next bin
Expand All @@ -995,22 +996,20 @@ def _convert_to_binned(self, spiketrains):
'behaviour.'.format(num_rounding_corrections))
scale[rounding_error_indices] += .5

scale = scale.astype(int)
scale = scale.astype(np.int32)

la = np.logical_and(times >= 0 * self.bin_size.units,
times <= (self.t_stop
- self.t_start).rescale(
self.bin_size.units))
duration = (self.t_stop - self.t_start).rescale(
self.bin_size.units).magnitude
la = np.logical_and(times >= 0, times <= duration)
filled_tmp = scale[la]
filled_tmp = filled_tmp[filled_tmp < self.n_bins]
f, c = np.unique(filled_tmp, return_counts=True)
column_ids.extend(f)
counts.extend(c)
row_ids.extend([idx] * len(f))
csr_matrix = sps.csr_matrix((counts, (row_ids, column_ids)),
shape=(len(spiketrains),
self.n_bins),
dtype=int)
shape=(len(spiketrains), self.n_bins),
dtype=np.int32)
self._sparse_mat_u = csr_matrix


Expand Down
5 changes: 5 additions & 0 deletions elephant/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,8 @@ def is_time_quantity(x, allow_none=False):
if not isinstance(x, pq.Quantity):
return False
return x.dimensionality.simplified == pq.Quantity(1, "s").dimensionality


def rescale_magnitude(neo_object, units):
factor = neo_object.units.rescale(units).item()
return neo_object.magnitude * factor

0 comments on commit 080fcb3

Please sign in to comment.