Skip to content
Merged
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 18 additions & 4 deletions src/spikeinterface/core/analyzer_extension_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,6 +437,7 @@ def _run(self, verbose=False, **job_kwargs):
return_in_uV = self.sorting_analyzer.return_in_uV

return_std = "std" in self.params["operators"]
sparsity_mask = None if self.sparsity is None else self.sparsity.mask
output = estimate_templates_with_accumulator(
recording,
some_spikes,
Expand All @@ -445,17 +446,30 @@ def _run(self, verbose=False, **job_kwargs):
self.nafter,
return_in_uV=return_in_uV,
return_std=return_std,
sparsity_mask=sparsity_mask,
verbose=verbose,
**job_kwargs,
)

# Output of estimate_templates_with_accumulator is either (templates,) or (templates, stds)
if return_std:
templates, stds = output
self.data["average"] = templates
self.data["std"] = stds
data = dict(average=templates, std=stds)
else:
self.data["average"] = output
templates = output
data = dict(average=templates)

if self.sparsity is not None:
# make average and std dense again
for k, arr in data.items():
dense_arr = np.zeros(
(arr.shape[0], arr.shape[1], self.sorting_analyzer.get_num_channels()),
dtype=arr.dtype,
)
for unit_index, unit_id in enumerate(self.sorting_analyzer.unit_ids):
chan_inds = self.sparsity.unit_id_to_channel_indices[unit_id]
dense_arr[unit_index][:, chan_inds] = arr[unit_index, :, : chan_inds.size]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We have the exact same logic in ChannelSparsity.densify_waveforms:

My proposal is to add a densify_templates function to the ChannelSparsity class (it already has a sparsify_templates):

def densify_templates(self, templates_array: np.ndarray) -> np.ndarray:
    assert templates_array.shape[0] == self.num_units

    densified_shape = (self.num_units, templates_array.shape[1], self.num_channels)
    dense_templates = np.zeros(shape=densified_shape, dtype=templates_array.dtype)
    for unit_index, unit_id in enumerate(self.unit_ids):
        sparse_template = templates_array[unit_index, ...]
        dense_template = self.densify_waveforms(waveforms=sparse_template[np.newaxis, :, :], unit_id=unit_id)
        dense_templates[unit_index, :, :] = dense_template

    return dense_templates

Then the logic in the ComputeTemplates extension could simply become:

if self.sparsity is not None:
    # make average and std dense again
    for k, arr in data.items():
        dense_arr = self.sparsity.densify_templates(arr)
        data[k] = dense_arr

What do you think?

data[k] = dense_arr
self.data.update(data)

def _compute_and_append_from_waveforms(self, operators):
if not self.sorting_analyzer.has_extension("waveforms"):
Expand Down