Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove default values used as expressions in generate.py. #2345

Merged
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 10 additions & 5 deletions src/spikeinterface/core/generate.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from __future__ import annotations
import math
import warnings
import numpy as np
Expand Down Expand Up @@ -465,9 +466,9 @@ def inject_some_duplicate_units(sorting, num=4, max_shift=5, ratio=None, seed=No
return sorting_with_dup


def inject_some_split_units(sorting, split_ids=[], num_split=2, output_ids=False, seed=None):
def inject_some_split_units(sorting, split_ids: list, num_split=2, output_ids=False, seed=None):
""" """
assert len(split_ids) > 0, "you need to provide some ids to split"

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@alejoe91
The error was caused by the fact that np.array(0) behaves as False. I should know better to not play with the truthiness of containers.

However, as in this case, you actually ask the split_ids to be non-empty I Just made the argument a required one.

unit_ids = sorting.unit_ids
assert unit_ids.dtype.kind == "i"

Expand Down Expand Up @@ -867,8 +868,8 @@ def generate_templates(
seed=None,
dtype="float32",
upsample_factor=None,
unit_params=dict(),
unit_params_range=dict(),
unit_params=None,
unit_params_range=None,
):
"""
Generate some templates from the given channel positions and neuron position.s
Expand Down Expand Up @@ -924,6 +925,9 @@ def generate_templates(
* (num_units, num_samples, num_channels, upsample_factor) if upsample_factor is not None

"""

unit_params = unit_params or dict()
unit_params_range = unit_params_range or dict()
rng = np.random.default_rng(seed=seed)

# neuron location must be 3D
Expand Down Expand Up @@ -1383,7 +1387,7 @@ def generate_ground_truth_recording(
generate_sorting_kwargs=dict(firing_rates=15, refractory_period_ms=4.0),
noise_kwargs=dict(noise_level=5.0, strategy="on_the_fly"),
generate_unit_locations_kwargs=dict(margin_um=10.0, minimum_z=5.0, maximum_z=50.0, minimum_distance=20),
generate_templates_kwargs=dict(),
generate_templates_kwargs=None,
dtype="float32",
seed=None,
):
Expand Down Expand Up @@ -1442,6 +1446,7 @@ def generate_ground_truth_recording(
sorting: Sorting
The generated sorting extractor.
"""
generate_templates_kwargs = generate_templates_kwargs or dict()

# TODO implement upsample_factor in InjectTemplatesRecording and propagate into toy_example

Expand Down
14 changes: 6 additions & 8 deletions src/spikeinterface/curation/tests/test_auto_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,15 @@ def test_get_auto_merge_list():
num_unit_splited = 1
num_split = 2

split_ids = sorting.unit_ids[:num_unit_splited]
sorting_with_split, other_ids = inject_some_split_units(
sorting, split_ids=sorting.unit_ids[:num_unit_splited], num_split=num_split, output_ids=True, seed=42
sorting,
split_ids=split_ids,
num_split=num_split,
output_ids=True,
seed=42,
)

print(sorting_with_split)
print(sorting_with_split.unit_ids)
print(other_ids)

# rec = rec.save()
# sorting_with_split = sorting_with_split.save()
# wf_folder = cache_folder / "wf_auto_merge"
Expand All @@ -42,7 +43,6 @@ def test_get_auto_merge_list():
# we = extract_waveforms(rec, sorting_with_split, mode="folder", folder=wf_folder, n_jobs=1)

we = extract_waveforms(rec, sorting_with_split, mode="memory", folder=None, n_jobs=1)
# print(we)

potential_merges, outs = get_potential_auto_merge(
we,
Expand All @@ -63,8 +63,6 @@ def test_get_auto_merge_list():
firing_contamination_balance=1.5,
extra_outputs=True,
)
# print(potential_merges)
# print(num_unit_splited)

assert len(potential_merges) == num_unit_splited
for true_pair in other_ids.values():
Expand Down