Skip to content

Commit

Permalink
Merge pull request #1461 from NeuralEnsemble/black-formatting
Browse files Browse the repository at this point in the history
Black formatting
  • Loading branch information
zm711 authored Apr 15, 2024
2 parents e9b57b0 + b84a1d8 commit 4763c49
Show file tree
Hide file tree
Showing 10 changed files with 41 additions and 31 deletions.
5 changes: 3 additions & 2 deletions examples/plot_igorio.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
===========================
"""

###########################################################
# Import our packages
import os
Expand All @@ -28,7 +29,7 @@
zip_ref.close()

######################################################
# Once we have our data we can use `get_io` to find an
# Once we have our data we can use `get_io` to find an
# io (Igor in this case). Then we read the analogsignals
# Finally we will make some nice plots
reader = get_io(filename)
Expand All @@ -37,4 +38,4 @@
plt.xlabel(signal.sampling_period.dimensionality)
plt.ylabel(signal.dimensionality)

plt.show()
plt.show()
6 changes: 3 additions & 3 deletions examples/plot_imageseq.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

############################################################
# Now we need to generate some data
# We will just make a nice box and then we can attach this
# We will just make a nice box and then we can attach this
# ImageSequence to a variety of ROIs
# our ImageSequence will be 50 frames of 100x100 pixel images

Expand All @@ -36,9 +36,9 @@
image_seq = ImageSequence(l, sampling_rate=500 * pq.Hz, spatial_scale="m", units="V")

result = image_seq.signal_from_region(
CircularRegionOfInterest(image_seq,50, 50, 25),
CircularRegionOfInterest(image_seq, 50, 50, 25),
CircularRegionOfInterest(image_seq, 10, 10, 5),
PolygonRegionOfInterest(image_seq,(50, 25), (50, 45), (14, 65), (90, 80)),
PolygonRegionOfInterest(image_seq, (50, 25), (50, 45), (14, 65), (90, 80)),
)

###############################################################
Expand Down
5 changes: 3 additions & 2 deletions examples/plot_multi_tetrode_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Analyzing and Plotting Data with Neo Structures
===============================================
"""

######################################################
# First we import some packages. Since we are making simulated
# data we will import quite a few neo features as well as use
Expand Down Expand Up @@ -31,7 +32,7 @@

##################################################################################
# Neo can also have groups. Groups are structures within a block that can cross segments
# for example we could group a neuron across trials or across probes.
# for example we could group a neuron across trials or across probes.

# Create a group for each neuron, annotate each group with the tetrode from which it was recorded
groups = []
Expand Down Expand Up @@ -86,7 +87,7 @@
# since its data can be treated like numpy arrays
# it is easy to use standard packages like matplotlib
# for all your plotting needs
# We do a classic in neuroscience and show various ways
# We do a classic in neuroscience and show various ways
# to plot a PSTH (Peristimulus histogram)

###################################################
Expand Down
5 changes: 2 additions & 3 deletions examples/plot_read_files_neo_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
urllib.request.urlretrieve(distantfile, localfile)



###################################################
# Now we can create our reader and read some data

Expand All @@ -31,7 +30,7 @@

######################################################
# Once we have our blocks we can iterate through each
# block of data and see the contents of all parts of
# block of data and see the contents of all parts of
# that data

# access to segments
Expand All @@ -45,7 +44,7 @@

#######################################################
# Let's look at another file type

# CED Spike2 files
distantfile = url_repo + "spike2/File_spike2_1.smr"
localfile = "./File_spike2_1.smr"
Expand Down
11 changes: 6 additions & 5 deletions examples/plot_read_files_neo_rawio.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
compare with read_files_neo_io.py
"""

###########################################################
# First we import a RawIO from neo.rawio
# For this example we will use PlexonRawIO
Expand Down Expand Up @@ -42,9 +43,9 @@
###############################################################
# Read signal chunks
# This is how we read raw data. We choose indices that we want or
# we can use None to mean look at all channels. We also need to
# we can use None to mean look at all channels. We also need to
# specify the block of data (block_index) as well as the segment
# (seg_index). Then we give the index start and stop. Since we
# (seg_index). Then we give the index start and stop. Since we
# often think in time: to go from time to index would just require
# the sample rate (so index = time / sampling_rate)

Expand Down Expand Up @@ -82,7 +83,7 @@

# Count units and spikes per unit
nb_unit = reader.spike_channels_count()
print(f"nb_unit: {nb_unit}\n") # nb_unit stands for number of units
print(f"nb_unit: {nb_unit}\n") # nb_unit stands for number of units
print("spike_channel_index nb_spike")
for spike_channel_index in range(nb_unit):
nb_spike = reader.spike_count(block_index=0, seg_index=0, spike_channel_index=spike_channel_index)
Expand All @@ -99,7 +100,7 @@

#######################################################################
# Some file formats can also give waveform information. We are lucky
# again our file has waveform data!! We forms are a 3d dataset of
# again our file has waveform data!! We forms are a 3d dataset of
# (nb_spike, nb_channel, nb_sample)

# Read spike waveforms
Expand Down Expand Up @@ -134,7 +135,7 @@

nb_event_channel = reader.event_channels_count()
print(f"nb_event_channel: {nb_event_channel}")
# now iterate through the channels
# now iterate through the channels
for chan_index in range(nb_event_channel):
nb_event = reader.event_count(block_index=0, seg_index=0, event_channel_index=chan_index)
print(f"chan_index: {chan_index} nb_event: {nb_event}\n")
Expand Down
22 changes: 12 additions & 10 deletions examples/plot_read_proxy_with_lazy_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@

############################################################
# as always we can look view some interesting information about the
# metadata and structure of a file just by printing the reader and
# metadata and structure of a file just by printing the reader and
# it's header
print(reader)
print(f"Header information: {reader.header}")
Expand All @@ -49,31 +49,33 @@
# Now let's make a function that we want to apply to
# look at lazy vs eager uses of the API


def apply_my_fancy_average(sig_list):
"""basic average along triggers and then channels
here we go back to numpy with magnitude
to be able to use np.stack.
Because neo uses quantities to keep track of units
we can always get just the magnitude of an array
we can always get just the magnitude of an array
with `.magnitude`
"""
sig_list = [s.magnitude for s in sig_list]
sigs = np.stack(sig_list, axis=0)
return np.mean(np.mean(sigs, axis=0), axis=1)


#################################################
# Let's set our limits for both cases. We will
# use quantities to include time dimensions.
# use quantities to include time dimensions.

lim_start = -20 * pq.ms # 20 milliseconds before
lim_end = +20 * pq.ms # 20 milliseconds after
lim_start = -20 * pq.ms # 20 milliseconds before
lim_end = +20 * pq.ms # 20 milliseconds after

##################################################
# We start with eager (where `lazy=False`.) Everything
# is loaded into memory. We will read a segment of data.
# This includes analog signal data and events data
# (final contents of a segment are dependent on the
# This includes analog signal data and events data
# (final contents of a segment are dependent on the
# underlying IO being used)


Expand All @@ -86,7 +88,7 @@ def apply_my_fancy_average(sig_list):
anasig_chunk = anasig.time_slice(t0, t1)
all_sig_chunks.append(anasig_chunk)

# After pulling all data into memory and then iterating through triggers
# After pulling all data into memory and then iterating through triggers
# we end by doing our average
m1 = apply_my_fancy_average(all_sig_chunks)

Expand All @@ -111,7 +113,7 @@ def apply_my_fancy_average(sig_list):
m2 = apply_my_fancy_average(all_sig_chunks)

##########################################################
# We see that either way the result is the same, but
# We see that either way the result is the same, but
# we do not exhaust our RAM/memory
print(f"Eagerly loading data and averaging: {m1}")
print(f"Lazy loading data and average {m2}")
6 changes: 3 additions & 3 deletions neo/rawio/axonrawio.py
Original file line number Diff line number Diff line change
Expand Up @@ -481,12 +481,12 @@ def parse_axon_soup(filename):
# brittle. pyABF believes that looking for the \x00\x00 is more
# robust. We find these values, replace mu->u, then split into
# a set of strings
indexed_string = big_string[big_string.rfind(b'\x00\x00'):]
indexed_string = big_string[big_string.rfind(b"\x00\x00") :]
# replace mu -> u for easy display
indexed_string = indexed_string.replace(b'\xb5', b'\x75')
indexed_string = indexed_string.replace(b"\xb5", b"\x75")
# we need to remove one of the \x00 to have the indices be
# the correct order
indexed_string = indexed_string.split(b'\x00')[1:]
indexed_string = indexed_string.split(b"\x00")[1:]
strings = indexed_string

# ADC sections
Expand Down
4 changes: 3 additions & 1 deletion neo/rawio/spikegadgetsrawio.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,9 @@ def _parse_header(self):
chan_ind = 0
self.is_scaleable = "spikeScalingToUv" in sconf[0].attrib
if not self.is_scaleable:
self.logger.warning("Unable to read channel gain scaling (to uV) from .rec header. Data has no physical units!")
self.logger.warning(
"Unable to read channel gain scaling (to uV) from .rec header. Data has no physical units!"
)

for trode in sconf:
if "spikeScalingToUv" in trode.attrib:
Expand Down
7 changes: 5 additions & 2 deletions neo/test/iotest/test_get_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@
import platform
from neo.io import get_io, list_candidate_ios, NixIO
import pytest

try:
import nixio

HAVE_NIX = True
except:
HAVE_NIX = False
Expand Down Expand Up @@ -35,7 +37,8 @@ def test_list_candidate_ios_filename_stub():

assert NixIO in ios

@pytest.mark.skipif(not HAVE_NIX or platform.system()=='Windows', reason='Need nixio in order to return NixIO class')

@pytest.mark.skipif(not HAVE_NIX or platform.system() == "Windows", reason="Need nixio in order to return NixIO class")
def test_get_io_non_existant_file_writable_io():
# use nixio for testing with writable io
non_existant_file = Path("non_existant_file.nix")
Expand All @@ -45,4 +48,4 @@ def test_get_io_non_existant_file_writable_io():
assert isinstance(io, NixIO)

# cleanup
non_existant_file.unlink(missing_ok=True) # cleanup will fail on Windows so need to skip
non_existant_file.unlink(missing_ok=True) # cleanup will fail on Windows so need to skip
1 change: 1 addition & 0 deletions neo/test/rawiotest/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ def can_use_network():
return False
try:
import datalad

HAVE_DATALAD = True
except:
HAVE_DATALAD = False
Expand Down

0 comments on commit 4763c49

Please sign in to comment.