Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
alejoe91 committed Mar 23, 2023
2 parents 207e1e8 + efc8080 commit 78ef8ea
Show file tree
Hide file tree
Showing 7 changed files with 162 additions and 14 deletions.
110 changes: 110 additions & 0 deletions .github/workflows/caches_cron_job.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
name: Create caches for ephy_testing_data and conda env

on:
workflow_dispatch: # Workflow can be triggered manually via GH actions webinterface
push: # When something is pushed into master this checks if caches need to re-created
branches:
- master
schedule:
- cron: "0 12 * * *" # Daily at noon UTC

jobs:

create-conda-env-cache-if-missing:
name: Caching conda env
runs-on: "ubuntu-latest"
strategy:
fail-fast: true
defaults:
# by default run in bash mode (required for conda usage)
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v3

- name: Get current year-month
id: date
run: |
echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT
- name: Get current dependencies hash
id: dependencies
run: |
echo "hash=${{hashFiles('**/pyproject.toml', '**/environment_testing.yml')}}" >> $GITHUB_OUTPUT
- uses: actions/cache@v3
# the cache for python package is reset:
# * every month
# * when package dependencies change
id: cache-conda-env
with:
path: /usr/share/miniconda/envs/neo-test-env
key: ${{ runner.os }}-conda-env-${{ steps.dependencies.outputs.hash }}-${{ steps.date.outputs.date }}

- name: Cache found?
run: echo "Cache-hit == ${{steps.cache-conda-env.outputs.cache-hit == 'true'}}"

# activate environment if not restored from cache
- uses: conda-incubator/setup-miniconda@v2
if: steps.cache-conda-env.outputs.cache-hit != 'true'
with:
activate-environment: neo-test-env
python-version: 3.9

- name: Create the conda environment to be cached
if: steps.cache-conda-env.outputs.cache-hit != 'true'
# create conda env, configure git and install pip, neo and test dependencies from master
# for PRs that change dependencies, this environment will be updated in the test workflow
run: |
conda env update neo-test-env --file environment_testing.yml
git config --global user.email "neo_ci@fake_mail.com"
git config --global user.name "neo CI"
python -m pip install -U pip # Official recommended way
pip install --upgrade -e .
pip install .[test]
create-data-cache-if-missing:
name: Caching data env
runs-on: "ubuntu-latest"
steps:

- name: Get current hash (SHA) of the ephy_testing_data repo
id: ephy_testing_data
run: |
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
- uses: actions/cache@v3
# Loading cache of ephys_testing_dataset
id: cache-datasets
with:
path: ~/ephy_testing_data
key: ${{ runner.os }}-datasets-${{ steps.ephy_testing_data.outputs.dataset_hash }}

- name: Cache found?
run: echo "Cache-hit == ${{steps.cache-datasets.outputs.cache-hit == 'true'}}"

- name: Installing datalad and git-annex
if: steps.cache-datasets.outputs.cache-hit != 'true'
run: |
git config --global user.email "neo_ci@fake_mail.com"
git config --global user.name "neo CI"
python -m pip install -U pip # Official recommended way
pip install datalad-installer
datalad-installer --sudo ok git-annex --method datalad/packages
pip install datalad
git config --global filter.annex.process "git-annex filter-process" # recommended for efficiency
- name: Download dataset
if: steps.cache-datasets.outputs.cache-hit != 'true'
# Download repository and also fetch data
run: |
cd ~
datalad install --recursive --get-data https://gin.g-node.org/NeuralEnsemble/ephy_testing_data
- name: Show size of the cache to assert data is downloaded
run: |
cd ~
pwd
du -hs ephy_testing_data
cd ephy_testing_data
pwd
7 changes: 7 additions & 0 deletions .github/workflows/core-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,18 @@ on:
pull_request:
branches: [master]
types: [synchronize, opened, reopened, ready_for_review]
paths:
- 'neo/core/**'
- 'pyproject.toml'

# run checks on any change of master, including merge of PRs
push:
branches: [master]

concurrency: # Cancel previous workflows on the same pull request
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
multi-os-python-numpy:
runs-on: ${{ matrix.os }}
Expand Down
42 changes: 32 additions & 10 deletions .github/workflows/io-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@ on:
push:
branches: [master]

concurrency: # Cancel previous workflows on the same pull request
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
build-and-test:
Expand All @@ -35,34 +38,44 @@ jobs:

- name: Get ephy_testing_data current head hash
# the key depend on the last commit repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git
id: ephy_testing_data_hash
id: ephy_testing_data
run: |
echo "latest_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
echo "dataset_hash=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
- uses: actions/cache@v3
- uses: actions/cache/restore@v3
# Loading cache of ephys_testing_dataset
id: cache-datasets
with:
path: ~/ephy_testing_data
key: ${{ runner.os }}-datasets-${{ steps.ephy_testing_data_hash.outputs.latest_hash }}
key: ${{ runner.os }}-datasets-${{ steps.ephy_testing_data.outputs.dataset_hash }}
restore-keys: ${{ runner.os }}-datasets-

- uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: neo-test-env
python-version: ${{ matrix.python-version }}
clean-patched-environment-file: false

- uses: actions/cache@v3
- name: Get current dependencies hash
id: dependencies
run: |
echo "hash=${{hashFiles('**/pyproject.toml', '**/environment_testing.yml')}}" >> $GITHUB_OUTPUT
- uses: actions/cache/restore@v3
# the cache for python package is reset:
# * every month
# * when requirements/requirements_testing change
# * when package dependencies change
id: cache-conda-env
with:
path: /usr/share/miniconda/envs/neo-test-env
key: ${{ runner.os }}-conda-env-${{ hashFiles('**/pyproject.toml') }}-${{ steps.date.outputs.date }}
key: ${{ runner.os }}-conda-env-${{ steps.dependencies.outputs.hash }}-${{ steps.date.outputs.date }}
# restore-keys match any key that starts with the restore-key
restore-keys: |
${{ runner.os }}-conda-env-${{ steps.dependencies.outputs.hash }}-
${{ runner.os }}-conda-env-
- name: Install testing dependencies
# testing environment is only installed if no cache was found
# testing environment is only created from yml if no cache was found
# restore-key hits should result in `cache-hit` == 'false'
if: steps.cache-conda-env.outputs.cache-hit != 'true'
run: |
conda env update neo-test-env --file environment_testing.yml
Expand All @@ -72,11 +85,20 @@ jobs:
git config --global user.email "neo_ci@fake_mail.com"
git config --global user.name "neo CI"
- name: Install neo
- name: Install neo including dependencies
# installation with dependencies is only required if no cache was found
# restore-key hits should result in `cache-hit` == 'false'
if: steps.cache-conda-env.outputs.cache-hit != 'true'
run: |
pip install --upgrade -e .
pip install .[test]
- name: Install neo without dependencies
# only installing neo version to test as dependencies should be in cached conda env already
if: steps.cache-conda-env.outputs.cache-hit == 'true'
run: |
pip install --no-dependencies -e .
- name: Test with pytest
run: |
# only neo.rawio and neo.io
Expand Down
2 changes: 1 addition & 1 deletion neo/io/nixio.py
Original file line number Diff line number Diff line change
Expand Up @@ -630,7 +630,7 @@ def write_block(self, block, use_obj_names=False):
metadata["neo_name"] = neoname
nixblock.definition = block.description
if block.rec_datetime:
nix_rec_dt = int(block.rec_datetime.strftime("%s"))
nix_rec_dt = int(block.rec_datetime.timestamp())
nixblock.force_created_at(nix_rec_dt)
if block.file_datetime:
fdt, annotype = dt_to_nix(block.file_datetime)
Expand Down
2 changes: 1 addition & 1 deletion neo/rawio/neuralynxrawio/ncssections.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,7 +438,7 @@ def build_for_ncs_file(ncsMemMap, nlxHdr):

# digital lynx style with fractional frequency and micros per samp determined from
# block times
elif acqType == "DIGITALLYNX" or acqType == "DIGITALLYNXSX" or acqType == 'CHEETAH64':
elif acqType == "DIGITALLYNX" or acqType == "DIGITALLYNXSX" or acqType == 'CHEETAH64' or acqType == 'RAWDATAFILE':
nomFreq = nlxHdr['sampling_rate']
nb = NcsSectionsFactory._buildForMaxGap(ncsMemMap, nomFreq)

Expand Down
4 changes: 4 additions & 0 deletions neo/rawio/neuralynxrawio/nlxheader.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,6 +293,10 @@ def type_of_recording(self):
# Cheetah64
elif self['HardwareSubSystemType'] == 'Cheetah64':
return 'CHEETAH64'

# RawDataFile
elif self['HardwareSubSystemType'] == 'RawDataFile':
return 'RAWDATAFILE'

else:
return 'UNKNOWN'
Expand Down
9 changes: 7 additions & 2 deletions neo/rawio/spikeglxrawio.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,11 @@
imDatPrb_type=1 (NP 1.0)
imDatPrb_type=21 (NP 2.0, single multiplexed shank)
imDatPrb_type=24 (NP 2.0, 4-shank)
imDatPrb_type=1030 (NP 1.0-NHP 45mm SOI90 - NHP long 90um wide, staggered contacts)
imDatPrb_type=1031 (NP 1.0-NHP 45mm SOI125 - NHP long 125um wide, staggered contacts)
imDatPrb_type=1032 (NP 1.0-NHP 45mm SOI115 / 125 linear - NHP long 125um wide, linear contacts)
imDatPrb_type=1022 (NP 1.0-NHP 25mm - NHP medium)
imDatPrb_type=1015 (NP 1.0-NHP 10mm - NHP short)
Author : Samuel Garcia
Some functions are copied from Graham Findlay
Expand Down Expand Up @@ -380,7 +385,7 @@ def extract_stream_info(meta_file, meta):
# metad['imroTbl'] contain two gain per channel AP and LF
# except for the last fake channel
per_channel_gain = np.ones(num_chan, dtype='float64')
if 'imDatPrb_type' not in meta or meta['imDatPrb_type'] == '0':
if 'imDatPrb_type' not in meta or meta['imDatPrb_type'] == '0' or meta['imDatPrb_type'] in ('1015', '1022', '1030', '1031', '1032'):
# This work with NP 1.0 case with different metadata versions
# https://github.com/billkarsh/SpikeGLX/blob/gh-pages/Support/Metadata_3A.md#imec
# https://github.com/billkarsh/SpikeGLX/blob/gh-pages/Support/Metadata_3B1.md#imec
Expand All @@ -404,7 +409,7 @@ def extract_stream_info(meta_file, meta):
channel_gains = gain_factor * per_channel_gain * 1e6
else:
raise NotImplementedError('This meta file version of spikeglx'
'is not implemented')
' is not implemented')
else:
stream_kind = ''
stream_name = device
Expand Down

0 comments on commit 78ef8ea

Please sign in to comment.