diff --git a/Processed-Data/README.md b/Processed-Data/README.md new file mode 100644 index 0000000..52add43 --- /dev/null +++ b/Processed-Data/README.md @@ -0,0 +1 @@ +All processed data should go here. diff --git a/Raw-Data/READEME.md b/Raw-Data/READEME.md new file mode 100644 index 0000000..d09a365 --- /dev/null +++ b/Raw-Data/READEME.md @@ -0,0 +1 @@ +Raw Data goes here. Each folder should correspond to an animal. diff --git a/environment.yml b/environment.yml new file mode 100644 index 0000000..4bc6467 --- /dev/null +++ b/environment.yml @@ -0,0 +1,17 @@ +name: Roumis_2018 +channels: +- conda-forge +- defaults +- edeno +dependencies: +- "python >= 3.5" +- loren_frank_data_processing +- ripple_detection +- replay_classification +- spectral_connectivity +- jupyter +- setuptools +- "pytest >= 2.7.1" +- pytest-cov +- coveralls +- netcdf4 diff --git a/notebooks/README.md b/notebooks/README.md new file mode 100644 index 0000000..e69de29 diff --git a/scripts/queue_cluster_jobs.py b/scripts/queue_cluster_jobs.py new file mode 100644 index 0000000..7cc9657 --- /dev/null +++ b/scripts/queue_cluster_jobs.py @@ -0,0 +1,78 @@ +'''Script for executing run_by_epoch on the cluster +''' +from argparse import ArgumentParser +from os import getcwd, makedirs, environ +from os.path import join +from subprocess import run +from sys import exit + +from loren_frank_data_processing import make_epochs_dataframe +from src.parameters import ANIMALS + + +def get_command_line_arguments(): + parser = ArgumentParser() + parser.add_argument('--Animal', type=str, help='Short name of animal') + parser.add_argument('--Day', type=int, help='Day of recording session') + parser.add_argument('--Epoch', type=int, + help='Epoch number of recording session') + return parser.parse_args() + + +def queue_job(python_cmd, directives=None, log_file='log.log', + job_name='job'): + queue_cmd = ( + 'qsub {directives} -j y -o {log_file} -N {job_name}').format( + directives=directives, + log_file=log_file, + job_name=job_name) + cmd_line_script = ' | '.join([ + 'echo python {python_cmd}'.format(python_cmd=python_cmd), + queue_cmd]) + run(cmd_line_script, shell=True) + + +def main(): + # Set the maximum number of threads for openBLAS to use. + NUM_THREADS = 16 + environ['OPENBLAS_NUM_THREADS'] = str(NUM_THREADS) + environ['NUMBA_NUM_THREADS'] = str(NUM_THREADS) + environ['OMP_NUM_THREADS'] = str(NUM_THREADS) + log_directory = join(getcwd(), 'logs') + makedirs(log_directory, exist_ok=True) + + python_function = 'run_by_epoch.py' + directives = ' '.join( + ['-l h_rt=1:00:00', '-pe omp {0}'.format(NUM_THREADS), + '-P braincom', '-notify', '-l mem_total=125G', + '-v OPENBLAS_NUM_THREADS', '-v NUMBA_NUM_THREADS', + '-v OMP_NUM_THREADS']) + + args = get_command_line_arguments() + if args.Animal is None and args.Day is None and args.Epoch is None: + epoch_info = make_epochs_dataframe(ANIMALS) + epoch_keys = epoch_info[(epoch_info.type == 'run') & ( + epoch_info.environment != 'lin')].index + else: + epoch_keys = [(args.Animal, args.Day, args.Epoch)] + + for (animal, day, epoch_ind) in epoch_keys: + print('Animal: {0}, Day: {1}, Epoch: {2}'.format( + animal, day, epoch_ind)) + log_file = '{animal}_{day:02d}_{epoch:02d}.log'.format( + animal=animal, day=day, epoch=epoch_ind) + job_name = ( + '{function_name}_{animal}_{day:02d}_{epoch:02d}').format( + animal=animal, day=day, epoch=epoch_ind, + function_name=python_function.replace('.py', '')) + python_cmd = '{python_function} {animal} {day} {epoch}'.format( + python_function=python_function, animal=animal, day=day, + epoch=epoch_ind) + queue_job(python_cmd, + directives=directives, + log_file=join(log_directory, log_file), + job_name=job_name) + + +if __name__ == '__main__': + exit(main()) diff --git a/scripts/run_by_epoch.py b/scripts/run_by_epoch.py new file mode 100644 index 0000000..26b4714 --- /dev/null +++ b/scripts/run_by_epoch.py @@ -0,0 +1,60 @@ +from argparse import ArgumentParser +from logging import DEBUG, INFO, Formatter, StreamHandler, getLogger +from signal import SIGUSR1, SIGUSR2, signal +from subprocess import PIPE, run +from sys import exit, stdout + + +def get_command_line_arguments(): + parser = ArgumentParser() + parser.add_argument('Animal', type=str, help='Short name of animal') + parser.add_argument('Day', type=int, help='Day of recording session') + parser.add_argument('Epoch', type=int, + help='Epoch number of recording session') + parser.add_argument( + '-d', '--debug', + help='More verbose output for debugging', + action='store_const', + dest='log_level', + const=DEBUG, + default=INFO, + ) + return parser.parse_args() + + +def get_logger(): + formatter = Formatter( + '%(asctime)s - %(name)s - %(levelname)s - %(message)s') + handler = StreamHandler(stream=stdout) + handler.setFormatter(formatter) + logger = getLogger() + logger.addHandler(handler) + return logger + + +def main(): + args = get_command_line_arguments() + logger = get_logger() + logger.setLevel(args.log_level) + + def _signal_handler(signal_code, frame): + logger.error('***Process killed with signal {signal}***'.format( + signal=signal_code)) + exit() + + for code in [SIGUSR1, SIGUSR2]: + signal(code, _signal_handler) + + epoch_key = (args.Animal, args.Day, args.Epoch) + logger.info( + 'Processing epoch: Animal {0}, Day {1}, Epoch #{2}...'.format( + *epoch_key)) + git_hash = run(['git', 'rev-parse', 'HEAD'], + stdout=PIPE, universal_newlines=True).stdout + logger.info('Git Hash: {git_hash}'.format(git_hash=git_hash.rstrip())) + + logger.info('Finished Processing') + + +if __name__ == '__main__': + exit(main()) diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..b7e4789 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[aliases] +test=pytest diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..5b4158f --- /dev/null +++ b/setup.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 + +from setuptools import find_packages, setup + +INSTALL_REQUIRES = ['loren_frank_data_processing', 'ripple_detection', + 'spectral_connectivity', 'replay_classification'] +TESTS_REQUIRE = ['pytest >= 2.7.1'] + +setup( + name='roumis_2018', + version='0.1.0.dev0', + license='GPL-3.0', + description=('Analysis of MEC and CA1 connectivity'), + author='Demetris Roumis, Eric Denovellis', + author_email='demetris.roumis@ucsf.edu, edeno@bu.edu', + url='https://github.com/edeno/Roumis_2018', + packages=find_packages(), + install_requires=INSTALL_REQUIRES, + tests_require=TESTS_REQUIRE, +) diff --git a/src/analysis.py b/src/analysis.py new file mode 100644 index 0000000..e69de29 diff --git a/src/parameters.py b/src/parameters.py new file mode 100644 index 0000000..cfbcd92 --- /dev/null +++ b/src/parameters.py @@ -0,0 +1,12 @@ +from collections import namedtuple +from os.path import join, abspath, dirname, pardir + +# LFP sampling frequency +SAMPLING_FREQUENCY = 1500 + +# Data directories and definitions +ROOT_DIR = join(abspath(dirname(__file__)), pardir) +RAW_DATA_DIR = join(ROOT_DIR, 'Raw-Data') +PROCESSED_DATA_DIR = join(ROOT_DIR, 'Processed-Data') + +Animal = namedtuple('Animal', {'directory', 'short_name'}) diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..9fb7920 --- /dev/null +++ b/tests/README.md @@ -0,0 +1 @@ +Tests go here.