Skip to content

Commit

Permalink
Add setup and environment
Browse files Browse the repository at this point in the history
  • Loading branch information
edeno committed Nov 22, 2017
1 parent 77fa8cb commit 16e0bfe
Show file tree
Hide file tree
Showing 11 changed files with 192 additions and 0 deletions.
1 change: 1 addition & 0 deletions Processed-Data/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
All processed data should go here.
1 change: 1 addition & 0 deletions Raw-Data/READEME.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Raw Data goes here. Each folder should correspond to an animal.
17 changes: 17 additions & 0 deletions environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: Roumis_2018
channels:
- conda-forge
- defaults
- edeno
dependencies:
- "python >= 3.5"
- loren_frank_data_processing
- ripple_detection
- replay_classification
- spectral_connectivity
- jupyter
- setuptools
- "pytest >= 2.7.1"
- pytest-cov
- coveralls
- netcdf4
Empty file added notebooks/README.md
Empty file.
78 changes: 78 additions & 0 deletions scripts/queue_cluster_jobs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
'''Script for executing run_by_epoch on the cluster
'''
from argparse import ArgumentParser
from os import getcwd, makedirs, environ
from os.path import join
from subprocess import run
from sys import exit

from loren_frank_data_processing import make_epochs_dataframe
from src.parameters import ANIMALS


def get_command_line_arguments():
parser = ArgumentParser()
parser.add_argument('--Animal', type=str, help='Short name of animal')
parser.add_argument('--Day', type=int, help='Day of recording session')
parser.add_argument('--Epoch', type=int,
help='Epoch number of recording session')
return parser.parse_args()


def queue_job(python_cmd, directives=None, log_file='log.log',
job_name='job'):
queue_cmd = (
'qsub {directives} -j y -o {log_file} -N {job_name}').format(
directives=directives,
log_file=log_file,
job_name=job_name)
cmd_line_script = ' | '.join([
'echo python {python_cmd}'.format(python_cmd=python_cmd),
queue_cmd])
run(cmd_line_script, shell=True)


def main():
# Set the maximum number of threads for openBLAS to use.
NUM_THREADS = 16
environ['OPENBLAS_NUM_THREADS'] = str(NUM_THREADS)
environ['NUMBA_NUM_THREADS'] = str(NUM_THREADS)
environ['OMP_NUM_THREADS'] = str(NUM_THREADS)
log_directory = join(getcwd(), 'logs')
makedirs(log_directory, exist_ok=True)

python_function = 'run_by_epoch.py'
directives = ' '.join(
['-l h_rt=1:00:00', '-pe omp {0}'.format(NUM_THREADS),
'-P braincom', '-notify', '-l mem_total=125G',
'-v OPENBLAS_NUM_THREADS', '-v NUMBA_NUM_THREADS',
'-v OMP_NUM_THREADS'])

args = get_command_line_arguments()
if args.Animal is None and args.Day is None and args.Epoch is None:
epoch_info = make_epochs_dataframe(ANIMALS)
epoch_keys = epoch_info[(epoch_info.type == 'run') & (
epoch_info.environment != 'lin')].index
else:
epoch_keys = [(args.Animal, args.Day, args.Epoch)]

for (animal, day, epoch_ind) in epoch_keys:
print('Animal: {0}, Day: {1}, Epoch: {2}'.format(
animal, day, epoch_ind))
log_file = '{animal}_{day:02d}_{epoch:02d}.log'.format(
animal=animal, day=day, epoch=epoch_ind)
job_name = (
'{function_name}_{animal}_{day:02d}_{epoch:02d}').format(
animal=animal, day=day, epoch=epoch_ind,
function_name=python_function.replace('.py', ''))
python_cmd = '{python_function} {animal} {day} {epoch}'.format(
python_function=python_function, animal=animal, day=day,
epoch=epoch_ind)
queue_job(python_cmd,
directives=directives,
log_file=join(log_directory, log_file),
job_name=job_name)


if __name__ == '__main__':
exit(main())
60 changes: 60 additions & 0 deletions scripts/run_by_epoch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from argparse import ArgumentParser
from logging import DEBUG, INFO, Formatter, StreamHandler, getLogger
from signal import SIGUSR1, SIGUSR2, signal
from subprocess import PIPE, run
from sys import exit, stdout


def get_command_line_arguments():
parser = ArgumentParser()
parser.add_argument('Animal', type=str, help='Short name of animal')
parser.add_argument('Day', type=int, help='Day of recording session')
parser.add_argument('Epoch', type=int,
help='Epoch number of recording session')
parser.add_argument(
'-d', '--debug',
help='More verbose output for debugging',
action='store_const',
dest='log_level',
const=DEBUG,
default=INFO,
)
return parser.parse_args()


def get_logger():
formatter = Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler = StreamHandler(stream=stdout)
handler.setFormatter(formatter)
logger = getLogger()
logger.addHandler(handler)
return logger


def main():
args = get_command_line_arguments()
logger = get_logger()
logger.setLevel(args.log_level)

def _signal_handler(signal_code, frame):
logger.error('***Process killed with signal {signal}***'.format(
signal=signal_code))
exit()

for code in [SIGUSR1, SIGUSR2]:
signal(code, _signal_handler)

epoch_key = (args.Animal, args.Day, args.Epoch)
logger.info(
'Processing epoch: Animal {0}, Day {1}, Epoch #{2}...'.format(
*epoch_key))
git_hash = run(['git', 'rev-parse', 'HEAD'],
stdout=PIPE, universal_newlines=True).stdout
logger.info('Git Hash: {git_hash}'.format(git_hash=git_hash.rstrip()))

logger.info('Finished Processing')


if __name__ == '__main__':
exit(main())
2 changes: 2 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[aliases]
test=pytest
20 changes: 20 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#!/usr/bin/env python3

from setuptools import find_packages, setup

INSTALL_REQUIRES = ['loren_frank_data_processing', 'ripple_detection',
'spectral_connectivity', 'replay_classification']
TESTS_REQUIRE = ['pytest >= 2.7.1']

setup(
name='roumis_2018',
version='0.1.0.dev0',
license='GPL-3.0',
description=('Analysis of MEC and CA1 connectivity'),
author='Demetris Roumis, Eric Denovellis',
author_email='demetris.roumis@ucsf.edu, edeno@bu.edu',
url='https://github.com/edeno/Roumis_2018',
packages=find_packages(),
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
)
Empty file added src/analysis.py
Empty file.
12 changes: 12 additions & 0 deletions src/parameters.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from collections import namedtuple
from os.path import join, abspath, dirname, pardir

# LFP sampling frequency
SAMPLING_FREQUENCY = 1500

# Data directories and definitions
ROOT_DIR = join(abspath(dirname(__file__)), pardir)
RAW_DATA_DIR = join(ROOT_DIR, 'Raw-Data')
PROCESSED_DATA_DIR = join(ROOT_DIR, 'Processed-Data')

Animal = namedtuple('Animal', {'directory', 'short_name'})
1 change: 1 addition & 0 deletions tests/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Tests go here.

0 comments on commit 16e0bfe

Please sign in to comment.