-
Notifications
You must be signed in to change notification settings - Fork 7
/
slurm_resample.sh
53 lines (48 loc) · 1.26 KB
/
slurm_resample.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
#!/bin/bash
## number of nodes
#SBATCH -N 1
## number of cpus per task
#SBATCH -c 2
## walltime
#SBATCH --time 40:00:00
## memory per job
#SBATCH --mem 32gb
## licenses (per filesystem limit)
#SBATCH --licenses vida:25
## gres (per node bandwidth limit)
#SBATCH --gres bandwidth:200
## job log path
#SBATCH -o %x.%j.out
echo ________________________________________
echo
echo SLURM Job Log
echo Start time: $(date)
echo
echo Job name: $SLURM_JOB_NAME
echo Job ID: $SLURM_JOBID
echo Submitted by user: $USER
echo User effective group ID: $(id -ng)
echo
echo SLURM account used: $SLURM_ACCOUNT
echo Hostname of submission: $SLURM_SUBMIT_HOST
echo Submitted to cluster: $SLURM_CLUSTER_NAME
echo Submitted to node: $SLURMD_NODENAME
echo Cores on node: $SLURM_CPUS_ON_NODE
echo Requested cores per task: $SLURM_CPUS_PER_TASK
echo Requested cores per job: $SLURM_NTASKS
echo Requested walltime: $SBATCH_TIMELIMIT
echo Nodes assigned to job: $SLURM_JOB_NODELIST
echo Running node index: $SLURM_NODEID
echo
echo Running on hostname: $HOSTNAME
echo Parent PID: $PPID
echo Process PID: $$
echo
echo Working directory: $SLURM_SUBMIT_DIR
echo ________________________________________________________
echo
cd $SLURM_SUBMIT_DIR
# init env
source ~/.bashrc; conda activate pgc
echo $p1
python $p1