@@ -42,6 +42,32 @@ site.stampede {
42
42
app.ALL { executable: "*" } # All tasks to be found from commandline
43
43
}
44
44
45
+ # Instructions for Stampede, with XSEDE tutorial reservation
46
+ # 1. If you are running on the stampede login nodes set jobManager: "local:slurm"
47
+ site.rstampede {
48
+ execution {
49
+ type : "coaster" # Use coasters to run on remote sites
50
+ URL : "stampede.tacc.xsede.org" # Stampede login nodes login[1..4].stampede.tacc.utexas.edu
51
+ jobManager: "ssh-cl:slurm" # Use ssh-cl to connect, slurm is the Local resource manager
52
+ options {
53
+ maxJobs : 1 # Max jobs submitted to LRM
54
+ nodeGranularity : 1 # Nodes per job
55
+ maxNodesPerJob : 1 # Nodes per job
56
+ tasksPerNode : 16 # Tasks per Node
57
+ jobQueue : "development" # Select queue from (development, normal, large)
58
+ maxJobTime : "00:25:00" # Time requested per job
59
+ jobOptions.slurm{
60
+ "reservation" : "mpierce_27"
61
+ }
62
+ }
63
+ }
64
+ staging : "local" # Stage files from "local" system
65
+ workDirectory : "/tmp/"${env.USER}"/swiftwork" # Location for intermediate files
66
+ maxParallelTasks : 101 # Maximum number of parallel tasks
67
+ initialParallelTasks: 100 # Maximum number of tasks at start
68
+ app.ALL { executable: "*" } # All tasks to be found from commandline
69
+ }
70
+
45
71
# Instructions for Blacklight
46
72
# 1. If you are running on the blacklight login nodes, set jobManager: "local:pbs"
47
73
# 2. If you are running Set userHomeOverride : "/lustre/blacklight2/YOUR_USERNAME_ON_BLACKLIGHT/swiftwork"
@@ -70,6 +96,34 @@ site.blacklight {
70
96
app.ALL { executable: "*" } # All tasks to be found from commandline
71
97
}
72
98
99
+ # Instructions for Blacklight with XSEDE reserved queues
100
+ # 1. If you are running on the blacklight login nodes, set jobManager: "local:pbs"
101
+ # 2. If you are running Set userHomeOverride : "/lustre/blacklight2/YOUR_USERNAME_ON_BLACKLIGHT/swiftwork"
102
+ # 4. Set workDirectory : "/tmp/YOUR_USERNAME_ON_BLACKLIGHT/swiftwork"
103
+ site.rblacklight {
104
+ execution {
105
+ type : "coaster" # Use coasters to run on remote sites
106
+ URL : "blacklight.psc.xsede.org" # Blacklight login URL
107
+ jobManager: "ssh-cl:pbs" # use ssh-cl to connect, pbs is the Local Resource manager(LRM)
108
+ options {
109
+ maxJobs : 1 # Max jobs submitted to LRM
110
+ nodeGranularity : 1 # Nodes per job
111
+ maxNodesPerJob : 1 # Nodes per job
112
+ tasksPerNode : 16 # Tasks per Node
113
+ maxJobTime : "00:25:00" # Time requested per job
114
+ jobQueue : "res_1"
115
+ jobOptions {
116
+ ppn : "16" # Virtual processors per node per Job
117
+ }
118
+ }
119
+ }
120
+ staging : "local" # Stage files from "local" system to Blacklight
121
+ workDirectory : "/tmp/"${env.USER}"/swiftwork" # Location for intermediate files
122
+ maxParallelTasks : 101 # Maximum number of parallel tasks
123
+ initialParallelTasks: 100 # Maximum number of tasks at start
124
+ app.ALL { executable: "*" } # All tasks to be found from commandline
125
+ }
126
+
73
127
# Instructions for Gordon
74
128
# 1. Do *NOT* run on the Gordon login nodes. There are memory limits which prevent swift from running
75
129
# properly on these machines.
0 commit comments