Skip to content

Commit c596f28

Browse files
committed
Adding rstampede and rblacklight config
1 parent a487100 commit c596f28

File tree

1 file changed

+54
-0
lines changed

1 file changed

+54
-0
lines changed

swift.conf

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,32 @@ site.stampede {
4242
app.ALL { executable: "*" } # All tasks to be found from commandline
4343
}
4444

45+
# Instructions for Stampede, with XSEDE tutorial reservation
46+
# 1. If you are running on the stampede login nodes set jobManager: "local:slurm"
47+
site.rstampede {
48+
execution {
49+
type : "coaster" # Use coasters to run on remote sites
50+
URL : "stampede.tacc.xsede.org" # Stampede login nodes login[1..4].stampede.tacc.utexas.edu
51+
jobManager: "ssh-cl:slurm" # Use ssh-cl to connect, slurm is the Local resource manager
52+
options {
53+
maxJobs : 1 # Max jobs submitted to LRM
54+
nodeGranularity : 1 # Nodes per job
55+
maxNodesPerJob : 1 # Nodes per job
56+
tasksPerNode : 16 # Tasks per Node
57+
jobQueue : "development" # Select queue from (development, normal, large)
58+
maxJobTime : "00:25:00" # Time requested per job
59+
jobOptions.slurm{
60+
"reservation" : "mpierce_27"
61+
}
62+
}
63+
}
64+
staging : "local" # Stage files from "local" system
65+
workDirectory : "/tmp/"${env.USER}"/swiftwork" # Location for intermediate files
66+
maxParallelTasks : 101 # Maximum number of parallel tasks
67+
initialParallelTasks: 100 # Maximum number of tasks at start
68+
app.ALL { executable: "*" } # All tasks to be found from commandline
69+
}
70+
4571
# Instructions for Blacklight
4672
# 1. If you are running on the blacklight login nodes, set jobManager: "local:pbs"
4773
# 2. If you are running Set userHomeOverride : "/lustre/blacklight2/YOUR_USERNAME_ON_BLACKLIGHT/swiftwork"
@@ -70,6 +96,34 @@ site.blacklight {
7096
app.ALL { executable: "*" } # All tasks to be found from commandline
7197
}
7298

99+
# Instructions for Blacklight with XSEDE reserved queues
100+
# 1. If you are running on the blacklight login nodes, set jobManager: "local:pbs"
101+
# 2. If you are running Set userHomeOverride : "/lustre/blacklight2/YOUR_USERNAME_ON_BLACKLIGHT/swiftwork"
102+
# 4. Set workDirectory : "/tmp/YOUR_USERNAME_ON_BLACKLIGHT/swiftwork"
103+
site.rblacklight {
104+
execution {
105+
type : "coaster" # Use coasters to run on remote sites
106+
URL : "blacklight.psc.xsede.org" # Blacklight login URL
107+
jobManager: "ssh-cl:pbs" # use ssh-cl to connect, pbs is the Local Resource manager(LRM)
108+
options {
109+
maxJobs : 1 # Max jobs submitted to LRM
110+
nodeGranularity : 1 # Nodes per job
111+
maxNodesPerJob : 1 # Nodes per job
112+
tasksPerNode : 16 # Tasks per Node
113+
maxJobTime : "00:25:00" # Time requested per job
114+
jobQueue : "res_1"
115+
jobOptions {
116+
ppn : "16" # Virtual processors per node per Job
117+
}
118+
}
119+
}
120+
staging : "local" # Stage files from "local" system to Blacklight
121+
workDirectory : "/tmp/"${env.USER}"/swiftwork" # Location for intermediate files
122+
maxParallelTasks : 101 # Maximum number of parallel tasks
123+
initialParallelTasks: 100 # Maximum number of tasks at start
124+
app.ALL { executable: "*" } # All tasks to be found from commandline
125+
}
126+
73127
# Instructions for Gordon
74128
# 1. Do *NOT* run on the Gordon login nodes. There are memory limits which prevent swift from running
75129
# properly on these machines.

0 commit comments

Comments
 (0)