forked from adoptium/ci-jenkins-pipelines
-
Notifications
You must be signed in to change notification settings - Fork 0
/
build_pipeline_generator.groovy
306 lines (262 loc) · 16.7 KB
/
build_pipeline_generator.groovy
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
import java.nio.file.NoSuchFileException
import groovy.json.JsonSlurper
import groovy.json.JsonOutput
/*
file used as jenkinsfile to generator nightly and weekly pipeline
*/
node('worker') {
try {
// Pull in Adopt defaults
String ADOPT_DEFAULTS_FILE_URL = 'https://raw.githubusercontent.com/adoptium/ci-jenkins-pipelines/master/pipelines/defaults.json'
def getAdopt = new URL(ADOPT_DEFAULTS_FILE_URL).openConnection()
Map<String, ?> ADOPT_DEFAULTS_JSON = new JsonSlurper().parseText(getAdopt.getInputStream().getText()) as Map
if (!ADOPT_DEFAULTS_JSON || !Map.isInstance(ADOPT_DEFAULTS_JSON)) {
throw new Exception("[ERROR] No ADOPT_DEFAULTS_JSON found at ${ADOPT_DEFAULTS_FILE_URL} or it is not a valid JSON object. Please ensure this path is correct and leads to a JSON or Map object file. NOTE: Since this adopt's defaults and unlikely to change location, this is likely a network or GitHub issue.")
}
// Pull in User defaults
String DEFAULTS_FILE_URL = (params.DEFAULTS_URL) ?: ADOPT_DEFAULTS_FILE_URL
def getUser = new URL(DEFAULTS_FILE_URL).openConnection()
Map<String, ?> DEFAULTS_JSON = new JsonSlurper().parseText(getUser.getInputStream().getText()) as Map
if (!DEFAULTS_JSON || !Map.isInstance(DEFAULTS_JSON)) {
throw new Exception("[ERROR] No DEFAULTS_JSON found at ${DEFAULTS_FILE_URL} or it is not a valid JSON object. Please ensure this path is correct and leads to a JSON or Map object file.")
}
Map remoteConfigs = [:]
def repoBranch = null
/*
Changes dir to Adopt's repo. Use closures as functions aren't accepted inside node blocks
*/
def checkoutAdoptPipelines = { ->
checkout([$class: 'GitSCM',
branches: [ [ name: ADOPT_DEFAULTS_JSON['repository']['pipeline_branch'] ] ],
userRemoteConfigs: [ [ url: ADOPT_DEFAULTS_JSON['repository']['pipeline_url'] ] ]
])
}
/*
Changes dir to the user's repo. Use closures as functions aren't accepted inside node blocks
*/
def checkoutUserPipelines = { ->
checkout([$class: 'GitSCM',
branches: [ [ name: repoBranch ] ],
userRemoteConfigs: [ remoteConfigs ]
])
}
timestamps {
def retiredVersions = [9, 10, 12, 13, 14, 15, 16, 18, 19, 20]
def generatedPipelines = []
// Load git url and branch and gitBranch. These determine where we will be pulling user configs from.
def repoUri = (params.REPOSITORY_URL) ?: DEFAULTS_JSON['repository']['pipeline_url']
repoBranch = (params.REPOSITORY_BRANCH) ?: DEFAULTS_JSON['repository']['pipeline_branch']
// Load credentials to be used in checking out. This is in case we are checking out a URL that is not Adopts and they don't have their ssh key on the machine.
def checkoutCreds = (params.CHECKOUT_CREDENTIALS) ?: ''
remoteConfigs = [ url: repoUri ]
if (checkoutCreds != '') {
// NOTE: This currently does not work with user credentials due to https://issues.jenkins.io/browse/JENKINS-60349
remoteConfigs.put('credentials', "${checkoutCreds}")
} else {
println "[WARNING] CHECKOUT_CREDENTIALS not specified! Checkout to $repoUri may fail if you do not have your ssh key on this machine."
}
// Checkout into user repository
checkoutUserPipelines()
String helperRef = DEFAULTS_JSON['repository']['helper_ref']
library(identifier: "openjdk-jenkins-helper@${helperRef}")
// Load jobRoot. This is where the openjdkxx-pipeline jobs will be created.
def jobRoot = (params.JOB_ROOT) ?: DEFAULTS_JSON['jenkinsDetails']['rootDirectory']
/*
Load scriptFolderPath. This is the folder where the openjdk_pipeline.groovy code is located compared to the repository root.
These are the top level pipeline jobs.
*/
def scriptFolderPath = (params.SCRIPT_FOLDER_PATH) ?: DEFAULTS_JSON['scriptDirectories']['upstream']
if (!fileExists(scriptFolderPath)) {
println "[WARNING] ${scriptFolderPath} does not exist in your chosen repository. Updating it to use Adopt's instead"
checkoutAdoptPipelines()
scriptFolderPath = ADOPT_DEFAULTS_JSON['scriptDirectories']['upstream']
println "[SUCCESS] The path is now ${scriptFolderPath} relative to ${ADOPT_DEFAULTS_JSON['repository']['pipeline_url']}"
checkoutUserPipelines()
}
/*
Load nightlyFolderPath. This is the folder where the configurations/jdkxx_pipeline_config.groovy code is located compared to the repository root.
These define what the default set of nightlies will be.
*/
def nightlyFolderPath = (params.NIGHTLY_FOLDER_PATH) ?: DEFAULTS_JSON['configDirectories']['nightly']
if (!fileExists(nightlyFolderPath)) {
println "[WARNING] ${nightlyFolderPath} does not exist in your chosen repository. Updating it to use Adopt's instead"
checkoutAdoptPipelines()
nightlyFolderPath = ADOPT_DEFAULTS_JSON['configDirectories']['nightly']
println "[SUCCESS] The path is now ${nightlyFolderPath} relative to ${ADOPT_DEFAULTS_JSON['repository']['pipeline_url']}"
checkoutUserPipelines()
}
/*
Load jobTemplatePath. This is where the pipeline_job_template.groovy code is located compared to the repository root.
This actually sets up the pipeline job using the parameters above.
*/
def jobTemplatePath = (params.JOB_TEMPLATE_PATH) ?: DEFAULTS_JSON['templateDirectories']['upstream']
if (!fileExists(jobTemplatePath)) {
println "[WARNING] ${jobTemplatePath} does not exist in your chosen repository. Updating it to use Adopt's instead"
checkoutAdoptPipelines()
jobTemplatePath = ADOPT_DEFAULTS_JSON['templateDirectories']['upstream']
println "[SUCCESS] The path is now ${jobTemplatePath} relative to ${ADOPT_DEFAULTS_JSON['repository']['pipeline_url']}"
checkoutUserPipelines()
}
// Load enablePipelineSchedule. This determines whether we will be generating the pipelines with a schedule (defined in jdkxx.groovy) or not.
Boolean enablePipelineSchedule = false
if (params.ENABLE_PIPELINE_SCHEDULE) {
enablePipelineSchedule = true
}
// Load useAdoptShellScripts. This determines whether we will checkout to adopt's repository before running make-adopt-build-farm.sh or if we use the user's bash scripts.
Boolean useAdoptShellScripts = false
if (params.USE_ADOPT_SHELL_SCRIPTS) {
useAdoptShellScripts = true
}
println '[INFO] Running generator script with the following configuration:'
println "REPOSITORY_URL = $repoUri"
println "REPOSITORY_BRANCH = $repoBranch"
println "JOB_ROOT = $jobRoot"
println "SCRIPT_FOLDER_PATH = $scriptFolderPath"
println "NIGHTLY_FOLDER_PATH = $nightlyFolderPath"
println "JOB_TEMPLATE_PATH = $jobTemplatePath"
println "ENABLE_PIPELINE_SCHEDULE = $enablePipelineSchedule"
println "USE_ADOPT_SHELL_SCRIPTS = $useAdoptShellScripts"
// Collect available JDK versions to check for generation (tip_version + 1 just in case it is out of date on a release day)
def JobHelper = library(identifier: "openjdk-jenkins-helper@${helperRef}").JobHelper
println 'Querying Adopt Api for the JDK-Head number (tip_version)...'
def response = JobHelper.getAvailableReleases(this)
int headVersion = (int) response[('tip_version')]
(8..headVersion + 1).each({ javaVersion ->
if (retiredVersions.contains(javaVersion)) {
println "[INFO] $javaVersion is a retired version that isn't currently built. Skipping generation..."
return
}
def config = [
TEST : false,
GIT_URL : repoUri,
BRANCH : repoBranch,
BUILD_FOLDER : jobRoot,
CHECKOUT_CREDENTIALS: checkoutCreds,
JAVA_VERSION : javaVersion,
JOB_NAME : "openjdk${javaVersion}-pipeline",
SCRIPT : "${scriptFolderPath}/openjdk_pipeline.groovy",
disableJob : false,
pipelineSchedule : '0 0 31 2 0', // 31st Feb, so will never run,
adoptScripts : false,
releaseType : 'Nightly'
]
def target
try {
target = load "${WORKSPACE}/${nightlyFolderPath}/jdk${javaVersion}.groovy"
} catch (NoSuchFileException e) {
try {
println "[WARNING] jdk${javaVersion}.groovy does not exist, chances are we want a jdk${javaVersion}u.groovy file. Trying ${WORKSPACE}/${nightlyFolderPath}/jdk${javaVersion}u.groovy"
target = load "${WORKSPACE}/${nightlyFolderPath}/jdk${javaVersion}u.groovy"
} catch (NoSuchFileException e2) {
println "[WARNING] jdk${javaVersion}u.groovy does not exist, chances are we are generating from a repository that isn't Adopt's. Pulling Adopt's nightlies in..."
checkoutAdoptPipelines()
try {
target = load "${WORKSPACE}/${ADOPT_DEFAULTS_JSON['configDirectories']['nightly']}/jdk${javaVersion}.groovy"
} catch (NoSuchFileException e3) {
try {
target = load "${WORKSPACE}/${ADOPT_DEFAULTS_JSON['configDirectories']['nightly']}/jdk${javaVersion}u.groovy"
} catch (NoSuchFileException e4) {
println "[WARNING] No config found for JDK${javaVersion} in the User's or Adopt's repository. Skipping generation..."
// break and move to next element in the loop
// groovylint-disable-next-line
return
}
}
checkoutUserPipelines()
}
}
println "[INFO] JDK${javaVersion}: loaded target configuration:"
println JsonOutput.prettyPrint(JsonOutput.toJson(target))
config.put('targetConfigurations', target.targetConfigurations)
// hack as jenkins groovy does not seem to allow us to check if disableJob exists
try {
config.put('disableJob', target.disableJob)
} catch (Exception ex) {
config.put('disableJob', false)
}
if (enablePipelineSchedule.toBoolean()) {
try {
config.put('pipelineSchedule', target.triggerSchedule_nightly)
} catch (Exception ex) {
config.put('pipelineSchedule', '0 0 31 2 0')
}
}
if (useAdoptShellScripts.toBoolean()) {
config.put('adoptScripts', true)
}
config.put('enableReproducibleCompare', DEFAULTS_JSON['testDetails']['enableReproducibleCompare'] as Boolean)
config.put('enableTests', DEFAULTS_JSON['testDetails']['enableTests'] as Boolean)
config.put('enableTestDynamicParallel', DEFAULTS_JSON['testDetails']['enableTestDynamicParallel'] as Boolean)
println "[INFO] JDK${javaVersion}: nightly pipelineSchedule = ${config.pipelineSchedule}"
config.put('defaultsJson', DEFAULTS_JSON)
config.put('adoptDefaultsJson', ADOPT_DEFAULTS_JSON)
println "[INFO] FINAL CONFIG FOR NIGHTLY JDK${javaVersion}"
println JsonOutput.prettyPrint(JsonOutput.toJson(config))
// Create the nightly job, using adopt's template if the user's one fails
try {
jobDsl targets: jobTemplatePath, ignoreExisting: false, additionalParameters: config
} catch (Exception e) {
println "${e}\n[WARNING] Something went wrong when creating the job dsl. It may be because we are trying to pull the template inside a user repository. Using Adopt's template instead..."
checkoutAdoptPipelines()
jobDsl targets: ADOPT_DEFAULTS_JSON['templateDirectories']['upstream'], ignoreExisting: false, additionalParameters: config
checkoutUserPipelines()
}
generatedPipelines.add(config['JOB_NAME'])
// Create weekly release pipeline
config.JOB_NAME = "weekly-openjdk${javaVersion}-pipeline"
config.SCRIPT = (params.WEEKLY_SCRIPT_PATH) ?: DEFAULTS_JSON['scriptDirectories']['weekly']
if (!fileExists(config.SCRIPT)) {
println "[WARNING] ${config.SCRIPT} does not exist in your chosen repository. Updating it to use Adopt's instead"
checkoutAdoptPipelines()
config.SCRIPT = ADOPT_DEFAULTS_JSON['scriptDirectories']['weekly']
println "[SUCCESS] The path is now ${config.SCRIPT} relative to ${ADOPT_DEFAULTS_JSON['repository']['pipeline_url']}"
checkoutUserPipelines()
}
config.PIPELINE = "openjdk${javaVersion}-pipeline"
config.weekly_release_scmReferences = target.weekly_release_scmReferences
// Load weeklyTemplatePath. This is where the weekly_release_pipeline_job_template.groovy code is located compared to the repository root. This actually sets up the weekly pipeline job using the parameters above.
def weeklyTemplatePath = (params.WEEKLY_TEMPLATE_PATH) ?: DEFAULTS_JSON['templateDirectories']['weekly']
if (enablePipelineSchedule.toBoolean()) {
try {
config.put('pipelineSchedule', target.triggerSchedule_weekly)
} catch (Exception ex) {
config.put('pipelineSchedule', '0 0 31 2 0')
}
}
config.releaseType = "Weekly"
println "[INFO] CREATING JDK${javaVersion} WEEKLY RELEASE PIPELINE WITH NEW CONFIG VALUES:"
println "JOB_NAME = ${config.JOB_NAME}"
println "SCRIPT = ${config.SCRIPT}"
println "PIPELINE = ${config.PIPELINE}"
println "releaseType = ${config.releaseType}"
println "weekly_release_scmReferences = ${config.weekly_release_scmReferences}"
try {
jobDsl targets: weeklyTemplatePath, ignoreExisting: false, additionalParameters: config
} catch (Exception e) {
println "${e}\n[WARNING] Something went wrong when creating the weekly job dsl. It may be because we are trying to pull the template inside a user repository. Using Adopt's template instead..."
checkoutAdoptPipelines()
jobDsl targets: ADOPT_DEFAULTS_JSON['templateDirectories']['weekly'], ignoreExisting: false, additionalParameters: config
checkoutUserPipelines()
}
generatedPipelines.add(config['JOB_NAME'])
// config.load() loads into the current groovy binding, and returns "this", so we need to reset variables before next load of target
target.targetConfigurations = {}
target.triggerSchedule_nightly = '0 0 31 2 0'
target.triggerSchedule_weekly = '0 0 31 2 0'
target.weekly_release_scmReferences = {}
target.disableJob = false
})
// Fail if nothing was generated
if (generatedPipelines == []) {
throw new Exception('[ERROR] NO PIPELINES WERE GENERATED!')
} else {
println "[SUCCESS] THE FOLLOWING PIPELINES WERE GENERATED IN THE ${jobRoot} FOLDER"
println generatedPipelines
}
}
} finally {
// Always clean up, even on failure (doesn't delete the created jobs)
println '[INFO] Cleaning up...'
cleanWs deleteDirs: true
}
}