Skip to content

Commit ff061d2

Browse files
Merge branch 'master' into sync_isPointsOnly
2 parents 646e677 + 934d6b3 commit ff061d2

File tree

1,997 files changed

+45192
-20638
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,997 files changed

+45192
-20638
lines changed

.ci/Jenkinsfile_coverage

Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
#!/bin/groovy
2+
3+
library 'kibana-pipeline-library'
4+
kibanaLibrary.load() // load from the Jenkins instance
5+
6+
stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit
7+
timeout(time: 180, unit: 'MINUTES') {
8+
timestamps {
9+
ansiColor('xterm') {
10+
catchError {
11+
withEnv([
12+
'CODE_COVERAGE=1', // Needed for multiple ci scripts, such as remote.ts, test/scripts/*.sh, schema.js, etc.
13+
]) {
14+
parallel([
15+
'kibana-intake-agent': {
16+
withEnv([
17+
'NODE_ENV=test' // Needed for jest tests only
18+
]) {
19+
kibanaPipeline.legacyJobRunner('kibana-intake')()
20+
}
21+
},
22+
'x-pack-intake-agent': {
23+
withEnv([
24+
'NODE_ENV=test' // Needed for jest tests only
25+
]) {
26+
kibanaPipeline.legacyJobRunner('x-pack-intake')()
27+
}
28+
},
29+
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
30+
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
31+
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
32+
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
33+
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
34+
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
35+
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
36+
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
37+
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
38+
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
39+
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
40+
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
41+
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
42+
]),
43+
'kibana-xpack-agent-1': kibanaPipeline.withWorkers('kibana-xpack-tests-1', { kibanaPipeline.buildXpack() }, [
44+
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
45+
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
46+
]),
47+
'kibana-xpack-agent-2': kibanaPipeline.withWorkers('kibana-xpack-tests-2', { kibanaPipeline.buildXpack() }, [
48+
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
49+
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
50+
]),
51+
52+
'kibana-xpack-agent-3': kibanaPipeline.withWorkers('kibana-xpack-tests-3', { kibanaPipeline.buildXpack() }, [
53+
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
54+
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
55+
'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
56+
'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
57+
'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
58+
'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
59+
]),
60+
])
61+
kibanaPipeline.jobRunner('tests-l', false) {
62+
kibanaPipeline.downloadCoverageArtifacts()
63+
kibanaPipeline.bash(
64+
'''
65+
# bootstrap from x-pack folder
66+
source src/dev/ci_setup/setup_env.sh
67+
cd x-pack
68+
yarn kbn bootstrap --prefer-offline
69+
cd ..
70+
# extract archives
71+
mkdir -p /tmp/extracted_coverage
72+
echo extracting intakes
73+
tar -xzf /tmp/downloaded_coverage/coverage/kibana-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
74+
tar -xzf /tmp/downloaded_coverage/coverage/x-pack-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
75+
echo extracting kibana-oss-tests
76+
tar -xzf /tmp/downloaded_coverage/coverage/kibana-oss-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
77+
echo extracting kibana-xpack-tests
78+
for i in {1..3}; do
79+
tar -xzf /tmp/downloaded_coverage/coverage/kibana-xpack-tests-${i}/kibana-coverage.tar.gz -C /tmp/extracted_coverage
80+
done
81+
# replace path in json files to have valid html report
82+
pwd=$(pwd)
83+
du -sh /tmp/extracted_coverage/target/kibana-coverage/
84+
echo replacing path in json files
85+
for i in {1..9}; do
86+
sed -i "s|/dev/shm/workspace/kibana|$pwd|g" /tmp/extracted_coverage/target/kibana-coverage/functional/${i}*.json &
87+
done
88+
wait
89+
# merge oss & x-pack reports
90+
echo merging coverage reports
91+
yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/jest --report-dir target/kibana-coverage/jest-combined --reporter=html --reporter=json-summary
92+
yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/functional --report-dir target/kibana-coverage/functional-combined --reporter=html --reporter=json-summary
93+
echo copy mocha reports
94+
mkdir -p target/kibana-coverage/mocha-combined
95+
cp -r /tmp/extracted_coverage/target/kibana-coverage/mocha target/kibana-coverage/mocha-combined
96+
''',
97+
"run `yarn kbn bootstrap && merge coverage`"
98+
)
99+
sh 'tar -czf kibana-jest-coverage.tar.gz target/kibana-coverage/jest-combined/*'
100+
kibanaPipeline.uploadCoverageArtifacts("coverage/jest-combined", 'kibana-jest-coverage.tar.gz')
101+
sh 'tar -czf kibana-functional-coverage.tar.gz target/kibana-coverage/functional-combined/*'
102+
kibanaPipeline.uploadCoverageArtifacts("coverage/functional-combined", 'kibana-functional-coverage.tar.gz')
103+
sh 'tar -czf kibana-mocha-coverage.tar.gz target/kibana-coverage/mocha-combined/*'
104+
kibanaPipeline.uploadCoverageArtifacts("coverage/mocha-combined", 'kibana-mocha-coverage.tar.gz')
105+
}
106+
}
107+
}
108+
kibanaPipeline.sendMail()
109+
}
110+
}
111+
}
112+
}

.ci/Jenkinsfile_flaky

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def getWorkerMap(agentNumber, numberOfExecutions, worker, workerFailures, maxWor
9999
def numberOfWorkers = Math.min(numberOfExecutions, maxWorkerProcesses)
100100

101101
for(def i = 1; i <= numberOfWorkers; i++) {
102-
def workerExecutions = numberOfExecutions/numberOfWorkers + (i <= numberOfExecutions%numberOfWorkers ? 1 : 0)
102+
def workerExecutions = floor(numberOfExecutions/numberOfWorkers + (i <= numberOfExecutions%numberOfWorkers ? 1 : 0))
103103

104104
workerMap["agent-${agentNumber}-worker-${i}"] = { workerNumber ->
105105
for(def j = 0; j < workerExecutions; j++) {
Lines changed: 162 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,162 @@
1+
#!/bin/groovy
2+
3+
// This job effectively has two SCM configurations:
4+
// one for kibana, used to check out this Jenkinsfile (which means it's the job's main SCM configuration), as well as kick-off the downstream verification job
5+
// one for elasticsearch, used to check out the elasticsearch source before building it
6+
7+
// There are two parameters that drive which branch is checked out for each of these, but they will typically be the same
8+
// 'branch_specifier' is for kibana / the job itself
9+
// ES_BRANCH is for elasticsearch
10+
11+
library 'kibana-pipeline-library'
12+
kibanaLibrary.load()
13+
14+
def ES_BRANCH = params.ES_BRANCH
15+
16+
if (!ES_BRANCH) {
17+
error "Parameter 'ES_BRANCH' must be specified."
18+
}
19+
20+
currentBuild.displayName += " - ${ES_BRANCH}"
21+
currentBuild.description = "ES: ${ES_BRANCH}<br />Kibana: ${params.branch_specifier}"
22+
23+
def PROMOTE_WITHOUT_VERIFY = !!params.PROMOTE_WITHOUT_VERIFICATION
24+
25+
timeout(time: 120, unit: 'MINUTES') {
26+
timestamps {
27+
ansiColor('xterm') {
28+
node('linux && immutable') {
29+
catchError {
30+
def VERSION
31+
def SNAPSHOT_ID
32+
def DESTINATION
33+
34+
def scmVars = checkoutEs(ES_BRANCH)
35+
def GIT_COMMIT = scmVars.GIT_COMMIT
36+
def GIT_COMMIT_SHORT = sh(script: "git rev-parse --short ${GIT_COMMIT}", returnStdout: true).trim()
37+
38+
buildArchives('to-archive')
39+
40+
dir('to-archive') {
41+
def now = new Date()
42+
def date = now.format("yyyyMMdd-HHmmss")
43+
44+
def filesRaw = sh(script: "ls -1", returnStdout: true).trim()
45+
def files = filesRaw
46+
.split("\n")
47+
.collect { filename ->
48+
// Filename examples
49+
// elasticsearch-oss-8.0.0-SNAPSHOT-linux-x86_64.tar.gz
50+
// elasticsearch-8.0.0-SNAPSHOT-linux-x86_64.tar.gz
51+
def parts = filename.replace("elasticsearch-oss", "oss").split("-")
52+
53+
VERSION = VERSION ?: parts[1]
54+
SNAPSHOT_ID = SNAPSHOT_ID ?: "${date}_${GIT_COMMIT_SHORT}"
55+
DESTINATION = DESTINATION ?: "${VERSION}/archives/${SNAPSHOT_ID}"
56+
57+
return [
58+
filename: filename,
59+
checksum: filename + '.sha512',
60+
url: "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${DESTINATION}/${filename}".toString(),
61+
version: parts[1],
62+
platform: parts[3],
63+
architecture: parts[4].split('\\.')[0],
64+
license: parts[0] == 'oss' ? 'oss' : 'default',
65+
]
66+
}
67+
68+
sh 'find * -exec bash -c "shasum -a 512 {} > {}.sha512" \\;'
69+
70+
def manifest = [
71+
bucket: "kibana-ci-es-snapshots-daily/${DESTINATION}".toString(),
72+
branch: ES_BRANCH,
73+
sha: GIT_COMMIT,
74+
sha_short: GIT_COMMIT_SHORT,
75+
version: VERSION,
76+
generated: now.format("yyyy-MM-dd'T'HH:mm:ss'Z'", TimeZone.getTimeZone("UTC")),
77+
archives: files,
78+
]
79+
def manifestJson = toJSON(manifest).toString()
80+
writeFile file: 'manifest.json', text: manifestJson
81+
82+
upload(DESTINATION, '*.*')
83+
84+
sh "cp manifest.json manifest-latest.json"
85+
upload(VERSION, 'manifest-latest.json')
86+
}
87+
88+
if (PROMOTE_WITHOUT_VERIFY) {
89+
esSnapshots.promote(VERSION, SNAPSHOT_ID)
90+
91+
emailext(
92+
to: 'build-kibana@elastic.co',
93+
subject: "ES snapshot promoted without verification: ${params.ES_BRANCH}",
94+
body: '${SCRIPT,template="groovy-html.template"}',
95+
mimeType: 'text/html',
96+
)
97+
} else {
98+
build(
99+
propagate: false,
100+
wait: false,
101+
job: 'elasticsearch+snapshots+verify',
102+
parameters: [
103+
string(name: 'branch_specifier', value: branch_specifier),
104+
string(name: 'SNAPSHOT_VERSION', value: VERSION),
105+
string(name: 'SNAPSHOT_ID', value: SNAPSHOT_ID),
106+
]
107+
)
108+
}
109+
}
110+
111+
kibanaPipeline.sendMail()
112+
}
113+
}
114+
}
115+
}
116+
117+
def checkoutEs(branch) {
118+
retryWithDelay(8, 15) {
119+
return checkout([
120+
$class: 'GitSCM',
121+
branches: [[name: branch]],
122+
doGenerateSubmoduleConfigurations: false,
123+
extensions: [],
124+
submoduleCfg: [],
125+
userRemoteConfigs: [[
126+
credentialsId: 'f6c7695a-671e-4f4f-a331-acdce44ff9ba',
127+
url: 'git@github.com:elastic/elasticsearch',
128+
]],
129+
])
130+
}
131+
}
132+
133+
def upload(destination, pattern) {
134+
return googleStorageUpload(
135+
credentialsId: 'kibana-ci-gcs-plugin',
136+
bucket: "gs://kibana-ci-es-snapshots-daily/${destination}",
137+
pattern: pattern,
138+
sharedPublicly: false,
139+
showInline: false,
140+
)
141+
}
142+
143+
def buildArchives(destination) {
144+
def props = readProperties file: '.ci/java-versions.properties'
145+
withEnv([
146+
// Select the correct JDK for this branch
147+
"PATH=/var/lib/jenkins/.java/${props.ES_BUILD_JAVA}/bin:${env.PATH}",
148+
149+
// These Jenkins env vars trigger some automation in the elasticsearch repo that we don't want
150+
"BUILD_NUMBER=",
151+
"JENKINS_URL=",
152+
"BUILD_URL=",
153+
"JOB_NAME=",
154+
"NODE_NAME=",
155+
]) {
156+
sh """
157+
./gradlew -p distribution/archives assemble --parallel
158+
mkdir -p ${destination}
159+
find distribution/archives -type f \\( -name 'elasticsearch-*-*-*-*.tar.gz' -o -name 'elasticsearch-*-*-*-*.zip' \\) -not -path *no-jdk* -exec cp {} ${destination} \\;
160+
"""
161+
}
162+
}
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
#!/bin/groovy
2+
3+
if (!params.branches_yaml) {
4+
error "'branches_yaml' parameter must be specified"
5+
}
6+
7+
def branches = readYaml text: params.branches_yaml
8+
9+
branches.each { branch ->
10+
build(
11+
propagate: false,
12+
wait: false,
13+
job: 'elasticsearch+snapshots+build',
14+
parameters: [
15+
string(name: 'branch_specifier', value: branch),
16+
string(name: 'ES_BRANCH', value: branch),
17+
]
18+
)
19+
}
Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
#!/bin/groovy
2+
3+
library 'kibana-pipeline-library'
4+
kibanaLibrary.load()
5+
6+
def SNAPSHOT_VERSION = params.SNAPSHOT_VERSION
7+
def SNAPSHOT_ID = params.SNAPSHOT_ID
8+
9+
if (!SNAPSHOT_VERSION) {
10+
error "Parameter SNAPSHOT_VERSION must be specified"
11+
}
12+
13+
if (!SNAPSHOT_ID) {
14+
error "Parameter SNAPSHOT_ID must be specified"
15+
}
16+
17+
currentBuild.displayName += " - ${SNAPSHOT_VERSION}"
18+
currentBuild.description = "ES: ${SNAPSHOT_VERSION}<br />Kibana: ${params.branch_specifier}"
19+
20+
def SNAPSHOT_MANIFEST = "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${SNAPSHOT_VERSION}/archives/${SNAPSHOT_ID}/manifest.json"
21+
22+
timeout(time: 120, unit: 'MINUTES') {
23+
timestamps {
24+
ansiColor('xterm') {
25+
catchError {
26+
withEnv(["ES_SNAPSHOT_MANIFEST=${SNAPSHOT_MANIFEST}"]) {
27+
parallel([
28+
// TODO we just need to run integration tests from intake?
29+
'kibana-intake-agent': kibanaPipeline.legacyJobRunner('kibana-intake'),
30+
'x-pack-intake-agent': kibanaPipeline.legacyJobRunner('x-pack-intake'),
31+
'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
32+
'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
33+
'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
34+
'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
35+
'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
36+
'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
37+
'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
38+
'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
39+
'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
40+
'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
41+
'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
42+
'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
43+
'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
44+
]),
45+
'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
46+
'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
47+
'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
48+
'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
49+
'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
50+
'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
51+
'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
52+
'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
53+
'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
54+
'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
55+
'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
56+
]),
57+
])
58+
}
59+
60+
promoteSnapshot(SNAPSHOT_VERSION, SNAPSHOT_ID)
61+
}
62+
63+
kibanaPipeline.sendMail()
64+
}
65+
}
66+
}
67+
68+
def promoteSnapshot(snapshotVersion, snapshotId) {
69+
node('linux && immutable') {
70+
esSnapshots.promote(snapshotVersion, snapshotId)
71+
}
72+
}

0 commit comments

Comments
 (0)