Skip to content

Commit 3fc9a80

Browse files
authored
Merge branch 'main' into feat/custom-product-versions-trino
2 parents 8491513 + 3431707 commit 3fc9a80

File tree

5 files changed

+159
-7
lines changed

5 files changed

+159
-7
lines changed

CHANGELOG.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,7 @@ All notable changes to this project will be documented in this file.
8383
- zookeeper: backport ZOOKEEPER-4846, ZOOKEEPER-4921, ZOOKEEPER-4925 into Zookeeper 3.9.3 ([#1150]).
8484
- testing-tools: Update base image ([#1165]).
8585
- trino: Enable custom versions ([#1168]).
86+
- opa: Enable custom versions ([#1170]).
8687

8788
### Fixed
8889

@@ -124,7 +125,6 @@ All notable changes to this project will be documented in this file.
124125
- nifi: Remove `2.2.0` ([#1114]).
125126
- kafka: Remove `3.7.1` and `3.8.0` ([#1117]).
126127
- spark-connect-client: Remove `3.5.5` ([#1142]).
127-
- spark-k8s: Remove the JMX exporter jar ([#1157]).
128128

129129
[nifi-iceberg-bundle]: https://github.com/stackabletech/nifi-iceberg-bundle
130130
[#1025]: https://github.com/stackabletech/docker-images/pull/1025
@@ -186,10 +186,10 @@ All notable changes to this project will be documented in this file.
186186
[#1151]: https://github.com/stackabletech/docker-images/pull/1151
187187
[#1152]: https://github.com/stackabletech/docker-images/pull/1152
188188
[#1156]: https://github.com/stackabletech/docker-images/pull/1156
189-
[#1157]: https://github.com/stackabletech/docker-images/pull/1157
190189
[#1163]: https://github.com/stackabletech/docker-images/pull/1163
191190
[#1165]: https://github.com/stackabletech/docker-images/pull/1165
192191
[#1168]: https://github.com/stackabletech/docker-images/pull/1168
192+
[#1170]: https://github.com/stackabletech/docker-images/pull/1170
193193

194194
## [25.3.0] - 2025-03-21
195195

opa/Dockerfile

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -77,23 +77,29 @@ go install github.com/CycloneDX/cyclonedx-gomod/cmd/cyclonedx-gomod@v1.7.0
7777

7878
cd "$(/stackable/patchable --images-repo-root=src checkout opa ${PRODUCT})"
7979

80+
ORIGINAL_VERSION=${PRODUCT}
81+
NEW_VERSION="${PRODUCT}-stackable${RELEASE}"
82+
83+
sed -i 's/var Version = "'${ORIGINAL_VERSION}'"/var Version = "'${NEW_VERSION}'"/g' v1/version/version.go
84+
8085
# Create snapshot of the source code including custom patches
81-
tar -czf /stackable/opa-${PRODUCT}-src.tar.gz .
86+
tar -czf /stackable/opa-${NEW_VERSION}-src.tar.gz .
8287

8388
# Unfortunately, we need to create a dummy Git repository to allow cyclonedx-gomod to determine the version of OPA
8489
rm .git
8590
git init
8691
git config user.email "fake.commiter@stackable.tech"
8792
git config user.name "Fake commiter"
8893
git commit --allow-empty --message "Fake commit, so that we can create a tag"
89-
git tag "v${PRODUCT}"
94+
git tag "v${NEW_VERSION}"
9095
go build -o opa -buildmode=exe
9196
# move artifact to /stackable/*/ to copy in final image
92-
~/go/bin/cyclonedx-gomod app -json -output-version 1.5 -output /stackable/opa/"opa_${PRODUCT}.cdx.json" -packages -files
97+
~/go/bin/cyclonedx-gomod app -json -output-version 1.5 -output /stackable/opa/"opa_${NEW_VERSION}.cdx.json" -packages -files
98+
sed -i "s/${NEW_VERSION}/${ORIGINAL_VERSION}/g" /stackable/opa/"opa_${NEW_VERSION}.cdx.json"
9399
# move artifact to /stackable/* to copy in final image
94100
mv opa /stackable/opa/
95101
# set correct groups
96-
chmod -R g=u /stackable/opa /stackable/opa-${PRODUCT}-src.tar.gz
102+
chmod -R g=u /stackable/opa /stackable/opa-${NEW_VERSION}-src.tar.gz
97103
EOF
98104

99105
FROM stackable/image/vector
@@ -113,7 +119,7 @@ LABEL name="Open Policy Agent" \
113119
COPY --chown=${STACKABLE_USER_UID}:0 opa/licenses /licenses
114120

115121
COPY --from=opa-builder --chown=${STACKABLE_USER_UID}:0 /stackable/opa /stackable/opa
116-
COPY --from=opa-builder --chown=${STACKABLE_USER_UID}:0 /stackable/opa-${PRODUCT}-src.tar.gz /stackable/opa-${PRODUCT}-src.tar.gz
122+
COPY --from=opa-builder --chown=${STACKABLE_USER_UID}:0 /stackable/opa-${PRODUCT}-stackable${RELEASE}-src.tar.gz /stackable/opa-${PRODUCT}-stackable${RELEASE}-src.tar.gz
117123
COPY --from=multilog-builder --chown=${STACKABLE_USER_UID}:0 /daemontools/admin/daemontools/command/multilog /stackable/multilog
118124

119125
RUN <<EOF

spark-k8s/Dockerfile

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@ ARG AZURE_KEYVAULT_CORE
131131
ARG JACKSON_DATAFORMAT_XML
132132
ARG STAX2_API
133133
ARG WOODSTOX_CORE
134+
ARG JMX_EXPORTER
134135
ARG TARGETARCH
135136
ARG TINI
136137
ARG STACKABLE_USER_UID
@@ -205,6 +206,8 @@ RUN cp /stackable/spark-${PRODUCT}/connector/connect/server/target/spark-connect
205206
&& cp /stackable/spark-${PRODUCT}/connector/connect/common/target/spark-connect-common_*-${PRODUCT}.jar . \
206207
&& cp /stackable/spark-${PRODUCT}/connector/connect/client/jvm/target/spark-connect-client-jvm_2.12-${PRODUCT}.jar .
207208

209+
COPY spark-k8s/stackable/jmx /stackable/jmx
210+
208211
WORKDIR /stackable/spark-${PRODUCT}/dist/extra-jars
209212

210213
RUN <<EOF
@@ -221,8 +224,14 @@ curl --fail "https://repo.stackable.tech/repository/packages/tini/tini-${TINI}-$
221224
-o /usr/bin/tini
222225
chmod +x /usr/bin/tini
223226

227+
# JMX Exporter
228+
curl --fail "https://repo.stackable.tech/repository/packages/jmx-exporter/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" \
229+
-o "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar"
230+
ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar
231+
224232
chmod -R g=u /stackable/spark-${PRODUCT}/dist
225233
chmod -R g=u /stackable/spark-${PRODUCT}/assembly/target/bom.json
234+
chmod -R g=u /stackable/jmx
226235
EOF
227236

228237
# TODO: java-base installs the Adoptium dnf repo and the Termurin jre which is not needed here.
@@ -232,6 +241,7 @@ FROM stackable/image/java-base AS final
232241
ARG PRODUCT
233242
ARG PYTHON
234243
ARG RELEASE
244+
ARG JMX_EXPORTER
235245
ARG HBASE_CONNECTOR
236246
ARG STACKABLE_USER_UID
237247

@@ -257,6 +267,7 @@ COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRO
257267
COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-source-builder /stackable/spark-${PRODUCT}-src.tar.gz /stackable
258268
COPY --chown=${STACKABLE_USER_UID}:0 --from=hbase-connectors-builder /stackable/hbase-connector-${HBASE_CONNECTOR}-src.tar.gz /stackable
259269
COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}/assembly/target/bom.json /stackable/spark/spark-${PRODUCT}.cdx.json
270+
COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/jmx /stackable/jmx
260271
COPY --from=spark-builder /usr/bin/tini /usr/bin/tini
261272

262273
COPY --chown=${STACKABLE_USER_UID}:0 spark-k8s/stackable/run-spark.sh /stackable/run-spark.sh
@@ -290,6 +301,7 @@ chown -h ${STACKABLE_USER_UID}:0 /stackable/spark/examples/jars/spark-examples.j
290301

291302
# fix permissions
292303
chmod g=u /stackable/spark
304+
chmod g=u /stackable/jmx
293305
chmod g=u /stackable/run-spark.sh
294306
EOF
295307

spark-k8s/stackable/jmx/config.yaml

Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
---
2+
rules:
3+
4+
# These come from the master
5+
# Example: master.aliveWorkers
6+
- pattern: "metrics<name=master\\.(.*), type=counters><>Value"
7+
name: spark_master_$1
8+
9+
# These come from the worker
10+
# Example: worker.coresFree
11+
- pattern: "metrics<name=worker\\.(.*), type=counters><>Value"
12+
name: spark_worker_$1
13+
14+
# These come from the application driver
15+
# Example: app-20160809000059-0000.driver.DAGScheduler.stage.failedStages
16+
- pattern: "metrics<name=(.*)\\.driver\\.(DAGScheduler|BlockManager|jvm)\\.(.*), type=gauges><>Value"
17+
name: spark_driver_$2_$3
18+
type: GAUGE
19+
labels:
20+
app_id: "$1"
21+
22+
# These come from the application driver
23+
# Emulate timers for DAGScheduler like messagePRocessingTime
24+
- pattern: "metrics<name=(.*)\\.driver\\.DAGScheduler\\.(.*), type=counters><>Count"
25+
name: spark_driver_DAGScheduler_$2_total
26+
type: COUNTER
27+
labels:
28+
app_id: "$1"
29+
30+
- pattern: "metrics<name=(.*)\\.driver\\.HiveExternalCatalog\\.(.*), type=counters><>Count"
31+
name: spark_driver_HiveExternalCatalog_$2_total
32+
type: COUNTER
33+
labels:
34+
app_id: "$1"
35+
36+
# These come from the application driver
37+
# Emulate histograms for CodeGenerator
38+
- pattern: "metrics<name=(.*)\\.driver\\.CodeGenerator\\.(.*), type=counters><>Count"
39+
name: spark_driver_CodeGenerator_$2_total
40+
type: COUNTER
41+
labels:
42+
app_id: "$1"
43+
44+
# These come from the application driver
45+
# Emulate timer (keep only count attribute) plus counters for LiveListenerBus
46+
- pattern: "metrics<name=(.*)\\.driver\\.LiveListenerBus\\.(.*), type=counters><>Count"
47+
name: spark_driver_LiveListenerBus_$2_total
48+
type: COUNTER
49+
labels:
50+
app_id: "$1"
51+
52+
# Get Gauge type metrics for LiveListenerBus
53+
- pattern: "metrics<name=(.*)\\.driver\\.LiveListenerBus\\.(.*), type=gauges><>Value"
54+
name: spark_driver_LiveListenerBus_$2
55+
type: GAUGE
56+
labels:
57+
app_id: "$1"
58+
59+
# These come from the application driver if it's a streaming application
60+
# Example: app-20160809000059-0000.driver.com.example.ClassName.StreamingMetrics.streaming.lastCompletedBatch_schedulingDelay
61+
- pattern: "metrics<name=(.*)\\.driver\\.(.*)\\.StreamingMetrics\\.streaming\\.(.*), type=gauges><>Value"
62+
name: spark_driver_streaming_$3
63+
labels:
64+
app_id: "$1"
65+
app_name: "$2"
66+
67+
# These come from the application driver if it's a structured streaming application
68+
# Example: app-20160809000059-0000.driver.spark.streaming.QueryName.inputRate-total
69+
- pattern: "metrics<name=(.*)\\.driver\\.spark\\.streaming\\.(.*)\\.(.*), type=gauges><>Value"
70+
name: spark_driver_structured_streaming_$3
71+
labels:
72+
app_id: "$1"
73+
query_name: "$2"
74+
75+
# These come from the application executors
76+
# Examples:
77+
# app-20160809000059-0000.0.executor.threadpool.activeTasks (value)
78+
# app-20160809000059-0000.0.executor.JvmGCtime (counter)
79+
80+
# filesystem metrics are declared as gauge metrics, but are actually counters
81+
- pattern: "metrics<name=(.*)\\.(.*)\\.executor\\.filesystem\\.(.*), type=gauges><>Value"
82+
name: spark_executor_filesystem_$3_total
83+
type: COUNTER
84+
labels:
85+
app_id: "$1"
86+
executor_id: "$2"
87+
88+
- pattern: "metrics<name=(.*)\\.(.*)\\.executor\\.(.*), type=gauges><>Value"
89+
name: spark_executor_$3
90+
type: GAUGE
91+
labels:
92+
app_id: "$1"
93+
executor_id: "$2"
94+
95+
- pattern: "metrics<name=(.*)\\.(.*)\\.executor\\.(.*), type=counters><>Count"
96+
name: spark_executor_$3_total
97+
type: COUNTER
98+
labels:
99+
app_id: "$1"
100+
executor_id: "$2"
101+
102+
- pattern: "metrics<name=(.*)\\.(.*)\\.ExecutorMetrics\\.(.*), type=gauges><>Value"
103+
name: spark_executor_$3
104+
type: GAUGE
105+
labels:
106+
app_id: "$1"
107+
executor_id: "$2"
108+
109+
# These come from the application executors
110+
# Example: app-20160809000059-0000.0.jvm.threadpool.activeTasks
111+
- pattern: "metrics<name=(.*)\\.([0-9]+)\\.(jvm|NettyBlockTransfer)\\.(.*), type=gauges><>Value"
112+
name: spark_executor_$3_$4
113+
type: GAUGE
114+
labels:
115+
app_id: "$1"
116+
executor_id: "$2"
117+
118+
- pattern: "metrics<name=(.*)\\.([0-9]+)\\.HiveExternalCatalog\\.(.*), type=counters><>Count"
119+
name: spark_executor_HiveExternalCatalog_$3_total
120+
type: COUNTER
121+
labels:
122+
app_id: "$1"
123+
executor_id: "$2"
124+
125+
# These come from the application driver
126+
# Emulate histograms for CodeGenerator
127+
- pattern: "metrics<name=(.*)\\.([0-9]+)\\.CodeGenerator\\.(.*), type=counters><>Count"
128+
name: spark_executor_CodeGenerator_$3_total
129+
type: COUNTER
130+
labels:
131+
app_id: "$1"
132+
executor_id: "$2"

spark-k8s/versions.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
"stax2_api": "4.2.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2
1414
"woodstox_core": "6.5.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2
1515
"vector": "0.47.0",
16+
"jmx_exporter": "1.3.0",
1617
"tini": "0.19.0",
1718
"hbase_connector": "1.0.1",
1819
},
@@ -30,6 +31,7 @@
3031
"stax2_api": "4.2.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2
3132
"woodstox_core": "6.5.1", # https://mvnrepository.com/artifact/com.fasterxml.jackson.dataformat/jackson-dataformat-xml/2.15.2
3233
"vector": "0.47.0",
34+
"jmx_exporter": "1.3.0",
3335
"tini": "0.19.0",
3436
"hbase_connector": "1.0.1",
3537
},

0 commit comments

Comments
 (0)