Skip to content

Commit 11080c9

Browse files
committed
Make building alternate language binding images opt-in rather than default
1 parent 8887b5a commit 11080c9

File tree

1 file changed

+13
-21
lines changed

1 file changed

+13
-21
lines changed

bin/docker-image-tool.sh

Lines changed: 13 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -104,8 +104,8 @@ function build {
104104
base_img=$(image_ref spark)
105105
)
106106
local BASEDOCKERFILE=${BASEDOCKERFILE:-"$IMG_PATH/spark/Dockerfile"}
107-
local PYDOCKERFILE=${PYDOCKERFILE:-"$IMG_PATH/spark/bindings/python/Dockerfile"}
108-
local RDOCKERFILE=${RDOCKERFILE:-"$IMG_PATH/spark/bindings/R/Dockerfile"}
107+
local PYDOCKERFILE=${PYDOCKERFILE:-false}
108+
local RDOCKERFILE=${RDOCKERFILE:-false}
109109

110110
docker build $NOCACHEARG "${BUILD_ARGS[@]}" \
111111
-t $(image_ref spark) \
@@ -114,30 +114,22 @@ function build {
114114
error "Failed to build Spark JVM Docker image, please refer to Docker build output for details."
115115
fi
116116

117-
if [ "${PYDOCKERFILE}" != "skip" ]; then
117+
if [ "${PYDOCKERFILE}" != "false" ]; then
118118
docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
119119
-t $(image_ref spark-py) \
120120
-f "$PYDOCKERFILE" .
121121
if [ $? -ne 0 ]; then
122122
error "Failed to build PySpark Docker image, please refer to Docker build output for details."
123123
fi
124-
else
125-
echo "Skipped building PySpark docker image."
126124
fi
127125

128-
if [ "${RDOCKERFILE}" != "skip" ]; then
129-
if [ -d "${SPARK_HOME}/R/lib" ]; then
130-
docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
131-
-t $(image_ref spark-r) \
132-
-f "$RDOCKERFILE" .
133-
if [ $? -ne 0 ]; then
134-
error "Failed to build SparkR Docker image, please refer to Docker build output for details."
135-
fi
136-
else
137-
echo "SparkR artifacts not found. Skipped building SparkR docker image."
126+
if [ "${RDOCKERFILE}" != "false" ]; then
127+
docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
128+
-t $(image_ref spark-r) \
129+
-f "$RDOCKERFILE" .
130+
if [ $? -ne 0 ]; then
131+
error "Failed to build SparkR Docker image, please refer to Docker build output for details."
138132
fi
139-
else
140-
echo "Skipped building SparkR docker image."
141133
fi
142134
}
143135

@@ -159,10 +151,10 @@ Commands:
159151
160152
Options:
161153
-f file Dockerfile to build for JVM based Jobs. By default builds the Dockerfile shipped with Spark.
162-
-p file Dockerfile to build for PySpark Jobs. Builds Python dependencies and ships with Spark.
163-
Specify 'skip' to skip building PySpark docker image.
164-
-R file Dockerfile to build for SparkR Jobs. Builds R dependencies and ships with Spark.
165-
Specify 'skip' to skip building SparkR docker image.
154+
-p file (Optional) Dockerfile to build for PySpark Jobs. Builds Python dependencies and ships with Spark.
155+
Skips building PySpark docker image if not specified.
156+
-R file (Optional) Dockerfile to build for SparkR Jobs. Builds R dependencies and ships with Spark.
157+
Skips building SparkR docker image if not specified.
166158
-r repo Repository address.
167159
-t tag Tag to apply to the built image, or to identify the image to be pushed.
168160
-m Use minikube's Docker daemon.

0 commit comments

Comments
 (0)