@@ -104,8 +104,8 @@ function build {
104
104
base_img=$( image_ref spark)
105
105
)
106
106
local BASEDOCKERFILE=${BASEDOCKERFILE:- " $IMG_PATH /spark/Dockerfile" }
107
- local PYDOCKERFILE=${PYDOCKERFILE:- " $IMG_PATH /spark/bindings/python/Dockerfile " }
108
- local RDOCKERFILE=${RDOCKERFILE:- " $IMG_PATH /spark/bindings/R/Dockerfile " }
107
+ local PYDOCKERFILE=${PYDOCKERFILE:- false }
108
+ local RDOCKERFILE=${RDOCKERFILE:- false }
109
109
110
110
docker build $NOCACHEARG " ${BUILD_ARGS[@]} " \
111
111
-t $( image_ref spark) \
@@ -114,30 +114,22 @@ function build {
114
114
error " Failed to build Spark JVM Docker image, please refer to Docker build output for details."
115
115
fi
116
116
117
- if [ " ${PYDOCKERFILE} " != " skip " ]; then
117
+ if [ " ${PYDOCKERFILE} " != " false " ]; then
118
118
docker build $NOCACHEARG " ${BINDING_BUILD_ARGS[@]} " \
119
119
-t $( image_ref spark-py) \
120
120
-f " $PYDOCKERFILE " .
121
121
if [ $? -ne 0 ]; then
122
122
error " Failed to build PySpark Docker image, please refer to Docker build output for details."
123
123
fi
124
- else
125
- echo " Skipped building PySpark docker image."
126
124
fi
127
125
128
- if [ " ${RDOCKERFILE} " != " skip" ]; then
129
- if [ -d " ${SPARK_HOME} /R/lib" ]; then
130
- docker build $NOCACHEARG " ${BINDING_BUILD_ARGS[@]} " \
131
- -t $( image_ref spark-r) \
132
- -f " $RDOCKERFILE " .
133
- if [ $? -ne 0 ]; then
134
- error " Failed to build SparkR Docker image, please refer to Docker build output for details."
135
- fi
136
- else
137
- echo " SparkR artifacts not found. Skipped building SparkR docker image."
126
+ if [ " ${RDOCKERFILE} " != " false" ]; then
127
+ docker build $NOCACHEARG " ${BINDING_BUILD_ARGS[@]} " \
128
+ -t $( image_ref spark-r) \
129
+ -f " $RDOCKERFILE " .
130
+ if [ $? -ne 0 ]; then
131
+ error " Failed to build SparkR Docker image, please refer to Docker build output for details."
138
132
fi
139
- else
140
- echo " Skipped building SparkR docker image."
141
133
fi
142
134
}
143
135
@@ -159,10 +151,10 @@ Commands:
159
151
160
152
Options:
161
153
-f file Dockerfile to build for JVM based Jobs. By default builds the Dockerfile shipped with Spark.
162
- -p file Dockerfile to build for PySpark Jobs. Builds Python dependencies and ships with Spark.
163
- Specify 'skip' to skip building PySpark docker image.
164
- -R file Dockerfile to build for SparkR Jobs. Builds R dependencies and ships with Spark.
165
- Specify 'skip' to skip building SparkR docker image.
154
+ -p file (Optional) Dockerfile to build for PySpark Jobs. Builds Python dependencies and ships with Spark.
155
+ Skips building PySpark docker image if not specified .
156
+ -R file (Optional) Dockerfile to build for SparkR Jobs. Builds R dependencies and ships with Spark.
157
+ Skips building SparkR docker image if not specified .
166
158
-r repo Repository address.
167
159
-t tag Tag to apply to the built image, or to identify the image to be pushed.
168
160
-m Use minikube's Docker daemon.
0 commit comments