Skip to content

Commit c131a7a

Browse files
committed
Run from spark locally and build image if no image tag or tgz is specified.
1 parent af7e236 commit c131a7a

File tree

1 file changed

+20
-10
lines changed

1 file changed

+20
-10
lines changed

resource-managers/kubernetes/integration-tests/scripts/setup-integration-test-env.sh

Lines changed: 20 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
# See the License for the specific language governing permissions and
1717
# limitations under the License.
1818
#
19+
set -ex
1920
TEST_ROOT_DIR=$(git rev-parse --show-toplevel)
2021
UNPACKED_SPARK_TGZ="$TEST_ROOT_DIR/target/spark-dist-unpacked"
2122
IMAGE_TAG_OUTPUT_FILE="$TEST_ROOT_DIR/target/image-tag.txt"
@@ -58,50 +59,59 @@ while (( "$#" )); do
5859
shift
5960
done
6061

61-
rm -rf $UNPACKED_SPARK_TGZ
62+
rm -rf "$UNPACKED_SPARK_TGZ"
6263
if [[ $SPARK_TGZ == "N/A" && $IMAGE_TAG == "N/A" ]];
6364
then
64-
echo "Must specify a Spark tarball to build Docker images against with --spark-tgz OR image with --image-tag." && exit 1;
65-
else
65+
# If there is no spark image tag to test with and no src dir, build from current
66+
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
67+
SPARK_INPUT_DIR="$(cd "$SCRIPT_DIR/"../../../../ >/dev/null 2>&1 && pwd )"
68+
DOCKER_FILE_BASE_PATH="$SPARK_INPUT_DIR/resource-managers/kubernetes/docker/src/main/dockerfiles/spark"
69+
elif [[ $IMAGE_TAG == "N/A" ]];
70+
then
71+
# If there is a test src tarball and no image tag we will want to build from that
6672
mkdir -p $UNPACKED_SPARK_TGZ
6773
tar -xzvf $SPARK_TGZ --strip-components=1 -C $UNPACKED_SPARK_TGZ;
74+
SPARK_INPUT_DIR="$UNPACKED_SPARK_TGZ"
75+
DOCKER_FILE_BASE_PATH="$SPARK_INPUT_DIR/kubernetes/dockerfiles/spark"
6876
fi
6977

78+
79+
# If there is a specific Spark image skip building and extraction/copy
7080
if [[ $IMAGE_TAG == "N/A" ]];
7181
then
7282
IMAGE_TAG=$(uuidgen);
73-
cd $UNPACKED_SPARK_TGZ
83+
cd $SPARK_INPUT_DIR
7484

7585
# Build PySpark image
76-
LANGUAGE_BINDING_BUILD_ARGS="-p $UNPACKED_SPARK_TGZ/kubernetes/dockerfiles/spark/bindings/python/Dockerfile"
86+
LANGUAGE_BINDING_BUILD_ARGS="-p $DOCKER_FILE_BASE_PATH/bindings/python/Dockerfile"
7787

7888
# Build SparkR image
79-
LANGUAGE_BINDING_BUILD_ARGS="$LANGUAGE_BINDING_BUILD_ARGS -R $UNPACKED_SPARK_TGZ/kubernetes/dockerfiles/spark/bindings/R/Dockerfile"
89+
LANGUAGE_BINDING_BUILD_ARGS="$LANGUAGE_BINDING_BUILD_ARGS -R $DOCKER_FILE_BASE_PATH/bindings/R/Dockerfile"
8090

8191
case $DEPLOY_MODE in
8292
cloud)
8393
# Build images
84-
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
94+
$SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
8595

8696
# Push images appropriately
8797
if [[ $IMAGE_REPO == gcr.io* ]] ;
8898
then
8999
gcloud docker -- push $IMAGE_REPO/spark:$IMAGE_TAG
90100
else
91-
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG push
101+
$SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG push
92102
fi
93103
;;
94104

95105
docker-for-desktop)
96106
# Only need to build as this will place it in our local Docker repo which is all
97107
# we need for Docker for Desktop to work so no need to also push
98-
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
108+
$SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
99109
;;
100110

101111
minikube)
102112
# Only need to build and if we do this with the -m option for minikube we will
103113
# build the images directly using the minikube Docker daemon so no need to push
104-
$UNPACKED_SPARK_TGZ/bin/docker-image-tool.sh -m -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
114+
$SPARK_INPUT_DIR/bin/docker-image-tool.sh -m -r $IMAGE_REPO -t $IMAGE_TAG $LANGUAGE_BINDING_BUILD_ARGS build
105115
;;
106116
*)
107117
echo "Unrecognized deploy mode $DEPLOY_MODE" && exit 1

0 commit comments

Comments
 (0)