@@ -15,10 +15,11 @@ USER root
15
15
# Spark dependencies
16
16
# Default values can be overridden at build time
17
17
# (ARGS are in lower case to distinguish them from ENV)
18
- ARG spark_version="3.2.1"
19
- ARG hadoop_version="3.2"
20
- ARG spark_checksum="145ADACF189FECF05FBA3A69841D2804DD66546B11D14FC181AC49D89F3CB5E4FECD9B25F56F0AF767155419CD430838FB651992AEB37D3A6F91E7E009D1F9AE"
21
- ARG openjdk_version="11"
18
+ ARG spark_version="3.3.0"
19
+ ARG hadoop_version="3"
20
+ ARG scala_version="2.13"
21
+ ARG spark_checksum="4c09dac70e22bf1d5b7b2cabc1dd92aba13237f52a5b682c67982266fc7a0f5e0f964edff9bc76adbd8cb444eb1a00fdc59516147f99e4e2ce068420ff4881f0"
22
+ ARG openjdk_version="17"
22
23
23
24
ENV APACHE_SPARK_VERSION="${spark_version}" \
24
25
HADOOP_VERSION="${hadoop_version}"
@@ -31,10 +32,10 @@ RUN apt-get update --yes && \
31
32
32
33
# Spark installation
33
34
WORKDIR /tmp
34
- RUN wget -q "https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" && \
35
- echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \
36
- tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" -C /usr/local --owner root --group root --no-same-owner && \
37
- rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz"
35
+ RUN wget -q "https://archive.apache.org/dist/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version} .tgz" && \
36
+ echo "${spark_checksum} *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version} .tgz" | sha512sum -c - && \
37
+ tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version} .tgz" -C /usr/local --owner root --group root --no-same-owner && \
38
+ rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version} .tgz"
38
39
39
40
WORKDIR /usr/local
40
41
@@ -43,7 +44,7 @@ ENV SPARK_HOME=/usr/local/spark
43
44
ENV SPARK_OPTS="--driver-java-options=-Xms1024M --driver-java-options=-Xmx4096M --driver-java-options=-Dlog4j.logLevel=info" \
44
45
PATH="${PATH}:${SPARK_HOME}/bin"
45
46
46
- RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}" spark && \
47
+ RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}-scala${scala_version} " spark && \
47
48
# Add a link in the before_notebook hook in order to source automatically PYTHONPATH
48
49
mkdir -p /usr/local/bin/before-notebook.d && \
49
50
ln -s "${SPARK_HOME}/sbin/spark-config.sh" /usr/local/bin/before-notebook.d/spark-config.sh
0 commit comments