Skip to content

Commit aab1e09

Browse files
committed
[SPARK-32434][CORE] Support Scala 2.13 in AbstractCommandBuilder and load-spark-env scripts
### What changes were proposed in this pull request? This PR aims to support Scala 2.11 at `AbstractCommandBuilder.java` and `load-spark-env` scripts. ### Why are the changes needed? Currently, Scala 2.12 is only supported and the following fails. ``` $ dev/change-scala-version.sh 2.13 $ build/mvn test -pl core --am -Pscala-2.13 -DwildcardSuites=none -Dtest=org.apache.spark.launcher.SparkLauncherSuite ... [ERROR] Failures: [ERROR] SparkLauncherSuite.testChildProcLauncher:123 expected:<0> but was:<1> [ERROR] SparkLauncherSuite.testSparkLauncherGetError:274 [ERROR] Tests run: 6, Failures: 2, Errors: 0, Skipped: 0 ``` ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? This should be tested manually with the above command. ``` [INFO] ------------------------------------------------------------------------ [INFO] Reactor Summary for Spark Project Parent POM 3.1.0-SNAPSHOT: [INFO] [INFO] Spark Project Parent POM ........................... SUCCESS [ 2.186 s] [INFO] Spark Project Tags ................................. SUCCESS [ 4.400 s] [INFO] Spark Project Local DB ............................. SUCCESS [ 1.744 s] [INFO] Spark Project Networking ........................... SUCCESS [ 2.233 s] [INFO] Spark Project Shuffle Streaming Service ............ SUCCESS [ 1.527 s] [INFO] Spark Project Unsafe ............................... SUCCESS [ 5.564 s] [INFO] Spark Project Launcher ............................. SUCCESS [ 1.946 s] [INFO] Spark Project Core ................................. SUCCESS [01:21 min] [INFO] ------------------------------------------------------------------------ [INFO] BUILD SUCCESS [INFO] ------------------------------------------------------------------------ [INFO] Total time: 01:41 min [INFO] Finished at: 2020-07-24T20:04:34-07:00 [INFO] ------------------------------------------------------------------------ ``` Closes apache#29227 from dongjoon-hyun/SPARK-32434. Authored-by: Dongjoon Hyun <dongjoon@apache.org> Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
1 parent f642234 commit aab1e09

File tree

3 files changed

+50
-57
lines changed

3 files changed

+50
-57
lines changed

bin/load-spark-env.cmd

Lines changed: 20 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -37,26 +37,24 @@ if [%SPARK_ENV_LOADED%] == [] (
3737

3838
rem Setting SPARK_SCALA_VERSION if not already set.
3939

40-
rem TODO: revisit for Scala 2.13 support
41-
set SPARK_SCALA_VERSION=2.12
42-
rem if [%SPARK_SCALA_VERSION%] == [] (
43-
rem set SCALA_VERSION_1=2.12
44-
rem set SCALA_VERSION_2=2.11
45-
rem
46-
rem set ASSEMBLY_DIR1=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%
47-
rem set ASSEMBLY_DIR2=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%
48-
rem set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
49-
rem if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
50-
rem echo "Presence of build for multiple Scala versions detected (%ASSEMBLY_DIR1% and %ASSEMBLY_DIR2%)."
51-
rem echo "Remove one of them or, set SPARK_SCALA_VERSION=%SCALA_VERSION_1% in %SPARK_ENV_CMD%."
52-
rem echo "Visit %ENV_VARIABLE_DOC% for more details about setting environment variables in spark-env.cmd."
53-
rem echo "Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd."
54-
rem exit 1
55-
rem )
56-
rem if exist %ASSEMBLY_DIR1% (
57-
rem set SPARK_SCALA_VERSION=%SCALA_VERSION_1%
58-
rem ) else (
59-
rem set SPARK_SCALA_VERSION=%SCALA_VERSION_2%
60-
rem )
61-
rem )
40+
if [%SPARK_SCALA_VERSION%] == [] (
41+
set SCALA_VERSION_1=2.13
42+
set SCALA_VERSION_2=2.12
43+
44+
set ASSEMBLY_DIR1=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%
45+
set ASSEMBLY_DIR2=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%
46+
set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
47+
if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
48+
echo "Presence of build for multiple Scala versions detected (%ASSEMBLY_DIR1% and %ASSEMBLY_DIR2%)."
49+
echo "Remove one of them or, set SPARK_SCALA_VERSION=%SCALA_VERSION_1% in %SPARK_ENV_CMD%."
50+
echo "Visit %ENV_VARIABLE_DOC% for more details about setting environment variables in spark-env.cmd."
51+
echo "Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd."
52+
exit 1
53+
)
54+
if exist %ASSEMBLY_DIR1% (
55+
set SPARK_SCALA_VERSION=%SCALA_VERSION_1%
56+
) else (
57+
set SPARK_SCALA_VERSION=%SCALA_VERSION_2%
58+
)
59+
)
6260
exit /b 0

bin/load-spark-env.sh

Lines changed: 20 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -43,25 +43,23 @@ fi
4343

4444
# Setting SPARK_SCALA_VERSION if not already set.
4545

46-
# TODO: revisit for Scala 2.13 support
47-
export SPARK_SCALA_VERSION=2.12
48-
#if [ -z "$SPARK_SCALA_VERSION" ]; then
49-
# SCALA_VERSION_1=2.12
50-
# SCALA_VERSION_2=2.11
51-
#
52-
# ASSEMBLY_DIR_1="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_1}"
53-
# ASSEMBLY_DIR_2="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_2}"
54-
# ENV_VARIABLE_DOC="https://spark.apache.org/docs/latest/configuration.html#environment-variables"
55-
# if [[ -d "$ASSEMBLY_DIR_1" && -d "$ASSEMBLY_DIR_2" ]]; then
56-
# echo "Presence of build for multiple Scala versions detected ($ASSEMBLY_DIR_1 and $ASSEMBLY_DIR_2)." 1>&2
57-
# echo "Remove one of them or, export SPARK_SCALA_VERSION=$SCALA_VERSION_1 in ${SPARK_ENV_SH}." 1>&2
58-
# echo "Visit ${ENV_VARIABLE_DOC} for more details about setting environment variables in spark-env.sh." 1>&2
59-
# exit 1
60-
# fi
61-
#
62-
# if [[ -d "$ASSEMBLY_DIR_1" ]]; then
63-
# export SPARK_SCALA_VERSION=${SCALA_VERSION_1}
64-
# else
65-
# export SPARK_SCALA_VERSION=${SCALA_VERSION_2}
66-
# fi
67-
#fi
46+
if [ -z "$SPARK_SCALA_VERSION" ]; then
47+
SCALA_VERSION_1=2.13
48+
SCALA_VERSION_2=2.12
49+
50+
ASSEMBLY_DIR_1="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_1}"
51+
ASSEMBLY_DIR_2="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_2}"
52+
ENV_VARIABLE_DOC="https://spark.apache.org/docs/latest/configuration.html#environment-variables"
53+
if [[ -d "$ASSEMBLY_DIR_1" && -d "$ASSEMBLY_DIR_2" ]]; then
54+
echo "Presence of build for multiple Scala versions detected ($ASSEMBLY_DIR_1 and $ASSEMBLY_DIR_2)." 1>&2
55+
echo "Remove one of them or, export SPARK_SCALA_VERSION=$SCALA_VERSION_1 in ${SPARK_ENV_SH}." 1>&2
56+
echo "Visit ${ENV_VARIABLE_DOC} for more details about setting environment variables in spark-env.sh." 1>&2
57+
exit 1
58+
fi
59+
60+
if [[ -d "$ASSEMBLY_DIR_1" ]]; then
61+
export SPARK_SCALA_VERSION=${SCALA_VERSION_1}
62+
else
63+
export SPARK_SCALA_VERSION=${SCALA_VERSION_2}
64+
fi
65+
fi

launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java

Lines changed: 10 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -235,20 +235,17 @@ String getScalaVersion() {
235235
return scala;
236236
}
237237
String sparkHome = getSparkHome();
238-
// TODO: revisit for Scala 2.13 support
239238
File scala212 = new File(sparkHome, "launcher/target/scala-2.12");
240-
// File scala211 = new File(sparkHome, "launcher/target/scala-2.11");
241-
// checkState(!scala212.isDirectory() || !scala211.isDirectory(),
242-
// "Presence of build for multiple Scala versions detected.\n" +
243-
// "Either clean one of them or set SPARK_SCALA_VERSION in your environment.");
244-
// if (scala212.isDirectory()) {
245-
// return "2.12";
246-
// } else {
247-
// checkState(scala211.isDirectory(), "Cannot find any build directories.");
248-
// return "2.11";
249-
// }
250-
checkState(scala212.isDirectory(), "Cannot find any build directories.");
251-
return "2.12";
239+
File scala213 = new File(sparkHome, "launcher/target/scala-2.13");
240+
checkState(!scala212.isDirectory() || !scala213.isDirectory(),
241+
"Presence of build for multiple Scala versions detected.\n" +
242+
"Either clean one of them or set SPARK_SCALA_VERSION in your environment.");
243+
if (scala213.isDirectory()) {
244+
return "2.13";
245+
} else {
246+
checkState(scala212.isDirectory(), "Cannot find any build directories.");
247+
return "2.12";
248+
}
252249
}
253250

254251
String getSparkHome() {

0 commit comments

Comments
 (0)