Skip to content

Commit 94f8964

Browse files
committed
Revert "[SPARK-32434][CORE] Support Scala 2.13 in AbstractCommandBuilder and load-spark-env scripts"
This reverts commit aab1e09.
1 parent 4309f56 commit 94f8964

File tree

3 files changed

+57
-50
lines changed

3 files changed

+57
-50
lines changed

bin/load-spark-env.cmd

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -37,24 +37,26 @@ if [%SPARK_ENV_LOADED%] == [] (
3737

3838
rem Setting SPARK_SCALA_VERSION if not already set.
3939

40-
if [%SPARK_SCALA_VERSION%] == [] (
41-
set SCALA_VERSION_1=2.13
42-
set SCALA_VERSION_2=2.12
43-
44-
set ASSEMBLY_DIR1=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%
45-
set ASSEMBLY_DIR2=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%
46-
set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
47-
if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
48-
echo "Presence of build for multiple Scala versions detected (%ASSEMBLY_DIR1% and %ASSEMBLY_DIR2%)."
49-
echo "Remove one of them or, set SPARK_SCALA_VERSION=%SCALA_VERSION_1% in %SPARK_ENV_CMD%."
50-
echo "Visit %ENV_VARIABLE_DOC% for more details about setting environment variables in spark-env.cmd."
51-
echo "Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd."
52-
exit 1
53-
)
54-
if exist %ASSEMBLY_DIR1% (
55-
set SPARK_SCALA_VERSION=%SCALA_VERSION_1%
56-
) else (
57-
set SPARK_SCALA_VERSION=%SCALA_VERSION_2%
58-
)
59-
)
40+
rem TODO: revisit for Scala 2.13 support
41+
set SPARK_SCALA_VERSION=2.12
42+
rem if [%SPARK_SCALA_VERSION%] == [] (
43+
rem set SCALA_VERSION_1=2.12
44+
rem set SCALA_VERSION_2=2.11
45+
rem
46+
rem set ASSEMBLY_DIR1=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%
47+
rem set ASSEMBLY_DIR2=%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%
48+
rem set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables
49+
rem if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
50+
rem echo "Presence of build for multiple Scala versions detected (%ASSEMBLY_DIR1% and %ASSEMBLY_DIR2%)."
51+
rem echo "Remove one of them or, set SPARK_SCALA_VERSION=%SCALA_VERSION_1% in %SPARK_ENV_CMD%."
52+
rem echo "Visit %ENV_VARIABLE_DOC% for more details about setting environment variables in spark-env.cmd."
53+
rem echo "Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd."
54+
rem exit 1
55+
rem )
56+
rem if exist %ASSEMBLY_DIR1% (
57+
rem set SPARK_SCALA_VERSION=%SCALA_VERSION_1%
58+
rem ) else (
59+
rem set SPARK_SCALA_VERSION=%SCALA_VERSION_2%
60+
rem )
61+
rem )
6062
exit /b 0

bin/load-spark-env.sh

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -43,23 +43,25 @@ fi
4343

4444
# Setting SPARK_SCALA_VERSION if not already set.
4545

46-
if [ -z "$SPARK_SCALA_VERSION" ]; then
47-
SCALA_VERSION_1=2.13
48-
SCALA_VERSION_2=2.12
49-
50-
ASSEMBLY_DIR_1="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_1}"
51-
ASSEMBLY_DIR_2="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_2}"
52-
ENV_VARIABLE_DOC="https://spark.apache.org/docs/latest/configuration.html#environment-variables"
53-
if [[ -d "$ASSEMBLY_DIR_1" && -d "$ASSEMBLY_DIR_2" ]]; then
54-
echo "Presence of build for multiple Scala versions detected ($ASSEMBLY_DIR_1 and $ASSEMBLY_DIR_2)." 1>&2
55-
echo "Remove one of them or, export SPARK_SCALA_VERSION=$SCALA_VERSION_1 in ${SPARK_ENV_SH}." 1>&2
56-
echo "Visit ${ENV_VARIABLE_DOC} for more details about setting environment variables in spark-env.sh." 1>&2
57-
exit 1
58-
fi
59-
60-
if [[ -d "$ASSEMBLY_DIR_1" ]]; then
61-
export SPARK_SCALA_VERSION=${SCALA_VERSION_1}
62-
else
63-
export SPARK_SCALA_VERSION=${SCALA_VERSION_2}
64-
fi
65-
fi
46+
# TODO: revisit for Scala 2.13 support
47+
export SPARK_SCALA_VERSION=2.12
48+
#if [ -z "$SPARK_SCALA_VERSION" ]; then
49+
# SCALA_VERSION_1=2.12
50+
# SCALA_VERSION_2=2.11
51+
#
52+
# ASSEMBLY_DIR_1="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_1}"
53+
# ASSEMBLY_DIR_2="${SPARK_HOME}/assembly/target/scala-${SCALA_VERSION_2}"
54+
# ENV_VARIABLE_DOC="https://spark.apache.org/docs/latest/configuration.html#environment-variables"
55+
# if [[ -d "$ASSEMBLY_DIR_1" && -d "$ASSEMBLY_DIR_2" ]]; then
56+
# echo "Presence of build for multiple Scala versions detected ($ASSEMBLY_DIR_1 and $ASSEMBLY_DIR_2)." 1>&2
57+
# echo "Remove one of them or, export SPARK_SCALA_VERSION=$SCALA_VERSION_1 in ${SPARK_ENV_SH}." 1>&2
58+
# echo "Visit ${ENV_VARIABLE_DOC} for more details about setting environment variables in spark-env.sh." 1>&2
59+
# exit 1
60+
# fi
61+
#
62+
# if [[ -d "$ASSEMBLY_DIR_1" ]]; then
63+
# export SPARK_SCALA_VERSION=${SCALA_VERSION_1}
64+
# else
65+
# export SPARK_SCALA_VERSION=${SCALA_VERSION_2}
66+
# fi
67+
#fi

launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -235,17 +235,20 @@ String getScalaVersion() {
235235
return scala;
236236
}
237237
String sparkHome = getSparkHome();
238+
// TODO: revisit for Scala 2.13 support
238239
File scala212 = new File(sparkHome, "launcher/target/scala-2.12");
239-
File scala213 = new File(sparkHome, "launcher/target/scala-2.13");
240-
checkState(!scala212.isDirectory() || !scala213.isDirectory(),
241-
"Presence of build for multiple Scala versions detected.\n" +
242-
"Either clean one of them or set SPARK_SCALA_VERSION in your environment.");
243-
if (scala213.isDirectory()) {
244-
return "2.13";
245-
} else {
246-
checkState(scala212.isDirectory(), "Cannot find any build directories.");
247-
return "2.12";
248-
}
240+
// File scala211 = new File(sparkHome, "launcher/target/scala-2.11");
241+
// checkState(!scala212.isDirectory() || !scala211.isDirectory(),
242+
// "Presence of build for multiple Scala versions detected.\n" +
243+
// "Either clean one of them or set SPARK_SCALA_VERSION in your environment.");
244+
// if (scala212.isDirectory()) {
245+
// return "2.12";
246+
// } else {
247+
// checkState(scala211.isDirectory(), "Cannot find any build directories.");
248+
// return "2.11";
249+
// }
250+
checkState(scala212.isDirectory(), "Cannot find any build directories.");
251+
return "2.12";
249252
}
250253

251254
String getSparkHome() {

0 commit comments

Comments
 (0)