Skip to content

Commit aae5897

Browse files
author
Marcelo Vanzin
committed
Use launcher classes instead of jars in non-release mode.
This makes it easier for people who are used to building spark with "sbt assembly" instead of "sbt package" to use the new launcher without having to modify their workflow.
1 parent e584fc3 commit aae5897

File tree

4 files changed

+40
-54
lines changed

4 files changed

+40
-54
lines changed

bin/spark-class

Lines changed: 24 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -40,33 +40,38 @@ else
4040
fi
4141
fi
4242

43+
# Look for the launcher. In non-release mode, add the compiled classes directly to the classpath
44+
# instead of looking for a jar file, since it's very common for people using sbt to use the
45+
# "assembly" target instead of "package".
4346
SPARK_LAUNCHER_CP=
4447
if [ -f $SPARK_HOME/RELEASE ]; then
4548
LAUNCHER_DIR="$SPARK_HOME/lib"
46-
else
47-
LAUNCHER_DIR="$SPARK_HOME/launcher/target/scala-$SPARK_SCALA_VERSION"
48-
if [ -n "$SPARK_PREPEND_CLASSES" ]; then
49-
SPARK_LAUNCHER_CP="$LAUNCHER_DIR/classes:"
49+
num_jars="$(ls -1 "$LAUNCHER_DIR" | grep "^spark-launcher.*\.jar$" | wc -l)"
50+
if [ "$num_jars" -eq "0" -a -z "$SPARK_LAUNCHER_CP" ]; then
51+
echo "Failed to find Spark launcher in $LAUNCHER_DIR." 1>&2
52+
echo "You need to build Spark before running this program." 1>&2
53+
exit 1
5054
fi
51-
fi
5255

53-
num_jars="$(ls -1 "$LAUNCHER_DIR" | grep "^spark-launcher.*\.jar$" | wc -l)"
54-
if [ "$num_jars" -eq "0" ]; then
55-
echo "Failed to find Spark launcher in $LAUNCHER_DIR." 1>&2
56-
echo "You need to build Spark before running this program." 1>&2
57-
exit 1
58-
fi
56+
LAUNCHER_JARS="$(ls -1 "$LAUNCHER_DIR" | grep "^spark-launcher.*\.jar$" || true)"
57+
if [ "$num_jars" -gt "1" ]; then
58+
echo "Found multiple Spark launcher jars in $LAUNCHER_DIR:" 1>&2
59+
echo "$LAUNCHER_JARS" 1>&2
60+
echo "Please remove all but one jar." 1>&2
61+
exit 1
62+
fi
5963

60-
LAUNCHER_JARS="$(ls -1 "$LAUNCHER_DIR" | grep "^spark-launcher.*\.jar$")"
61-
if [ "$num_jars" -gt "1" ]; then
62-
echo "Found multiple Spark launcher jars in $LAUNCHER_DIR:" 1>&2
63-
echo "$LAUNCHER_JARS" 1>&2
64-
echo "Please remove all but one jar." 1>&2
65-
exit 1
64+
SPARK_LAUNCHER_CP="${LAUNCHER_DIR}/${LAUNCHER_JARS}"
65+
else
66+
LAUNCHER_DIR="$SPARK_HOME/launcher/target/scala-$SPARK_SCALA_VERSION"
67+
if [ ! -d "$LAUNCHER_DIR/classes" ]; then
68+
echo "Failed to find Spark launcher classes in $LAUNCHER_DIR." 1>&2
69+
echo "You need to build Spark before running this program." 1>&2
70+
exit 1
71+
fi
72+
SPARK_LAUNCHER_CP="$LAUNCHER_DIR/classes"
6673
fi
6774

68-
SPARK_LAUNCHER_CP="${SPARK_LAUNCHER_CP}${LAUNCHER_DIR}/${LAUNCHER_JARS}"
69-
7075
# The launcher library will print arguments separated by a NULL character. Read that in a while
7176
# loop, populating an array that will be used to exec the final command.
7277
CMD=()

bin/spark-class2.cmd

Lines changed: 15 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -29,31 +29,30 @@ if "x%1"=="x" (
2929
exit /b 1
3030
)
3131

32-
set LAUNCHER_JAR=0
33-
if exist "%SPARK_HOME%\RELEASE" goto find_release_launcher
32+
set LAUNCHER_CP=0
33+
if exist %SPARK_HOME%\RELEASE goto find_release_launcher
3434

35-
rem Look for the Spark launcher in both Scala build directories. The launcher doesn't use
36-
rem Scala so it doesn't really matter which one is picked up.
37-
for %%d in (%SPARK_HOME%\launcher\target\scala-2.10\spark-launcher*.jar) do (
38-
set LAUNCHER_JAR=%%d
39-
set SPARK_SCALA_VERSION=2.10
35+
rem Look for the Spark launcher in both Scala build directories. The launcher doesn't use Scala so
36+
rem it doesn't really matter which one is picked up. Add the compiled classes directly to the
37+
rem classpath instead of looking for a jar file, since it's very common for people using sbt to use
38+
rem the "assembly" target instead of "package".
39+
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.10\classes
40+
if exist %LAUNCHER_CLASSES% (
41+
set LAUNCHER_CP=%LAUNCHER_CLASSES%
4042
)
41-
for %%d in (%SPARK_HOME%\launcher\target\scala-2.11\spark-launcher*.jar) do (
42-
set LAUNCHER_JAR=%%d
43-
set SPARK_SCALA_VERSION=2.11
44-
)
45-
if not "x%SPARK_PREPEND_CLASSES"=="x" (
46-
set LAUNCHER_JAR=%SPARK_HOME%\launcher\target\scala-%SPARK_SCALA_VERSION%\classes;%LAUNCHER_JAR%
43+
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.11\classes
44+
if exist %LAUNCHER_CLASSES% (
45+
set LAUNCHER_CP=%LAUNCHER_CLASSES%
4746
)
4847
goto check_launcher
4948

5049
:find_release_launcher
5150
for %%d in (%SPARK_HOME%\lib\spark-launcher*.jar) do (
52-
set LAUNCHER_JAR=%%d
51+
set LAUNCHER_CP=%%d
5352
)
5453

5554
:check_launcher
56-
if "%LAUNCHER_JAR%"=="0" (
55+
if "%LAUNCHER_CP%"=="0" (
5756
echo Failed to find Spark launcher JAR.
5857
echo You need to build Spark before running this program.
5958
exit /b 1
@@ -63,7 +62,7 @@ rem Figure out where java is.
6362
set RUNNER=java
6463
if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java
6564

66-
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %LAUNCHER_JAR% org.apache.spark.launcher.Main %*"') do (
65+
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %LAUNCHER_CP% org.apache.spark.launcher.Main %*"') do (
6766
set SPARK_CMD=%%i
6867
)
6968
%SPARK_CMD%

launcher/pom.xml

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -79,23 +79,5 @@
7979
<build>
8080
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
8181
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
82-
<plugins>
83-
<plugin>
84-
<groupId>org.apache.maven.plugins</groupId>
85-
<artifactId>maven-jar-plugin</artifactId>
86-
<configuration>
87-
<outputDirectory>target/scala-${scala.binary.version}</outputDirectory>
88-
</configuration>
89-
</plugin>
90-
<plugin>
91-
<groupId>org.scalatest</groupId>
92-
<artifactId>scalatest-maven-plugin</artifactId>
93-
<configuration>
94-
<systemProperties>
95-
<spark.test.home>${project.basedir}/..</spark.test.home>
96-
</systemProperties>
97-
</configuration>
98-
</plugin>
99-
</plugins>
10082
</build>
10183
</project>

pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1131,7 +1131,7 @@
11311131
</environmentVariables>
11321132
<systemProperties>
11331133
<java.awt.headless>true</java.awt.headless>
1134-
<spark.test.home>${session.executionRootDirectory}</spark.test.home>
1134+
<spark.test.home>${spark.test.home}</spark.test.home>
11351135
<spark.testing>1</spark.testing>
11361136
<spark.ui.enabled>false</spark.ui.enabled>
11371137
<spark.ui.showConsoleProgress>false</spark.ui.showConsoleProgress>

0 commit comments

Comments
 (0)