Skip to content

Commit 2f4e8b4

Browse files
author
Marcelo Vanzin
committed
Changes needed to make this work with SPARK-4048.
Also some minor tweaks for the maven build.
1 parent 799fc20 commit 2f4e8b4

File tree

3 files changed

+28
-2
lines changed

3 files changed

+28
-2
lines changed

launcher/pom.xml

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,11 @@
4242
<artifactId>log4j</artifactId>
4343
<scope>test</scope>
4444
</dependency>
45+
<dependency>
46+
<groupId>junit</groupId>
47+
<artifactId>junit</artifactId>
48+
<scope>test</scope>
49+
</dependency>
4550
<dependency>
4651
<groupId>org.mockito</groupId>
4752
<artifactId>mockito-all</artifactId>
@@ -62,12 +67,26 @@
6267
<artifactId>slf4j-log4j12</artifactId>
6368
<scope>test</scope>
6469
</dependency>
70+
71+
<!-- Not needed by the test code, but referenced by SparkSubmit which is used by the tests. -->
72+
<dependency>
73+
<groupId>org.apache.hadoop</groupId>
74+
<artifactId>hadoop-client</artifactId>
75+
<scope>test</scope>
76+
</dependency>
6577
</dependencies>
6678

6779
<build>
6880
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
6981
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
7082
<plugins>
83+
<plugin>
84+
<groupId>org.apache.maven.plugins</groupId>
85+
<artifactId>maven-jar-plugin</artifactId>
86+
<configuration>
87+
<outputDirectory>target/scala-${scala.binary.version}</outputDirectory>
88+
</configuration>
89+
</plugin>
7190
<plugin>
7291
<groupId>org.scalatest</groupId>
7392
<artifactId>scalatest-maven-plugin</artifactId>

launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,7 @@ protected List<String> buildClassPath(String appClassPath) throws IOException {
275275

276276
addToClassPath(cp, getenv("HADOOP_CONF_DIR"));
277277
addToClassPath(cp, getenv("YARN_CONF_DIR"));
278+
addToClassPath(cp, getenv("SPARK_DIST_CLASSPATH"));
278279
return cp;
279280
}
280281

@@ -511,7 +512,7 @@ public boolean accept(File file) {
511512
}
512513

513514
private String getenv(String key) {
514-
return (env != null) ? env.get(key) : System.getenv(key);
515+
return first(env != null ? env.get(key) : null, System.getenv(key));
515516
}
516517

517518
/**

pom.xml

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1122,13 +1122,19 @@
11221122
</includes>
11231123
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
11241124
<argLine>-Xmx3g -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=512m</argLine>
1125+
<environmentVariables>
1126+
<!--
1127+
Setting SPARK_DIST_CLASSPATH is a simple way to make sure any child processes
1128+
launched by the tests have access to the correct test-time classpath.
1129+
-->
1130+
<SPARK_DIST_CLASSPATH>${test_classpath}</SPARK_DIST_CLASSPATH>
1131+
</environmentVariables>
11251132
<systemProperties>
11261133
<java.awt.headless>true</java.awt.headless>
11271134
<spark.test.home>${session.executionRootDirectory}</spark.test.home>
11281135
<spark.testing>1</spark.testing>
11291136
<spark.ui.enabled>false</spark.ui.enabled>
11301137
<spark.ui.showConsoleProgress>false</spark.ui.showConsoleProgress>
1131-
<spark.executor.extraClassPath>${test_classpath}</spark.executor.extraClassPath>
11321138
<spark.driver.allowMultipleContexts>true</spark.driver.allowMultipleContexts>
11331139
</systemProperties>
11341140
</configuration>

0 commit comments

Comments
 (0)