Skip to content

Commit a3b9693

Browse files
committed
Minor changes
1 parent 7dd6298 commit a3b9693

File tree

3 files changed

+8
-6
lines changed

3 files changed

+8
-6
lines changed

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -388,7 +388,7 @@ private[spark] trait ClientBase extends Logging {
388388

389389
logDebug("===============================================================================")
390390
logDebug("Yarn AM launch context:")
391-
logDebug(s" user class: ${args.userClass}")
391+
logDebug(s" user class: ${Option(args.userClass).getOrElse("N/A")}")
392392
logDebug(" env:")
393393
launchEnv.foreach { case (k, v) => logDebug(s" $k -> $v") }
394394
logDebug(" resources:")

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.deploy.yarn
1919

20-
import java.io.File
2120
import java.net.URI
2221

2322
import scala.collection.JavaConversions._
@@ -125,9 +124,9 @@ trait ExecutorRunnableUtil extends Logging {
125124
localResources: HashMap[String, LocalResource],
126125
timestamp: String,
127126
size: String,
128-
vis: String) = {
127+
vis: String): Unit = {
129128
val uri = new URI(file)
130-
val amJarRsrc = Records.newRecord(classOf[LocalResource]).asInstanceOf[LocalResource]
129+
val amJarRsrc = Records.newRecord(classOf[LocalResource])
131130
amJarRsrc.setType(rtype)
132131
amJarRsrc.setVisibility(LocalResourceVisibility.valueOf(vis))
133132
amJarRsrc.setResource(ConverterUtils.getYarnUrlFromURI(uri))
@@ -171,7 +170,11 @@ trait ExecutorRunnableUtil extends Logging {
171170
val extraCp = sparkConf.getOption("spark.executor.extraClassPath")
172171
ClientBase.populateClasspath(null, yarnConf, sparkConf, env, extraCp)
173172

174-
sparkConf.getExecutorEnv.foreach { case (key, value) => env(key) = value }
173+
sparkConf.getExecutorEnv.foreach { case (key, value) =>
174+
// This assumes each executor environment variable set here is a path
175+
// This is kept for backward compatibility and consistency with hadoop
176+
YarnSparkHadoopUtil.addPathToEnvironment(env, key, value)
177+
}
175178

176179
// Keep this for backwards compatibility but users should move to the config
177180
sys.env.get("SPARK_YARN_USER_ENV").foreach { userEnvs =>

yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,6 @@ private[spark] class YarnClientSchedulerBackend(
3838
private var appId: ApplicationId = null
3939
private var stopping: Boolean = false
4040
private var totalExpectedExecutors = 0
41-
private def isStopping(): Boolean = stopping
4241

4342
/**
4443
* Create a Yarn client to submit an application to the ResourceManager.

0 commit comments

Comments
 (0)