@@ -322,14 +322,16 @@ private[spark] class Executor(
322
322
// Fetch missing dependencies
323
323
for ((name, timestamp) <- newFiles if currentFiles.getOrElse(name, - 1L ) < timestamp) {
324
324
logInfo(" Fetching " + name + " with timestamp " + timestamp)
325
+ // Fetch file with useCache mode, close cache for local mode.
325
326
Utils .fetchFile(name, new File (SparkFiles .getRootDirectory), conf,
326
- env.securityManager, hadoopConf, timestamp, useCache = true )
327
+ env.securityManager, hadoopConf, timestamp, useCache = ! isLocal )
327
328
currentFiles(name) = timestamp
328
329
}
329
330
for ((name, timestamp) <- newJars if currentJars.getOrElse(name, - 1L ) < timestamp) {
330
331
logInfo(" Fetching " + name + " with timestamp " + timestamp)
332
+ // Fetch file with useCache mode, close cache for local mode.
331
333
Utils .fetchFile(name, new File (SparkFiles .getRootDirectory), conf,
332
- env.securityManager, hadoopConf, timestamp, useCache = true )
334
+ env.securityManager, hadoopConf, timestamp, useCache = ! isLocal )
333
335
currentJars(name) = timestamp
334
336
// Add it to our class loader
335
337
val localName = name.split(" /" ).last
0 commit comments