Skip to content

Commit

Permalink
Fix a silly mistake
Browse files Browse the repository at this point in the history
  • Loading branch information
sunchao committed Oct 9, 2020
1 parent f582b17 commit e10e59f
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions core/src/main/scala/org/apache/spark/util/HadoopFSUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ private[spark] object HadoopFSUtils extends Logging {
ignoreLocality: Boolean,
parallelismThreshold: Int,
parallelismMax: Int): Seq[(Path, Seq[FileStatus])] = {
parallelListLeafFilesInternal(sc, paths, hadoopConf, filter, true, ignoreMissingFiles,
ignoreLocality, parallelismThreshold, parallelismMax)
parallelListLeafFilesInternal(sc, paths, hadoopConf, filter, isRootLevel = true,
ignoreMissingFiles, ignoreLocality, parallelismThreshold, parallelismMax)
}

private def parallelListLeafFilesInternal(
Expand Down Expand Up @@ -212,11 +212,12 @@ private[spark] object HadoopFSUtils extends Logging {
val (dirs, topLevelFiles) = statuses.partition(_.isDirectory)
val nestedFiles: Seq[FileStatus] = contextOpt match {
case Some(context) if dirs.size > parallelismThreshold =>
parallelListLeafFiles(
parallelListLeafFilesInternal(
context,
dirs.map(_.getPath),
hadoopConf = hadoopConf,
filter = filter,
isRootLevel = false,
ignoreMissingFiles = ignoreMissingFiles,
ignoreLocality = ignoreLocality,
parallelismThreshold = parallelismThreshold,
Expand Down

0 comments on commit e10e59f

Please sign in to comment.