Skip to content

Commit 20aa7c6

Browse files
committed
Fixed more line length issues.
1 parent 29aa099 commit 20aa7c6

File tree

2 files changed

+4
-2
lines changed

2 files changed

+4
-2
lines changed

streaming/src/main/scala/org/apache/spark/streaming/rdd/WriteAheadLogBackedBlockRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ class WriteAheadLogBackedBlockRDD[T: ClassTag](
116116
override def getPreferredLocations(split: Partition): Seq[String] = {
117117
val partition = split.asInstanceOf[WriteAheadLogBackedBlockRDDPartition]
118118
val blockLocations = getBlockIdLocations().get(partition.blockId)
119-
lazy val segmentLocations = HdfsUtils.getBlockLocations(
119+
lazy val segmentLocations = HdfsUtils.getFileSegmentLocations(
120120
partition.segment.path, partition.segment.offset, partition.segment.length, hadoopConfig)
121121
blockLocations.orElse(segmentLocations).getOrElse(Seq.empty)
122122
}

streaming/src/main/scala/org/apache/spark/streaming/util/HdfsUtils.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,9 @@ private[streaming] object HdfsUtils {
5252
}
5353
}
5454

55-
def getBlockLocations(path: String, offset: Long, length: Long, conf: Configuration): Option[Seq[String]] = {
55+
/** Get the locations of the HDFS blocks containing the given file segment. */
56+
def getFileSegmentLocations(
57+
path: String, offset: Long, length: Long, conf: Configuration): Option[Seq[String]] = {
5658
val dfsPath = new Path(path)
5759
val dfs = getFileSystemForPath(dfsPath, conf)
5860
val fileStatus = dfs.getFileStatus(dfsPath)

0 commit comments

Comments
 (0)