Skip to content

Commit 0924a6b

Browse files
committed
rename 'doWork' into 'getIterator'
1 parent 07f32c2 commit 0924a6b

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

core/src/main/scala/org/apache/spark/storage/BlockManager.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1016,24 +1016,24 @@ private[spark] class BlockManager(
10161016
serializer: Serializer = defaultSerializer): Iterator[Any] = {
10171017
bytes.rewind()
10181018

1019-
def doWork() = {
1019+
def getIterator = {
10201020
val stream = wrapForCompression(blockId, new ByteBufferInputStream(bytes, true))
10211021
serializer.newInstance().deserializeStream(stream).asIterator
10221022
}
10231023

10241024
if (blockId.isShuffle) {
10251025
// Reducer may need to read many local shuffle blocks and will wrap them into Iterators
1026-
// at the beginning. The wrapping will cost some memory(compression instance
1026+
// at the beginning. The wrapping will cost some memory (compression instance
10271027
// initialization, etc.). Reducer read shuffle blocks one by one so we could do the
10281028
// wrapping lazily to save memory.
10291029
class LazyProxyIterator(f: => Iterator[Any]) extends Iterator[Any] {
10301030
lazy val proxy = f
10311031
override def hasNext: Boolean = proxy.hasNext
10321032
override def next(): Any = proxy.next()
10331033
}
1034-
new LazyProxyIterator(doWork())
1034+
new LazyProxyIterator(getIterator)
10351035
} else {
1036-
doWork()
1036+
getIterator
10371037
}
10381038
}
10391039

0 commit comments

Comments
 (0)