File tree Expand file tree Collapse file tree 1 file changed +4
-4
lines changed
core/src/main/scala/org/apache/spark/storage Expand file tree Collapse file tree 1 file changed +4
-4
lines changed Original file line number Diff line number Diff line change @@ -1016,24 +1016,24 @@ private[spark] class BlockManager(
1016
1016
serializer : Serializer = defaultSerializer): Iterator [Any ] = {
1017
1017
bytes.rewind()
1018
1018
1019
- def doWork () = {
1019
+ def getIterator = {
1020
1020
val stream = wrapForCompression(blockId, new ByteBufferInputStream (bytes, true ))
1021
1021
serializer.newInstance().deserializeStream(stream).asIterator
1022
1022
}
1023
1023
1024
1024
if (blockId.isShuffle) {
1025
1025
// Reducer may need to read many local shuffle blocks and will wrap them into Iterators
1026
- // at the beginning. The wrapping will cost some memory(compression instance
1026
+ // at the beginning. The wrapping will cost some memory (compression instance
1027
1027
// initialization, etc.). Reducer read shuffle blocks one by one so we could do the
1028
1028
// wrapping lazily to save memory.
1029
1029
class LazyProxyIterator (f : => Iterator [Any ]) extends Iterator [Any ] {
1030
1030
lazy val proxy = f
1031
1031
override def hasNext : Boolean = proxy.hasNext
1032
1032
override def next (): Any = proxy.next()
1033
1033
}
1034
- new LazyProxyIterator (doWork() )
1034
+ new LazyProxyIterator (getIterator )
1035
1035
} else {
1036
- doWork()
1036
+ getIterator
1037
1037
}
1038
1038
}
1039
1039
You can’t perform that action at this time.
0 commit comments