Skip to content
This repository was archived by the owner on May 9, 2024. It is now read-only.

Commit 208b7a5

Browse files
committed
Small code style changes
1 parent b70c945 commit 208b7a5

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

core/src/main/scala/org/apache/spark/shuffle/hash/HashShuffleReader.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ private[spark] class HashShuffleReader[K, C](
3838
/** Read the combined key-values for this reduce task */
3939
override def read(): Iterator[Product2[K, C]] = {
4040
val blockStreams = BlockStoreShuffleFetcher.fetchBlockStreams(
41-
handle.shuffleId, startPartition, context)
41+
handle.shuffleId, startPartition, context)
4242

4343
// Wrap the streams for compression based on configuration
4444
val wrappedStreams = blockStreams.map { case (blockId, inputStream) =>

core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import java.io.InputStream
2121
import java.util.concurrent.LinkedBlockingQueue
2222

2323
import scala.collection.mutable
24-
import scala.collection.mutable.ArrayBuffer
24+
import scala.collection.mutable.{ArrayBuffer, HashSet}
2525
import scala.util.{Failure, Try}
2626

2727
import org.apache.spark.network.buffer.ManagedBuffer
@@ -78,7 +78,7 @@ final class ShuffleBlockFetcherIterator(
7878
private[this] val localBlocks = new ArrayBuffer[BlockId]()
7979

8080
/** Remote blocks to fetch, excluding zero-sized blocks. */
81-
private[this] val remoteBlocks = new mutable.HashSet[BlockId]()
81+
private[this] val remoteBlocks = new HashSet[BlockId]()
8282

8383
/**
8484
* A queue to hold our results. This turns the asynchronous model provided by

0 commit comments

Comments
 (0)