Skip to content

Commit 8f7540e

Browse files
committed
rename
1 parent 5c1f201 commit 8f7540e

File tree

7 files changed

+7
-7
lines changed

7 files changed

+7
-7
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -683,7 +683,7 @@ private[spark] object SparkConf extends Logging {
683683
AlternateConfig("spark.akka.frameSize", "1.6")),
684684
"spark.yarn.jars" -> Seq(
685685
AlternateConfig("spark.yarn.jar", "2.0")),
686-
NETWORK_MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM.key -> Seq(
686+
MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM.key -> Seq(
687687
AlternateConfig("spark.reducer.maxReqSizeShuffleToMem", "2.3"),
688688
AlternateConfig("spark.maxRemoteBlockSizeFetchToMem", "3.0")),
689689
LISTENER_BUS_EVENT_QUEUE_CAPACITY.key -> Seq(

core/src/main/scala/org/apache/spark/internal/config/package.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -894,7 +894,7 @@ package object config {
894894
.checkValue(_ > 0, "The max no. of blocks in flight cannot be non-positive.")
895895
.createWithDefault(Int.MaxValue)
896896

897-
private[spark] val NETWORK_MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM =
897+
private[spark] val MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM =
898898
ConfigBuilder("spark.network.maxRemoteBlockSizeFetchToMem")
899899
.doc("Remote block will be fetched to disk when size of the block is above this threshold " +
900900
"in bytes. This is to avoid a giant request takes too much memory. Note this " +

core/src/main/scala/org/apache/spark/network/netty/NettyBlockTransferService.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ private[spark] class NettyBlockTransferService(
168168
// Everything else is encoded using our binary protocol.
169169
val metadata = JavaUtils.bufferToArray(serializer.newInstance().serialize((level, classTag)))
170170

171-
val asStream = blockData.size() > conf.get(config.NETWORK_MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM)
171+
val asStream = blockData.size() > conf.get(config.MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM)
172172
val callback = new RpcResponseCallback {
173173
override def onSuccess(response: ByteBuffer): Unit = {
174174
logTrace(s"Successfully uploaded block $blockId${if (asStream) " as stream" else ""}")

core/src/main/scala/org/apache/spark/shuffle/BlockStoreShuffleReader.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ private[spark] class BlockStoreShuffleReader[K, C](
7676
SparkEnv.get.conf.get(config.REDUCER_MAX_SIZE_IN_FLIGHT) * 1024 * 1024,
7777
SparkEnv.get.conf.get(config.REDUCER_MAX_REQS_IN_FLIGHT),
7878
SparkEnv.get.conf.get(config.REDUCER_MAX_BLOCKS_IN_FLIGHT_PER_ADDRESS),
79-
SparkEnv.get.conf.get(config.NETWORK_MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM),
79+
SparkEnv.get.conf.get(config.MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM),
8080
SparkEnv.get.conf.get(config.SHUFFLE_DETECT_CORRUPT),
8181
SparkEnv.get.conf.get(config.SHUFFLE_DETECT_CORRUPT_MEMORY),
8282
readMetrics,

core/src/main/scala/org/apache/spark/storage/BlockManager.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,7 @@ private[spark] class BlockManager(
246246
// Exposed for test
247247
private[storage] val remoteBlockTempFileManager =
248248
new BlockManager.RemoteBlockDownloadFileManager(this)
249-
private val maxRemoteBlockToMem = conf.get(config.NETWORK_MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM)
249+
private val maxRemoteBlockToMem = conf.get(config.MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM)
250250

251251
var hostLocalDirManager: Option[HostLocalDirManager] = None
252252

core/src/test/scala/org/apache/spark/DistributedSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,7 @@ class DistributedSuite extends SparkFunSuite with Matchers with LocalSparkContex
165165
// also try with block replication as a stream
166166
val uploadStreamConf = new SparkConf()
167167
uploadStreamConf.setAll(conf.getAll)
168-
uploadStreamConf.set(config.NETWORK_MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM, 1L)
168+
uploadStreamConf.set(config.MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM, 1L)
169169
test(s"$testName (with replication as stream)") {
170170
testCaching(uploadStreamConf, storageLevel)
171171
}

core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1658,7 +1658,7 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE
16581658
}
16591659

16601660
test("fetch remote block to local disk if block size is larger than threshold") {
1661-
conf.set(NETWORK_MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM, 1000L)
1661+
conf.set(MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM, 1000L)
16621662

16631663
val mockBlockManagerMaster = mock(classOf[BlockManagerMaster])
16641664
val mockBlockTransferService = new MockBlockTransferService(0)

0 commit comments

Comments
 (0)