diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala index acd628abd7008..b5d1c7ed69c8f 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala @@ -229,17 +229,19 @@ private[spark] class BlockManager( ThreadUtils.newDaemonCachedThreadPool("block-manager-future", 128)) // Actual storage of where blocks are kept - private[spark] val memoryStore = - new MemoryStore(conf, blockInfoManager, serializerManager, memoryManager, this) + private[spark] lazy val memoryStore = { + val store = new MemoryStore(conf, blockInfoManager, serializerManager, memoryManager, this) + memoryManager.setMemoryStore(store) + store + } private[spark] val diskStore = new DiskStore(conf, diskBlockManager, securityManager) - memoryManager.setMemoryStore(memoryStore) // Note: depending on the memory manager, `maxMemory` may actually vary over time. // However, since we use this only for reporting and logging, what we actually want here is // the absolute maximum value that `maxMemory` can ever possibly reach. We may need // to revisit whether reporting this value as the "max" is intuitive to the user. - private val maxOnHeapMemory = memoryManager.maxOnHeapStorageMemory - private val maxOffHeapMemory = memoryManager.maxOffHeapStorageMemory + private lazy val maxOnHeapMemory = memoryManager.maxOnHeapStorageMemory + private lazy val maxOffHeapMemory = memoryManager.maxOffHeapStorageMemory private[spark] val externalShuffleServicePort = StorageUtils.externalShuffleServicePort(conf)