@@ -19,6 +19,7 @@ package org.apache.spark.util.collection
19
19
20
20
import org .apache .spark .SparkEnv
21
21
import org .apache .spark .internal .Logging
22
+ import org .apache .spark .internal .config ._
22
23
import org .apache .spark .memory .{MemoryConsumer , MemoryMode , TaskMemoryManager }
23
24
24
25
/**
@@ -41,7 +42,7 @@ private[spark] abstract class Spillable[C](taskMemoryManager: TaskMemoryManager)
41
42
protected def forceSpill (): Boolean
42
43
43
44
// Number of elements read from input since last spill
44
- protected def elementsRead : Long = _elementsRead
45
+ protected def elementsRead : Int = _elementsRead
45
46
46
47
// Called by subclasses every time a record is read
47
48
// It's used for checking spilling frequency
@@ -54,15 +55,15 @@ private[spark] abstract class Spillable[C](taskMemoryManager: TaskMemoryManager)
54
55
55
56
// Force this collection to spill when there are this many elements in memory
56
57
// For testing only
57
- private [this ] val numElementsForceSpillThreshold : Long =
58
- SparkEnv .get.conf.getLong( " spark.shuffle.spill.numElementsForceSpillThreshold " , Long . MaxValue )
58
+ private [this ] val numElementsForceSpillThreshold : Int =
59
+ SparkEnv .get.conf.get( SHUFFLE_SPILL_NUM_ELEMENTS_FORCE_SPILL_THRESHOLD )
59
60
60
61
// Threshold for this collection's size in bytes before we start tracking its memory usage
61
62
// To avoid a large number of small spills, initialize this to a value orders of magnitude > 0
62
63
@ volatile private [this ] var myMemoryThreshold = initialMemoryThreshold
63
64
64
65
// Number of elements read from input since last spill
65
- private [this ] var _elementsRead = 0L
66
+ private [this ] var _elementsRead = 0
66
67
67
68
// Number of bytes spilled in total
68
69
@ volatile private [this ] var _memoryBytesSpilled = 0L
0 commit comments