@@ -48,7 +48,7 @@ class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Match
48
48
bus.metrics.metricRegistry.counter(s " queue. $SHARED_QUEUE.numDroppedEvents " ).getCount
49
49
}
50
50
51
- private def queueSize (bus : LiveListenerBus ): Int = {
51
+ private def sharedQueueSize (bus : LiveListenerBus ): Int = {
52
52
bus.metrics.metricRegistry.getGauges().get(s " queue. $SHARED_QUEUE.size " ).getValue()
53
53
.asInstanceOf [Int ]
54
54
}
@@ -73,12 +73,11 @@ class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Match
73
73
val conf = new SparkConf ()
74
74
val counter = new BasicJobCounter
75
75
val bus = new LiveListenerBus (conf)
76
- bus.addToSharedQueue(counter)
77
76
78
77
// Metrics are initially empty.
79
78
assert(bus.metrics.numEventsPosted.getCount === 0 )
80
79
assert(numDroppedEvents(bus) === 0 )
81
- assert(queueSize( bus) === 0 )
80
+ assert(bus.queuedEvents.size === 0 )
82
81
assert(eventProcessingTimeCount(bus) === 0 )
83
82
84
83
// Post five events:
@@ -87,17 +86,23 @@ class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Match
87
86
// Five messages should be marked as received and queued, but no messages should be posted to
88
87
// listeners yet because the the listener bus hasn't been started.
89
88
assert(bus.metrics.numEventsPosted.getCount === 5 )
90
- assert(queueSize(bus) === 5 )
89
+ assert(bus.queuedEvents.size === 5 )
90
+
91
+ // Add the counter to the bus after messages have been queued for later delivery.
92
+ bus.addToSharedQueue(counter)
91
93
assert(counter.count === 0 )
92
94
93
95
// Starting listener bus should flush all buffered events
94
96
bus.start(mockSparkContext, mockMetricsSystem)
95
97
Mockito .verify(mockMetricsSystem).registerSource(bus.metrics)
96
98
bus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS )
97
99
assert(counter.count === 5 )
98
- assert(queueSize (bus) === 0 )
100
+ assert(sharedQueueSize (bus) === 0 )
99
101
assert(eventProcessingTimeCount(bus) === 5 )
100
102
103
+ // After the bus is started, there should be no more queued events.
104
+ assert(bus.queuedEvents === null )
105
+
101
106
// After listener bus has stopped, posting events should not increment counter
102
107
bus.stop()
103
108
(1 to 5 ).foreach { _ => bus.post(SparkListenerJobEnd (0 , jobCompletionTime, JobSucceeded )) }
@@ -188,18 +193,18 @@ class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Match
188
193
// Post a message to the listener bus and wait for processing to begin:
189
194
bus.post(SparkListenerJobEnd (0 , jobCompletionTime, JobSucceeded ))
190
195
listenerStarted.acquire()
191
- assert(queueSize (bus) === 0 )
196
+ assert(sharedQueueSize (bus) === 0 )
192
197
assert(numDroppedEvents(bus) === 0 )
193
198
194
199
// If we post an additional message then it should remain in the queue because the listener is
195
200
// busy processing the first event:
196
201
bus.post(SparkListenerJobEnd (0 , jobCompletionTime, JobSucceeded ))
197
- assert(queueSize (bus) === 1 )
202
+ assert(sharedQueueSize (bus) === 1 )
198
203
assert(numDroppedEvents(bus) === 0 )
199
204
200
205
// The queue is now full, so any additional events posted to the listener will be dropped:
201
206
bus.post(SparkListenerJobEnd (0 , jobCompletionTime, JobSucceeded ))
202
- assert(queueSize (bus) === 1 )
207
+ assert(sharedQueueSize (bus) === 1 )
203
208
assert(numDroppedEvents(bus) === 1 )
204
209
205
210
// Allow the the remaining events to be processed so we can stop the listener bus:
0 commit comments