Skip to content

Commit 7331b27

Browse files
committed
[SPARK-26329][CORE] Fix a test post-rebase.
1 parent 99addf1 commit 7331b27

File tree

1 file changed

+12
-6
lines changed

1 file changed

+12
-6
lines changed

core/src/test/scala/org/apache/spark/scheduler/dynalloc/ExecutorMonitorSuite.scala

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -291,15 +291,18 @@ class ExecutorMonitorSuite extends SparkFunSuite {
291291

292292
// First a failed task, to make sure it does not count.
293293
monitor.onTaskStart(SparkListenerTaskStart(1, 0, taskInfo("1", 1)))
294-
monitor.onTaskEnd(SparkListenerTaskEnd(1, 0, "foo", TaskResultLost, taskInfo("1", 1), null))
294+
monitor.onTaskEnd(SparkListenerTaskEnd(1, 0, "foo", TaskResultLost, taskInfo("1", 1),
295+
new ExecutorMetrics, null))
295296
assert(monitor.timedOutExecutors(idleDeadline) === Seq("1"))
296297

297298
monitor.onTaskStart(SparkListenerTaskStart(1, 0, taskInfo("1", 1)))
298-
monitor.onTaskEnd(SparkListenerTaskEnd(1, 0, "foo", Success, taskInfo("1", 1), null))
299+
monitor.onTaskEnd(SparkListenerTaskEnd(1, 0, "foo", Success, taskInfo("1", 1),
300+
new ExecutorMetrics, null))
299301
assert(monitor.timedOutExecutors(idleDeadline).isEmpty)
300302

301303
monitor.onTaskStart(SparkListenerTaskStart(3, 0, taskInfo("1", 1)))
302-
monitor.onTaskEnd(SparkListenerTaskEnd(3, 0, "foo", Success, taskInfo("1", 1), null))
304+
monitor.onTaskEnd(SparkListenerTaskEnd(3, 0, "foo", Success, taskInfo("1", 1),
305+
new ExecutorMetrics, null))
303306
assert(monitor.timedOutExecutors(idleDeadline).isEmpty)
304307

305308
// Finish the jobs, now the executor should be idle, but with the shuffle timeout, since the
@@ -351,11 +354,13 @@ class ExecutorMonitorSuite extends SparkFunSuite {
351354
monitor.onJobStart(SparkListenerJobStart(2, clock.getTimeMillis(), Seq(stage3, stage4)))
352355

353356
monitor.onTaskStart(SparkListenerTaskStart(1, 0, taskInfo("1", 1)))
354-
monitor.onTaskEnd(SparkListenerTaskEnd(1, 0, "foo", Success, taskInfo("1", 1), null))
357+
monitor.onTaskEnd(SparkListenerTaskEnd(1, 0, "foo", Success, taskInfo("1", 1),
358+
new ExecutorMetrics, null))
355359
assert(monitor.timedOutExecutors(idleDeadline) === Seq("2"))
356360

357361
monitor.onTaskStart(SparkListenerTaskStart(3, 0, taskInfo("2", 1)))
358-
monitor.onTaskEnd(SparkListenerTaskEnd(3, 0, "foo", Success, taskInfo("2", 1), null))
362+
monitor.onTaskEnd(SparkListenerTaskEnd(3, 0, "foo", Success, taskInfo("2", 1),
363+
new ExecutorMetrics, null))
359364
assert(monitor.timedOutExecutors(idleDeadline).isEmpty)
360365

361366
monitor.onJobEnd(SparkListenerJobEnd(1, clock.getTimeMillis(), JobSucceeded))
@@ -385,7 +390,8 @@ class ExecutorMonitorSuite extends SparkFunSuite {
385390
clock.advance(1000L)
386391
monitor.onExecutorAdded(SparkListenerExecutorAdded(clock.getTimeMillis(), "1", null))
387392
monitor.onTaskStart(SparkListenerTaskStart(1, 0, taskInfo("1", 1)))
388-
monitor.onTaskEnd(SparkListenerTaskEnd(1, 0, "foo", Success, taskInfo("1", 1), null))
393+
monitor.onTaskEnd(SparkListenerTaskEnd(1, 0, "foo", Success, taskInfo("1", 1),
394+
new ExecutorMetrics, null))
389395
monitor.onJobEnd(SparkListenerJobEnd(1, clock.getTimeMillis(), JobSucceeded))
390396

391397
assert(monitor.timedOutExecutors(idleDeadline).isEmpty)

0 commit comments

Comments
 (0)