Skip to content

[SPARK-13344] [TEST] Fix harmless accumulator not found exceptions #11222

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,14 @@ import org.apache.spark.serializer.JavaSerializer
class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContext {
import AccumulatorParam._

override def afterEach(): Unit = {
try {
Accumulators.clear()
} finally {
super.afterEach()
}
}

implicit def setAccum[A]: AccumulableParam[mutable.Set[A], A] =
new AccumulableParam[mutable.Set[A], A] {
def addInPlace(t1: mutable.Set[A], t2: mutable.Set[A]) : mutable.Set[A] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,14 @@ class InternalAccumulatorSuite extends SparkFunSuite with LocalSparkContext {
import InternalAccumulator._
import AccumulatorParam._

override def afterEach(): Unit = {
try {
Accumulators.clear()
} finally {
super.afterEach()
}
}

test("get param") {
assert(getParam(EXECUTOR_DESERIALIZE_TIME) === LongAccumulatorParam)
assert(getParam(EXECUTOR_RUN_TIME) === LongAccumulatorParam)
Expand Down
18 changes: 14 additions & 4 deletions core/src/test/scala/org/apache/spark/SparkFunSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,26 @@
package org.apache.spark

// scalastyle:off
import org.scalatest.{FunSuite, Outcome}
import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}

/**
* Base abstract class for all unit tests in Spark for handling common functionality.
*/
private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
private[spark] abstract class SparkFunSuite
extends FunSuite
with BeforeAndAfterAll
with Logging {
// scalastyle:on

protected override def afterAll(): Unit = {
try {
// Avoid leaking map entries in tests that use accumulators without SparkContext
Accumulators.clear()
} finally {
super.afterAll()
}
}

/**
* Log the suite name and the test name before and after each test.
*
Expand All @@ -42,8 +54,6 @@ private[spark] abstract class SparkFunSuite extends FunSuite with Logging {
test()
} finally {
logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
// Avoid leaking map entries in tests that use accumulators without SparkContext
Accumulators.clear()
}
}

Expand Down