Skip to content

Commit 0faac2a

Browse files
committed
Fix tests.
1 parent aeed617 commit 0faac2a

File tree

1 file changed

+16
-19
lines changed

1 file changed

+16
-19
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/ExternalAppendOnlyUnsafeRowArraySuite.scala

Lines changed: 16 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -27,32 +27,29 @@ import org.apache.spark.sql.catalyst.expressions.UnsafeRow
2727

2828
class ExternalAppendOnlyUnsafeRowArraySuite extends SparkFunSuite with LocalSparkContext {
2929
private val random = new java.util.Random()
30-
private var taskContext: TaskContext = _
31-
32-
override def afterAll(): Unit = try {
33-
TaskContext.unset()
34-
} finally {
35-
super.afterAll()
36-
}
3730

3831
private def withExternalArray(inMemoryThreshold: Int, spillThreshold: Int)
3932
(f: ExternalAppendOnlyUnsafeRowArray => Unit): Unit = {
4033
sc = new SparkContext("local", "test", new SparkConf(false))
4134

42-
taskContext = MemoryTestingUtils.fakeTaskContext(SparkEnv.get)
35+
val taskContext = MemoryTestingUtils.fakeTaskContext(SparkEnv.get)
4336
TaskContext.setTaskContext(taskContext)
4437

45-
val array = new ExternalAppendOnlyUnsafeRowArray(
46-
taskContext.taskMemoryManager(),
47-
SparkEnv.get.blockManager,
48-
SparkEnv.get.serializerManager,
49-
taskContext,
50-
1024,
51-
SparkEnv.get.memoryManager.pageSizeBytes,
52-
inMemoryThreshold,
53-
spillThreshold)
54-
try f(array) finally {
55-
array.clear()
38+
try {
39+
val array = new ExternalAppendOnlyUnsafeRowArray(
40+
taskContext.taskMemoryManager(),
41+
SparkEnv.get.blockManager,
42+
SparkEnv.get.serializerManager,
43+
taskContext,
44+
1024,
45+
SparkEnv.get.memoryManager.pageSizeBytes,
46+
inMemoryThreshold,
47+
spillThreshold)
48+
try f(array) finally {
49+
array.clear()
50+
}
51+
} finally {
52+
TaskContext.unset()
5653
}
5754
}
5855

0 commit comments

Comments
 (0)