Skip to content

Commit 57ae3b1

Browse files
committed
Fix order dependent test
1 parent b3d0f64 commit 57ae3b1

File tree

1 file changed

+14
-3
lines changed

1 file changed

+14
-3
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark.sql.hive
1919

20+
import org.scalatest.BeforeAndAfterAll
21+
2022
import scala.reflect.ClassTag
2123

2224

@@ -26,7 +28,16 @@ import org.apache.spark.sql.execution.{BroadcastHashJoin, ShuffledHashJoin}
2628
import org.apache.spark.sql.hive.test.TestHive
2729
import org.apache.spark.sql.hive.test.TestHive._
2830

29-
class StatisticsSuite extends QueryTest {
31+
class StatisticsSuite extends QueryTest with BeforeAndAfterAll {
32+
33+
override def beforeAll() = {
34+
// HACK: Cached tables do not currently preserve statistics...
35+
TestHive.cacheTables = false
36+
}
37+
38+
override def afterAll() = {
39+
TestHive.cacheTables = true
40+
}
3041

3142
test("parse analyze commands") {
3243
def assertAnalyzeCommand(analyzeCommand: String, c: Class[_]) {
@@ -126,7 +137,7 @@ class StatisticsSuite extends QueryTest {
126137
val sizes = rdd.queryExecution.analyzed.collect { case mr: MetastoreRelation =>
127138
mr.statistics.sizeInBytes
128139
}
129-
assert(sizes.size === 1)
140+
assert(sizes.size === 1, s"Size wrong for:\n ${rdd.queryExecution}")
130141
assert(sizes(0).equals(BigInt(5812)),
131142
s"expected exact size 5812 for test table 'src', got: ${sizes(0)}")
132143
}
@@ -147,7 +158,7 @@ class StatisticsSuite extends QueryTest {
147158
case r if ct.runtimeClass.isAssignableFrom(r.getClass) => r.statistics.sizeInBytes
148159
}
149160
assert(sizes.size === 2 && sizes(0) <= autoBroadcastJoinThreshold,
150-
s"query should contain two relations, each of which has size smaller than autoConvertSize")
161+
s"query should contain two relations, each of which has size smaller than autoConvertSize instead ${rdd.queryExecution}")
151162

152163
// Using `sparkPlan` because for relevant patterns in HashJoin to be
153164
// matched, other strategies need to be applied.

0 commit comments

Comments
 (0)