@@ -25,34 +25,30 @@ import org.apache.spark.api.java.JavaSparkContext
25
25
import org .apache .spark .sql .api .java .JavaSchemaRDD
26
26
import org .apache .spark .sql .execution .ExplainCommand
27
27
import org .apache .spark .sql .hive .test .TestHive
28
- import org .apache .spark .sql .test .TestSQLContext
29
28
30
29
// Implicits
31
30
import scala .collection .JavaConversions ._
32
31
33
32
class JavaHiveQLSuite extends FunSuite {
34
- lazy val javaCtx = new JavaSparkContext (TestSQLContext .sparkContext)
33
+ lazy val javaCtx = new JavaSparkContext (TestHive .sparkContext)
35
34
36
35
// There is a little trickery here to avoid instantiating two HiveContexts in the same JVM
37
36
lazy val javaHiveCtx = new JavaHiveContext (javaCtx) {
38
37
override val sqlContext = TestHive
39
38
}
40
39
41
- ignore (" SELECT * FROM src" ) {
40
+ test (" SELECT * FROM src" ) {
42
41
assert(
43
42
javaHiveCtx.sql(" SELECT * FROM src" ).collect().map(_.getInt(0 )) ===
44
43
TestHive .sql(" SELECT * FROM src" ).collect().map(_.getInt(0 )).toSeq)
45
44
}
46
45
47
- private val explainCommandClassName =
48
- classOf [ExplainCommand ].getSimpleName.stripSuffix(" $" )
49
-
50
46
def isExplanation (result : JavaSchemaRDD ) = {
51
47
val explanation = result.collect().map(_.getString(0 ))
52
- explanation.size > 1 && explanation.head.startsWith(explainCommandClassName )
48
+ explanation.size > 1 && explanation.head.startsWith(" == Physical Plan == " )
53
49
}
54
50
55
- ignore (" Query Hive native command execution result" ) {
51
+ test (" Query Hive native command execution result" ) {
56
52
val tableName = " test_native_commands"
57
53
58
54
assertResult(0 ) {
@@ -63,23 +59,18 @@ class JavaHiveQLSuite extends FunSuite {
63
59
javaHiveCtx.sql(s " CREATE TABLE $tableName(key INT, value STRING) " ).count()
64
60
}
65
61
66
- javaHiveCtx.sql(" SHOW TABLES" ).registerTempTable(" show_tables" )
67
-
68
62
assert(
69
63
javaHiveCtx
70
- .sql(" SELECT result FROM show_tables " )
64
+ .sql(" SHOW TABLES " )
71
65
.collect()
72
66
.map(_.getString(0 ))
73
67
.contains(tableName))
74
68
75
- assertResult(Array (Array (" key" , " int" , " None" ), Array (" value" , " string" , " None" ))) {
76
- javaHiveCtx.sql(s " DESCRIBE $tableName" ).registerTempTable(" describe_table" )
77
-
78
-
69
+ assertResult(Array (Array (" key" , " int" ), Array (" value" , " string" ))) {
79
70
javaHiveCtx
80
- .sql(" SELECT result FROM describe_table " )
71
+ .sql(s " describe $tableName " )
81
72
.collect()
82
- .map(_.getString (0 ).split( " \t " ).map(_.trim ))
73
+ .map(row => Array (row.get (0 ).asInstanceOf [ String ], row.get( 1 ). asInstanceOf [ String ] ))
83
74
.toArray
84
75
}
85
76
@@ -89,7 +80,7 @@ class JavaHiveQLSuite extends FunSuite {
89
80
TestHive .reset()
90
81
}
91
82
92
- ignore (" Exactly once semantics for DDL and command statements" ) {
83
+ test (" Exactly once semantics for DDL and command statements" ) {
93
84
val tableName = " test_exactly_once"
94
85
val q0 = javaHiveCtx.sql(s " CREATE TABLE $tableName(key INT, value STRING) " )
95
86
0 commit comments