Skip to content

Commit 73f442b

Browse files
committed
Adds Date support for HiveThriftServer2 (Hive 0.12.0)
1 parent b9e1c2e commit 73f442b

File tree

2 files changed

+40
-22
lines changed

2 files changed

+40
-22
lines changed

sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala

Lines changed: 35 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.sql.hive.thriftserver
1919

2020
import java.io.File
2121
import java.net.ServerSocket
22-
import java.sql.{DriverManager, Statement}
22+
import java.sql.{Date, DriverManager, Statement}
2323
import java.util.concurrent.TimeoutException
2424

2525
import scala.collection.mutable.ArrayBuffer
@@ -51,6 +51,15 @@ import org.apache.spark.sql.hive.HiveShim
5151
class HiveThriftServer2Suite extends FunSuite with Logging {
5252
Class.forName(classOf[HiveDriver].getCanonicalName)
5353

54+
object TestData {
55+
def getTestDataFilePath(name: String) = {
56+
Thread.currentThread().getContextClassLoader.getResource(s"data/files/$name")
57+
}
58+
59+
val smallKv = getTestDataFilePath("small_kv.txt")
60+
val smallKvWithNull = getTestDataFilePath("small_kv_with_null.txt")
61+
}
62+
5463
def randomListeningPort = {
5564
// Let the system to choose a random available port to avoid collision with other parallel
5665
// builds.
@@ -145,12 +154,8 @@ class HiveThriftServer2Suite extends FunSuite with Logging {
145154
}
146155
}
147156

148-
val env = Seq(
149-
// Resets SPARK_TESTING to avoid loading Log4J configurations in testing class paths
150-
"SPARK_TESTING" -> "0",
151-
// Prevents loading classes out of the assembly jar. Otherwise Utils.sparkVersion can't read
152-
// proper version information from the jar manifest.
153-
"SPARK_PREPEND_CLASSES" -> "")
157+
// Resets SPARK_TESTING to avoid loading Log4J configurations in testing class paths
158+
val env = Seq("SPARK_TESTING" -> "0")
154159

155160
Process(command, None, env: _*).run(ProcessLogger(
156161
captureThriftServerOutput("stdout"),
@@ -194,13 +199,10 @@ class HiveThriftServer2Suite extends FunSuite with Logging {
194199

195200
test("Test JDBC query execution") {
196201
withJdbcStatement() { statement =>
197-
val dataFilePath =
198-
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
199-
200202
val queries =
201203
s"""SET spark.sql.shuffle.partitions=3;
202204
|CREATE TABLE test(key INT, val STRING);
203-
|LOAD DATA LOCAL INPATH '$dataFilePath' OVERWRITE INTO TABLE test;
205+
|LOAD DATA LOCAL INPATH '${TestData.smallKv}' OVERWRITE INTO TABLE test;
204206
|CACHE TABLE test;
205207
""".stripMargin.split(";").map(_.trim).filter(_.nonEmpty)
206208

@@ -216,14 +218,10 @@ class HiveThriftServer2Suite extends FunSuite with Logging {
216218

217219
test("SPARK-3004 regression: result set containing NULL") {
218220
withJdbcStatement() { statement =>
219-
val dataFilePath =
220-
Thread.currentThread().getContextClassLoader.getResource(
221-
"data/files/small_kv_with_null.txt")
222-
223221
val queries = Seq(
224222
"DROP TABLE IF EXISTS test_null",
225223
"CREATE TABLE test_null(key INT, val STRING)",
226-
s"LOAD DATA LOCAL INPATH '$dataFilePath' OVERWRITE INTO TABLE test_null")
224+
s"LOAD DATA LOCAL INPATH '${TestData.smallKvWithNull}' OVERWRITE INTO TABLE test_null")
227225

228226
queries.foreach(statement.execute)
229227

@@ -270,24 +268,40 @@ class HiveThriftServer2Suite extends FunSuite with Logging {
270268

271269
test("SPARK-4292 regression: result set iterator issue") {
272270
withJdbcStatement() { statement =>
273-
val dataFilePath =
274-
Thread.currentThread().getContextClassLoader.getResource("data/files/small_kv.txt")
275-
276271
val queries = Seq(
277272
"DROP TABLE IF EXISTS test_4292",
278273
"CREATE TABLE test_4292(key INT, val STRING)",
279-
s"LOAD DATA LOCAL INPATH '$dataFilePath' OVERWRITE INTO TABLE test_4292")
274+
s"LOAD DATA LOCAL INPATH '${TestData.smallKv}' OVERWRITE INTO TABLE test_4292")
280275

281276
queries.foreach(statement.execute)
282277

283278
val resultSet = statement.executeQuery("SELECT key FROM test_4292")
284279

285280
Seq(238, 86, 311, 27, 165).foreach { key =>
286281
resultSet.next()
287-
assert(resultSet.getInt(1) == key)
282+
assert(resultSet.getInt(1) === key)
288283
}
289284

290285
statement.executeQuery("DROP TABLE IF EXISTS test_4292")
291286
}
292287
}
288+
289+
test("SPARK-4309 regression: Date type support") {
290+
withJdbcStatement() { statement =>
291+
val queries = Seq(
292+
"DROP TABLE IF EXISTS test_date",
293+
"CREATE TABLE test_date(key INT, value STRING)",
294+
s"LOAD DATA LOCAL INPATH '${TestData.smallKv}' OVERWRITE INTO TABLE test_date")
295+
296+
queries.foreach(statement.execute)
297+
298+
val resultSet = statement.executeQuery(
299+
"SELECT CAST('2011-01-01' as date) FROM test_date LIMIT 1")
300+
301+
assertResult(Date.valueOf("2011-01-01")) {
302+
resultSet.next()
303+
resultSet.getDate(1)
304+
}
305+
}
306+
}
293307
}

sql/hive-thriftserver/v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim12.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.hive.thriftserver
1919

20-
import java.sql.Timestamp
20+
import java.sql.{Date, Timestamp}
2121
import java.util.{ArrayList => JArrayList, Map => JMap}
2222

2323
import scala.collection.JavaConversions._
@@ -131,6 +131,8 @@ private[hive] class SparkExecuteStatementOperation(
131131
to.addColumnValue(ColumnValue.byteValue(from.getByte(ordinal)))
132132
case ShortType =>
133133
to.addColumnValue(ColumnValue.shortValue(from.getShort(ordinal)))
134+
case DateType =>
135+
to.addColumnValue(ColumnValue.dateValue(from(ordinal).asInstanceOf[Date]))
134136
case TimestampType =>
135137
to.addColumnValue(
136138
ColumnValue.timestampValue(from.get(ordinal).asInstanceOf[Timestamp]))
@@ -163,6 +165,8 @@ private[hive] class SparkExecuteStatementOperation(
163165
to.addColumnValue(ColumnValue.byteValue(null))
164166
case ShortType =>
165167
to.addColumnValue(ColumnValue.shortValue(null))
168+
case DateType =>
169+
to.addColumnValue(ColumnValue.dateValue(null))
166170
case TimestampType =>
167171
to.addColumnValue(ColumnValue.timestampValue(null))
168172
case BinaryType | _: ArrayType | _: StructType | _: MapType =>

0 commit comments

Comments
 (0)