Skip to content

Commit f876dea

Browse files
author
Anselme Vignon
committed
starting to write tests
1 parent dbceaa3 commit f876dea

File tree

1 file changed

+17
-1
lines changed

1 file changed

+17
-1
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/parquet/parquetSuites.scala

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.spark.sql.hive.test.TestHive._
3131
case class ParquetData(intField: Int, stringField: String)
3232
// The data that also includes the partitioning key
3333
case class ParquetDataWithKey(p: Int, intField: Int, stringField: String)
34-
34+
case class ParquetDataWithKeyAndComplexTypes(p: Int, intField: Int, stringField: String)
3535

3636
/**
3737
* A suite to test the automatic conversion of metastore tables with parquet data to use the
@@ -69,6 +69,22 @@ class ParquetMetastoreSuite extends ParquetTest {
6969
location '${partitionedTableDirWithKey.getCanonicalPath}'
7070
""")
7171

72+
sql(s"""
73+
create external table partitioned_parquet_with_key_and_complextypes
74+
(
75+
intField INT,
76+
structField STRUCT<intStructField INT, stringStructField STRING>,
77+
arrayField ARRAY<INT>,
78+
stringField STRING
79+
)
80+
PARTITIONED BY (p int)
81+
ROW FORMAT SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
82+
STORED AS
83+
INPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
84+
OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
85+
location '${partitionedTableDirWithKey.getCanonicalPath}'
86+
""")
87+
7288
sql(s"""
7389
create external table normal_parquet
7490
(

0 commit comments

Comments
 (0)