File tree Expand file tree Collapse file tree 1 file changed +17
-1
lines changed
sql/hive/src/test/scala/org/apache/spark/sql/parquet Expand file tree Collapse file tree 1 file changed +17
-1
lines changed Original file line number Diff line number Diff line change @@ -31,7 +31,7 @@ import org.apache.spark.sql.hive.test.TestHive._
31
31
case class ParquetData (intField : Int , stringField : String )
32
32
// The data that also includes the partitioning key
33
33
case class ParquetDataWithKey (p : Int , intField : Int , stringField : String )
34
-
34
+ case class ParquetDataWithKeyAndComplexTypes ( p : Int , intField : Int , stringField : String )
35
35
36
36
/**
37
37
* A suite to test the automatic conversion of metastore tables with parquet data to use the
@@ -69,6 +69,22 @@ class ParquetMetastoreSuite extends ParquetTest {
69
69
location ' ${partitionedTableDirWithKey.getCanonicalPath}'
70
70
""" )
71
71
72
+ sql(s """
73
+ create external table partitioned_parquet_with_key_and_complextypes
74
+ (
75
+ intField INT,
76
+ structField STRUCT<intStructField INT, stringStructField STRING>,
77
+ arrayField ARRAY<INT>,
78
+ stringField STRING
79
+ )
80
+ PARTITIONED BY (p int)
81
+ ROW FORMAT SERDE 'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'
82
+ STORED AS
83
+ INPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'
84
+ OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'
85
+ location ' ${partitionedTableDirWithKey.getCanonicalPath}'
86
+ """ )
87
+
72
88
sql(s """
73
89
create external table normal_parquet
74
90
(
You can’t perform that action at this time.
0 commit comments