Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,17 @@ object BackendSettings extends BackendSettingsApi {
// Collect unsupported types.
val unsupportedDataTypes = fields.map(_.dataType).collect {
case _: ByteType => "ByteType"
case _: ArrayType => "ArrayType"
case mapType: MapType if mapType.keyType.isInstanceOf[StructType] => "StructType as Key in MapType"
// Parquet scan of nested map with struct as key type is not supported in Velox.
// Parquet scan of nested array with struct/array as element type is not supported in Velox.
case arrayType: ArrayType if arrayType.elementType.isInstanceOf[StructType] =>
"StructType as element type in ArrayType"
case arrayType: ArrayType if arrayType.elementType.isInstanceOf[ArrayType] =>
"ArrayType as element type in ArrayType"
// Parquet scan of nested map with struct as key type,
// or array type as value type is not supported in Velox.
case mapType: MapType if mapType.keyType.isInstanceOf[StructType] =>
"StructType as Key type in MapType"
case mapType: MapType if mapType.valueType.isInstanceOf[ArrayType] =>
"ArrayType as Value type in MapType"
}
for (unsupportedDataType <- unsupportedDataTypes) {
// scalastyle:off println
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -289,9 +289,16 @@ class VeloxDataTypeValidationSuite extends WholeStageTransformerSuite {
}

test("Array type") {
// Validation: BatchScan.
runQueryAndCompare("select array from type1") {
checkOperatorMatch[BatchScanExecTransformer]}

// Validation: BatchScan Project Aggregate Expand Sort Limit
runQueryAndCompare("select int, array from type1 " +
" group by grouping sets(int, array) sort by array, int limit 1") { _ => }
" group by grouping sets(int, array) sort by array, int limit 1") { df => {
val executedPlan = getExecutedPlan(df)
assert(executedPlan.exists(plan => plan.isInstanceOf[BatchScanExecTransformer]))
}}

// Validation: BroadHashJoin, Filter, Project
super.sparkConf.set("spark.sql.autoBroadcastJoinThreshold", "10M")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -637,6 +637,11 @@ class VeloxTestSettings extends BackendTestSettings {
// Struct reader does not support implicit schema pruning.
.exclude("vectorized reader: missing some struct fields")
.exclude("vectorized reader: missing all struct fields")
// TODO: Unsupported Array schema in Parquet.
.exclude("vectorized reader: optional array with required elements")
.exclude("vectorized reader: required array with required elements")
.exclude("vectorized reader: required array with optional elements")
.exclude("vectorized reader: required array with legacy format")
enableSuite[GlutenParquetV1PartitionDiscoverySuite]
// Timezone is not supported yet.
.exclude("Resolve type conflicts - decimals, dates and timestamps in partition column")
Expand Down