Skip to content

Commit d749f06

Browse files
committed
feat: ArrayType literal
1 parent a65389e commit d749f06

File tree

5 files changed

+12
-12
lines changed

5 files changed

+12
-12
lines changed

spark/src/main/scala/org/apache/comet/DataTypeSupport.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,4 +73,9 @@ object DataTypeSupport {
7373
val ARRAY_ELEMENT = "array element"
7474
val MAP_KEY = "map key"
7575
val MAP_VALUE = "map value"
76+
77+
def isComplexType(dt: DataType): Boolean = dt match {
78+
case _: StructType | _: ArrayType | _: MapType => true
79+
case _ => false
80+
}
7681
}

spark/src/main/scala/org/apache/comet/rules/CometScanRule.scala

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ import org.apache.spark.sql.types._
3737
import org.apache.comet.{CometConf, CometSparkSessionExtensions, DataTypeSupport}
3838
import org.apache.comet.CometConf._
3939
import org.apache.comet.CometSparkSessionExtensions.{isCometLoaded, isCometScanEnabled, withInfo, withInfos}
40+
import org.apache.comet.DataTypeSupport.isComplexType
4041
import org.apache.comet.parquet.{CometParquetScan, SupportsComet}
4142

4243
/**
@@ -277,11 +278,6 @@ case class CometScanRule(session: SparkSession) extends Rule[SparkPlan] {
277278
val partitionSchemaSupported =
278279
typeChecker.isSchemaSupported(partitionSchema, fallbackReasons)
279280

280-
def isComplexType(dt: DataType): Boolean = dt match {
281-
case _: StructType | _: ArrayType | _: MapType => true
282-
case _ => false
283-
}
284-
285281
def hasMapsContainingStructs(dataType: DataType): Boolean = {
286282
dataType match {
287283
case s: StructType => s.exists(field => hasMapsContainingStructs(field.dataType))

spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ import com.google.protobuf.ByteString
5252

5353
import org.apache.comet.CometConf
5454
import org.apache.comet.CometSparkSessionExtensions.{isCometScan, withInfo}
55+
import org.apache.comet.DataTypeSupport.isComplexType
5556
import org.apache.comet.expressions._
5657
import org.apache.comet.objectstore.NativeConfig
5758
import org.apache.comet.serde.ExprOuterClass.{AggExpr, DataType => ProtoDataType, Expr, ScalarFunc}
@@ -846,9 +847,11 @@ object QueryPlanSerde extends Logging with CometExprShim {
846847
allowComplex = value == null ||
847848
// Nested literal support for native reader
848849
// can be tracked https://github.com/apache/datafusion-comet/issues/1937
850+
// now supports only Array of primitive
849851
(Seq(CometConf.SCAN_NATIVE_ICEBERG_COMPAT, CometConf.SCAN_NATIVE_DATAFUSION)
850852
.contains(CometConf.COMET_NATIVE_SCAN_IMPL.get()) && dataType
851-
.isInstanceOf[ArrayType])) =>
853+
.isInstanceOf[ArrayType]) && !isComplexType(
854+
dataType.asInstanceOf[ArrayType].elementType)) =>
852855
val exprBuilder = ExprOuterClass.Literal.newBuilder()
853856

854857
if (value == null) {

spark/src/test/scala/org/apache/comet/CometFuzzTestSuite.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ import org.apache.spark.sql.internal.SQLConf.ParquetOutputTimestampType
3939
import org.apache.spark.sql.types._
4040

4141
import org.apache.comet.CometSparkSessionExtensions.isSpark40Plus
42+
import org.apache.comet.DataTypeSupport.isComplexType
4243
import org.apache.comet.testing.{DataGenOptions, ParquetGenerator}
4344

4445
class CometFuzzTestSuite extends CometTestBase with AdaptiveSparkPlanHelper {

spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ import org.apache.spark.sql.execution._
4444
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
4545
import org.apache.spark.sql.internal._
4646
import org.apache.spark.sql.test._
47-
import org.apache.spark.sql.types.{ArrayType, DataType, DecimalType, MapType, StructType}
47+
import org.apache.spark.sql.types.{DecimalType, StructType}
4848

4949
import org.apache.comet._
5050
import org.apache.comet.shims.ShimCometSparkSessionExtensions
@@ -1128,9 +1128,4 @@ abstract class CometTestBase
11281128
usingDataSourceExec(conf) &&
11291129
!CometConf.COMET_SCAN_ALLOW_INCOMPATIBLE.get(conf)
11301130
}
1131-
1132-
def isComplexType(dt: DataType): Boolean = dt match {
1133-
case _: StructType | _: ArrayType | _: MapType => true
1134-
case _ => false
1135-
}
11361131
}

0 commit comments

Comments
 (0)