Skip to content

Commit e5b74b0

Browse files
committed
Support flink engine under the select statement, the results can be read in a stream
1 parent a23b16a commit e5b74b0

File tree

2 files changed

+7
-1
lines changed

2 files changed

+7
-1
lines changed

externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,12 @@ object SchemaHelper {
142142
.contains(dt.getClass.getSimpleName) => Some(dt.defaultSize)
143143
case dt @ (BooleanType | _: NumericType | DateType | TimestampType |
144144
CalendarIntervalType | NullType) =>
145-
Some(dt.defaultSize)
145+
// decimal type
146+
if (dt.isInstanceOf[DecimalType]) {
147+
Some(dt.asInstanceOf[DecimalType].precision)
148+
} else {
149+
Some(dt.defaultSize)
150+
}
146151
case StructType(fields) =>
147152
val sizeArr = fields.map(f => getColumnSize(f.dataType))
148153
if (sizeArr.contains(None)) {

externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -154,6 +154,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with HiveMetadataTests with
154154
val colSize = rowSet.getInt(COLUMN_SIZE)
155155
schema(pos).dataType match {
156156
case StringType | BinaryType | _: ArrayType | _: MapType => assert(colSize === 0)
157+
case d: DecimalType => assert(colSize === d.precision)
157158
case StructType(fields) if fields.length == 1 => assert(colSize === 0)
158159
case o => assert(colSize === o.defaultSize)
159160
}

0 commit comments

Comments
 (0)