Skip to content

Commit cc40bc9

Browse files
SachinJananiymahajan
authored and
ymahajan
committed
[SNAP-860] Removed hardcoding of size of Array used for storing DecimalType (#38)
Created a jira for -Pspark precheckin SNAP-914 (https://jira.snappydata.io/browse/SNAP-914)
1 parent 8998a02 commit cc40bc9

File tree

3 files changed

+5
-5
lines changed

3 files changed

+5
-5
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.spark.sql.catalyst.expressions.{Expression, Literal}
3131
* A Decimal that must have fixed precision (the maximum number of digits) and scale (the number
3232
* of digits on right side of dot).
3333
*
34-
* The precision can be up to 38, scale can also be up to 38 (less or equal to precision).
34+
* The precision can be up to 127, scale can also be up to 127 (less or equal to precision).
3535
*
3636
* The default precision and scale is (10, 0).
3737
*
@@ -48,7 +48,7 @@ case class DecimalType(precision: Int, scale: Int) extends FractionalType {
4848
}
4949

5050
if (precision > DecimalType.MAX_PRECISION) {
51-
throw new AnalysisException(s"DecimalType can only support precision up to 38")
51+
throw new AnalysisException(s"DecimalType can only support precision up to ${DecimalType.MAX_PRECISION}")
5252
}
5353

5454
// default constructor for Java

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonInferSchema.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -279,8 +279,8 @@ private[sql] object JsonInferSchema {
279279
case (t1: DecimalType, t2: DecimalType) =>
280280
val scale = math.max(t1.scale, t2.scale)
281281
val range = math.max(t1.precision - t1.scale, t2.precision - t2.scale)
282-
if (range + scale > 38) {
283-
// DecimalType can't support precision > 38
282+
if (range + scale > DecimalType.MAX_PRECISION) {
283+
// DecimalType can't support precision > DecimalType.MAX_PRECISION
284284
DoubleType
285285
} else {
286286
DecimalType(range + scale, scale)

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaConverter.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -594,7 +594,7 @@ private[sql] object ParquetSchemaConverter {
594594
}
595595

596596
// Returns the minimum number of bytes needed to store a decimal with a given `precision`.
597-
val minBytesForPrecision = Array.tabulate[Int](39)(computeMinBytesForPrecision)
597+
val minBytesForPrecision = Array.tabulate[Int](DecimalType.MAX_PRECISION + 1)(computeMinBytesForPrecision)
598598

599599
// Max precision of a decimal value stored in `numBytes` bytes
600600
def maxPrecisionForBytes(numBytes: Int): Int = {

0 commit comments

Comments
 (0)