Skip to content

Commit 6e22bff

Browse files
itholicdongjoon-hyun
authored andcommitted
[SPARK-42305][SQL] Integrate _LEGACY_ERROR_TEMP_1229 into DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION
### What changes were proposed in this pull request? This PR proposes to integrate `_LEGACY_ERROR_TEMP_1229` into `DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION`. **_LEGACY_ERROR_TEMP_1229** ```json "_LEGACY_ERROR_TEMP_1229" : { "message" : [ "<decimalType> can only support precision up to <precision>." ] }, ``` **DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION** ```json "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION" : { "message" : [ "Decimal precision <precision> exceeds max precision <maxPrecision>." ], "sqlState" : "22003" }, ``` ### Why are the changes needed? We should assign proper name to _LEGACY_ERROR_TEMP_* ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? `./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*"` Closes apache#39875 from itholic/LEGACY_1229. Authored-by: itholic <haejoon.lee@databricks.com> Signed-off-by: Max Gekk <max.gekk@gmail.com> (cherry picked from commit f8e06c1) Signed-off-by: Max Gekk <max.gekk@gmail.com>
1 parent 8ec5b6e commit 6e22bff

File tree

8 files changed

+34
-42
lines changed

8 files changed

+34
-42
lines changed

core/src/main/resources/error/error-classes.json

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3100,11 +3100,6 @@
31003100
"Decimal scale (<scale>) cannot be greater than precision (<precision>)."
31013101
]
31023102
},
3103-
"_LEGACY_ERROR_TEMP_1229" : {
3104-
"message" : [
3105-
"<decimalType> can only support precision up to <precision>."
3106-
]
3107-
},
31083103
"_LEGACY_ERROR_TEMP_1231" : {
31093104
"message" : [
31103105
"<key> is not a valid partition column in table <tblName>."

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,8 @@ import org.antlr.v4.runtime.tree.{ParseTree, RuleNode, TerminalNode}
2929
import org.apache.commons.codec.DecoderException
3030
import org.apache.commons.codec.binary.Hex
3131

32-
import org.apache.spark.SparkException
32+
import org.apache.spark.{SparkArithmeticException, SparkException}
3333
import org.apache.spark.internal.Logging
34-
import org.apache.spark.sql.AnalysisException
3534
import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper, TableIdentifier}
3635
import org.apache.spark.sql.catalyst.analysis._
3736
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat}
@@ -2604,7 +2603,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] with SQLConfHelper wit
26042603
try {
26052604
Literal(BigDecimal(raw).underlying())
26062605
} catch {
2607-
case e: AnalysisException =>
2606+
case e: SparkArithmeticException =>
26082607
throw new ParseException(
26092608
errorClass = "_LEGACY_ERROR_TEMP_0061",
26102609
messageParameters = Map("msg" -> e.getMessage),

sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2253,14 +2253,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {
22532253
"precision" -> precision.toString))
22542254
}
22552255

2256-
def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: Int): Throwable = {
2257-
new AnalysisException(
2258-
errorClass = "_LEGACY_ERROR_TEMP_1229",
2259-
messageParameters = Map(
2260-
"decimalType" -> decimalType,
2261-
"precision" -> precision.toString))
2262-
}
2263-
22642256
def negativeScaleNotAllowedError(scale: Int): Throwable = {
22652257
SparkException.internalError(s"Negative scale is not allowed: ${scale.toString}." +
22662258
s" Set the config ${toSQLConf(LEGACY_ALLOW_NEGATIVE_SCALE_OF_DECIMAL_ENABLED.key)}" +

sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import scala.reflect.runtime.universe.typeTag
2525
import org.apache.spark.annotation.Stable
2626
import org.apache.spark.sql.catalyst.expressions.{Expression, Literal}
2727
import org.apache.spark.sql.catalyst.types.{PhysicalDataType, PhysicalDecimalType}
28-
import org.apache.spark.sql.errors.QueryCompilationErrors
28+
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
2929
import org.apache.spark.sql.internal.SQLConf
3030

3131
/**
@@ -51,8 +51,8 @@ case class DecimalType(precision: Int, scale: Int) extends FractionalType {
5151
}
5252

5353
if (precision > DecimalType.MAX_PRECISION) {
54-
throw QueryCompilationErrors.decimalOnlySupportPrecisionUptoError(
55-
DecimalType.simpleString, DecimalType.MAX_PRECISION)
54+
throw QueryExecutionErrors.decimalPrecisionExceedsMaxPrecisionError(
55+
precision, DecimalType.MAX_PRECISION)
5656
}
5757

5858
// default constructor for Java

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -807,7 +807,8 @@ class ExpressionParserSuite extends AnalysisTest {
807807
checkError(
808808
exception = parseException("1.20E-38BD"),
809809
errorClass = "_LEGACY_ERROR_TEMP_0061",
810-
parameters = Map("msg" -> "decimal can only support precision up to 38."),
810+
parameters = Map("msg" ->
811+
"[DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION] Decimal precision 40 exceeds max precision 38."),
811812
context = ExpectedContext(
812813
fragment = "1.20E-38BD",
813814
start = 0,

sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -172,12 +172,13 @@ select 1234567890123456789012345678901234567890
172172
-- !query schema
173173
struct<>
174174
-- !query output
175-
org.apache.spark.sql.catalyst.parser.ParseException
175+
org.apache.spark.SparkArithmeticException
176176
{
177-
"errorClass" : "_LEGACY_ERROR_TEMP_1229",
177+
"errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
178+
"sqlState" : "22003",
178179
"messageParameters" : {
179-
"decimalType" : "decimal",
180-
"precision" : "38"
180+
"maxPrecision" : "38",
181+
"precision" : "40"
181182
}
182183
}
183184

@@ -187,12 +188,13 @@ select 1234567890123456789012345678901234567890.0
187188
-- !query schema
188189
struct<>
189190
-- !query output
190-
org.apache.spark.sql.catalyst.parser.ParseException
191+
org.apache.spark.SparkArithmeticException
191192
{
192-
"errorClass" : "_LEGACY_ERROR_TEMP_1229",
193+
"errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
194+
"sqlState" : "22003",
193195
"messageParameters" : {
194-
"decimalType" : "decimal",
195-
"precision" : "38"
196+
"maxPrecision" : "38",
197+
"precision" : "41"
196198
}
197199
}
198200

@@ -477,7 +479,7 @@ org.apache.spark.sql.catalyst.parser.ParseException
477479
{
478480
"errorClass" : "_LEGACY_ERROR_TEMP_0061",
479481
"messageParameters" : {
480-
"msg" : "decimal can only support precision up to 38."
482+
"msg" : "[DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION] Decimal precision 40 exceeds max precision 38."
481483
},
482484
"queryContext" : [ {
483485
"objectType" : "",

sql/core/src/test/resources/sql-tests/results/literals.sql.out

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -172,12 +172,13 @@ select 1234567890123456789012345678901234567890
172172
-- !query schema
173173
struct<>
174174
-- !query output
175-
org.apache.spark.sql.catalyst.parser.ParseException
175+
org.apache.spark.SparkArithmeticException
176176
{
177-
"errorClass" : "_LEGACY_ERROR_TEMP_1229",
177+
"errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
178+
"sqlState" : "22003",
178179
"messageParameters" : {
179-
"decimalType" : "decimal",
180-
"precision" : "38"
180+
"maxPrecision" : "38",
181+
"precision" : "40"
181182
}
182183
}
183184

@@ -187,12 +188,13 @@ select 1234567890123456789012345678901234567890.0
187188
-- !query schema
188189
struct<>
189190
-- !query output
190-
org.apache.spark.sql.catalyst.parser.ParseException
191+
org.apache.spark.SparkArithmeticException
191192
{
192-
"errorClass" : "_LEGACY_ERROR_TEMP_1229",
193+
"errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
194+
"sqlState" : "22003",
193195
"messageParameters" : {
194-
"decimalType" : "decimal",
195-
"precision" : "38"
196+
"maxPrecision" : "38",
197+
"precision" : "41"
196198
}
197199
}
198200

@@ -477,7 +479,7 @@ org.apache.spark.sql.catalyst.parser.ParseException
477479
{
478480
"errorClass" : "_LEGACY_ERROR_TEMP_0061",
479481
"messageParameters" : {
480-
"msg" : "decimal can only support precision up to 38."
482+
"msg" : "[DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION] Decimal precision 40 exceeds max precision 38."
481483
},
482484
"queryContext" : [ {
483485
"objectType" : "",

sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3580,12 +3580,13 @@ INSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.423308199106402476
35803580
-- !query schema
35813581
struct<>
35823582
-- !query output
3583-
org.apache.spark.sql.catalyst.parser.ParseException
3583+
org.apache.spark.SparkArithmeticException
35843584
{
3585-
"errorClass" : "_LEGACY_ERROR_TEMP_1229",
3585+
"errorClass" : "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
3586+
"sqlState" : "22003",
35863587
"messageParameters" : {
3587-
"decimalType" : "decimal",
3588-
"precision" : "38"
3588+
"maxPrecision" : "38",
3589+
"precision" : "39"
35893590
}
35903591
}
35913592

0 commit comments

Comments
 (0)