Skip to content

Commit 3c74aed

Browse files
gengliangwangMaxGekk
authored andcommitted
[SPARK-39229][SQL] Separate query contexts from error-classes.json
### What changes were proposed in this pull request? Separate query contexts for runtime errors from error-classes.json. ### Why are the changes needed? The message is JSON should only contain parameters explicitly thrown. It is more elegant to separate query contexts from error-classes.json. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Existing UT Closes #36604 from gengliangwang/refactorErrorClass. Authored-by: Gengliang Wang <gengliang@apache.org> Signed-off-by: Max Gekk <max.gekk@gmail.com>
1 parent 0a99060 commit 3c74aed

File tree

6 files changed

+53
-33
lines changed

6 files changed

+53
-33
lines changed

core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ public SparkOutOfMemoryError(OutOfMemoryError e) {
3939
}
4040

4141
public SparkOutOfMemoryError(String errorClass, String[] messageParameters) {
42-
super(SparkThrowableHelper.getMessage(errorClass, messageParameters));
42+
super(SparkThrowableHelper.getMessage(errorClass, messageParameters, ""));
4343
this.errorClass = errorClass;
4444
this.messageParameters = messageParameters;
4545
}

core/src/main/resources/error/error-classes.json

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,15 @@
44
"sqlState" : "42000"
55
},
66
"ARITHMETIC_OVERFLOW" : {
7-
"message" : [ "<message>.<alternative> If necessary set <config> to \"false\" (except for ANSI interval type) to bypass this error.<context>" ],
7+
"message" : [ "<message>.<alternative> If necessary set <config> to \"false\" (except for ANSI interval type) to bypass this error." ],
88
"sqlState" : "22003"
99
},
1010
"CANNOT_CAST_DATATYPE" : {
1111
"message" : [ "Cannot cast <sourceType> to <targetType>." ],
1212
"sqlState" : "22005"
1313
},
1414
"CANNOT_CHANGE_DECIMAL_PRECISION" : {
15-
"message" : [ "<value> cannot be represented as Decimal(<precision>, <scale>). If necessary set <config> to \"false\" to bypass this error.<details>" ],
15+
"message" : [ "<value> cannot be represented as Decimal(<precision>, <scale>). If necessary set <config> to \"false\" to bypass this error." ],
1616
"sqlState" : "22005"
1717
},
1818
"CANNOT_PARSE_DECIMAL" : {
@@ -23,7 +23,7 @@
2323
"message" : [ "Cannot up cast <value> from <sourceType> to <targetType>.\n<details>" ]
2424
},
2525
"CAST_INVALID_INPUT" : {
26-
"message" : [ "The value <value> of the type <sourceType> cannot be cast to <targetType> because it is malformed. To return NULL instead, use `try_cast`. If necessary set <config> to \"false\" to bypass this error.<details>" ],
26+
"message" : [ "The value <value> of the type <sourceType> cannot be cast to <targetType> because it is malformed. To return NULL instead, use `try_cast`. If necessary set <config> to \"false\" to bypass this error." ],
2727
"sqlState" : "42000"
2828
},
2929
"CAST_OVERFLOW" : {
@@ -38,7 +38,7 @@
3838
"sqlState" : "22008"
3939
},
4040
"DIVIDE_BY_ZERO" : {
41-
"message" : [ "Division by zero. To return NULL instead, use `try_divide`. If necessary set <config> to \"false\" (except for ANSI interval type) to bypass this error.<details>" ],
41+
"message" : [ "Division by zero. To return NULL instead, use `try_divide`. If necessary set <config> to \"false\" (except for ANSI interval type) to bypass this error." ],
4242
"sqlState" : "22012"
4343
},
4444
"DUPLICATE_KEY" : {
@@ -138,7 +138,7 @@
138138
"sqlState" : "42000"
139139
},
140140
"MAP_KEY_DOES_NOT_EXIST" : {
141-
"message" : [ "Key <keyValue> does not exist. To return NULL instead, use `try_element_at`. If necessary set <config> to \"false\" to bypass this error.<details>" ]
141+
"message" : [ "Key <keyValue> does not exist. To return NULL instead, use `try_element_at`. If necessary set <config> to \"false\" to bypass this error." ]
142142
},
143143
"MISSING_COLUMN" : {
144144
"message" : [ "Column '<columnName>' does not exist. Did you mean one of the following? [<proposal>]" ],

core/src/main/scala/org/apache/spark/ErrorInfo.scala

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,10 @@ private[spark] object SparkThrowableHelper {
7171
mapper.readValue(errorClassesUrl, new TypeReference[SortedMap[String, ErrorInfo]]() {})
7272
}
7373

74-
def getMessage(errorClass: String, messageParameters: Array[String]): String = {
74+
def getMessage(
75+
errorClass: String,
76+
messageParameters: Array[String],
77+
queryContext: String = ""): String = {
7578
val errorInfo = errorClassToInfoMap.getOrElse(errorClass,
7679
throw new IllegalArgumentException(s"Cannot find error class '$errorClass'"))
7780
if (errorInfo.subClass.isDefined) {
@@ -82,11 +85,11 @@ private[spark] object SparkThrowableHelper {
8285
val subMessageParameters = messageParameters.tail
8386
"[" + errorClass + "." + subErrorClass + "] " + String.format((errorInfo.messageFormat +
8487
errorSubInfo.messageFormat).replaceAll("<[a-zA-Z0-9_-]+>", "%s"),
85-
subMessageParameters: _*)
88+
subMessageParameters: _*) + queryContext
8689
} else {
8790
"[" + errorClass + "] " + String.format(
8891
errorInfo.messageFormat.replaceAll("<[a-zA-Z0-9_-]+>", "%s"),
89-
messageParameters: _*)
92+
messageParameters: _*) + queryContext
9093
}
9194
}
9295

core/src/main/scala/org/apache/spark/SparkException.scala

Lines changed: 24 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -84,8 +84,12 @@ private[spark] class SparkUpgradeException(
8484
/**
8585
* Arithmetic exception thrown from Spark with an error class.
8686
*/
87-
private[spark] class SparkArithmeticException(errorClass: String, messageParameters: Array[String])
88-
extends ArithmeticException(SparkThrowableHelper.getMessage(errorClass, messageParameters))
87+
private[spark] class SparkArithmeticException(
88+
errorClass: String,
89+
messageParameters: Array[String],
90+
queryContext: String = "")
91+
extends ArithmeticException(
92+
SparkThrowableHelper.getMessage(errorClass, messageParameters, queryContext))
8993
with SparkThrowable {
9094

9195
override def getErrorClass: String = errorClass
@@ -132,9 +136,13 @@ private[spark] class SparkConcurrentModificationException(
132136
/**
133137
* Datetime exception thrown from Spark with an error class.
134138
*/
135-
private[spark] class SparkDateTimeException(errorClass: String, messageParameters: Array[String])
139+
private[spark] class SparkDateTimeException(
140+
errorClass: String,
141+
messageParameters: Array[String],
142+
queryContext: String = "")
136143
extends DateTimeException(
137-
SparkThrowableHelper.getMessage(errorClass, messageParameters)) with SparkThrowable {
144+
SparkThrowableHelper.getMessage(errorClass, messageParameters, queryContext))
145+
with SparkThrowable {
138146

139147
override def getErrorClass: String = errorClass
140148
}
@@ -168,9 +176,11 @@ private[spark] class SparkFileNotFoundException(
168176
*/
169177
private[spark] class SparkNumberFormatException(
170178
errorClass: String,
171-
messageParameters: Array[String])
179+
messageParameters: Array[String],
180+
queryContext: String)
172181
extends NumberFormatException(
173-
SparkThrowableHelper.getMessage(errorClass, messageParameters)) with SparkThrowable {
182+
SparkThrowableHelper.getMessage(errorClass, messageParameters, queryContext))
183+
with SparkThrowable {
174184

175185
override def getErrorClass: String = errorClass
176186
}
@@ -226,9 +236,11 @@ private[spark] class SparkIOException(
226236
private[spark] class SparkRuntimeException(
227237
errorClass: String,
228238
messageParameters: Array[String],
229-
cause: Throwable = null)
239+
cause: Throwable = null,
240+
queryContext: String = "")
230241
extends RuntimeException(
231-
SparkThrowableHelper.getMessage(errorClass, messageParameters), cause) with SparkThrowable {
242+
SparkThrowableHelper.getMessage(errorClass, messageParameters, queryContext), cause)
243+
with SparkThrowable {
232244

233245
override def getErrorClass: String = errorClass
234246
}
@@ -274,9 +286,11 @@ private[spark] class SparkSQLException(
274286
*/
275287
private[spark] class SparkNoSuchElementException(
276288
errorClass: String,
277-
messageParameters: Array[String])
289+
messageParameters: Array[String],
290+
queryContext: String)
278291
extends NoSuchElementException(
279-
SparkThrowableHelper.getMessage(errorClass, messageParameters)) with SparkThrowable {
292+
SparkThrowableHelper.getMessage(errorClass, messageParameters, queryContext))
293+
with SparkThrowable {
280294

281295
override def getErrorClass: String = errorClass
282296
}

core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ class SparkThrowableSuite extends SparkFunSuite {
127127
assert(getMessage("DIVIDE_BY_ZERO", Array("foo", "bar", "baz")) ==
128128
"[DIVIDE_BY_ZERO] Division by zero. " +
129129
"To return NULL instead, use `try_divide`. If necessary set foo to \"false\" " +
130-
"(except for ANSI interval type) to bypass this error.bar")
130+
"(except for ANSI interval type) to bypass this error.")
131131
}
132132

133133
test("Error message is formatted") {

sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala

Lines changed: 16 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -104,8 +104,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
104104
value.toDebugString,
105105
decimalPrecision.toString,
106106
decimalScale.toString,
107-
toSQLConf(SQLConf.ANSI_ENABLED.key),
108-
context))
107+
toSQLConf(SQLConf.ANSI_ENABLED.key)),
108+
queryContext = context)
109109
}
110110

111111
def invalidInputInCastToDatetimeError(
@@ -119,8 +119,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
119119
toSQLValue(value, from),
120120
toSQLType(from),
121121
toSQLType(to),
122-
toSQLConf(SQLConf.ANSI_ENABLED.key),
123-
errorContext))
122+
toSQLConf(SQLConf.ANSI_ENABLED.key)),
123+
queryContext = errorContext)
124124
}
125125

126126
def invalidInputSyntaxForBooleanError(
@@ -132,8 +132,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
132132
toSQLValue(s, StringType),
133133
toSQLType(StringType),
134134
toSQLType(BooleanType),
135-
toSQLConf(SQLConf.ANSI_ENABLED.key),
136-
errorContext))
135+
toSQLConf(SQLConf.ANSI_ENABLED.key)),
136+
queryContext = errorContext)
137137
}
138138

139139
def invalidInputInCastToNumberError(
@@ -146,8 +146,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
146146
toSQLValue(s, StringType),
147147
toSQLType(StringType),
148148
toSQLType(to),
149-
toSQLConf(SQLConf.ANSI_ENABLED.key),
150-
errorContext))
149+
toSQLConf(SQLConf.ANSI_ENABLED.key)),
150+
queryContext = errorContext)
151151
}
152152

153153
def cannotCastFromNullTypeError(to: DataType): Throwable = {
@@ -180,7 +180,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
180180
def divideByZeroError(context: String): ArithmeticException = {
181181
new SparkArithmeticException(
182182
errorClass = "DIVIDE_BY_ZERO",
183-
messageParameters = Array(toSQLConf(SQLConf.ANSI_ENABLED.key), context))
183+
messageParameters = Array(toSQLConf(SQLConf.ANSI_ENABLED.key)),
184+
queryContext = context)
184185
}
185186

186187
def invalidArrayIndexError(index: Int, numElements: Int): ArrayIndexOutOfBoundsException = {
@@ -218,8 +219,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
218219
errorClass = "MAP_KEY_DOES_NOT_EXIST",
219220
messageParameters = Array(
220221
toSQLValue(key, dataType),
221-
toSQLConf(SQLConf.ANSI_ENABLED.key),
222-
context))
222+
toSQLConf(SQLConf.ANSI_ENABLED.key)),
223+
queryContext = context)
223224
}
224225

225226
def invalidFractionOfSecondError(): DateTimeException = {
@@ -477,8 +478,10 @@ object QueryExecutionErrors extends QueryErrorsBase {
477478
hint: String = "",
478479
errorContext: String = ""): ArithmeticException = {
479480
val alternative = if (hint.nonEmpty) s" To return NULL instead, use '$hint'." else ""
480-
new SparkArithmeticException("ARITHMETIC_OVERFLOW",
481-
Array(message, alternative, SQLConf.ANSI_ENABLED.key, errorContext))
481+
new SparkArithmeticException(
482+
errorClass = "ARITHMETIC_OVERFLOW",
483+
messageParameters = Array(message, alternative, SQLConf.ANSI_ENABLED.key),
484+
queryContext = errorContext)
482485
}
483486

484487
def unaryMinusCauseOverflowError(originValue: Int): ArithmeticException = {

0 commit comments

Comments
 (0)