Skip to content

Commit e0f4410

Browse files
panbingkunMaxGekk
authored andcommitted
[SPARK-40910][SQL] Replace UnsupportedOperationException with SparkUnsupportedOperationException
### What changes were proposed in this pull request? This pr aims to replace UnsupportedOperationException with SparkUnsupportedOperationException. ### Why are the changes needed? 1.When I work on https://issues.apache.org/jira/browse/SPARK-40889, I found `QueryExecutionErrors.unsupportedPartitionTransformError` throw **UnsupportedOperationException**(but not **SparkUnsupportedOperationException**), it seem not to fit into the new error framework. https://github.com/apache/spark/blob/a27b459be3ca2ad2d50b9d793b939071ca2270e2/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala#L71-L72 2.`QueryExecutionErrors.unsupportedPartitionTransformError` throw SparkUnsupportedOperationException, but UT catch `UnsupportedOperationException`. https://github.com/apache/spark/blob/a27b459be3ca2ad2d50b9d793b939071ca2270e2/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala#L288-L301 https://github.com/apache/spark/blob/a27b459be3ca2ad2d50b9d793b939071ca2270e2/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala#L904-L909 https://github.com/apache/spark/blob/a27b459be3ca2ad2d50b9d793b939071ca2270e2/core/src/main/scala/org/apache/spark/SparkException.scala#L144-L154 ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Existed UT. Closes apache#38387 from panbingkun/replace_UnsupportedOperationException. Authored-by: panbingkun <pbk1982@gmail.com> Signed-off-by: Max Gekk <max.gekk@gmail.com>
1 parent 276abe3 commit e0f4410

File tree

4 files changed

+23
-17
lines changed

4 files changed

+23
-17
lines changed

core/src/main/resources/error/error-classes.json

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -964,6 +964,11 @@
964964
"Literal for '<value>' of <type>."
965965
]
966966
},
967+
"MULTIPLE_BUCKET_TRANSFORMS" : {
968+
"message" : [
969+
"Multiple bucket TRANSFORMs."
970+
]
971+
},
967972
"NATURAL_CROSS_JOIN" : {
968973
"message" : [
969974
"NATURAL CROSS JOIN."

sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,9 @@ private[sql] object CatalogV2Implicits {
6060
identityCols += col
6161

6262
case BucketTransform(numBuckets, col, sortCol) =>
63-
if (bucketSpec.nonEmpty) throw QueryExecutionErrors.multipleBucketTransformsError
63+
if (bucketSpec.nonEmpty) {
64+
throw QueryExecutionErrors.unsupportedMultipleBucketTransformsError
65+
}
6466
if (sortCol.isEmpty) {
6567
bucketSpec = Some(BucketSpec(numBuckets, col.map(_.fieldNames.mkString(".")), Nil))
6668
} else {

sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2623,9 +2623,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
26232623
"format" -> format))
26242624
}
26252625

2626-
def multipleBucketTransformsError(): SparkUnsupportedOperationException = {
2626+
def unsupportedMultipleBucketTransformsError(): SparkUnsupportedOperationException = {
26272627
new SparkUnsupportedOperationException(
2628-
errorClass = "_LEGACY_ERROR_TEMP_2279",
2628+
errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORMS",
26292629
messageParameters = Map.empty)
26302630
}
26312631

sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala

Lines changed: 13 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import org.mockito.ArgumentMatchers.any
2424
import org.mockito.Mockito.{mock, when}
2525
import org.mockito.invocation.InvocationOnMock
2626

27+
import org.apache.spark.SparkUnsupportedOperationException
2728
import org.apache.spark.sql.{AnalysisException, SaveMode}
2829
import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier}
2930
import org.apache.spark.sql.catalyst.analysis.{AnalysisContext, AnalysisTest, Analyzer, EmptyFunctionRegistry, NoSuchTableException, ResolvedFieldName, ResolvedIdentifier, ResolvedTable, ResolveSessionCatalog, UnresolvedAttribute, UnresolvedInlineTable, UnresolvedRelation, UnresolvedSubqueryColumnAliases, UnresolvedTable}
@@ -292,13 +293,12 @@ class PlanResolutionSuite extends AnalysisTest {
292293
|CREATE TABLE my_tab(a INT, b STRING) USING parquet
293294
|PARTITIONED BY ($transform)
294295
""".stripMargin
295-
296-
val ae = intercept[UnsupportedOperationException] {
297-
parseAndResolve(query)
298-
}
299-
300-
assert(ae.getMessage
301-
.contains(s"Unsupported partition transform: $transform"))
296+
checkError(
297+
exception = intercept[SparkUnsupportedOperationException] {
298+
parseAndResolve(query)
299+
},
300+
errorClass = "_LEGACY_ERROR_TEMP_2067",
301+
parameters = Map("transform" -> transform))
302302
}
303303
}
304304

@@ -310,13 +310,12 @@ class PlanResolutionSuite extends AnalysisTest {
310310
|CREATE TABLE my_tab(a INT, b STRING, c String) USING parquet
311311
|PARTITIONED BY ($transform)
312312
""".stripMargin
313-
314-
val ae = intercept[UnsupportedOperationException] {
315-
parseAndResolve(query)
316-
}
317-
318-
assert(ae.getMessage
319-
.contains("Multiple bucket transforms are not supported."))
313+
checkError(
314+
exception = intercept[SparkUnsupportedOperationException] {
315+
parseAndResolve(query)
316+
},
317+
errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORMS",
318+
parameters = Map.empty)
320319
}
321320
}
322321

0 commit comments

Comments
 (0)