Skip to content

Commit 0a84082

Browse files
panbingkunSandishKumarHN
authored andcommitted
[SPARK-40790][SQL][TESTS] Check error classes in DDL parsing tests
### What changes were proposed in this pull request? This PR aims to replace 'intercept' with 'Check error classes' in DDL parsing tests, include: - AlterNamespaceSetPropertiesParserSuite - AlterTableDropPartitionParserSuite - AlterTableRenameParserSuite - AlterTableRecoverPartitionsParserSuite - DescribeTableParserSuite - TruncateTableParserSuite - AlterTableSetSerdeParserSuite - ShowPartitionsParserSuite ### Why are the changes needed? The changes improve the error framework. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? By running the modified test suite: ``` $ build/sbt "test:testOnly *ParserSuite" ``` Closes apache#38280 from panbingkun/SPARK-40790. Authored-by: panbingkun <pbk1982@gmail.com> Signed-off-by: Max Gekk <max.gekk@gmail.com>
1 parent 329f51d commit 0a84082

8 files changed

+54
-41
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceSetPropertiesParserSuite.scala

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,10 @@ package org.apache.spark.sql.execution.command
1919

2020
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedNamespace}
2121
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
22-
import org.apache.spark.sql.catalyst.parser.ParseException
2322
import org.apache.spark.sql.catalyst.plans.logical.SetNamespaceProperties
2423

2524
class AlterNamespaceSetPropertiesParserSuite extends AnalysisTest {
25+
2626
test("set namespace properties") {
2727
Seq("DATABASE", "SCHEMA", "NAMESPACE").foreach { nsToken =>
2828
Seq("PROPERTIES", "DBPROPERTIES").foreach { propToken =>
@@ -40,10 +40,14 @@ class AlterNamespaceSetPropertiesParserSuite extends AnalysisTest {
4040
}
4141

4242
test("property values must be set") {
43-
val e = intercept[ParseException] {
44-
parsePlan("ALTER NAMESPACE my_db SET PROPERTIES('key_without_value', 'key_with_value'='x')")
45-
}
46-
assert(e.getMessage.contains(
47-
"Operation not allowed: Values must be specified for key(s): [key_without_value]"))
43+
val sql = "ALTER NAMESPACE my_db SET PROPERTIES('key_without_value', 'key_with_value'='x')"
44+
checkError(
45+
exception = parseException(parsePlan)(sql),
46+
errorClass = "_LEGACY_ERROR_TEMP_0035",
47+
parameters = Map("message" -> "Values must be specified for key(s): [key_without_value]"),
48+
context = ExpectedContext(
49+
fragment = sql,
50+
start = 0,
51+
stop = 78))
4852
}
4953
}

sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionParserSuite.scala

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,11 @@ package org.apache.spark.sql.execution.command
1919

2020
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartitionSpec, UnresolvedTable}
2121
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
22-
import org.apache.spark.sql.catalyst.parser.ParseException
2322
import org.apache.spark.sql.catalyst.plans.logical.DropPartitions
2423
import org.apache.spark.sql.test.SharedSparkSession
2524

2625
class AlterTableDropPartitionParserSuite extends AnalysisTest with SharedSparkSession {
26+
2727
test("drop partition") {
2828
val sql = """
2929
|ALTER TABLE table_name DROP PARTITION
@@ -92,9 +92,13 @@ class AlterTableDropPartitionParserSuite extends AnalysisTest with SharedSparkSe
9292

9393
test("drop partition from view") {
9494
val sql = "ALTER VIEW table_name DROP PARTITION (p=1)"
95-
val errMsg = intercept[ParseException] {
96-
parsePlan(sql)
97-
}.getMessage
98-
assert(errMsg.contains("Operation not allowed"))
95+
checkError(
96+
exception = parseException(parsePlan)(sql),
97+
errorClass = "_LEGACY_ERROR_TEMP_0035",
98+
parameters = Map("message" -> "ALTER VIEW ... DROP PARTITION"),
99+
context = ExpectedContext(
100+
fragment = sql,
101+
start = 0,
102+
stop = 41))
99103
}
100104
}

sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRecoverPartitionsParserSuite.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,17 +19,17 @@ package org.apache.spark.sql.execution.command
1919

2020
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTable}
2121
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
22-
import org.apache.spark.sql.catalyst.parser.ParseException
2322
import org.apache.spark.sql.catalyst.plans.logical.RecoverPartitions
2423
import org.apache.spark.sql.test.SharedSparkSession
2524

2625
class AlterTableRecoverPartitionsParserSuite extends AnalysisTest with SharedSparkSession {
2726

2827
test("recover partitions without table") {
29-
val errMsg = intercept[ParseException] {
30-
parsePlan("ALTER TABLE RECOVER PARTITIONS")
31-
}.getMessage
32-
assert(errMsg.contains("Syntax error at or near 'PARTITIONS'"))
28+
val sql = "ALTER TABLE RECOVER PARTITIONS"
29+
checkError(
30+
exception = parseException(parsePlan)(sql),
31+
errorClass = "PARSE_SYNTAX_ERROR",
32+
parameters = Map("error" -> "'PARTITIONS'", "hint" -> ""))
3333
}
3434

3535
test("recover partitions of a table") {

sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenameParserSuite.scala

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command
1919

2020
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTableOrView}
2121
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
22-
import org.apache.spark.sql.catalyst.parser.ParseException
2322
import org.apache.spark.sql.catalyst.plans.logical.RenameTable
2423

2524
class AlterTableRenameParserSuite extends AnalysisTest {
@@ -42,10 +41,16 @@ class AlterTableRenameParserSuite extends AnalysisTest {
4241
}
4342

4443
test("invalid table identifiers") {
45-
Seq(
46-
"ALTER TABLE RENAME TO x.y.z",
47-
"ALTER TABLE _ RENAME TO .z").foreach { renameCmd =>
48-
intercept[ParseException] { parsePlan(renameCmd) }
49-
}
44+
val sql1 = "ALTER TABLE RENAME TO x.y.z"
45+
checkError(
46+
exception = parseException(parsePlan)(sql1),
47+
errorClass = "PARSE_SYNTAX_ERROR",
48+
parameters = Map("error" -> "'TO'", "hint" -> ""))
49+
50+
val sql2 = "ALTER TABLE _ RENAME TO .z"
51+
checkError(
52+
exception = parseException(parsePlan)(sql2),
53+
errorClass = "PARSE_SYNTAX_ERROR",
54+
parameters = Map("error" -> "'.'", "hint" -> ": extra input '.'"))
5055
}
5156
}

sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeParserSuite.scala

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command
1919

2020
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTable}
2121
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
22-
import org.apache.spark.sql.catalyst.parser.ParseException
2322
import org.apache.spark.sql.catalyst.plans.logical.SetTableSerDeProperties
2423
import org.apache.spark.sql.test.SharedSparkSession
2524

@@ -30,11 +29,14 @@ class AlterTableSetSerdeParserSuite extends AnalysisTest with SharedSparkSession
3029
test("SerDe property values must be set") {
3130
val sql = "ALTER TABLE table_name SET SERDE 'serde' " +
3231
"WITH SERDEPROPERTIES('key_without_value', 'key_with_value'='x')"
33-
val errMsg = intercept[ParseException] {
34-
parsePlan(sql)
35-
}.getMessage
36-
assert(errMsg.contains("Operation not allowed"))
37-
assert(errMsg.contains("key_without_value"))
32+
checkError(
33+
exception = parseException(parsePlan)(sql),
34+
errorClass = "_LEGACY_ERROR_TEMP_0035",
35+
parameters = Map("message" -> "Values must be specified for key(s): [key_without_value]"),
36+
context = ExpectedContext(
37+
fragment = sql,
38+
start = 0,
39+
stop = 103))
3840
}
3941

4042
test("alter table SerDe properties by 'SET SERDE'") {

sql/core/src/test/scala/org/apache/spark/sql/execution/command/DescribeTableParserSuite.scala

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.sql.execution.command
1919

20-
import org.apache.spark.sql.AnalysisException
2120
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedAttribute, UnresolvedTableOrView}
2221
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
2322
import org.apache.spark.sql.catalyst.plans.logical.{DescribeColumn, DescribeRelation}
@@ -76,9 +75,14 @@ class DescribeTableParserSuite extends AnalysisTest {
7675
UnresolvedAttribute(Seq("col")),
7776
isExtended = true))
7877

79-
val caught = intercept[AnalysisException](
80-
parsePlan("DESCRIBE TABLE t PARTITION (ds='1970-01-01') col"))
81-
assert(caught.getMessage.contains(
82-
"The feature is not supported: DESC TABLE COLUMN for a specific partition."))
78+
val sql = "DESCRIBE TABLE t PARTITION (ds='1970-01-01') col"
79+
checkError(
80+
exception = parseException(parsePlan)(sql),
81+
errorClass = "UNSUPPORTED_FEATURE.DESC_TABLE_COLUMN_PARTITION",
82+
parameters = Map.empty,
83+
context = ExpectedContext(
84+
fragment = sql,
85+
start = 0,
86+
stop = 47))
8387
}
8488
}

sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command
1919

2020
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartitionSpec, UnresolvedTable}
2121
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
22-
import org.apache.spark.sql.catalyst.parser.ParseException
2322
import org.apache.spark.sql.catalyst.plans.logical.ShowPartitions
2423

2524
class ShowPartitionsParserSuite extends AnalysisTest {
@@ -47,9 +46,7 @@ class ShowPartitionsParserSuite extends AnalysisTest {
4746

4847
test("empty values in non-optional partition specs") {
4948
checkError(
50-
exception = intercept[ParseException] {
51-
parsePlan("SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)")
52-
},
49+
exception = parseException(parsePlan)("SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)"),
5350
errorClass = "INVALID_SQL_SYNTAX",
5451
sqlState = "42000",
5552
parameters = Map("inputString" -> "Partition key `b` must set value (can't be empty)."),

sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command
1919

2020
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartitionSpec, UnresolvedTable}
2121
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
22-
import org.apache.spark.sql.catalyst.parser.ParseException
2322
import org.apache.spark.sql.catalyst.plans.logical.{TruncatePartition, TruncateTable}
2423

2524
class TruncateTableParserSuite extends AnalysisTest {
@@ -47,9 +46,7 @@ class TruncateTableParserSuite extends AnalysisTest {
4746

4847
test("empty values in non-optional partition specs") {
4948
checkError(
50-
exception = intercept[ParseException] {
51-
parsePlan("TRUNCATE TABLE dbx.tab1 PARTITION (a='1', b)")
52-
},
49+
exception = parseException(parsePlan)("TRUNCATE TABLE dbx.tab1 PARTITION (a='1', b)"),
5350
errorClass = "INVALID_SQL_SYNTAX",
5451
sqlState = "42000",
5552
parameters = Map("inputString" -> "Partition key `b` must set value (can't be empty)."),

0 commit comments

Comments
 (0)