Skip to content

Commit 3080d82

Browse files
author
xy_xin
committed
Refine the code.
1 parent c0c9383 commit 3080d82

File tree

5 files changed

+55
-15
lines changed

5 files changed

+55
-15
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -344,7 +344,12 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
344344
val tableId = visitMultipartIdentifier(ctx.multipartIdentifier)
345345
val tableAlias = if (ctx.tableAlias() != null) {
346346
val ident = ctx.tableAlias().strictIdentifier()
347-
if (ident != null) { Some(ident.getText) } else { None }
347+
// We do not allow columns aliases after table alias.
348+
if (ctx.tableAlias().identifierList() != null) {
349+
throw new ParseException("Columns aliases is not allowed in DELETE.",
350+
ctx.tableAlias().identifierList())
351+
}
352+
if (ident != null) Some(ident.getText) else None
348353
} else {
349354
None
350355
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,11 @@ import java.util.Locale
2121

2222
import org.apache.spark.sql.AnalysisException
2323
import org.apache.spark.sql.catalog.v2.expressions.{ApplyTransform, BucketTransform, DaysTransform, FieldReference, HoursTransform, IdentityTransform, LiteralValue, MonthsTransform, Transform, YearsTransform}
24-
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedRelation, UnresolvedStar}
24+
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedAttribute, UnresolvedRelation, UnresolvedStar}
2525
import org.apache.spark.sql.catalyst.catalog.BucketSpec
26+
import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal}
2627
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project}
27-
import org.apache.spark.sql.catalyst.plans.logical.sql.{AlterTableAddColumnsStatement, AlterTableAlterColumnStatement, AlterTableDropColumnsStatement, AlterTableRenameColumnStatement, AlterTableSetLocationStatement, AlterTableSetPropertiesStatement, AlterTableUnsetPropertiesStatement, AlterViewSetPropertiesStatement, AlterViewUnsetPropertiesStatement, CreateTableAsSelectStatement, CreateTableStatement, DescribeColumnStatement, DescribeTableStatement, DropTableStatement, DropViewStatement, InsertIntoStatement, QualifiedColType, ReplaceTableAsSelectStatement, ReplaceTableStatement, ShowTablesStatement}
28+
import org.apache.spark.sql.catalyst.plans.logical.sql.{AlterTableAddColumnsStatement, AlterTableAlterColumnStatement, AlterTableDropColumnsStatement, AlterTableRenameColumnStatement, AlterTableSetLocationStatement, AlterTableSetPropertiesStatement, AlterTableUnsetPropertiesStatement, AlterViewSetPropertiesStatement, AlterViewUnsetPropertiesStatement, CreateTableAsSelectStatement, CreateTableStatement, DeleteFromStatement, DescribeColumnStatement, DescribeTableStatement, DropTableStatement, DropViewStatement, InsertIntoStatement, QualifiedColType, ReplaceTableAsSelectStatement, ReplaceTableStatement, ShowTablesStatement}
2829
import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructType, TimestampType}
2930
import org.apache.spark.unsafe.types.UTF8String
3031

@@ -764,6 +765,30 @@ class DDLParserSuite extends AnalysisTest {
764765
assert(exc.getMessage.contains("INSERT INTO ... IF NOT EXISTS"))
765766
}
766767

768+
test("delete from table: delete all") {
769+
parseCompare("DELETE FROM testcat.ns1.ns2.tbl",
770+
DeleteFromStatement(
771+
Seq("testcat", "ns1", "ns2", "tbl"),
772+
None,
773+
None))
774+
}
775+
776+
test("delete from table: with alias and where clause") {
777+
parseCompare("DELETE FROM testcat.ns1.ns2.tbl AS t WHERE t.a = 2",
778+
DeleteFromStatement(
779+
Seq("testcat", "ns1", "ns2", "tbl"),
780+
Some("t"),
781+
Some(EqualTo(UnresolvedAttribute("t.a"), Literal(2)))))
782+
}
783+
784+
test("delete from table: columns aliases is not allowed") {
785+
val exc = intercept[ParseException] {
786+
parsePlan("DELETE FROM testcat.ns1.ns2.tbl AS t(a,b,c,d) WHERE d = 2")
787+
}
788+
789+
assert(exc.getMessage.contains("Columns aliases is not allowed in DELETE."))
790+
}
791+
767792
test("show tables") {
768793
comparePlans(
769794
parsePlan("SHOW TABLES"),

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -238,13 +238,17 @@ object DataSourceV2Strategy extends Strategy with PredicateHelper {
238238
OverwritePartitionsDynamicExec(r.table.asWritable, r.options, planLater(query)) :: Nil
239239

240240
case DeleteFromTable(r: DataSourceV2Relation, condition) =>
241+
if (condition.exists(SubqueryExpression.hasSubquery)) {
242+
throw new AnalysisException(
243+
s"Delete by condition with subquery is not supported: $condition")
244+
}
241245
// fail if any filter cannot be converted. correctness depends on removing all matching data.
242-
val filters = condition.map(
243-
splitConjunctivePredicates(_).map {
244-
f => DataSourceStrategy.translateFilter(f).getOrElse(
245-
throw new AnalysisException(s"Exec update failed:" +
246-
s" cannot translate expression to source filter: $f"))
247-
}.toArray).getOrElse(Array.empty[Filter])
246+
val filters = DataSourceStrategy.normalizeFilters(condition.toSeq, r.output)
247+
.flatMap(splitConjunctivePredicates(_).map {
248+
f => DataSourceStrategy.translateFilter(f).getOrElse(
249+
throw new AnalysisException(s"Exec update failed:" +
250+
s" cannot translate expression to source filter: $f"))
251+
}).toArray
248252
DeleteFromTableExec(r.table.asDeletable, filters) :: Nil
249253

250254
case WriteToContinuousDataSource(writer, query) =>

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2WriteSupportCheck.scala

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -51,11 +51,6 @@ object V2WriteSupportCheck extends (LogicalPlan => Unit) {
5151
}
5252
}
5353

54-
case DeleteFromTable(_, condition) =>
55-
if (condition.exists(SubqueryExpression.hasSubquery)) {
56-
failAnalysis(s"Delete by condition with subquery is not supported: $condition")
57-
}
58-
5954
case _ => // OK
6055
}
6156
}

sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2SQLSuite.scala

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1752,7 +1752,7 @@ class DataSourceV2SQLSuite
17521752
}
17531753
}
17541754

1755-
test("DeleteFrom: delete aliased target table") {
1755+
test("DeleteFrom: delete from aliased target table") {
17561756
val t = "testcat.ns1.ns2.tbl"
17571757
withTable(t) {
17581758
sql(s"CREATE TABLE $t (id bigint, data string, p int) USING foo PARTITIONED BY (id, p)")
@@ -1763,6 +1763,17 @@ class DataSourceV2SQLSuite
17631763
}
17641764
}
17651765

1766+
test("DeleteFrom: normalize attribute names") {
1767+
val t = "testcat.ns1.ns2.tbl"
1768+
withTable(t) {
1769+
sql(s"CREATE TABLE $t (id bigint, data string, p int) USING foo PARTITIONED BY (id, p)")
1770+
sql(s"INSERT INTO $t VALUES (2L, 'a', 2), (2L, 'b', 3), (3L, 'c', 3)")
1771+
sql(s"DELETE FROM $t AS tbl WHERE tbl.ID = 2")
1772+
checkAnswer(spark.table(t), Seq(
1773+
Row(3, "c", 3)))
1774+
}
1775+
}
1776+
17661777
test("DeleteFrom: fail if has subquery") {
17671778
val t = "testcat.ns1.ns2.tbl"
17681779
withTable(t) {

0 commit comments

Comments
 (0)