Skip to content

Commit ec1560a

Browse files
cloud-fanHyukjinKwon
authored andcommitted
[SPARK-33364][SQL][FOLLOWUP] Refine the catalog v2 API to purge a table
### What changes were proposed in this pull request? This is a followup of #30267 Inspired by #30886, it's better to have 2 methods `def dropTable` and `def purgeTable`, than `def dropTable(ident)` and `def dropTable(ident, purge)`. ### Why are the changes needed? 1. make the APIs orthogonal. Previously, `def dropTable(ident, purge)` calls `def dropTable(ident)` and is a superset. 2. simplifies the catalog implementation a little bit. Now the `if (purge) ... else ...` check is done at the Spark side. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? existing tests Closes #30890 from cloud-fan/purgeTable. Authored-by: Wenchen Fan <wenchen@databricks.com> Signed-off-by: HyukjinKwon <gurwls223@apache.org>
1 parent 303b8c8 commit ec1560a

File tree

5 files changed

+20
-13
lines changed

5 files changed

+20
-13
lines changed

sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/DelegatingCatalogExtension.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,11 @@ public boolean dropTable(Identifier ident) {
9999
return asTableCatalog().dropTable(ident);
100100
}
101101

102+
@Override
103+
public boolean purgeTable(Identifier ident) {
104+
return asTableCatalog().purgeTable(ident);
105+
}
106+
102107
@Override
103108
public void renameTable(
104109
Identifier oldIdent,

sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableCatalog.java

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -173,26 +173,23 @@ Table alterTable(
173173
boolean dropTable(Identifier ident);
174174

175175
/**
176-
* Drop a table in the catalog with an option to purge.
176+
* Drop a table in the catalog and completely remove its data by skipping a trash even if it is
177+
* supported.
177178
* <p>
178179
* If the catalog supports views and contains a view for the identifier and not a table, this
179180
* must not drop the view and must return false.
180181
* <p>
181-
* If the catalog supports the option to purge a table, this method must be overridden.
182-
* The default implementation falls back to {@link #dropTable(Identifier)} dropTable} if the
183-
* purge option is set to false. Otherwise, it throws {@link UnsupportedOperationException}.
182+
* If the catalog supports to purge a table, this method should be overridden.
183+
* The default implementation throws {@link UnsupportedOperationException}.
184184
*
185185
* @param ident a table identifier
186-
* @param purge whether a table should be purged
187186
* @return true if a table was deleted, false if no table exists for the identifier
187+
* @throws UnsupportedOperationException If table purging is not supported
188188
*
189189
* @since 3.1.0
190190
*/
191-
default boolean dropTable(Identifier ident, boolean purge) {
192-
if (purge) {
193-
throw new UnsupportedOperationException("Purge option is not supported.");
194-
}
195-
return dropTable(ident);
191+
default boolean purgeTable(Identifier ident) throws UnsupportedOperationException {
192+
throw new UnsupportedOperationException("Purge table is not supported.");
196193
}
197194

198195
/**

sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/TableCatalogSuite.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -643,6 +643,11 @@ class TableCatalogSuite extends SparkFunSuite {
643643
assert(!catalog.tableExists(testIdent))
644644
}
645645

646+
test("purgeTable") {
647+
val catalog = newCatalog()
648+
intercept[UnsupportedOperationException](catalog.purgeTable(testIdent))
649+
}
650+
646651
test("renameTable") {
647652
val catalog = newCatalog()
648653

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DropTableExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ case class DropTableExec(
3535
override def run(): Seq[InternalRow] = {
3636
if (catalog.tableExists(ident)) {
3737
invalidateCache()
38-
catalog.dropTable(ident, purge)
38+
if (purge) catalog.purgeTable(ident) else catalog.dropTable(ident)
3939
} else if (!ifExists) {
4040
throw new NoSuchTableException(ident)
4141
}

sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DropTableSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,8 @@ class DropTableSuite extends command.DropTableSuiteBase with CommandSuiteBase {
2929
val errMsg = intercept[UnsupportedOperationException] {
3030
sql(s"DROP TABLE $catalog.ns.tbl PURGE")
3131
}.getMessage
32-
// The default TableCatalog.dropTable implementation doesn't support the purge option.
33-
assert(errMsg.contains("Purge option is not supported"))
32+
// The default TableCatalog.purgeTable implementation throws an exception.
33+
assert(errMsg.contains("Purge table is not supported"))
3434
}
3535
}
3636

0 commit comments

Comments
 (0)