Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ statement
| (DESC | DESCRIBE) QUERY? query #describeQuery
| REFRESH TABLE multipartIdentifier #refreshTable
| REFRESH (STRING | .*?) #refreshResource
| CACHE LAZY? TABLE tableIdentifier
| CACHE LAZY? TABLE multipartIdentifier
(OPTIONS options=tablePropertyList)? (AS? query)? #cacheTable
| UNCACHE TABLE (IF EXISTS)? tableIdentifier #uncacheTable
| CLEAR CACHE #clearCache
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2769,6 +2769,30 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
RepairTableStatement(visitMultipartIdentifier(ctx.multipartIdentifier()))
}

/**
* Create a [[CacheTableStatement]].
*
* For example:
* {{{
* CACHE [LAZY] TABLE multi_part_name
* [OPTIONS tablePropertyList] [[AS] query]
* }}}
*/
override def visitCacheTable(ctx: CacheTableContext): LogicalPlan = withOrigin(ctx) {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._

val query = Option(ctx.query).map(plan)
val tableName = visitMultipartIdentifier(ctx.multipartIdentifier)
if (query.isDefined && tableName.length > 1) {
val catalogAndNamespace = tableName.init
throw new ParseException("It is not allowed to add catalog/namespace " +
s"prefix ${catalogAndNamespace.quoted} to " +
"the table name in CACHE TABLE AS SELECT", ctx)
}
val options = Option(ctx.options).map(visitPropertyKeyValues).getOrElse(Map.empty)
CacheTableStatement(tableName, query, ctx.LAZY != null, options)
}

/**
* Create a [[TruncateTableStatement]] command.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -330,6 +330,15 @@ case class AnalyzeColumnStatement(
*/
case class RepairTableStatement(tableName: Seq[String]) extends ParsedStatement

/**
* A CACHE TABLE statement, as parsed from SQL
*/
case class CacheTableStatement(
tableName: Seq[String],
plan: Option[LogicalPlan],
isLazy: Boolean,
options: Map[String, String]) extends ParsedStatement

/**
* A TRUNCATE TABLE statement, as parsed from SQL
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1045,6 +1045,23 @@ class DDLParserSuite extends AnalysisTest {
RepairTableStatement(Seq("a", "b", "c")))
}

test("CACHE table") {
comparePlans(
parsePlan("CACHE TABLE a.b.c"),
CacheTableStatement(Seq("a", "b", "c"), None, false, Map.empty))

comparePlans(
parsePlan("CACHE LAZY TABLE a.b.c"),
CacheTableStatement(Seq("a", "b", "c"), None, true, Map.empty))

comparePlans(
parsePlan("CACHE LAZY TABLE a.b.c OPTIONS('storageLevel' 'DISK_ONLY')"),
CacheTableStatement(Seq("a", "b", "c"), None, true, Map("storageLevel" -> "DISK_ONLY")))

intercept("CACHE TABLE a.b.c AS SELECT * FROM testData",
"It is not allowed to add catalog/namespace prefix a.b")
}

test("TRUNCATE table") {
comparePlans(
parsePlan("TRUNCATE TABLE a.b.c"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogPlugin, LookupCatalog, TableChange, V1Table}
import org.apache.spark.sql.connector.expressions.Transform
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableRecoverPartitionsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, AnalyzeColumnCommand, AnalyzePartitionCommand, AnalyzeTableCommand, CreateDatabaseCommand, DescribeColumnCommand, DescribeTableCommand, DropTableCommand, ShowPartitionsCommand, ShowTablesCommand, TruncateTableCommand}
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableRecoverPartitionsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, AnalyzeColumnCommand, AnalyzePartitionCommand, AnalyzeTableCommand, CacheTableCommand, CreateDatabaseCommand, DescribeColumnCommand, DescribeTableCommand, DropTableCommand, ShowPartitionsCommand, ShowTablesCommand, TruncateTableCommand}
import org.apache.spark.sql.execution.datasources.{CreateTable, DataSource, RefreshTable}
import org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2
import org.apache.spark.sql.internal.SQLConf
Expand Down Expand Up @@ -299,6 +299,10 @@ class ResolveSessionCatalog(
v1TableName.asTableIdentifier,
"MSCK REPAIR TABLE")

case CacheTableStatement(tableName, plan, isLazy, options) =>
val v1TableName = parseV1Table(tableName, "CACHE TABLE")
CacheTableCommand(v1TableName.asTableIdentifier, plan, isLazy, options)

case TruncateTableStatement(tableName, partitionSpec) =>
val v1TableName = parseV1Table(tableName, "TRUNCATE TABLE")
TruncateTableCommand(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,21 +165,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) {
unquotedPath
}

/**
* Create a [[CacheTableCommand]] logical plan.
*/
override def visitCacheTable(ctx: CacheTableContext): LogicalPlan = withOrigin(ctx) {
val query = Option(ctx.query).map(plan)
val tableIdent = visitTableIdentifier(ctx.tableIdentifier)
if (query.isDefined && tableIdent.database.isDefined) {
val database = tableIdent.database.get
throw new ParseException(s"It is not allowed to add database prefix `$database` to " +
s"the table name in CACHE TABLE AS SELECT", ctx)
}
val options = Option(ctx.options).map(visitPropertyKeyValues).getOrElse(Map.empty)
CacheTableCommand(tableIdent, query, ctx.LAZY != null, options)
}

/**
* Create an [[UncacheTableCommand]] logical plan.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1268,6 +1268,20 @@ class DataSourceV2SQLSuite
}
}

test("CACHE TABLE") {
val t = "testcat.ns1.ns2.tbl"
withTable(t) {
spark.sql(s"CREATE TABLE $t (id bigint, data string) USING foo")

testV1Command("CACHE TABLE", t)

val e = intercept[AnalysisException] {
sql(s"CACHE LAZY TABLE $t")
}
assert(e.message.contains("CACHE TABLE is only supported with v1 tables"))
}
}

private def testV1Command(sqlCommand: String, sqlParams: String): Unit = {
val e = intercept[AnalysisException] {
sql(s"$sqlCommand $sqlParams")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleto
val e = intercept[ParseException] {
sql(s"CACHE TABLE $db.cachedTable AS SELECT 1")
}.getMessage
assert(e.contains("It is not allowed to add database prefix ") &&
assert(e.contains("It is not allowed to add catalog/namespace prefix ") &&
e.contains("to the table name in CACHE TABLE AS SELECT"))
}
}
Expand Down