Skip to content

Commit b9f20ba

Browse files
committed
[SPARK-28930][SQL] Last Access Time value shall display 'UNKNOWN' as currently system cannot evaluate the last access time, and 'null' values will be shown in its capital form 'NULL' for SQL client to make the
display format similar to spark-shell. What changes were proposed in this pull request? If there is no comment for spark scala shell shows "null" in small letters but all other places Hive beeline/Spark beeline/Spark SQL it is showing in CAPITAL "NULL". In this patch shown in its capital form 'NULL' for SQL client to make the display format similar to Hive beeline/Spark beeline/Spark SQL. Also corrected the Last Access time, the value shall display 'UNKNOWN' as currently system wont support the last access time evaluation. Issue 2 mentioned in JIRA Spark SQL "desc formatted tablename" is not showing the header # col_name,data_type,comment , seems to be the header has been removed knowingly as part of SPARK-20954. Does this PR introduce any user-facing change? No How was this patch tested? Locally and corrected a ut.
1 parent dc11f3a commit b9f20ba

File tree

2 files changed

+5
-5
lines changed

2 files changed

+5
-5
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ case class CatalogTablePartition(
104104
storage: CatalogStorageFormat,
105105
parameters: Map[String, String] = Map.empty,
106106
createTime: Long = System.currentTimeMillis,
107-
lastAccessTime: Long = 0,
107+
lastAccessTime: Long = -1,
108108
stats: Option[CatalogStatistics] = None) {
109109

110110
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
@@ -117,7 +117,7 @@ case class CatalogTablePartition(
117117
}
118118
map.put("Created Time", new Date(createTime).toString)
119119
val lastAccess = {
120-
if (0 == lastAccessTime) "UNKNOWN" else new Date(lastAccessTime).toString
120+
if (lastAccessTime <= 0) "UNKNOWN" else new Date(lastAccessTime).toString
121121
}
122122
map.put("Last Access", lastAccess)
123123
stats.foreach(s => map.put("Partition Statistics", s.simpleString))
@@ -236,7 +236,7 @@ case class CatalogTable(
236236
bucketSpec: Option[BucketSpec] = None,
237237
owner: String = "",
238238
createTime: Long = System.currentTimeMillis,
239-
lastAccessTime: Long = 0,
239+
lastAccessTime: Long = -1,
240240
createVersion: String = "",
241241
properties: Map[String, String] = Map.empty,
242242
stats: Option[CatalogStatistics] = None,
@@ -321,7 +321,7 @@ case class CatalogTable(
321321
val tableProperties = properties.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
322322
val partitionColumns = partitionColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
323323
val lastAccess = {
324-
if (0 == lastAccessTime) "UNKNOWN" else new Date(lastAccessTime).toString
324+
if (lastAccessTime <= 0) "UNKNOWN" else new Date(lastAccessTime).toString
325325
}
326326

327327
identifier.database.foreach(map.put("Database", _))

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -513,7 +513,7 @@ class TreeNodeSuite extends SparkFunSuite with SQLHelper {
513513
"partitionColumnNames" -> List.empty[String],
514514
"owner" -> "",
515515
"createTime" -> 0,
516-
"lastAccessTime" -> 0,
516+
"lastAccessTime" -> -1,
517517
"createVersion" -> "2.x",
518518
"tracksPartitionsInCatalog" -> false,
519519
"properties" -> JNull,

0 commit comments

Comments
 (0)