Skip to content

Commit 2568d50

Browse files
committed
[SPARK-28930][SQL] Last Access Time value shall display 'UNKNOWN' as currently system cannot evaluate the last access time, and 'null' values will be shown in its capital form 'NULL' for SQL client to make the
display format similar to spark-shell. What changes were proposed in this pull request? If there is no comment for spark scala shell shows "null" in small letters but all other places Hive beeline/Spark beeline/Spark SQL it is showing in CAPITAL "NULL". In this patch shown in its capital form 'NULL' for SQL client to make the display format similar to Hive beeline/Spark beeline/Spark SQL. Also corrected the Last Access time, the value shall display 'UNKNOWN' as currently system wont support the last access time evaluation. Issue 2 mentioned in JIRA Spark SQL "desc formatted tablename" is not showing the header # col_name,data_type,comment , seems to be the header has been removed knowingly as part of SPARK-20954. Does this PR introduce any user-facing change? No How was this patch tested? Locally and corrected a ut.
1 parent 723faad commit 2568d50

File tree

3 files changed

+14
-10
lines changed

3 files changed

+14
-10
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ case class CatalogTablePartition(
104104
storage: CatalogStorageFormat,
105105
parameters: Map[String, String] = Map.empty,
106106
createTime: Long = System.currentTimeMillis,
107-
lastAccessTime: Long = -1,
107+
lastAccessTime: Long = 0,
108108
stats: Option[CatalogStatistics] = None) {
109109

110110
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
@@ -117,7 +117,7 @@ case class CatalogTablePartition(
117117
}
118118
map.put("Created Time", new Date(createTime).toString)
119119
val lastAccess = {
120-
if (-1 == lastAccessTime) "UNKNOWN" else new Date(lastAccessTime).toString
120+
if (0 == lastAccessTime) "UNKNOWN" else new Date(lastAccessTime).toString
121121
}
122122
map.put("Last Access", lastAccess)
123123
stats.foreach(s => map.put("Partition Statistics", s.simpleString))
@@ -236,7 +236,7 @@ case class CatalogTable(
236236
bucketSpec: Option[BucketSpec] = None,
237237
owner: String = "",
238238
createTime: Long = System.currentTimeMillis,
239-
lastAccessTime: Long = -1,
239+
lastAccessTime: Long = 0,
240240
createVersion: String = "",
241241
properties: Map[String, String] = Map.empty,
242242
stats: Option[CatalogStatistics] = None,
@@ -320,12 +320,15 @@ case class CatalogTable(
320320
val map = new mutable.LinkedHashMap[String, String]()
321321
val tableProperties = properties.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
322322
val partitionColumns = partitionColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
323+
val lastAccess = {
324+
if (0 == lastAccessTime) "UNKNOWN" else new Date(lastAccessTime).toString
325+
}
323326

324327
identifier.database.foreach(map.put("Database", _))
325328
map.put("Table", identifier.table)
326329
if (owner != null && owner.nonEmpty) map.put("Owner", owner)
327330
map.put("Created Time", new Date(createTime).toString)
328-
map.put("Last Access", new Date(lastAccessTime).toString)
331+
map.put("Last Access", lastAccess)
329332
map.put("Created By", "Spark " + createVersion)
330333
map.put("Type", tableType.name)
331334
provider.foreach(map.put("Provider", _))

sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -510,7 +510,8 @@ abstract class DescribeCommandBase extends RunnableCommand {
510510
append(buffer, s"# ${output.head.name}", output(1).name, output(2).name)
511511
}
512512
schema.foreach { column =>
513-
append(buffer, column.name, column.dataType.simpleString, column.getComment().orNull)
513+
append(buffer, column.name, column.dataType.simpleString,
514+
column.getComment().getOrElse("NULL"))
514515
}
515516
}
516517

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1042,11 +1042,11 @@ class HiveDDLSuite
10421042

10431043
assert(sql("DESC tbl").collect().containsSlice(
10441044
Seq(
1045-
Row("a", "int", null),
1046-
Row("b", "int", null),
1045+
Row("a", "int", "NULL"),
1046+
Row("b", "int", "NULL"),
10471047
Row("# Partition Information", "", ""),
10481048
Row("# col_name", "data_type", "comment"),
1049-
Row("b", "int", null)
1049+
Row("b", "int", "NULL")
10501050
)
10511051
))
10521052
}
@@ -1617,7 +1617,7 @@ class HiveDDLSuite
16171617

16181618
val desc = sql("DESC FORMATTED t1").collect().toSeq
16191619

1620-
assert(desc.contains(Row("id", "bigint", null)))
1620+
assert(desc.contains(Row("id", "bigint", "NULL")))
16211621
}
16221622
}
16231623
}
@@ -2435,7 +2435,7 @@ class HiveDDLSuite
24352435
.select("data_type")
24362436
// check if the last access time doesnt have the default date of year
24372437
// 1970 as its a wrong access time
2438-
assert(!(desc.first.toString.contains("1970")))
2438+
assert((desc.first.toString.contains("UNKNOWN")))
24392439
}
24402440
}
24412441

0 commit comments

Comments
 (0)