Skip to content

Commit

Permalink
address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
sunchao committed Sep 1, 2023
1 parent 34cf0d2 commit 62e88af
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -487,6 +487,19 @@ abstract class ExternalCatalogSuite extends SparkFunSuite {
assert(part.stats.exists(_.sizeInBytes == 1))
}

test("SPARK-45054: list partitions by filter should restore stats") {
val catalog = newBasicCatalog()
val stats = Some(CatalogStatistics(sizeInBytes = 1))
val newPart = CatalogTablePartition(Map("a" -> "1", "b" -> "2"), storageFormat, stats = stats)
catalog.alterPartitions("db2", "tbl2", Seq(newPart))
val tz = TimeZone.getDefault.getID
val parts = catalog.listPartitionsByFilter("db2", "tbl2", Seq($"a".int === 1), tz)

assert(parts.length == 1)
val part = parts.head
assert(part.stats.exists(_.sizeInBytes == 1))
}

test("SPARK-21457: list partitions with special chars") {
val catalog = newBasicCatalog()
assert(catalog.listPartitions("db2", "tbl1").isEmpty)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1305,6 +1305,7 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat
val clientPrunedPartitions =
client.getPartitionsByFilter(rawHiveTable, predicates).map { part =>
part.copy(spec = restorePartitionSpec(part.spec, partColNameMap))
restorePartitionMetadata(part, catalogTable)
}
prunePartitionsByFilter(catalogTable, clientPrunedPartitions, predicates, defaultTimeZoneId)
}
Expand Down

0 comments on commit 62e88af

Please sign in to comment.