Skip to content

Commit

Permalink
initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
sunchao committed Sep 1, 2023
1 parent e72ce91 commit 34cf0d2
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -475,6 +475,18 @@ abstract class ExternalCatalogSuite extends SparkFunSuite {
assert(catalog.listPartitions("db2", "tbl2", Some(Map("a" -> "unknown"))).isEmpty)
}

test("SPARK-45054: list partitions should restore stats") {
val catalog = newBasicCatalog()
val stats = Some(CatalogStatistics(sizeInBytes = 1))
val newPart = CatalogTablePartition(Map("a" -> "1", "b" -> "2"), storageFormat, stats = stats)
catalog.alterPartitions("db2", "tbl2", Seq(newPart))
val parts = catalog.listPartitions("db2", "tbl2", Some(Map("a" -> "1")))

assert(parts.length == 1)
val part = parts.head
assert(part.stats.exists(_.sizeInBytes == 1))
}

test("SPARK-21457: list partitions with special chars") {
val catalog = newBasicCatalog()
assert(catalog.listPartitions("db2", "tbl1").isEmpty)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1275,13 +1275,14 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat
db: String,
table: String,
partialSpec: Option[TablePartitionSpec] = None): Seq[CatalogTablePartition] = withClient {
val partColNameMap = buildLowerCasePartColNameMap(getTable(db, table))
val catalogTable = getTable(db, table)
val partColNameMap = buildLowerCasePartColNameMap(catalogTable)
val metaStoreSpec = partialSpec.map(toMetaStorePartitionSpec)
val res = client.getPartitions(db, table, metaStoreSpec)
.map { part => part.copy(spec = restorePartitionSpec(part.spec, partColNameMap))
}

metaStoreSpec match {
val parts = metaStoreSpec match {
// This might be a bug of Hive: When the partition value inside the partial partition spec
// contains dot, and we ask Hive to list partitions w.r.t. the partial partition spec, Hive
// treats dot as matching any single character and may return more partitions than we
Expand All @@ -1290,6 +1291,7 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat
res.filter(p => isPartialPartitionSpec(spec, toMetaStorePartitionSpec(p.spec)))
case _ => res
}
parts.map(restorePartitionMetadata(_, catalogTable))
}

override def listPartitionsByFilter(
Expand Down

0 comments on commit 34cf0d2

Please sign in to comment.