Skip to content

Commit 436ef43

Browse files
committed
Remove unneeded prunePartionsByFilter from backport
1 parent c74cb09 commit 436ef43

File tree

1 file changed

+0
-32
lines changed

1 file changed

+0
-32
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogUtils.scala

Lines changed: 0 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -123,38 +123,6 @@ object ExternalCatalogUtils {
123123
}
124124
escapePathName(col) + "=" + partitionString
125125
}
126-
127-
def prunePartitionsByFilter(
128-
catalogTable: CatalogTable,
129-
inputPartitions: Seq[CatalogTablePartition],
130-
predicates: Seq[Expression],
131-
defaultTimeZoneId: String): Seq[CatalogTablePartition] = {
132-
if (predicates.isEmpty) {
133-
inputPartitions
134-
} else {
135-
val partitionSchema = catalogTable.partitionSchema
136-
val partitionColumnNames = catalogTable.partitionColumnNames.toSet
137-
138-
val nonPartitionPruningPredicates = predicates.filterNot {
139-
_.references.map(_.name).toSet.subsetOf(partitionColumnNames)
140-
}
141-
if (nonPartitionPruningPredicates.nonEmpty) {
142-
throw new AnalysisException("Expected only partition pruning predicates: " +
143-
nonPartitionPruningPredicates)
144-
}
145-
146-
val boundPredicate =
147-
InterpretedPredicate.create(predicates.reduce(And).transform {
148-
case att: AttributeReference =>
149-
val index = partitionSchema.indexWhere(_.name == att.name)
150-
BoundReference(index, partitionSchema(index).dataType, nullable = true)
151-
})
152-
153-
inputPartitions.filter { p =>
154-
boundPredicate.eval(p.toRow(partitionSchema, defaultTimeZoneId))
155-
}
156-
}
157-
}
158126
}
159127

160128
object CatalogUtils {

0 commit comments

Comments
 (0)