File tree Expand file tree Collapse file tree
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog Expand file tree Collapse file tree Original file line number Diff line number Diff line change @@ -123,38 +123,6 @@ object ExternalCatalogUtils {
123123 }
124124 escapePathName(col) + " =" + partitionString
125125 }
126-
127- def prunePartitionsByFilter (
128- catalogTable : CatalogTable ,
129- inputPartitions : Seq [CatalogTablePartition ],
130- predicates : Seq [Expression ],
131- defaultTimeZoneId : String ): Seq [CatalogTablePartition ] = {
132- if (predicates.isEmpty) {
133- inputPartitions
134- } else {
135- val partitionSchema = catalogTable.partitionSchema
136- val partitionColumnNames = catalogTable.partitionColumnNames.toSet
137-
138- val nonPartitionPruningPredicates = predicates.filterNot {
139- _.references.map(_.name).toSet.subsetOf(partitionColumnNames)
140- }
141- if (nonPartitionPruningPredicates.nonEmpty) {
142- throw new AnalysisException (" Expected only partition pruning predicates: " +
143- nonPartitionPruningPredicates)
144- }
145-
146- val boundPredicate =
147- InterpretedPredicate .create(predicates.reduce(And ).transform {
148- case att : AttributeReference =>
149- val index = partitionSchema.indexWhere(_.name == att.name)
150- BoundReference (index, partitionSchema(index).dataType, nullable = true )
151- })
152-
153- inputPartitions.filter { p =>
154- boundPredicate.eval(p.toRow(partitionSchema, defaultTimeZoneId))
155- }
156- }
157- }
158126}
159127
160128object CatalogUtils {
You can’t perform that action at this time.
0 commit comments