diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowPartitionsExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowPartitionsExec.scala index 44d6f4495f55..c4b6aa805d58 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowPartitionsExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowPartitionsExec.scala @@ -41,7 +41,7 @@ case class ShowPartitionsExec( // listPartitionByNames() should return all partitions if the partition spec // does not specify any partition names. .getOrElse((Seq.empty[String], InternalRow.empty)) - val partitionIdentifiers = table.listPartitionByNames(names.toArray, ident) + val partitionIdentifiers = table.listPartitionIdentifiers(names.toArray, ident) // Converting partition identifiers as `InternalRow` of partition values, // for instance InternalRow(value0, value1, ..., valueN), to `InternalRow`s // with a string in the format: "col0=value0/col1=value1/.../colN=valueN".