diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala index f3e67dc4e975..29e455de74ee 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala @@ -99,7 +99,8 @@ case class CatalogTablePartition( spec: CatalogTypes.TablePartitionSpec, storage: CatalogStorageFormat, parameters: Map[String, String] = Map.empty, - stats: Option[CatalogStatistics] = None) { + stats: Option[CatalogStatistics] = None, + schema: Option[StructType] = None) { def toLinkedHashMap: mutable.LinkedHashMap[String, String] = { val map = new mutable.LinkedHashMap[String, String]() @@ -109,6 +110,10 @@ case class CatalogTablePartition( if (parameters.nonEmpty) { map.put("Partition Parameters", s"{${parameters.map(p => p._1 + "=" + p._2).mkString(", ")}}") } + if (schema.nonEmpty) { + map.put("Partition Cols", + s"{${schema.get.map(p => p.name + "=" + p.dataType).mkString(", ")}") + } stats.foreach(s => map.put("Partition Statistics", s.simpleString)) map } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index da9fe2d3088b..fd3af935dbe0 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -991,6 +991,7 @@ private[hive] object HiveClientImpl { p.storage.serde.foreach(serdeInfo.setSerializationLib) serdeInfo.setParameters(p.storage.properties.asJava) storageDesc.setSerdeInfo(serdeInfo) + storageDesc.setCols(p.schema.map(_.map(toHiveColumn).toList.asJava).orNull) tpart.setDbName(ht.getDbName) tpart.setTableName(ht.getTableName) tpart.setValues(partValues.asJava) @@ -1020,7 +1021,9 @@ private[hive] object HiveClientImpl { properties = Option(apiPartition.getSd.getSerdeInfo.getParameters) .map(_.asScala.toMap).orNull), parameters = properties, - stats = readHiveStats(properties)) + stats = readHiveStats(properties), + schema = Option(StructType(apiPartition.getSd.getCols.asScala.map(fromHiveColumn))) + ) } /**