Skip to content

Commit 995bdd8

Browse files
committed
Cleaned up DescribeHiveTableCommand
1 parent 542977c commit 995bdd8

File tree

1 file changed

+9
-14
lines changed

1 file changed

+9
-14
lines changed

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala

Lines changed: 9 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema
2323

2424
import org.apache.spark.annotation.DeveloperApi
2525
import org.apache.spark.rdd.RDD
26-
import org.apache.spark.sql.catalyst.expressions.{Attribute, GenericRow, Row}
26+
import org.apache.spark.sql.catalyst.expressions.{Attribute, Row}
2727
import org.apache.spark.sql.execution.{Command, LeafNode}
2828
import org.apache.spark.sql.hive.{HiveContext, MetastoreRelation}
2929

@@ -41,16 +41,11 @@ case class DescribeHiveTableCommand(
4141
extends LeafNode with Command {
4242

4343
// Strings with the format like Hive. It is used for result comparison in our unit tests.
44-
lazy val hiveString: Seq[String] = {
45-
val alignment = 20
46-
val delim = "\t"
47-
48-
sideEffectResult.map {
49-
case (name, dataType, comment) =>
50-
String.format("%-" + alignment + "s", name) + delim +
51-
String.format("%-" + alignment + "s", dataType) + delim +
52-
String.format("%-" + alignment + "s", Option(comment).getOrElse("None"))
53-
}
44+
lazy val hiveString: Seq[String] = sideEffectResult.map {
45+
case (name, dataType, comment) =>
46+
Seq(name, dataType, Option(comment).getOrElse("None"))
47+
.map(String.format(s"%-20s", _))
48+
.mkString("\t")
5449
}
5550

5651
override protected[sql] lazy val sideEffectResult: Seq[(String, String, String)] = {
@@ -60,7 +55,7 @@ case class DescribeHiveTableCommand(
6055
val columns: Seq[FieldSchema] = table.hiveQlTable.getCols
6156
val partitionColumns: Seq[FieldSchema] = table.hiveQlTable.getPartCols
6257
results ++= columns.map(field => (field.getName, field.getType, field.getComment))
63-
if (!partitionColumns.isEmpty) {
58+
if (partitionColumns.nonEmpty) {
6459
val partColumnInfo =
6560
partitionColumns.map(field => (field.getName, field.getType, field.getComment))
6661
results ++=
@@ -78,8 +73,8 @@ case class DescribeHiveTableCommand(
7873
}
7974

8075
override def executeCollect(): Array[Row] = sideEffectResult.map {
81-
case (name, dataType, comment) => Row(name, dataType, comment)
82-
}.toArray
76+
case (name, dataType, comment) => Row(name, dataType, comment)
77+
}.toArray
8378

8479
override def execute(): RDD[Row] = context.sparkContext.parallelize(executeCollect(), 1)
8580

0 commit comments

Comments
 (0)