Skip to content

Commit 24edb5f

Browse files
committed
address comments
1 parent ae15ea9 commit 24edb5f

3 files changed

Lines changed: 15 additions & 2 deletions

File tree

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -311,8 +311,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
311311

312312
/**
313313
* Create a [[CreateTableUsing]] or a [[CreateTableUsingAsSelect]] logical plan.
314-
*
315-
* TODO add bucketing and partitioning.
316314
*/
317315
override def visitCreateTableUsing(ctx: CreateTableUsingContext): LogicalPlan = withOrigin(ctx) {
318316
val (table, temp, ifNotExists, external) = visitCreateTableHeader(ctx.createTableHeader)

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, Cast, RowOrd
2626
import org.apache.spark.sql.catalyst.plans.logical
2727
import org.apache.spark.sql.catalyst.plans.logical._
2828
import org.apache.spark.sql.catalyst.rules.Rule
29+
import org.apache.spark.sql.execution.command.CreateTableCommand
2930
import org.apache.spark.sql.internal.SQLConf
3031
import org.apache.spark.sql.sources.{BaseRelation, InsertableRelation}
3132

@@ -206,6 +207,11 @@ private[sql] case class PreWriteCheck(conf: SQLConf, catalog: SessionCatalog)
206207
// The relation in l is not an InsertableRelation.
207208
failAnalysis(s"$l does not allow insertion.")
208209

210+
case c: CreateTableCommand =>
211+
// Duplicates are not allowed in partitionBy
212+
// Todo: when bucketBy and sortBy are supported, we also need to ban the duplication.
213+
checkDuplicates(c.table.partitionColumnNames, "Partition")
214+
209215
case c: CreateTableUsing =>
210216
// Duplicates are not allowed in partitionBy/bucketBy/sortBy columns.
211217
checkDuplicates(c.partitionColumns, "Partition")

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -207,6 +207,15 @@ class HiveDDLSuite
207207
}
208208
}
209209

210+
test("duplicate columns in partitionBy in CREATE TABLE") {
211+
withTable("t") {
212+
val e = intercept[AnalysisException] {
213+
sql("CREATE TABLE boxes (b INT, c INT) PARTITIONED BY (a INT, a INT)")
214+
}
215+
assert(e.getMessage.contains("Found duplicate column(s) in Partition: `a`"))
216+
}
217+
}
218+
210219
test("add/drop partitions - external table") {
211220
val catalog = spark.sessionState.catalog
212221
withTempDir { tmpDir =>

0 commit comments

Comments
 (0)