Skip to content

Commit c4eeee5

Browse files
committed
Wenchen's comments
1 parent 65a0d43 commit c4eeee5

File tree

2 files changed

+13
-23
lines changed

2 files changed

+13
-23
lines changed

sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala

Lines changed: 10 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -361,6 +361,16 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
361361
def insertInto(tableName: String): Unit = {
362362
import df.sparkSession.sessionState.analyzer.{AsTableIdentifier, CatalogObjectIdentifier}
363363

364+
assertNotBucketed("insertInto")
365+
366+
if (partitioningColumns.isDefined) {
367+
throw new AnalysisException(
368+
"insertInto() can't be used together with partitionBy(). " +
369+
"Partition columns have already been defined for the table. " +
370+
"It is not necessary to use partitionBy()."
371+
)
372+
}
373+
364374
df.sparkSession.sessionState.sqlParser.parseMultipartIdentifier(tableName) match {
365375
case CatalogObjectIdentifier(Some(catalog), ident) =>
366376
insertInto(catalog, ident)
@@ -372,16 +382,6 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
372382
private def insertInto(catalog: CatalogPlugin, ident: Identifier): Unit = {
373383
import org.apache.spark.sql.catalog.v2.CatalogV2Implicits._
374384

375-
assertNotBucketed("insertInto")
376-
377-
if (partitioningColumns.isDefined) {
378-
throw new AnalysisException(
379-
"insertInto() can't be used together with partitionBy(). " +
380-
"Partition columns have already been defined for the table. " +
381-
"It is not necessary to use partitionBy()."
382-
)
383-
}
384-
385385
val table = DataSourceV2Relation.create(catalog.asTableCatalog.loadTable(ident))
386386

387387
val command = modeForDSV2 match {
@@ -410,16 +410,6 @@ final class DataFrameWriter[T] private[sql](ds: Dataset[T]) {
410410
}
411411

412412
private def insertInto(tableIdent: TableIdentifier): Unit = {
413-
assertNotBucketed("insertInto")
414-
415-
if (partitioningColumns.isDefined) {
416-
throw new AnalysisException(
417-
"insertInto() can't be used together with partitionBy(). " +
418-
"Partition columns have already been defined for the table. " +
419-
"It is not necessary to use partitionBy()."
420-
)
421-
}
422-
423413
runCommand(df.sparkSession, "insertInto") {
424414
InsertIntoTable(
425415
table = UnresolvedRelation(tableIdent),

sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceV2DataFrameSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ class DataSourceV2DataFrameSuite extends QueryTest with SharedSQLContext with Be
6262
}
6363
}
6464

65-
test("insertInto: append partitioned table - dynamic clause") {
65+
test("insertInto: append partitioned table") {
6666
val t1 = "testcat.ns1.ns2.tbl"
6767
withTable(t1) {
6868
sql(s"CREATE TABLE $t1 (id bigint, data string) USING foo PARTITIONED BY (id)")
@@ -80,7 +80,7 @@ class DataSourceV2DataFrameSuite extends QueryTest with SharedSQLContext with Be
8080
}
8181
}
8282

83-
test("insertInto: overwrite - dynamic clause - static mode") {
83+
test("insertInto: overwrite - static mode") {
8484
withSQLConf(PARTITION_OVERWRITE_MODE.key -> PartitionOverwriteMode.STATIC.toString) {
8585
val t1 = "testcat.ns1.ns2.tbl"
8686
withTable(t1) {
@@ -92,7 +92,7 @@ class DataSourceV2DataFrameSuite extends QueryTest with SharedSQLContext with Be
9292
}
9393
}
9494

95-
test("insertInto: overwrite - dynamic clause - dynamic mode") {
95+
test("insertInto: overwrite - dynamic mode") {
9696
withSQLConf(PARTITION_OVERWRITE_MODE.key -> PartitionOverwriteMode.DYNAMIC.toString) {
9797
val t1 = "testcat.ns1.ns2.tbl"
9898
withTable(t1) {

0 commit comments

Comments
 (0)