Skip to content

Commit b57689a

Browse files
fenzhuGitHub Enterprise
authored andcommitted
[CARMEL-5955] Drop NOT NULL constraint when alter column type (#930)
* [CARMEL-5955] Drop NOT NULL constraint when alter column type * refresh
1 parent 72c944a commit b57689a

3 files changed

Lines changed: 51 additions & 1 deletion

File tree

sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -542,15 +542,25 @@ case class AlterTableChangeColumnCommand(
542542
s"'${newColumn.name}' with type '${newColumn.dataType}'")
543543
}
544544

545+
var changeNullable = false
545546
val newDataSchema = table.dataSchema.fields.map { field =>
546547
if (field.name == originColumn.name) {
547548
// Create a new column from the origin column with the new comment.
548-
addComment(field, newColumn.getComment)
549+
val fieldWithComment = addComment(field, newColumn.getComment)
550+
if (!fieldWithComment.nullable && newColumn.nullable) {
551+
changeNullable = true
552+
fieldWithComment.copy(nullable = true)
553+
} else {
554+
fieldWithComment
555+
}
549556
} else {
550557
field
551558
}
552559
}
553560
catalog.alterTableDataSchema(tableName, StructType(newDataSchema))
561+
if (changeNullable && sparkSession.sessionState.conf.enforceSchemaNotNull) {
562+
catalog.refreshTable(tableName)
563+
}
554564
}
555565

556566
Seq.empty[Row]

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -743,6 +743,27 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark
743743
}
744744
}
745745

746+
test("CARMEL-5955: Drop NOT NULL constraint when alter column type") {
747+
withSQLConf(SQLConf.ENFORCE_SCHEMA_NOT_NULL.key -> "true") {
748+
withTable("test") {
749+
sql("create table test(id int not null, name string) using parquet")
750+
val schema = spark.sessionState.catalog.getTableMetadata(TableIdentifier("test")).schema
751+
assert(schema.fields(0).name.equals("id") && !schema.fields(0).nullable
752+
&& schema.fields(1).name.equals("name") && schema.fields(1).nullable)
753+
754+
val e1 = intercept[SparkException] {
755+
sql("insert into test values (null, 'a')")
756+
}
757+
assert(Utils.findFirstCause(e1).getMessage.contains(
758+
"NOT NULL constraint violated for column: id"))
759+
760+
sql("alter table test change column id id int")
761+
sql("insert into test values (null, 'a')")
762+
checkAnswer(sql("select * from test"), Seq(Row(null, "a")))
763+
}
764+
}
765+
}
766+
746767
test("inner join, no matches") {
747768
checkAnswer(
748769
sql(

sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertSuite.scala

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1344,6 +1344,25 @@ class InsertSuite extends QueryTest with TestHiveSingleton with BeforeAndAfter
13441344
}
13451345
}
13461346

1347+
test("CARMEL-5955: Drop NOT NULL constraint when alter column type") {
1348+
withSQLConf(SQLConf.ENFORCE_SCHEMA_NOT_NULL.key -> "true") {
1349+
withTable("test") {
1350+
sql("create table test(id int not null, name string)")
1351+
val schema = spark.sessionState.catalog.getTableMetadata(TableIdentifier("test")).schema
1352+
assert(schema.fields(0).name.equals("id") && !schema.fields(0).nullable
1353+
&& schema.fields(1).name.equals("name") && schema.fields(1).nullable)
1354+
1355+
val e1 = intercept[SparkException] {
1356+
sql("insert into test values (null, 'a')")
1357+
}
1358+
assert(Utils.findFirstCause(e1).getMessage.contains(
1359+
"NOT NULL constraint violated for column: id"))
1360+
sql("alter table test change column id id int")
1361+
sql("insert into test values (null, 'a')")
1362+
checkAnswer(sql("select * from test"), Seq(Row(null, "a")))
1363+
}
1364+
}
1365+
}
13471366

13481367
test("Data inserting into HiveTable violates table constraints") {
13491368
withTable("constraintTable") {

0 commit comments

Comments
 (0)