Skip to content

Commit 470560e

Browse files
committed
Fix params not update BULKINSERT_ARE_PARTITIONER_RECORDS_SORTED
1 parent e3eb14a commit 470560e

File tree

1 file changed

+3
-2
lines changed

1 file changed

+3
-2
lines changed

hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -520,7 +520,8 @@ object HoodieSparkSqlWriter {
520520
if (parameters(INSERT_DROP_DUPS.key).toBoolean) {
521521
throw new HoodieException("Dropping duplicates with bulk_insert in row writer path is not supported yet")
522522
}
523-
val params = parameters.updated(HoodieWriteConfig.AVRO_SCHEMA_STRING.key, schema.toString)
523+
val params: mutable.Map[String, String] = collection.mutable.Map(parameters.toSeq: _*)
524+
params(HoodieWriteConfig.AVRO_SCHEMA_STRING.key) = schema.toString
524525
val writeConfig = DataSourceUtils.createHoodieConfig(schema.toString, path, tblName, mapAsJavaMap(params))
525526
val bulkInsertPartitionerRows: BulkInsertPartitioner[Dataset[Row]] = if (populateMetaFields) {
526527
val userDefinedBulkInsertPartitionerOpt = DataSourceUtils.createUserDefinedBulkInsertPartitionerWithRows(writeConfig)
@@ -535,7 +536,7 @@ object HoodieSparkSqlWriter {
535536
new NonSortPartitionerWithRows()
536537
}
537538
val arePartitionRecordsSorted = bulkInsertPartitionerRows.arePartitionRecordsSorted()
538-
parameters.updated(HoodieInternalConfig.BULKINSERT_ARE_PARTITIONER_RECORDS_SORTED, arePartitionRecordsSorted.toString)
539+
params(HoodieInternalConfig.BULKINSERT_ARE_PARTITIONER_RECORDS_SORTED) = arePartitionRecordsSorted.toString
539540
val isGlobalIndex = if (populateMetaFields) {
540541
SparkHoodieIndexFactory.isGlobalIndex(writeConfig)
541542
} else {

0 commit comments

Comments
 (0)