Skip to content

Commit afdad0e

Browse files
cloud-fandongjoon-hyun
authored andcommitted
[SPARK-32018][SQL][2.4] UnsafeRow.setDecimal should set null with overflowed value
backport #29125 Closes #29141 from cloud-fan/backport. Authored-by: Wenchen Fan <wenchen@databricks.com> Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
1 parent 9aeeb0f commit afdad0e

2 files changed

Lines changed: 11 additions & 1 deletion

File tree

sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -293,7 +293,7 @@ public void setDecimal(int ordinal, Decimal value, int precision) {
293293
Platform.putLong(baseObject, baseOffset + cursor, 0L);
294294
Platform.putLong(baseObject, baseOffset + cursor + 8, 0L);
295295

296-
if (value == null) {
296+
if (value == null || !value.changePrecision(precision, value.scale())) {
297297
setNullAt(ordinal);
298298
// keep the offset for future update
299299
Platform.putLong(baseObject, getFieldOffset(ordinal), cursor << 32);

sql/core/src/test/scala/org/apache/spark/sql/UnsafeRowSuite.scala

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -178,4 +178,14 @@ class UnsafeRowSuite extends SparkFunSuite {
178178
// Makes sure hashCode on unsafe array won't crash
179179
unsafeRow.getArray(0).hashCode()
180180
}
181+
182+
test("SPARK-32018: setDecimal with overflowed value") {
183+
val d1 = new Decimal().set(BigDecimal("10000000000000000000")).toPrecision(38, 18)
184+
val row = InternalRow.apply(d1)
185+
val unsafeRow = UnsafeProjection.create(Array[DataType](DecimalType(38, 18))).apply(row)
186+
assert(unsafeRow.getDecimal(0, 38, 18) === d1)
187+
val d2 = (d1 * Decimal(10)).toPrecision(39, 18)
188+
unsafeRow.setDecimal(0, d2, 38)
189+
assert(unsafeRow.getDecimal(0, 38, 18) === null)
190+
}
181191
}

0 commit comments

Comments
 (0)