Skip to content

Commit edcd364

Browse files
adrian-wangyhuai
authored andcommitted
[SPARK-7330] [SQL] avoid NPE at jdbc rdd
Thank nadavoosh point this out in #5590 Author: Daoyuan Wang <[email protected]> Closes #5877 from adrian-wang/jdbcrdd and squashes the following commits: cc11900 [Daoyuan Wang] avoid NPE in jdbcrdd (cherry picked from commit ed9be06) Signed-off-by: Yin Huai <[email protected]>
1 parent cbf232d commit edcd364

2 files changed

Lines changed: 32 additions & 1 deletion

File tree

sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -357,7 +357,13 @@ private[sql] class JDBCRDD(
357357
conversions(i) match {
358358
case BooleanConversion => mutableRow.setBoolean(i, rs.getBoolean(pos))
359359
case DateConversion =>
360-
mutableRow.update(i, DateUtils.fromJavaDate(rs.getDate(pos)))
360+
// DateUtils.fromJavaDate does not handle null value, so we need to check it.
361+
val dateVal = rs.getDate(pos)
362+
if (dateVal != null) {
363+
mutableRow.update(i, DateUtils.fromJavaDate(dateVal))
364+
} else {
365+
mutableRow.update(i, null)
366+
}
361367
case DecimalConversion =>
362368
val decimalVal = rs.getBigDecimal(pos)
363369
if (decimalVal == null) {

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,8 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
102102
).executeUpdate()
103103
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
104104
+ "'1996-01-01', '2002-02-20 11:22:33.543543543')").executeUpdate()
105+
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
106+
+ "null, '2002-02-20 11:22:33.543543543')").executeUpdate()
105107
conn.commit()
106108
sql(
107109
s"""
@@ -125,6 +127,23 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
125127
|OPTIONS (url '$url', dbtable 'TEST.FLTTYPES', user 'testUser', password 'testPass')
126128
""".stripMargin.replaceAll("\n", " "))
127129

130+
conn.prepareStatement(
131+
s"""
132+
|create table test.nulltypes (a INT, b BOOLEAN, c TINYINT, d BINARY(20), e VARCHAR(20),
133+
|f VARCHAR_IGNORECASE(20), g CHAR(20), h BLOB, i CLOB, j TIME, k DATE, l TIMESTAMP,
134+
|m DOUBLE, n REAL, o DECIMAL(40, 20))
135+
""".stripMargin.replaceAll("\n", " ")).executeUpdate()
136+
conn.prepareStatement("insert into test.nulltypes values ("
137+
+ "null, null, null, null, null, null, null, null, null, "
138+
+ "null, null, null, null, null, null)").executeUpdate()
139+
conn.commit()
140+
sql(
141+
s"""
142+
|CREATE TEMPORARY TABLE nulltypes
143+
|USING org.apache.spark.sql.jdbc
144+
|OPTIONS (url '$url', dbtable 'TEST.NULLTYPES', user 'testUser', password 'testPass')
145+
""".stripMargin.replaceAll("\n", " "))
146+
128147
// Untested: IDENTITY, OTHER, UUID, ARRAY, and GEOMETRY types.
129148
}
130149

@@ -252,6 +271,7 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
252271
val rows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.TIMETYPES").collect()
253272
val cachedRows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.TIMETYPES").cache().collect()
254273
assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
274+
assert(rows(1).getAs[java.sql.Date](1) === null)
255275
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
256276
}
257277

@@ -264,6 +284,11 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
264284
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
265285
}
266286

287+
test("test types for null value") {
288+
val rows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.NULLTYPES").collect()
289+
assert((0 to 14).forall(i => rows(0).isNullAt(i)))
290+
}
291+
267292
test("H2 floating-point types") {
268293
val rows = sql("SELECT * FROM flttypes").collect()
269294
assert(rows(0).getDouble(0) == 1.00000000000000022) // Yes, I meant ==.

0 commit comments

Comments
 (0)