Skip to content
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ import java.math.BigDecimal
import java.sql.{Connection, Date, Timestamp}
import java.util.Properties

import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.types._
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's remove this. I'll give the updated short test code.

import org.apache.spark.tags.DockerTest

@DockerTest
Expand Down Expand Up @@ -119,7 +121,7 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite {
val types = row.toSeq.map(x => x.getClass.toString)
assert(types.length == 12)
assert(types(0).equals("class java.lang.Boolean"))
assert(types(1).equals("class java.lang.Integer"))
assert(types(1).equals("class java.lang.Byte"))
assert(types(2).equals("class java.lang.Short"))
assert(types(3).equals("class java.lang.Integer"))
assert(types(4).equals("class java.lang.Long"))
Expand All @@ -131,7 +133,7 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite {
assert(types(10).equals("class java.math.BigDecimal"))
assert(types(11).equals("class java.math.BigDecimal"))
assert(row.getBoolean(0) == false)
assert(row.getInt(1) == 255)
assert(row.getByte(1) == 255.toByte)
assert(row.getShort(2) == 32767)
assert(row.getInt(3) == 2147483647)
assert(row.getLong(4) == 9223372036854775807L)
Expand Down Expand Up @@ -202,4 +204,25 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite {
df2.write.jdbc(jdbcUrl, "datescopy", new Properties)
df3.write.jdbc(jdbcUrl, "stringscopy", new Properties)
}

test("SPARK-28151 Test write table with BYTETYPE") {
val tableSchema = StructType(Seq(StructField("serialNum", ByteType, true)))
val tableData = Seq(Row(10))
val df1 = spark.createDataFrame(
spark.sparkContext.parallelize(tableData),
tableSchema)

df1.write
.format("jdbc")
.mode("overwrite")
.option("url", jdbcUrl)
.option("dbtable", "testTable")
.save()
val df2 = spark.read
.format("jdbc")
.option("url", jdbcUrl)
.option("dbtable", "byteTable")
.load()
df2.show()
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,7 @@ object JdbcUtils extends Logging {

case ByteType =>
(stmt: PreparedStatement, row: Row, pos: Int) =>
stmt.setInt(pos + 1, row.getByte(pos))
stmt.setByte(pos + 1, row.getByte(pos))

case BooleanType =>
(stmt: PreparedStatement, row: Row, pos: Int) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ private object MsSqlServerDialect extends JdbcDialect {
sqlType match {
case java.sql.Types.SMALLINT => Some(ShortType)
case java.sql.Types.REAL => Some(FloatType)
case java.sql.Types.TINYINT => Some(ByteType)
case _ => None
}
}
Expand All @@ -44,6 +45,7 @@ private object MsSqlServerDialect extends JdbcDialect {
case BooleanType => Some(JdbcType("BIT", java.sql.Types.BIT))
case BinaryType => Some(JdbcType("VARBINARY(MAX)", java.sql.Types.VARBINARY))
case ShortType => Some(JdbcType("SMALLINT", java.sql.Types.SMALLINT))
case ByteType => Some(JdbcType("TINYINT", java.sql.Types.TINYINT))
case _ => None
}

Expand Down