Skip to content

Commit ec5315d

Browse files
committed
test fix to use testimplicits for simplicity
1 parent 7658bbc commit ec5315d

1 file changed

Lines changed: 6 additions & 19 deletions

File tree

external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala

Lines changed: 6 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,6 @@ import java.math.BigDecimal
2121
import java.sql.{Connection, Date, Timestamp}
2222
import java.util.Properties
2323

24-
import org.apache.spark.sql.{DataFrame, Row}
25-
import org.apache.spark.sql.types._
2624
import org.apache.spark.tags.DockerTest
2725

2826
@DockerTest
@@ -205,34 +203,23 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite {
205203
df3.write.jdbc(jdbcUrl, "stringscopy", new Properties)
206204
}
207205

208-
test("SPARK-28151 Test write table with BYTETYPE") {
209-
val df : DataFrame = {
210-
val schema = StructType(Seq(
211-
StructField("a", ByteType, true)
212-
))
213-
val data = Seq(
214-
Row(-127.toByte),
215-
Row(0.toByte),
216-
Row(1.toByte),
217-
Row(38.toByte),
218-
Row(128.toByte)
219-
)
220-
spark.createDataFrame(spark.sparkContext.parallelize(data), schema)
221-
}
206+
test("Write tables with BYTETYPE") {
207+
import testImplicits._
208+
val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a")
222209
val tablename = "bytetable"
223210
df.write
224211
.format("jdbc")
225212
.mode("overwrite")
226213
.option("url", jdbcUrl)
227214
.option("dbtable", tablename)
228215
.save()
229-
val df_copy = spark.read
216+
val df2 = spark.read
230217
.format("jdbc")
231218
.option("url", jdbcUrl)
232219
.option("dbtable", tablename)
233220
.load()
234-
assert(df.count == df_copy.count)
235-
val rows = df_copy.collect()
221+
assert(df.count == df2.count)
222+
val rows = df2.collect()
236223
val colType = rows(0).toSeq.map(x => x.getClass.toString)
237224
assert(colType(0) == "class java.lang.Byte")
238225
}

0 commit comments

Comments
 (0)