Skip to content

Commit c0edad2

Browse files
committed
Improve test.
1 parent 9e7a8a4 commit c0edad2

3 files changed

Lines changed: 11 additions & 20 deletions

File tree

docs/sql-programming-guide.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1333,7 +1333,7 @@ the following case-insensitive options:
13331333
<tr>
13341334
<td><code>customSchema</code></td>
13351335
<td>
1336-
The custom schema to use for reading data from JDBC connectors. For example, <code>"id DECIMAL(38, 0), name STRING"</code>. You can also specify partial fields, others use default values. For example, <code>"id DECIMAL(38, 0)"</code>. The column names should be identical to the corresponding column names of JDBC table. Users can specify the corresponding data types of Spark SQL instead of using the defaults. This option applies only to reading.
1336+
The custom schema to use for reading data from JDBC connectors. For example, <code>"id DECIMAL(38, 0), name STRING"</code>. You can also specify partial fields, and the others use the default type mapping. For example, <code>"id DECIMAL(38, 0)"</code>. The column names should be identical to the corresponding column names of JDBC table. Users can specify the corresponding data types of Spark SQL instead of using the defaults. This option applies only to reading.
13371337
</td>
13381338
</tr>
13391339
</table>

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -784,10 +784,8 @@ object JdbcUtils extends Logging {
784784
// This is resolved by names, use the custom filed dataType to replace the default dateType.
785785
val newSchema = tableSchema.map { col =>
786786
userSchema.find(f => nameEquality(f.name, col.name)) match {
787-
case Some(c) =>
788-
col.copy(dataType = c.dataType, nullable = c.nullable)
789-
case None =>
790-
col
787+
case Some(c) => col.copy(dataType = c.dataType, metadata = Metadata.empty)
788+
case None => col
791789
}
792790
}
793791
StructType(newSchema)

sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala

Lines changed: 8 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
2626

2727
import org.apache.spark.{SparkException, SparkFunSuite}
2828
import org.apache.spark.sql.{AnalysisException, DataFrame, Row}
29+
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
2930
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
3031
import org.apache.spark.sql.execution.DataSourceScanExec
3132
import org.apache.spark.sql.execution.command.ExplainCommand
@@ -970,36 +971,28 @@ class JDBCSuite extends SparkFunSuite
970971

971972
test("jdbc API support custom schema") {
972973
val parts = Array[String]("THEID < 2", "THEID >= 2")
974+
val customSchema = "NAME STRING, THEID INT"
973975
val props = new Properties()
974-
props.put("customSchema", "name STRING, THEID BIGINT")
975-
val schema = StructType(Seq(
976-
StructField("NAME", StringType, true), StructField("THEID", LongType, true)))
976+
props.put("customSchema", customSchema)
977977
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, props)
978978
assert(df.schema.size === 2)
979-
df.schema.zip(schema).foreach {
980-
case (c, v) =>
981-
assert(c.dataType === v.dataType)
982-
}
979+
assert(df.schema === CatalystSqlParser.parseTableSchema(customSchema))
983980
assert(df.count() === 3)
984981
}
985982

986983
test("jdbc API custom schema DDL-like strings.") {
987984
withTempView("people_view") {
985+
val customSchema = "NAME STRING, THEID INT"
988986
sql(
989987
s"""
990988
|CREATE TEMPORARY VIEW people_view
991989
|USING org.apache.spark.sql.jdbc
992990
|OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass',
993-
|customSchema 'NAME STRING, THEID INT')
991+
|customSchema '$customSchema')
994992
""".stripMargin.replaceAll("\n", " "))
995-
val schema = StructType(
996-
Seq(StructField("NAME", StringType, true), StructField("THEID", IntegerType, true)))
997993
val df = sql("select * from people_view")
998-
assert(df.schema.size === 2)
999-
df.schema.zip(schema).foreach {
1000-
case (c, v) =>
1001-
assert(c.dataType === v.dataType)
1002-
}
994+
assert(df.schema.length === 2)
995+
assert(df.schema === CatalystSqlParser.parseTableSchema(customSchema))
1003996
assert(df.count() === 3)
1004997
}
1005998
}

0 commit comments

Comments
 (0)