From 0de5843f208124d5632555d663ada31f73c311ed Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Mon, 15 Jul 2019 11:48:53 -0700 Subject: [PATCH 1/2] [SPARK-28201][SQL][TEST][FOLLOWUP] Update Integration test suite according to the new exception message --- .../org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala index 64b9837cc5fa..d4efa950aa0e 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala @@ -376,8 +376,7 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo val e = intercept[org.apache.spark.SparkException] { spark.read.jdbc(jdbcUrl, "tableWithCustomSchema", new Properties()).collect() } - assert(e.getMessage.contains( - "requirement failed: Decimal precision 39 exceeds max precision 38")) + assert(e.getMessage.contains("Decimal precision 39 exceeds max precision 38")) // custom schema can read data val props = new Properties() From a822c713f5ac9fcca9001b22ea43e3fa792cf367 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Mon, 15 Jul 2019 13:29:19 -0700 Subject: [PATCH 2/2] Address comments --- .../scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala index d4efa950aa0e..8cdc4a1806b2 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala @@ -376,6 +376,7 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo val e = intercept[org.apache.spark.SparkException] { spark.read.jdbc(jdbcUrl, "tableWithCustomSchema", new Properties()).collect() } + assert(e.getCause().isInstanceOf[ArithmeticException]) assert(e.getMessage.contains("Decimal precision 39 exceeds max precision 38")) // custom schema can read data