Skip to content

Commit b7c4ea5

Browse files
committed
Add integration test suite
1 parent 1fad3e3 commit b7c4ea5

1 file changed

Lines changed: 120 additions & 0 deletions

File tree

Lines changed: 120 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,120 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.jdbc.v2
19+
20+
import java.math.BigDecimal
21+
import java.net.ServerSocket
22+
import java.sql.{Connection, Date, Timestamp}
23+
import java.util.{Properties, TimeZone}
24+
25+
import org.scalatest.time.SpanSugar._
26+
27+
import org.apache.spark.SparkConf
28+
import org.apache.spark.sql.{Row, SaveMode}
29+
import org.apache.spark.sql.execution.{RowDataSourceScanExec, WholeStageCodegenExec}
30+
import org.apache.spark.sql.execution.datasources.LogicalRelation
31+
import org.apache.spark.sql.execution.datasources.jdbc.{JDBCPartition, JDBCRelation}
32+
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
33+
import org.apache.spark.sql.internal.SQLConf
34+
import org.apache.spark.sql.jdbc.{DatabaseOnDocker, DockerJDBCIntegrationSuite}
35+
import org.apache.spark.sql.test.SharedSparkSession
36+
import org.apache.spark.sql.types._
37+
import org.apache.spark.tags.DockerTest
38+
39+
/**
40+
* The following would be the steps to test this
41+
* 1. Build Oracle database in Docker, please refer below link about how to.
42+
* https://github.com/oracle/docker-images/blob/master/OracleDatabase/SingleInstance/README.md
43+
* 2. export ORACLE_DOCKER_IMAGE_NAME=$ORACLE_DOCKER_IMAGE_NAME
44+
* Pull oracle $ORACLE_DOCKER_IMAGE_NAME image - docker pull $ORACLE_DOCKER_IMAGE_NAME
45+
* 3. Start docker - sudo service docker start
46+
* 4. Run spark test - ./build/sbt -Pdocker-integration-tests
47+
* "test-only org.apache.spark.sql.jdbc.OracleIntegrationSuite"
48+
*
49+
* An actual sequence of commands to run the test is as follows
50+
*
51+
* $ git clone https://github.com/oracle/docker-images.git
52+
* // Head SHA: 3e352a22618070595f823977a0fd1a3a8071a83c
53+
* $ cd docker-images/OracleDatabase/SingleInstance/dockerfiles
54+
* $ ./buildDockerImage.sh -v 18.4.0 -x
55+
* $ export ORACLE_DOCKER_IMAGE_NAME=oracle/database:18.4.0-xe
56+
* $ cd $SPARK_HOME
57+
* $ ./build/sbt -Pdocker-integration-tests
58+
* "test-only org.apache.spark.sql.jdbc.OracleIntegrationSuite"
59+
*
60+
* It has been validated with 18.4.0 Express Edition.
61+
*/
62+
@DockerTest
63+
class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSparkSession {
64+
import testImplicits._
65+
66+
override val db = new DatabaseOnDocker {
67+
override val imageName = sys.env("ORACLE_DOCKER_IMAGE_NAME")
68+
override val env = Map(
69+
"ORACLE_PWD" -> "oracle"
70+
)
71+
override val usesIpc = false
72+
override val jdbcPort: Int = 1521
73+
override def getJdbcUrl(ip: String, port: Int): String =
74+
s"jdbc:oracle:thin:system/oracle@//$ip:$port/xe"
75+
}
76+
77+
override def sparkConf: SparkConf = super.sparkConf
78+
.set("spark.sql.catalog.oracle", classOf[JDBCTableCatalog].getName)
79+
.set("spark.sql.catalog.oracle.url", db.getJdbcUrl(dockerIp, externalPort))
80+
81+
override val connectionTimeout = timeout(7.minutes)
82+
override def dataPreparation(conn: Connection): Unit = {}
83+
84+
test("SPARK-33034: alter table ... add column") {
85+
withTable("oracle.alt_table") {
86+
sql("CREATE TABLE oracle.alt_table (ID STRING) USING _")
87+
sql("ALTER TABLE oracle.alt_table ADD COLUMNS (C1 STRING, C2 STRING)")
88+
var t = spark.table("oracle.alt_table")
89+
var expectedSchema = new StructType()
90+
.add("ID", StringType)
91+
.add("C1", StringType)
92+
.add("C2", StringType)
93+
assert(t.schema === expectedSchema)
94+
sql("ALTER TABLE oracle.alt_table ADD COLUMNS (C3 STRING)")
95+
t = spark.table("oracle.alt_table")
96+
expectedSchema = expectedSchema.add("C3", StringType)
97+
assert(t.schema === expectedSchema)
98+
}
99+
}
100+
101+
test("SPARK-33034: alter table ... update column type") {
102+
withTable("oracle.alt_table") {
103+
sql("CREATE TABLE oracle.alt_table (ID INTEGER) USING _")
104+
sql("ALTER TABLE oracle.alt_table ALTER COLUMN id TYPE STRING")
105+
val t = spark.table("oracle.alt_table")
106+
val expectedSchema = new StructType().add("ID", StringType)
107+
assert(t.schema === expectedSchema)
108+
}
109+
}
110+
111+
test("SPARK-33034: alter table ... update column nullability") {
112+
withTable("oracle.alt_table") {
113+
sql("CREATE TABLE oracle.alt_table (ID STRING NOT NULL) USING _")
114+
sql("ALTER TABLE oracle.alt_table ALTER COLUMN ID DROP NOT NULL")
115+
val t = spark.table("oracle.alt_table")
116+
val expectedSchema = new StructType().add("ID", StringType, nullable = true)
117+
assert(t.schema === expectedSchema)
118+
}
119+
}
120+
}

0 commit comments

Comments
 (0)