Skip to content

Commit 554507a

Browse files
committed
adjust new suite as well
1 parent 629f3d2 commit 554507a

1 file changed

Lines changed: 20 additions & 16 deletions

File tree

sql/core/src/test/scala/org/apache/spark/sql/sources/v2/DataSourceDFWriterV2SessionCatalogSuite.scala

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,9 @@ class DataSourceDFWriterV2SessionCatalogSuite
4141
import testImplicits._
4242

4343
private val v2Format = classOf[InMemoryTableProvider].getName
44-
private val dfData = Seq((1L, "a"), (2L, "b"), (3L, "c"))
4544

4645
before {
4746
spark.conf.set(SQLConf.V2_SESSION_CATALOG.key, classOf[TestV2SessionCatalog].getName)
48-
spark.createDataFrame(dfData).toDF("id", "data").createOrReplaceTempView("source")
4947
}
5048

5149
override def afterEach(): Unit = {
@@ -55,47 +53,53 @@ class DataSourceDFWriterV2SessionCatalogSuite
5553

5654
test("saveAsTable and v2 table - table doesn't exist") {
5755
val t1 = "tbl"
58-
spark.table("source").write.format(v2Format).saveAsTable(t1)
59-
checkAnswer(spark.table(t1), spark.table("source"))
56+
val df = Seq((1L, "a"), (2L, "b"), (3L, "c")).toDF("id", "data")
57+
df.write.format(v2Format).saveAsTable(t1)
58+
checkAnswer(spark.table(t1), df)
6059
}
6160

6261
test("saveAsTable: v2 table - table exists") {
6362
val t1 = "tbl"
63+
val df = Seq((1L, "a"), (2L, "b"), (3L, "c")).toDF("id", "data")
6464
spark.sql(s"CREATE TABLE $t1 (id bigint, data string) USING $v2Format")
6565
intercept[TableAlreadyExistsException] {
66-
spark.table("source").select("id", "data").write.format(v2Format).saveAsTable(t1)
66+
df.select("id", "data").write.format(v2Format).saveAsTable(t1)
6767
}
68-
spark.table("source").write.format(v2Format).mode("append").saveAsTable(t1)
69-
checkAnswer(spark.table(t1), spark.table("source"))
68+
df.write.format(v2Format).mode("append").saveAsTable(t1)
69+
checkAnswer(spark.table(t1), df)
7070

7171
// Check that appends are by name
72-
spark.table("source").select('data, 'id).write.format(v2Format).mode("append").saveAsTable(t1)
73-
checkAnswer(spark.table(t1), spark.table("source").union(spark.table("source")))
72+
df.select('data, 'id).write.format(v2Format).mode("append").saveAsTable(t1)
73+
checkAnswer(spark.table(t1), df.union(df))
7474
}
7575

7676
test("saveAsTable: v2 table - table overwrite and table doesn't exist") {
7777
val t1 = "tbl"
78-
spark.table("source").write.format(v2Format).mode("overwrite").saveAsTable(t1)
79-
checkAnswer(spark.table(t1), spark.table("source"))
78+
val df = Seq((1L, "a"), (2L, "b"), (3L, "c")).toDF("id", "data")
79+
df.write.format(v2Format).mode("overwrite").saveAsTable(t1)
80+
checkAnswer(spark.table(t1), df)
8081
}
8182

8283
test("saveAsTable: v2 table - table overwrite and table exists") {
8384
val t1 = "tbl"
85+
val df = Seq((1L, "a"), (2L, "b"), (3L, "c")).toDF("id", "data")
8486
spark.sql(s"CREATE TABLE $t1 USING $v2Format AS SELECT 'c', 'd'")
85-
spark.table("source").write.format(v2Format).mode("overwrite").saveAsTable(t1)
86-
checkAnswer(spark.table(t1), spark.table("source"))
87+
df.write.format(v2Format).mode("overwrite").saveAsTable(t1)
88+
checkAnswer(spark.table(t1), df)
8789
}
8890

8991
test("saveAsTable: v2 table - ignore mode and table doesn't exist") {
9092
val t1 = "tbl"
91-
spark.table("source").write.format(v2Format).mode("ignore").saveAsTable(t1)
92-
checkAnswer(spark.table(t1), spark.table("source"))
93+
val df = Seq((1L, "a"), (2L, "b"), (3L, "c")).toDF("id", "data")
94+
df.write.format(v2Format).mode("ignore").saveAsTable(t1)
95+
checkAnswer(spark.table(t1), df)
9396
}
9497

9598
test("saveAsTable: v2 table - ignore mode and table exists") {
9699
val t1 = "tbl"
100+
val df = Seq((1L, "a"), (2L, "b"), (3L, "c")).toDF("id", "data")
97101
spark.sql(s"CREATE TABLE $t1 USING $v2Format AS SELECT 'c', 'd'")
98-
spark.table("source").write.format(v2Format).mode("ignore").saveAsTable(t1)
102+
df.write.format(v2Format).mode("ignore").saveAsTable(t1)
99103
checkAnswer(spark.table(t1), Seq(Row("c", "d")))
100104
}
101105
}

0 commit comments

Comments
 (0)