-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-19093][SQL] Cached tables are not used in SubqueryExpression #16493
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -24,6 +24,8 @@ import scala.language.postfixOps | |
| import org.scalatest.concurrent.Eventually._ | ||
|
|
||
| import org.apache.spark.CleanerListener | ||
| import org.apache.spark.sql.catalyst.expressions.{Expression, SubqueryExpression} | ||
| import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan | ||
| import org.apache.spark.sql.execution.RDDScanExec | ||
| import org.apache.spark.sql.execution.columnar._ | ||
| import org.apache.spark.sql.execution.exchange.ShuffleExchange | ||
|
|
@@ -53,6 +55,16 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext | |
| maybeBlock.nonEmpty | ||
| } | ||
|
|
||
| private def getNumInMemoryRelations(plan: LogicalPlan): Int = { | ||
| var sum = plan.collect { case _: InMemoryRelation => 1 }.sum | ||
| plan.transformAllExpressions { | ||
| case e: SubqueryExpression => | ||
| sum += getNumInMemoryRelations(e.plan) | ||
| e | ||
| } | ||
| sum | ||
| } | ||
|
|
||
| test("withColumn doesn't invalidate cached dataframe") { | ||
| var evalCount = 0 | ||
| val myUDF = udf((x: String) => { evalCount += 1; "result" }) | ||
|
|
@@ -565,4 +577,67 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext | |
| case i: InMemoryRelation => i | ||
| }.size == 1) | ||
| } | ||
|
|
||
| test("SPARK-19093 Caching in side subquery") { | ||
| withTempView("t1") { | ||
| Seq(1).toDF("c1").createOrReplaceTempView("t1") | ||
| spark.catalog.cacheTable("t1") | ||
| val cachedPlan = | ||
| sql( | ||
| """ | ||
| |SELECT * FROM t1 | ||
| |WHERE | ||
| |NOT EXISTS (SELECT * FROM t1) | ||
| """.stripMargin).queryExecution.optimizedPlan | ||
| assert( | ||
| cachedPlan.collect { | ||
| case i: InMemoryRelation => i | ||
| }.size == 2) | ||
| spark.catalog.uncacheTable("t1") | ||
| } | ||
| } | ||
|
|
||
| test("SPARK-19093 scalar and nested predicate query") { | ||
|
|
||
|
|
||
|
||
| withTempView("t1", "t2", "t3", "t4") { | ||
| Seq(1).toDF("c1").createOrReplaceTempView("t1") | ||
| Seq(2).toDF("c1").createOrReplaceTempView("t2") | ||
| Seq(1).toDF("c1").createOrReplaceTempView("t3") | ||
| Seq(1).toDF("c1").createOrReplaceTempView("t4") | ||
| spark.catalog.cacheTable("t1") | ||
| spark.catalog.cacheTable("t2") | ||
| spark.catalog.cacheTable("t3") | ||
| spark.catalog.cacheTable("t4") | ||
|
|
||
| // Nested predicate subquery | ||
| val cachedPlan = | ||
| sql( | ||
| """ | ||
| |SELECT * FROM t1 | ||
| |WHERE | ||
| |c1 IN (SELECT c1 FROM t2 WHERE c1 IN (SELECT c1 FROM t3 WHERE c1 = 1)) | ||
| """.stripMargin).queryExecution.optimizedPlan | ||
| assert (getNumInMemoryRelations(cachedPlan) == 3) | ||
|
|
||
| // Scalar subquery and predicate subquery | ||
| val cachedPlan2 = | ||
| sql( | ||
| """ | ||
| |SELECT * FROM (SELECT max(c1) FROM t1 GROUP BY c1) | ||
| |WHERE | ||
| |c1 = (SELECT max(c1) FROM t2 GROUP BY c1) | ||
| |OR | ||
| |EXISTS (SELECT c1 FROM t3) | ||
| |OR | ||
| |c1 IN (SELECT c1 FROM t4) | ||
| """.stripMargin).queryExecution.optimizedPlan | ||
| assert (getNumInMemoryRelations(cachedPlan2) == 4) | ||
|
|
||
| spark.catalog.uncacheTable("t1") | ||
| spark.catalog.uncacheTable("t2") | ||
| spark.catalog.uncacheTable("t3") | ||
| spark.catalog.uncacheTable("t4") | ||
|
||
| } | ||
| } | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The same here.