@@ -23,8 +23,8 @@ import org.apache.spark.sql.catalyst.expressions.{DynamicPruningExpression, Expr
2323import org .apache .spark .sql .catalyst .plans .ExistenceJoin
2424import org .apache .spark .sql .execution ._
2525import org .apache .spark .sql .execution .adaptive .{AdaptiveSparkPlanExec , AdaptiveSparkPlanHelper }
26- import org .apache .spark .sql .execution .exchange .{BroadcastExchangeExec , ReusedExchangeExec , ShuffleExchangeExec }
27- import org .apache .spark .sql .execution .joins .{ BroadcastHashJoinExec , SortMergeJoinExec }
26+ import org .apache .spark .sql .execution .exchange .{BroadcastExchangeExec , ReusedExchangeExec }
27+ import org .apache .spark .sql .execution .joins .BroadcastHashJoinExec
2828import org .apache .spark .sql .execution .streaming .{MemoryStream , StreamingQueryWrapper }
2929import org .apache .spark .sql .functions ._
3030import org .apache .spark .sql .internal .SQLConf
@@ -1251,9 +1251,10 @@ abstract class DynamicPartitionPruningSuiteBase
12511251 | SELECT * FROM view1 v1 join view1 v2 WHERE v1.store_id = v2.store_id
12521252 """ .stripMargin)
12531253
1254- val executedPlan = df.queryExecution.executedPlan
12551254 checkPartitionPruningPredicate(df, false , false )
1256- val reuseExchangeNodes = executedPlan.collect { case se : ReusedExchangeExec => se }
1255+ val reuseExchangeNodes = df.queryExecution.executedPlan.collect {
1256+ case se : ReusedExchangeExec => se
1257+ }
12571258 assert(reuseExchangeNodes.size == 1 , " Expected plan to contain 1 ReusedExchangeExec " +
12581259 s " nodes. Found ${reuseExchangeNodes.size}" )
12591260
0 commit comments