-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-23128][SQL] A new approach to do adaptive execution in Spark SQL #24706
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 4 commits
52f0222
1c665d2
a9b4209
cbfbc4e
4255421
a656e42
ac0794d
62af5be
77a668b
3e85e74
bd6a364
9eaf307
e2fa8e3
55450e9
4b0755d
6e547d7
baef964
ec59f88
9af4eb1
5b5ac2e
da33bd7
5688cb4
a40b771
37905f5
eb8fe75
4481085
8570ec0
237c067
d6e040b
e265104
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -40,9 +40,11 @@ import org.apache.spark.sql.execution.metric.SQLMetric | |
| import org.apache.spark.sql.types.DataType | ||
|
|
||
| object SparkPlan { | ||
| // a TreeNode tag in SparkPlan, to carry its original logical plan. The planner will add this tag | ||
| // when converting a logical plan to a physical plan. | ||
| /** The original [[LogicalPlan]] from which this [[SparkPlan]] is converted. */ | ||
| val LOGICAL_PLAN_TAG = TreeNodeTag[LogicalPlan]("logical_plan") | ||
|
|
||
| /** The [[LogicalPlan]] inherited from its ancestor. */ | ||
| val LOGICAL_PLAN_INHERITED_TAG = TreeNodeTag[LogicalPlan]("logical_plan_inherited") | ||
| } | ||
|
|
||
| /** | ||
|
|
@@ -79,6 +81,34 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializ | |
| super.makeCopy(newArgs) | ||
| } | ||
|
|
||
| /** | ||
| * @return The logical plan this plan is linked to. | ||
| */ | ||
| def logicalLink: Option[LogicalPlan] = | ||
| getTagValue(SparkPlan.LOGICAL_PLAN_TAG) | ||
| .orElse(getTagValue(SparkPlan.LOGICAL_PLAN_INHERITED_TAG)) | ||
|
|
||
| /** | ||
| * Set logical plan link recursively if unset. | ||
| */ | ||
| def setLogicalLink(logicalPlan: LogicalPlan): Unit = { | ||
| setLogicalLink(logicalPlan, false) | ||
| } | ||
|
|
||
| private def setLogicalLink(logicalPlan: LogicalPlan, inherited: Boolean = false): Unit = { | ||
| if (logicalLink.isDefined) { | ||
| return | ||
| } | ||
|
|
||
| val tag = if (inherited) { | ||
| SparkPlan.LOGICAL_PLAN_INHERITED_TAG | ||
| } else { | ||
| SparkPlan.LOGICAL_PLAN_TAG | ||
| } | ||
| setTagValue(tag, logicalPlan) | ||
| children.foreach(_.setLogicalLink(logicalPlan, true)) | ||
|
||
| } | ||
|
|
||
| /** | ||
| * @return All metrics containing metrics of this SparkPlan. | ||
| */ | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We probably need to update the doc for this config. It isn't enabled when runtime query re-optimization is true.
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Let's leave it now and see if we should use the existing config
spark.sql.adaptive.enabledinstead.