Skip to content

Commit dc5127e

Browse files
author
kai
committed
code style fixes
1 parent b5a4efa commit dc5127e

File tree

4 files changed

+13
-12
lines changed

4 files changed

+13
-12
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastHashOuterJoin.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.joins
2020
import org.apache.spark.annotation.DeveloperApi
2121
import org.apache.spark.rdd.RDD
2222
import org.apache.spark.sql.catalyst.expressions._
23-
import org.apache.spark.sql.catalyst.plans.physical.UnspecifiedDistribution
23+
import org.apache.spark.sql.catalyst.plans.physical.{Distribution, UnspecifiedDistribution}
2424
import org.apache.spark.sql.catalyst.plans.{JoinType, LeftOuter, RightOuter}
2525
import org.apache.spark.sql.execution.{BinaryNode, SparkPlan}
2626
import org.apache.spark.util.ThreadUtils
@@ -53,7 +53,7 @@ case class BroadcastHashOuterJoin(
5353
}
5454
}
5555

56-
override def requiredChildDistribution =
56+
override def requiredChildDistribution: Seq[Distribution] =
5757
UnspecifiedDistribution :: UnspecifiedDistribution :: Nil
5858

5959
private[this] lazy val (buildPlan, streamedPlan) = joinType match {

sql/core/src/main/scala/org/apache/spark/sql/execution/joins/ShuffledHashOuterJoin.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.joins
2020
import org.apache.spark.annotation.DeveloperApi
2121
import org.apache.spark.rdd.RDD
2222
import org.apache.spark.sql.catalyst.expressions._
23-
import org.apache.spark.sql.catalyst.plans.physical.ClusteredDistribution
23+
import org.apache.spark.sql.catalyst.plans.physical.{Distribution, ClusteredDistribution}
2424
import org.apache.spark.sql.catalyst.plans.{FullOuter, JoinType, LeftOuter, RightOuter}
2525
import org.apache.spark.sql.execution.{BinaryNode, SparkPlan}
2626

@@ -40,7 +40,7 @@ case class ShuffledHashOuterJoin(
4040
left: SparkPlan,
4141
right: SparkPlan) extends BinaryNode with HashOuterJoin {
4242

43-
override def requiredChildDistribution: Seq[ClusteredDistribution] =
43+
override def requiredChildDistribution: Seq[Distribution] =
4444
ClusteredDistribution(leftKeys) :: ClusteredDistribution(rightKeys) :: Nil
4545

4646
protected override def doExecute(): RDD[InternalRow] = {

sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,8 @@ class JoinSuite extends QueryTest with BeforeAndAfterEach {
8787
classOf[ShuffledHashOuterJoin]),
8888
("SELECT * FROM testData right join testData2 ON key = a and key = 2",
8989
classOf[ShuffledHashOuterJoin]),
90-
("SELECT * FROM testData full outer join testData2 ON key = a", classOf[ShuffledHashOuterJoin]),
90+
("SELECT * FROM testData full outer join testData2 ON key = a",
91+
classOf[ShuffledHashOuterJoin]),
9192
("SELECT * FROM testData left JOIN testData2 ON (key * a != key + a)",
9293
classOf[BroadcastNestedLoopJoin]),
9394
("SELECT * FROM testData right JOIN testData2 ON (key * a != key + a)",

sql/core/src/test/scala/org/apache/spark/sql/execution/SparkPlanTest.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -99,9 +99,9 @@ class SparkPlanTest extends SparkFunSuite {
9999
* @param expectedAnswer the expected result in a [[Seq]] of [[Product]]s.
100100
*/
101101
protected def checkAnswer[A <: Product : TypeTag](
102-
input: DataFrame,
103-
planFunction: SparkPlan => SparkPlan,
104-
expectedAnswer: Seq[A]): Unit = {
102+
input: DataFrame,
103+
planFunction: SparkPlan => SparkPlan,
104+
expectedAnswer: Seq[A]): Unit = {
105105
val expectedRows = expectedAnswer.map(Row.fromTuple)
106106
checkAnswer(input, planFunction, expectedRows)
107107
}
@@ -115,10 +115,10 @@ class SparkPlanTest extends SparkFunSuite {
115115
* @param expectedAnswer the expected result in a [[Seq]] of [[Product]]s.
116116
*/
117117
protected def checkAnswer[A <: Product : TypeTag](
118-
left: DataFrame,
119-
right: DataFrame,
120-
planFunction: (SparkPlan, SparkPlan) => SparkPlan,
121-
expectedAnswer: Seq[A]): Unit = {
118+
left: DataFrame,
119+
right: DataFrame,
120+
planFunction: (SparkPlan, SparkPlan) => SparkPlan,
121+
expectedAnswer: Seq[A]): Unit = {
122122
val expectedRows = expectedAnswer.map(Row.fromTuple)
123123
checkAnswer(left, right, planFunction, expectedRows)
124124
}

0 commit comments

Comments
 (0)