Skip to content

Commit 440d277

Browse files
Fixes to imports; add back requiredChildDistribution (lost when merging)
1 parent 208d5f6 commit 440d277

2 files changed

Lines changed: 3 additions & 4 deletions

File tree

sql/core/src/main/scala/org/apache/spark/sql/execution/joins.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,9 @@ case class LeftSemiJoinHash(
170170

171171
val buildSide = BuildRight
172172

173+
override def requiredChildDistribution =
174+
ClusteredDistribution(leftKeys) :: ClusteredDistribution(rightKeys) :: Nil
175+
173176
override def output = left.output
174177

175178
def execute() = {

sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,14 +27,10 @@ import parquet.hadoop.ParquetOutputFormat
2727
import parquet.hadoop.metadata.CompressionCodecName
2828
import parquet.schema.MessageType
2929

30-
import org.apache.spark.sql.SQLContext
3130
import org.apache.spark.sql.catalyst.analysis.{MultiInstanceRelation, UnresolvedException}
3231
import org.apache.spark.sql.catalyst.expressions.Attribute
3332
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, LeafNode}
3433

35-
// Implicits
36-
import scala.collection.JavaConversions._
37-
3834
/**
3935
* Relation that consists of data stored in a Parquet columnar format.
4036
*

0 commit comments

Comments
 (0)