Skip to content

Commit b4e0b1d

Browse files
committed
Fix style issues
1 parent 71e8b9f commit b4e0b1d

File tree

3 files changed

+9
-4
lines changed

3 files changed

+9
-4
lines changed

core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -707,7 +707,8 @@ object JavaPairRDD {
707707

708708
private[spark]
709709
def cogroupResult2ToJava[K: ClassTag, V, W1, W2](
710-
rdd: RDD[(K, (Iterator[V], Iterator[W1], Iterator[W2]))]): RDD[(K, (JIterator[V], JIterator[W1], JIterator[W2]))] = {
710+
rdd: RDD[(K, (Iterator[V], Iterator[W1], Iterator[W2]))])
711+
: RDD[(K, (JIterator[V], JIterator[W1], JIterator[W2]))] = {
711712
rddToPairRDDFunctions(rdd)
712713
.mapValues(x => (asJavaIterator(x._1), asJavaIterator(x._2), asJavaIterator(x._3)))
713714
}

core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -456,7 +456,8 @@ class PairRDDFunctions[K: ClassTag, V: ClassTag](self: RDD[(K, V)])
456456
* For each key k in `this` or `other`, return a resulting RDD that contains a tuple with the
457457
* list of values for that key in `this` as well as `other`.
458458
*/
459-
def cogroup[W](other: RDD[(K, W)], partitioner: Partitioner): RDD[(K, (Iterator[V], Iterator[W]))] = {
459+
def cogroup[W](other: RDD[(K, W)], partitioner: Partitioner)
460+
: RDD[(K, (Iterator[V], Iterator[W]))] = {
460461
if (partitioner.isInstanceOf[HashPartitioner] && getKeyClass().isArray) {
461462
throw new SparkException("Default partitioner cannot partition array keys.")
462463
}
@@ -477,7 +478,9 @@ class PairRDDFunctions[K: ClassTag, V: ClassTag](self: RDD[(K, V)])
477478
}
478479
val cg = new CoGroupedRDD[K](Seq(self, other1, other2), partitioner)
479480
cg.mapValues { case Seq(vs, w1s, w2s) =>
480-
(vs.asInstanceOf[Seq[V]].iterator, w1s.asInstanceOf[Seq[W1]].iterator, w2s.asInstanceOf[Seq[W2]].iterator)
481+
(vs.asInstanceOf[Seq[V]].iterator,
482+
w1s.asInstanceOf[Seq[W1]].iterator,
483+
w2s.asInstanceOf[Seq[W2]].iterator)
481484
}
482485
}
483486

streaming/src/main/scala/org/apache/spark/streaming/dstream/PairDStreamFunctions.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,8 @@ class PairDStreamFunctions[K: ClassTag, V: ClassTag](self: DStream[(K,V)])
140140
* the new DStream will generate RDDs); must be a multiple of this
141141
* DStream's batching interval
142142
*/
143-
def groupByKeyAndWindow(windowDuration: Duration, slideDuration: Duration): DStream[(K, Iterator[V])] =
143+
def groupByKeyAndWindow(windowDuration: Duration, slideDuration: Duration)
144+
: DStream[(K, Iterator[V])] =
144145
{
145146
groupByKeyAndWindow(windowDuration, slideDuration, defaultPartitioner())
146147
}

0 commit comments

Comments
 (0)