Skip to content

Commit 737819a

Browse files
committed
SPARK-1095 add explicit return types to APIs.
1 parent 3ddc8bb commit 737819a

File tree

3 files changed

+11
-10
lines changed

3 files changed

+11
-10
lines changed

core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T])
126126
def subtract(other: JavaRDD[T], p: Partitioner): JavaRDD[T] =
127127
wrapRDD(rdd.subtract(other, p))
128128

129-
def generator = rdd.generator
129+
def generator: String = rdd.generator
130130

131131
override def toString = rdd.toString
132132

core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
7474
* of the original partition.
7575
*/
7676
def mapPartitionsWithIndex[R: ClassTag](
77-
f: JFunction2[Int, java.util.Iterator[T], java.util.Iterator[R]],
77+
f: JFunction2[Integer, java.util.Iterator[T], java.util.Iterator[R]],
7878
preservesPartitioning: Boolean = false): JavaRDD[R] =
7979
new JavaRDD(rdd.mapPartitionsWithIndex(((a,b) => f(a,asJavaIterator(b))),
8080
preservesPartitioning))

core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
package org.apache.spark.api.java
1919

20+
import java.util
2021
import java.util.{Map => JMap}
2122

2223
import scala.collection.JavaConversions
@@ -92,23 +93,23 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
9293

9394
private[spark] val env = sc.env
9495

95-
def isLocal = sc.isLocal
96+
def isLocal: java.lang.Boolean = sc.isLocal
9697

97-
def sparkUser = sc.sparkUser
98+
def sparkUser: String = sc.sparkUser
9899

99-
def master = sc.master
100+
def master: String = sc.master
100101

101-
def appName = sc.appName
102+
def appName: String = sc.appName
102103

103-
def jars = JavaConversions.seqAsJavaList(sc.jars)
104+
def jars: util.List[String] = sc.jars
104105

105-
def startTime = sc.startTime
106+
def startTime: java.lang.Long = sc.startTime
106107

107108
/** Default level of parallelism to use when not given by user (e.g. parallelize and makeRDD). */
108-
def defaultParallelism = sc.defaultParallelism
109+
def defaultParallelism: Integer = sc.defaultParallelism
109110

110111
/** Default min number of partitions for Hadoop RDDs when not given by user */
111-
def defaultMinSplits = sc.defaultMinSplits
112+
def defaultMinSplits: Integer = sc.defaultMinSplits
112113

113114
/** Distribute a local Scala collection to form an RDD. */
114115
def parallelize[T](list: java.util.List[T], numSlices: Int): JavaRDD[T] = {

0 commit comments

Comments
 (0)