diff --git a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala index 0e8d551e4b2ab..645c674dec7d5 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala @@ -80,7 +80,7 @@ private[spark] object ResultTask { * * See [[org.apache.spark.scheduler.Task]] for more information. * - * @param stageId id of the stage this task belongs to + * @param _stageId id of the stage this task belongs to * @param rdd input to func * @param func a function to apply on a partition of the RDD * @param _partitionId index of the number in the RDD @@ -89,13 +89,13 @@ private[spark] object ResultTask { * input RDD's partitions). */ private[spark] class ResultTask[T, U]( - stageId: Int, + _stageId: Int, var rdd: RDD[T], var func: (TaskContext, Iterator[T]) => U, _partitionId: Int, @transient locs: Seq[TaskLocation], var outputId: Int) - extends Task[U](stageId, _partitionId) with Externalizable { + extends Task[U](_stageId, _partitionId) with Externalizable { def this() = this(0, null, null, 0, null, 0) @@ -134,7 +134,7 @@ private[spark] class ResultTask[T, U]( } override def readExternal(in: ObjectInput) { - val stageId = in.readInt() + stageId = in.readInt() val numBytes = in.readInt() val bytes = new Array[Byte](numBytes) in.readFully(bytes) diff --git a/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala index ed0f56f1abdf5..9cc498c812828 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala @@ -90,19 +90,19 @@ private[spark] object ShuffleMapTask { * * See [[org.apache.spark.scheduler.Task]] for more information. * - * @param stageId id of the stage this task belongs to + * @param _stageId id of the stage this task belongs to * @param rdd the final RDD in this stage * @param dep the ShuffleDependency * @param _partitionId index of the number in the RDD * @param locs preferred task execution locations for locality scheduling */ private[spark] class ShuffleMapTask( - stageId: Int, + _stageId: Int, var rdd: RDD[_], var dep: ShuffleDependency[_,_], _partitionId: Int, @transient private var locs: Seq[TaskLocation]) - extends Task[MapStatus](stageId, _partitionId) + extends Task[MapStatus](_stageId, _partitionId) with Externalizable with Logging { @@ -128,7 +128,7 @@ private[spark] class ShuffleMapTask( } override def readExternal(in: ObjectInput) { - val stageId = in.readInt() + stageId = in.readInt() val numBytes = in.readInt() val bytes = new Array[Byte](numBytes) in.readFully(bytes) diff --git a/core/src/main/scala/org/apache/spark/scheduler/Task.scala b/core/src/main/scala/org/apache/spark/scheduler/Task.scala index 5871edeb856ad..a81af571902ff 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/Task.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/Task.scala @@ -40,7 +40,7 @@ import org.apache.spark.util.ByteBufferInputStream * @param stageId id of the stage this task belongs to * @param partitionId index of the number in the RDD */ -private[spark] abstract class Task[T](val stageId: Int, var partitionId: Int) extends Serializable { +private[spark] abstract class Task[T](var stageId: Int, var partitionId: Int) extends Serializable { final def run(attemptId: Long): T = { context = new TaskContext(stageId, partitionId, attemptId, runningLocally = false)