Skip to content

Commit b3f5577

Browse files
committed
We can include the last element in the last slice in general for inclusive
range, hence eliminate the need to check Int.MaxValue or Int.MinValue.
1 parent 7d39b9e commit b3f5577

1 file changed

Lines changed: 2 additions & 4 deletions

File tree

core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -128,11 +128,9 @@ private object ParallelCollectionRDD {
128128
}
129129
seq match {
130130
case r: Range => {
131-
// 1 to Int.MaxValue and (-2 to Int.MinValue by -1) can trigger exclusive range int overflow
132-
val needsInclusiveRange = r.isInclusive && (r.end == Int.MaxValue || r.end == Int.MinValue)
133131
positions(r.length, numSlices).zipWithIndex.map({ case ((start, end), index) =>
134-
// If the range needs to be inclusive, include the last element in the last slice
135-
if (needsInclusiveRange && index == numSlices - 1) {
132+
// If the range is inclusive, include the last element in the last slice
133+
if (r.isInclusive && index == numSlices - 1) {
136134
new Range.Inclusive(r.start + start * r.step, r.end, r.step)
137135
}
138136
else {

0 commit comments

Comments
 (0)