File tree Expand file tree Collapse file tree
main/java/org/apache/spark/util/collection/unsafe/sort
test/java/org/apache/spark/util/collection/unsafe/sort
sql/core/src/main/java/org/apache/spark/sql/execution Expand file tree Collapse file tree Original file line number Diff line number Diff line change @@ -174,9 +174,12 @@ public void reset() {
174174 if (consumer != null ) {
175175 consumer .freeArray (array );
176176 // the call to consumer.allocateArray may trigger a spill
177- // which in turn access this instance and eventually re-enter this method and try to free the array again.
178- // by setting the array to null and its length to 0 we effectively make the spill code-path a no-op.
179- // setting the array to null also indicates that it has already been de-allocated which prevents a double de-allocation in free().
177+ // which in turn access this instance and eventually re-enter this method
178+ // and try to free the array again.
179+ // by setting the array to null and its length to 0
180+ // we effectively make the spill code-path a no-op.
181+ // setting the array to null also indicates that it has already been
182+ // de-allocated which prevents a double de-allocation in free().
180183 array = null ;
181184 usableCapacity = 0 ;
182185 pos = 0 ;
Original file line number Diff line number Diff line change @@ -470,17 +470,20 @@ public void testOOMDuringSpill() throws Exception {
470470 insertNumber (sorter , i );
471471 }
472472 // we expect the next insert to attempt growing the pointerssArray
473- // first allocation is expected to fail, then a spill is triggered which attempts another allocation
473+ // first allocation is expected to fail, then a spill is
474+ // triggered which attempts another allocation
474475 // which also fails and we expect to see this OOM here.
475476 // the original code messed with a released array within the spill code
476477 // and ended up with a failed assertion.
477- // we also expect the location of the OOM to be org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset
478+ // we also expect the location of the OOM to be
479+ // org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset
478480 memoryManager .markconsequentOOM (2 );
479481 try {
480482 insertNumber (sorter , 1024 );
481483 fail ("expected OutOfMmoryError but it seems operation surprisingly succeeded" );
482484 }
483- // we expect an OutOfMemoryError here, anything else (i.e the original NPE is a failure)
485+ // we expect an OutOfMemoryError here, anything else
486+ // (i.e the original NPE is a failure)
484487 catch (OutOfMemoryError oom ){
485488 String oomStackTrace = Utils .exceptionString (oom );
486489 assertThat ("expected OutOfMemoryError in org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset" ,
Original file line number Diff line number Diff line change @@ -259,7 +259,7 @@ public int compare(
259259 Object baseObj2 ,
260260 long baseOff2 ,
261261 int baseLen2 ) {
262- // Note that since ordering doesn't need the total length of the record, we just pass -1
262+ // Note that since ordering doesn't need the total length of the record, we just pass -1
263263 // into the row.
264264 row1 .pointTo (baseObj1 , baseOff1 + 4 , -1 );
265265 row2 .pointTo (baseObj2 , baseOff2 + 4 , -1 );
You can’t perform that action at this time.
0 commit comments