@@ -19,7 +19,7 @@ package org.apache.spark.util
1919
2020import java .util .concurrent ._
2121
22- import scala .concurrent .{Await , Awaitable , ExecutionContext , ExecutionContextExecutor }
22+ import scala .concurrent .{Awaitable , ExecutionContext , ExecutionContextExecutor }
2323import scala .concurrent .duration .Duration
2424import scala .concurrent .forkjoin .{ForkJoinPool => SForkJoinPool , ForkJoinWorkerThread => SForkJoinWorkerThread }
2525import scala .util .control .NonFatal
@@ -180,31 +180,20 @@ private[spark] object ThreadUtils {
180180
181181 // scalastyle:off awaitresult
182182 /**
183- * Preferred alternative to `Await.result()`. This method wraps and re-throws any exceptions
184- * thrown by the underlying `Await` call, ensuring that this thread's stack trace appears in
185- * logs.
186- */
187- @ throws(classOf [SparkException ])
188- def awaitResult [T ](awaitable : Awaitable [T ], atMost : Duration ): T = {
189- try {
190- Await .result(awaitable, atMost)
191- // scalastyle:on awaitresult
192- } catch {
193- case NonFatal (t) =>
194- throw new SparkException (" Exception thrown in awaitResult: " , t)
195- }
196- }
197-
198- /**
199- * Calls `Awaitable.result` directly to avoid using `ForkJoinPool`'s `BlockingContext`, wraps
200- * and re-throws any exceptions with nice stack track.
183+ * Preferred alternative to `Await.result()`.
184+ *
185+ * This method wraps and re-throws any exceptions thrown by the underlying `Await` call, ensuring
186+ * that this thread's stack trace appears in logs.
201187 *
202- * Codes running in the user's thread may be in a thread of Scala ForkJoinPool. As concurrent
203- * executions in ForkJoinPool may see some [[ThreadLocal ]] value unexpectedly, this method
204- * basically prevents ForkJoinPool from running other tasks in the current waiting thread.
188+ * In addition, it calls `Awaitable.result` directly to avoid using `ForkJoinPool`'s
189+ * `BlockingContext`. Codes running in the user's thread may be in a thread of Scala ForkJoinPool.
190+ * As concurrent executions in ForkJoinPool may see some [[ThreadLocal ]] value unexpectedly, this
191+ * method basically prevents ForkJoinPool from running other tasks in the current waiting thread.
192+ * In general, we should use this method because many places in Spark use [[ThreadLocal ]] and it's
193+ * hard to debug when [[ThreadLocal ]]s leak to other tasks.
205194 */
206195 @ throws(classOf [SparkException ])
207- def awaitResultInForkJoinSafely [T ](awaitable : Awaitable [T ], atMost : Duration ): T = {
196+ def awaitResult [T ](awaitable : Awaitable [T ], atMost : Duration ): T = {
208197 try {
209198 // `awaitPermission` is not actually used anywhere so it's safe to pass in null here.
210199 // See SPARK-13747.
@@ -215,4 +204,5 @@ private[spark] object ThreadUtils {
215204 throw new SparkException (" Exception thrown in awaitResult: " , t)
216205 }
217206 }
207+ // scalastyle:on awaitresult
218208}
0 commit comments