Skip to content

Commit 9d0d4ed

Browse files
committed
[SPARK-35595][TESTS] Support multiple loggers in testing method withLogAppender
### What changes were proposed in this pull request? A test case of AdaptiveQueryExecSuite becomes flaky since there are too many debug logs in RootLogger: https://github.com/Yikun/spark/runs/2715222392?check_suite_focus=true https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/139125/testReport/ To fix it, I suggest supporting multiple loggers in the testing method withLogAppender. So that the LogAppender gets clean target log outputs. ### Why are the changes needed? Fix a flaky test case. Also, reduce unnecessary memory cost in tests. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Unit test Closes #32725 from gengliangwang/fixFlakyLogAppender. Authored-by: Gengliang Wang <gengliang@apache.org> Signed-off-by: Gengliang Wang <gengliang@apache.org>
1 parent 0ad5ae5 commit 9d0d4ed

File tree

3 files changed

+21
-11
lines changed

3 files changed

+21
-11
lines changed

core/src/test/scala/org/apache/spark/SparkFunSuite.scala

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -217,19 +217,27 @@ abstract class SparkFunSuite
217217
*/
218218
protected def withLogAppender(
219219
appender: Appender,
220-
loggerName: Option[String] = None,
220+
loggerNames: Seq[String] = Seq.empty,
221221
level: Option[Level] = None)(
222222
f: => Unit): Unit = {
223-
val logger = loggerName.map(Logger.getLogger).getOrElse(Logger.getRootLogger)
224-
val restoreLevel = logger.getLevel
225-
logger.addAppender(appender)
226-
if (level.isDefined) {
227-
logger.setLevel(level.get)
223+
val loggers = if (loggerNames.nonEmpty) {
224+
loggerNames.map(Logger.getLogger)
225+
} else {
226+
Seq(Logger.getRootLogger)
227+
}
228+
val restoreLevels = loggers.map(_.getLevel)
229+
loggers.foreach { logger =>
230+
logger.addAppender(appender)
231+
if (level.isDefined) {
232+
logger.setLevel(level.get)
233+
}
228234
}
229235
try f finally {
230-
logger.removeAppender(appender)
236+
loggers.foreach(_.removeAppender(appender))
231237
if (level.isDefined) {
232-
logger.setLevel(restoreLevel)
238+
loggers.zipWithIndex.foreach { case (logger, i) =>
239+
logger.setLevel(restoreLevels(i))
240+
}
233241
}
234242
}
235243
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -516,7 +516,7 @@ class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper {
516516

517517
test("SPARK-25113: should log when there exists generated methods above HugeMethodLimit") {
518518
val appender = new LogAppender("huge method limit")
519-
withLogAppender(appender, loggerName = Some(classOf[CodeGenerator[_, _]].getName)) {
519+
withLogAppender(appender, loggerNames = Seq(classOf[CodeGenerator[_, _]].getName)) {
520520
val x = 42
521521
val expr = HugeCodeIntExpression(x)
522522
val proj = GenerateUnsafeProjection.generate(Seq(expr))

sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -823,7 +823,7 @@ class AdaptiveQueryExecSuite
823823
val logAppender = new LogAppender("adaptive execution")
824824
withLogAppender(
825825
logAppender,
826-
loggerName = Some(AdaptiveSparkPlanExec.getClass.getName.dropRight(1)),
826+
loggerNames = Seq(AdaptiveSparkPlanExec.getClass.getName.dropRight(1)),
827827
level = Some(Level.TRACE)) {
828828
withSQLConf(
829829
SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true",
@@ -1613,7 +1613,9 @@ class AdaptiveQueryExecSuite
16131613
val testDf = df.groupBy("index")
16141614
.agg(sum($"pv").alias("pv"))
16151615
.join(dim, Seq("index"))
1616-
withLogAppender(testAppender, level = Some(Level.DEBUG)) {
1616+
val loggerNames =
1617+
Seq(classOf[BroadcastQueryStageExec].getName, classOf[ShuffleQueryStageExec].getName)
1618+
withLogAppender(testAppender, loggerNames, level = Some(Level.DEBUG)) {
16171619
withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true") {
16181620
val result = testDf.collect()
16191621
assert(result.length == 26)

0 commit comments

Comments
 (0)