diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala index 93d1acdd2d15..c8b1afeebac0 100644 --- a/core/src/main/scala/org/apache/spark/executor/Executor.scala +++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala @@ -323,10 +323,7 @@ private[spark] class Executor( val threadName = s"Executor task launch worker for task $taskId" val taskName = taskDescription.name val mdcProperties = taskDescription.properties.asScala - .filter(_._1.startsWith("mdc.")).map { item => - val key = item._1.substring(4) - (key, item._2) - }.toSeq + .filter(_._1.startsWith("mdc.")).toSeq /** If specified, this task has been killed and this option contains the reason. */ @volatile private var reasonIfKilled: Option[String] = None @@ -705,7 +702,7 @@ private[spark] class Executor( MDC.clear() mdc.foreach { case (key, value) => MDC.put(key, value) } // avoid overriding the takName by the user - MDC.put("taskName", taskName) + MDC.put("mdc.taskName", taskName) } /** diff --git a/docs/configuration.md b/docs/configuration.md index 420942f7b7bb..706c2552b1d1 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -2955,11 +2955,11 @@ Spark uses [log4j](http://logging.apache.org/log4j/) for logging. You can config `log4j.properties` file in the `conf` directory. One way to start is to copy the existing `log4j.properties.template` located there. -By default, Spark adds 1 record to the MDC (Mapped Diagnostic Context): `taskName`, which shows something -like `task 1.0 in stage 0.0`. You can add `%X{taskName}` to your patternLayout in +By default, Spark adds 1 record to the MDC (Mapped Diagnostic Context): `mdc.taskName`, which shows something +like `task 1.0 in stage 0.0`. You can add `%X{mdc.taskName}` to your patternLayout in order to print it in the logs. -Moreover, you can use `spark.sparkContext.setLocalProperty("mdc." + name, "value")` to add user specific data into MDC. -The key in MDC will be the string after the `mdc.` prefix. +Moreover, you can use `spark.sparkContext.setLocalProperty(s"mdc.$name", "value")` to add user specific data into MDC. +The key in MDC will be the string of "mdc.$name". # Overriding configuration directory