Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import scala.collection.JavaConverters._
import com.google.common.io.Files

import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.{ApplicationDescription, ExecutorState}
import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged
import org.apache.spark.internal.Logging
import org.apache.spark.rpc.RpcEndpointRef
Expand Down Expand Up @@ -142,7 +142,13 @@ private[deploy] class ExecutorRunner(
private def fetchAndRunExecutor() {
try {
// Launch the process
val builder = CommandUtils.buildProcessBuilder(appDesc.command, new SecurityManager(conf),
val subsCommand = Command(appDesc.command.mainClass,
appDesc.command.arguments, appDesc.command.environment,
appDesc.command.classPathEntries,
appDesc.command.libraryPathEntries,
appDesc.command.javaOpts.map {
opt => Utils.substituteExecIdWildCard(opt, execId.toString) })
val builder = CommandUtils.buildProcessBuilder(subsCommand, new SecurityManager(conf),
memory, sparkHome.getAbsolutePath, substituteVariables)
val command = builder.command()
val formattedCommand = command.asScala.mkString("\"", "\" \"", "\"")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,14 +166,15 @@ private[spark] class CoarseMesosSchedulerBackend(
environment.addVariables(
Environment.Variable.newBuilder().setName("SPARK_CLASSPATH").setValue(cp).build())
}
val extraJavaOpts = conf.get("spark.executor.extraJavaOptions", "")
var extraJavaOpts = conf.get("spark.executor.extraJavaOptions", "")
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

would it be a little cleaner to do the following

val extraJavaOpts = conf.getOption("spark.executor.extraJavaOptions").map {
  Utils.substituteExecIdWildCard(_, taskId)
}.getOrElse("")

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks @BryanCutler for the suggestion, I have addressed it in the latest.


// Set the environment variable through a command prefix
// to append to the existing value of the variable
val prefixEnv = conf.getOption("spark.executor.extraLibraryPath").map { p =>
Utils.libraryPathEnvPrefix(Seq(p))
}.getOrElse("")

extraJavaOpts = Utils.substituteExecIdWildCard(extraJavaOpts, taskId)
environment.addVariables(
Environment.Variable.newBuilder()
.setName("SPARK_EXECUTOR_OPTS")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,12 +102,13 @@ private[spark] class MesosSchedulerBackend(
environment.addVariables(
Environment.Variable.newBuilder().setName("SPARK_CLASSPATH").setValue(cp).build())
}
val extraJavaOpts = sc.conf.getOption("spark.executor.extraJavaOptions").getOrElse("")
var extraJavaOpts = sc.conf.getOption("spark.executor.extraJavaOptions").getOrElse("")

val prefixEnv = sc.conf.getOption("spark.executor.extraLibraryPath").map { p =>
Utils.libraryPathEnvPrefix(Seq(p))
}.getOrElse("")

extraJavaOpts = Utils.substituteExecIdWildCard(extraJavaOpts, execId)
environment.addVariables(
Environment.Variable.newBuilder()
.setName("SPARK_EXECUTOR_OPTS")
Expand Down
7 changes: 7 additions & 0 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2286,6 +2286,13 @@ private[spark] object Utils extends Logging {
log.info(s"Started daemon with process name: ${Utils.getProcessName()}")
SignalUtils.registerLogger(log)
}

/**
* Replaces all the @execid@ occurrences with the Executor Id.
*/
def substituteExecIdWildCard(opt: String, execId: String): String = {
opt.replace("@execid@", execId)
}
}

/**
Expand Down
5 changes: 5 additions & 0 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,11 @@ Apart from these, the following properties are also available, and may be useful
Note that it is illegal to set Spark properties or maximum heap size (-Xmx) settings with this
option. Spark properties should be set using a SparkConf object or the spark-defaults.conf file
used with the spark-submit script. Maximum heap size settings can be set with spark.executor.memory.

The following symbol, if present, will be interpolated: @execid@ is replaced by Executor Id.
Any other occurrences of '@' will go unchanged. For example, to enable verbose gc logging to
a file named for the Executor Id in /tmp, pass a 'value' of:
-verbose:gc -Xloggc:/tmp/@execid@.gc
</td>
</tr>
<tr>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,8 @@ private[yarn] class ExecutorRunnable(

// Set extra Java options for the executor, if defined
sparkConf.get(EXECUTOR_JAVA_OPTIONS).foreach { opts =>
javaOpts ++= Utils.splitCommandString(opts).map(YarnSparkHadoopUtil.escapeForShell)
val subsOpt = Utils.substituteExecIdWildCard(opts, slaveId.toString)
javaOpts ++= Utils.splitCommandString(subsOpt).map(YarnSparkHadoopUtil.escapeForShell)
}
sys.env.get("SPARK_JAVA_OPTS").foreach { opts =>
javaOpts ++= Utils.splitCommandString(opts).map(YarnSparkHadoopUtil.escapeForShell)
Expand Down