Skip to content

Commit 978e994

Browse files
committed
Update SparkSubmit.scala
1 parent 83686af commit 978e994

1 file changed

Lines changed: 7 additions & 7 deletions

File tree

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -588,7 +588,7 @@ private[spark] class SparkSubmit extends Logging {
588588
OptionAssigner(args.deployMode, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES,
589589
confKey = SUBMIT_DEPLOY_MODE.key),
590590
OptionAssigner(args.name, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES, confKey = "spark.app.name"),
591-
OptionAssigner(args.ivyRepoPath, ALL_CLUSTER_MGRS, CLIENT, confKey = "spark.jars.ivy"),
591+
OptionAssignerWrapper(args.ivyRepoPath, ALL_CLUSTER_MGRS, CLIENT, confKey = "spark.jars.ivy"),
592592
OptionAssigner(args.driverMemory, ALL_CLUSTER_MGRS, CLIENT,
593593
confKey = DRIVER_MEMORY.key),
594594
OptionAssigner(args.driverExtraClassPath, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES,
@@ -604,13 +604,13 @@ private[spark] class SparkSubmit extends Logging {
604604
OptionAssigner(args.pyFiles, ALL_CLUSTER_MGRS, CLUSTER, confKey = SUBMIT_PYTHON_FILES.key),
605605

606606
// Propagate attributes for dependency resolution at the driver side
607-
OptionAssigner(args.packages, STANDALONE | MESOS | KUBERNETES,
607+
OptionAssignerWrapper(args.packages, STANDALONE | MESOS | KUBERNETES,
608608
CLUSTER, confKey = "spark.jars.packages"),
609-
OptionAssigner(args.repositories, STANDALONE | MESOS | KUBERNETES,
609+
OptionAssignerWrapper(args.repositories, STANDALONE | MESOS | KUBERNETES,
610610
CLUSTER, confKey = "spark.jars.repositories"),
611-
OptionAssigner(args.ivyRepoPath, STANDALONE | MESOS | KUBERNETES,
611+
OptionAssignerWrapper(args.ivyRepoPath, STANDALONE | MESOS | KUBERNETES,
612612
CLUSTER, confKey = "spark.jars.ivy"),
613-
OptionAssigner(args.packagesExclusions, STANDALONE | MESOS | KUBERNETES,
613+
OptionAssignerWrapper(args.packagesExclusions, STANDALONE | MESOS | KUBERNETES,
614614
CLUSTER, confKey = "spark.jars.excludes"),
615615

616616
// Yarn only
@@ -646,7 +646,7 @@ private[spark] class SparkSubmit extends Logging {
646646
confKey = DRIVER_CORES.key),
647647
OptionAssigner(args.supervise.toString, STANDALONE | MESOS, CLUSTER,
648648
confKey = DRIVER_SUPERVISE.key),
649-
OptionAssigner(args.ivyRepoPath, STANDALONE, CLUSTER, confKey = "spark.jars.ivy"),
649+
OptionAssignerWrapper(args.ivyRepoPath, STANDALONE, CLUSTER, confKey = "spark.jars.ivy"),
650650

651651
// An internal option used only for spark-shell to add user jars to repl's classloader,
652652
// previously it uses "spark.jars" or "spark.yarn.dist.jars" which now may be pointed to
@@ -1483,7 +1483,7 @@ private case class OptionAssigner(
14831483
confKey: String = null,
14841484
mergeFn: Option[(String, String) => String] = None)
14851485

1486-
private object OptionAssigner {
1486+
private object OptionAssignerWrapper {
14871487
def apply(
14881488
value: Option[String],
14891489
clusterManager: Int,

0 commit comments

Comments
 (0)