Skip to content

Commit 3fba7b7

Browse files
author
Andrew Or
committed
[Spark-3490] Disable SparkUI for tests (backport into 0.9)
Branch-1.2 #2363 (original) Branch-1.1 #2415 Branch-1.0 #3959 Branch-0.9 #3961 (this PR) Author: Andrew Or <[email protected]> Closes #3961 from andrewor14/ui-ports-0.9 and squashes the following commits: 8644997 [Andrew Or] Disable UI for tests
1 parent 7d007d3 commit 3fba7b7

File tree

7 files changed

+23
-10
lines changed

7 files changed

+23
-10
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -155,8 +155,14 @@ class SparkContext(
155155
new MetadataCleaner(MetadataCleanerType.SPARK_CONTEXT, this.cleanup, conf)
156156

157157
// Initialize the Spark UI
158-
private[spark] val ui = new SparkUI(this)
159-
ui.bind()
158+
private[spark] val ui: Option[SparkUI] =
159+
if (conf.getBoolean("spark.ui.enabled", true)) {
160+
Some(new SparkUI(this))
161+
} else {
162+
// For tests, do not enable the UI
163+
None
164+
}
165+
ui.foreach(_.bind())
160166

161167
val startTime = System.currentTimeMillis()
162168

@@ -202,7 +208,7 @@ class SparkContext(
202208
@volatile private[spark] var dagScheduler = new DAGScheduler(taskScheduler)
203209
dagScheduler.start()
204210

205-
ui.start()
211+
ui.foreach(_.start())
206212

207213
/** A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse. */
208214
val hadoopConfiguration = {
@@ -777,7 +783,7 @@ class SparkContext(
777783

778784
/** Shut down the SparkContext. */
779785
def stop() {
780-
ui.stop()
786+
ui.foreach(_.stop())
781787
// Do this only if not stopped already - best case effort.
782788
// prevent NPE if stopped more than once.
783789
val dagSchedulerCopy = dagScheduler

core/src/main/scala/org/apache/spark/scheduler/cluster/SimrSchedulerBackend.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,16 +44,17 @@ private[spark] class SimrSchedulerBackend(
4444

4545
val conf = new Configuration()
4646
val fs = FileSystem.get(conf)
47+
val appUIAddress = sc.ui.map(_.appUIAddress).getOrElse("")
4748

4849
logInfo("Writing to HDFS file: " + driverFilePath)
4950
logInfo("Writing Akka address: " + driverUrl)
50-
logInfo("Writing Spark UI Address: " + sc.ui.appUIAddress)
51+
logInfo("Writing Spark UI Address: " + appUIAddress)
5152

5253
// Create temporary file to prevent race condition where executors get empty driverUrl file
5354
val temp = fs.create(tmpPath, true)
5455
temp.writeUTF(driverUrl)
5556
temp.writeInt(maxCores)
56-
temp.writeUTF(sc.ui.appUIAddress)
57+
temp.writeUTF(appUIAddress)
5758
temp.close()
5859

5960
// "Atomic" rename

core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,8 +51,9 @@ private[spark] class SparkDeploySchedulerBackend(
5151
val command = Command(
5252
"org.apache.spark.executor.CoarseGrainedExecutorBackend", args, sc.executorEnvs)
5353
val sparkHome = sc.getSparkHome().getOrElse(null)
54-
val appDesc = new ApplicationDescription(appName, maxCores, sc.executorMemory, command, sparkHome,
55-
"http://" + sc.ui.appUIAddress)
54+
val appUIAddress = sc.ui.map { x => "http://" + x.appUIAddress }.getOrElse("")
55+
val appDesc = new ApplicationDescription(
56+
appName, maxCores, sc.executorMemory, command, sparkHome, appUIAddress)
5657

5758
client = new AppClient(sc.env.actorSystem, masters, appDesc, this, conf)
5859
client.start()

pom.xml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -636,6 +636,10 @@
636636
<filereports>${project.build.directory}/SparkTestSuite.txt</filereports>
637637
<argLine>-Xms64m -Xmx3g</argLine>
638638
<stderr />
639+
<systemProperties>
640+
<spark.testing>true</spark.testing>
641+
<spark.ui.enabled>false</spark.ui.enabled>
642+
</systemProperties>
639643
</configuration>
640644
<executions>
641645
<execution>

project/SparkBuild.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,7 @@ object SparkBuild extends Build {
149149
fork := true,
150150
javaOptions in Test += "-Dspark.home=" + sparkHome,
151151
javaOptions in Test += "-Dspark.testing=1",
152+
javaOptions in Test += "-Dspark.ui.enabled=false",
152153
javaOptions += "-Xmx3g",
153154
// Show full stack trace and duration in test cases.
154155
testOptions in Test += Tests.Argument("-oDF"),

yarn/alpha/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -227,7 +227,7 @@ class ApplicationMaster(args: ApplicationMasterArguments, conf: Configuration,
227227
assert(sparkContext != null || count >= numTries)
228228

229229
if (null != sparkContext) {
230-
uiAddress = sparkContext.ui.appUIAddress
230+
uiAddress = sparkContext.ui.map(_.appUIAddress).getOrElse("")
231231
this.yarnAllocator = YarnAllocationHandler.newAllocator(
232232
yarnConf,
233233
resourceManager,

yarn/stable/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ class ApplicationMaster(args: ApplicationMasterArguments, conf: Configuration,
199199
assert(sparkContext != null || numTries >= maxNumTries)
200200

201201
if (sparkContext != null) {
202-
uiAddress = sparkContext.ui.appUIAddress
202+
uiAddress = sparkContext.ui.map(_.appUIAddress).getOrElse("")
203203
this.yarnAllocator = YarnAllocationHandler.newAllocator(
204204
yarnConf,
205205
amClient,

0 commit comments

Comments
 (0)