Skip to content

Commit 2a38090

Browse files
committed
[SPARK-32175][SPARK-32175][FOLLOWUP] Remove flaky test added in
### What changes were proposed in this pull request? This PR removes a test added in SPARK-32175(#29002). ### Why are the changes needed? That test is flaky. It can be mitigated by increasing the timeout but it would rather be simpler to remove the test. See also the [discussion](#29002 (comment)). ### Does this PR introduce _any_ user-facing change? No. Closes #29314 from sarutak/remove-flaky-test. Authored-by: Kousuke Saruta <sarutak@oss.nttdata.com> Signed-off-by: Kousuke Saruta <sarutak@oss.nttdata.com> (cherry picked from commit 9d7b1d9) Signed-off-by: Kousuke Saruta <sarutak@oss.nttdata.com>
1 parent b40df01 commit 2a38090

1 file changed

Lines changed: 0 additions & 67 deletions

File tree

core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala

Lines changed: 0 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -403,73 +403,6 @@ class ExecutorSuite extends SparkFunSuite
403403
assert(taskMetrics.getMetricValue("JVMHeapMemory") > 0)
404404
}
405405

406-
test("SPARK-32175: Plugin initialization should start after heartbeater started") {
407-
withTempDir { tempDir =>
408-
val sparkPluginCodeBody =
409-
"""
410-
|@Override
411-
|public org.apache.spark.api.plugin.ExecutorPlugin executorPlugin() {
412-
| return new TestExecutorPlugin();
413-
|}
414-
|
415-
|@Override
416-
|public org.apache.spark.api.plugin.DriverPlugin driverPlugin() { return null; }
417-
""".stripMargin
418-
val executorPluginBody =
419-
"""
420-
|@Override
421-
|public void init(
422-
| org.apache.spark.api.plugin.PluginContext ctx,
423-
| java.util.Map<String, String> extraConf) {
424-
| try {
425-
| Thread.sleep(8 * 1000);
426-
| } catch (InterruptedException e) {
427-
| throw new RuntimeException(e);
428-
| }
429-
|}
430-
""".stripMargin
431-
432-
val compiledExecutorPlugin = TestUtils.createCompiledClass(
433-
"TestExecutorPlugin",
434-
tempDir,
435-
"",
436-
null,
437-
Seq.empty,
438-
Seq("org.apache.spark.api.plugin.ExecutorPlugin"),
439-
executorPluginBody)
440-
441-
val thisClassPath =
442-
sys.props("java.class.path").split(File.pathSeparator).map(p => new File(p).toURI.toURL)
443-
val compiledSparkPlugin = TestUtils.createCompiledClass(
444-
"TestSparkPlugin",
445-
tempDir,
446-
"",
447-
null,
448-
Seq(tempDir.toURI.toURL) ++ thisClassPath,
449-
Seq("org.apache.spark.api.plugin.SparkPlugin"),
450-
sparkPluginCodeBody)
451-
452-
val jarUrl = TestUtils.createJar(
453-
Seq(compiledSparkPlugin, compiledExecutorPlugin),
454-
new File(tempDir, "testPlugin.jar"))
455-
456-
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
457-
val args = Seq(
458-
"--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
459-
"--name", "testApp",
460-
"--master", "local-cluster[1,1,1024]",
461-
"--conf", "spark.plugins=TestSparkPlugin",
462-
"--conf", "spark.storage.blockManagerSlaveTimeoutMs=" + 5 * 1000,
463-
"--conf", "spark.network.timeoutInterval=" + 1000,
464-
"--conf", "spark.executor.heartbeatInterval=" + 1000,
465-
"--conf", "spark.executor.extraClassPath=" + jarUrl.toString,
466-
"--conf", "spark.driver.extraClassPath=" + jarUrl.toString,
467-
"--conf", "spark.ui.enabled=false",
468-
unusedJar.toString)
469-
SparkSubmitSuite.runSparkSubmit(args, timeout = 30.seconds)
470-
}
471-
}
472-
473406
private def createMockEnv(conf: SparkConf, serializer: JavaSerializer): SparkEnv = {
474407
val mockEnv = mock[SparkEnv]
475408
val mockRpcEnv = mock[RpcEnv]

0 commit comments

Comments
 (0)