-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-25922][K8] Spark Driver/Executor "spark-app-selector" label mismatch #23322
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
89c438c
ed2c4ca
fa45013
5ed8e1a
db3fceb
af73427
778acb4
bf4bc0e
8844f3d
45ded64
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -18,12 +18,14 @@ package org.apache.spark.scheduler.cluster.k8s | |
|
|
||
| import java.util.concurrent.ExecutorService | ||
|
|
||
| import io.fabric8.kubernetes.client.KubernetesClient | ||
| import scala.concurrent.{ExecutionContext, Future} | ||
|
|
||
| import io.fabric8.kubernetes.client.KubernetesClient | ||
|
|
||
| import org.apache.spark.SparkContext | ||
| import org.apache.spark.deploy.k8s.Config._ | ||
| import org.apache.spark.deploy.k8s.Constants._ | ||
| import org.apache.spark.deploy.k8s.KubernetesUtils | ||
| import org.apache.spark.rpc.{RpcAddress, RpcEnv} | ||
| import org.apache.spark.scheduler.{ExecutorLossReason, TaskSchedulerImpl} | ||
| import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend, SchedulerBackendUtils} | ||
|
|
@@ -60,6 +62,43 @@ private[spark] class KubernetesClusterSchedulerBackend( | |
| removeExecutor(executorId, reason) | ||
| } | ||
|
|
||
| /** | ||
| * Get an application ID associated with the job. | ||
| * This returns the string value of [[appId]] if set, otherwise | ||
| * the locally-generated ID from the superclass. | ||
| * @return The application ID | ||
| */ | ||
|
|
||
| var appId: Option[String] = None; | ||
|
||
|
|
||
| override def applicationId(): String = { | ||
|
|
||
|
||
| appId.map(_.toString).getOrElse { | ||
| logInfo("Initializing Application ID.") | ||
|
||
| bindApplicationId(); | ||
| appId.get | ||
| } | ||
| } | ||
|
|
||
| def bindApplicationId(): Unit = { | ||
| val appIdString = { | ||
| val wasSparkSubmittedInClusterMode = conf.get(KUBERNETES_DRIVER_SUBMIT_CHECK) | ||
|
|
||
| // cluster mode: get appId from driver env | ||
| if (wasSparkSubmittedInClusterMode) { | ||
|
||
| val sparkAppId = conf.getOption("spark.app.id") | ||
| sparkAppId.map(_.toString).getOrElse { | ||
| logWarning("Application ID is not initialized yet in cluster mode.") | ||
| super.applicationId | ||
| } | ||
| } else { | ||
| // client mode: generate new appId | ||
| KubernetesUtils.generateAppId() | ||
| } | ||
| } | ||
| appId = Some(appIdString) | ||
| } | ||
|
|
||
| override def start(): Unit = { | ||
| super.start() | ||
| if (!Utils.isDynamicAllocationEnabled(conf)) { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Unnecessary empty line.