Skip to content

Commit a958920

Browse files
committed
included new ability to bake krb5.conf into your docker images and not fail
1 parent f3a0ffb commit a958920

1 file changed

Lines changed: 42 additions & 25 deletions

File tree

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/hadooputils/HadoopBootstrapUtil.scala

Lines changed: 42 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -24,11 +24,11 @@ import scala.collection.JavaConverters._
2424
import com.google.common.io.Files
2525
import io.fabric8.kubernetes.api.model._
2626

27-
import org.apache.spark.SparkException
2827
import org.apache.spark.deploy.k8s.Constants._
2928
import org.apache.spark.deploy.k8s.SparkPod
29+
import org.apache.spark.internal.Logging
3030

31-
private[spark] object HadoopBootstrapUtil {
31+
private[spark] object HadoopBootstrapUtil extends Logging {
3232

3333
/**
3434
* Mounting the DT secret for both the Driver and the executors
@@ -78,36 +78,39 @@ private[spark] object HadoopBootstrapUtil {
7878
.build()
7979
}
8080

81-
// Breaking up Volume Creation for clarity
82-
val configMapVolume = preConfigMapVolume.getOrElse(
83-
createConfigMapVolume.getOrElse(
84-
throw new SparkException("Must specify krb5.conf file locally or via ConfigMap")
85-
)
86-
)
81+
// Breaking up Volume creation for clarity
82+
val configMapVolume = preConfigMapVolume.orElse(createConfigMapVolume)
83+
if (configMapVolume.isEmpty) {
84+
logInfo("You have not specified a krb5.conf file locally or via a ConfigMap. " +
85+
"Make sure that you have the krb5.conf locally on the Driver and Executor images")
86+
}
8787

88-
val kerberizedPod = new PodBuilder(pod.pod)
89-
.editOrNewSpec()
88+
val kerberizedPodWithDTSecret = new PodBuilder(pod.pod)
89+
.editOrNewSpec()
9090
.addNewVolume()
9191
.withName(SPARK_APP_HADOOP_SECRET_VOLUME_NAME)
9292
.withNewSecret()
9393
.withSecretName(dtSecretName)
9494
.endSecret()
9595
.endVolume()
96-
.addNewVolumeLike(configMapVolume)
97-
.endVolume()
9896
.endSpec()
97+
.build()
98+
99+
// Optionally add the krb5.conf ConfigMap
100+
val kerberizedPod = configMapVolume.map { cmVolume =>
101+
new PodBuilder(kerberizedPodWithDTSecret)
102+
.editSpec()
103+
.addNewVolumeLike(cmVolume)
104+
.endVolume()
105+
.endSpec()
99106
.build()
107+
}.getOrElse(kerberizedPodWithDTSecret)
100108

101-
val kerberizedContainer = new ContainerBuilder(pod.container)
109+
val kerberizedContainerWithMounts = new ContainerBuilder(pod.container)
102110
.addNewVolumeMount()
103111
.withName(SPARK_APP_HADOOP_SECRET_VOLUME_NAME)
104112
.withMountPath(SPARK_APP_HADOOP_CREDENTIALS_BASE_DIR)
105113
.endVolumeMount()
106-
.addNewVolumeMount()
107-
.withName(KRB_FILE_VOLUME)
108-
.withMountPath(KRB_FILE_DIR_PATH + "/krb5.conf")
109-
.withSubPath("krb5.conf")
110-
.endVolumeMount()
111114
.addNewEnv()
112115
.withName(ENV_HADOOP_TOKEN_FILE_LOCATION)
113116
.withValue(s"$SPARK_APP_HADOOP_CREDENTIALS_BASE_DIR/$dtSecretItemKey")
@@ -117,7 +120,22 @@ private[spark] object HadoopBootstrapUtil {
117120
.withValue(userName)
118121
.endEnv()
119122
.build()
120-
SparkPod(kerberizedPod, kerberizedContainer)
123+
124+
// Optionally add the krb5.conf Volume Mount
125+
val kerberizedContainer =
126+
if (configMapVolume.isDefined) {
127+
new ContainerBuilder(kerberizedContainerWithMounts)
128+
.addNewVolumeMount()
129+
.withName(KRB_FILE_VOLUME)
130+
.withMountPath(KRB_FILE_DIR_PATH + "/krb5.conf")
131+
.withSubPath("krb5.conf")
132+
.endVolumeMount()
133+
.build()
134+
} else {
135+
kerberizedContainerWithMounts
136+
}
137+
138+
SparkPod(kerberizedPod, kerberizedContainer)
121139
}
122140

123141
/**
@@ -130,9 +148,9 @@ private[spark] object HadoopBootstrapUtil {
130148
def bootstrapSparkUserPod(sparkUserName: String, pod: SparkPod): SparkPod = {
131149
val envModifiedContainer = new ContainerBuilder(pod.container)
132150
.addNewEnv()
133-
.withName(ENV_SPARK_USER)
134-
.withValue(sparkUserName)
135-
.endEnv()
151+
.withName(ENV_SPARK_USER)
152+
.withValue(sparkUserName)
153+
.endEnv()
136154
.build()
137155
SparkPod(pod.pod, envModifiedContainer)
138156
}
@@ -235,9 +253,8 @@ private[spark] object HadoopBootstrapUtil {
235253
.withNewMetadata()
236254
.withName(configMapName)
237255
.endMetadata()
238-
.addToData(
239-
Map(file.toPath.getFileName.toString ->
240-
Files.toString(file, StandardCharsets.UTF_8)).asJava)
256+
.addToData(Map(file.toPath.getFileName.toString ->
257+
Files.toString(file, StandardCharsets.UTF_8)).asJava)
241258
.build()
242259
}
243260

0 commit comments

Comments
 (0)