Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 58 additions & 0 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,8 @@ object SparkBuild extends PomBuild {
// SPARK-14738 - Remove docker tests from main Spark build
// enable(DockerIntegrationTests.settings)(dockerIntegrationTests)

enable(KubernetesIntegrationTests.settings)(kubernetesIntegrationTests)

/**
* Adds the ability to run the spark shell directly from SBT without building an assembly
* jar.
Expand Down Expand Up @@ -458,6 +460,62 @@ object DockerIntegrationTests {
)
}

/**
* These settings run a hardcoded configuration of the Kubernetes integration tests using
* minikube. Docker images will have the "dev" tag, and will be overwritten every time the
* integration tests are run. The integration tests are actually bound to the "test" phase,
* so running "test" on this module will run the integration tests.
*
* There are two ways to run the tests:
* - the "tests" task builds docker images and runs the test, so it's a little slow.
* - the "run-its" task just runs the tests on a pre-built set of images.
*
* Note that this does not use the shell scripts that the maven build uses, which are more
* configurable. This is meant as a quick way for developers to run these tests against their
* local changes.
*/
object KubernetesIntegrationTests {
import BuildCommons._

val dockerBuild = TaskKey[Unit]("docker-imgs", "Build the docker images for ITs.")
val runITs = TaskKey[Unit]("run-its", "Only run ITs, skip image build.")
val imageTag = settingKey[String]("Tag to use for images built during the test.")
val namespace = settingKey[String]("Namespace where to run pods.")

// Hack: this variable is used to control whether to build docker images. It's updated by
// the tasks below in a non-obvious way, so that you get the functionality described in
// the scaladoc above.
private var shouldBuildImage = true

lazy val settings = Seq(
imageTag := "dev",
namespace := "default",
dockerBuild := {
if (shouldBuildImage) {
val dockerTool = s"$sparkHome/bin/docker-image-tool.sh"
val cmd = Seq(dockerTool, "-m", "-t", imageTag.value, "build")
Process(cmd).!
}
shouldBuildImage = true
},
runITs := Def.taskDyn {
shouldBuildImage = false
Def.task {
(test in Test).value
}
}.value,
test in Test := (test in Test).dependsOn(dockerBuild).value,
javaOptions in Test ++= Seq(
"-Dspark.kubernetes.test.deployMode=minikube",
s"-Dspark.kubernetes.test.imageTag=${imageTag.value}",
s"-Dspark.kubernetes.test.namespace=${namespace.value}",
s"-Dspark.kubernetes.test.unpackSparkDir=$sparkHome"
),
// Force packaging before building images, so that the latest code is tested.
dockerBuild := dockerBuild.dependsOn(packageBin in Compile in assembly).value
)
}

/**
* Overrides to work around sbt's dependency resolution being different from Maven's.
*/
Expand Down
6 changes: 1 addition & 5 deletions resource-managers/kubernetes/integration-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -145,14 +145,10 @@
<executions>
<execution>
<id>test</id>
<phase>none</phase>
<goals>
<goal>test</goal>
</goals>
<configuration>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we not need this anymore because the integration test is hidden behind the profile?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don't need this because I disabled the execution the proper way a few lines above.

<!-- The negative pattern below prevents integration tests such as
KubernetesSuite from running in the test phase. -->
<suffixes>(?&lt;!Suite)</suffixes>
</configuration>
</execution>
<execution>
<id>integration-test</id>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,33 +19,33 @@ package org.apache.spark.deploy.k8s.integrationtest
import java.io.File
import java.nio.file.{Path, Paths}
import java.util.UUID
import java.util.regex.Pattern

import com.google.common.io.PatternFilenameFilter
import scala.collection.JavaConverters._

import com.google.common.base.Charsets
import com.google.common.io.Files
import io.fabric8.kubernetes.api.model.Pod
import io.fabric8.kubernetes.client.{KubernetesClientException, Watcher}
import io.fabric8.kubernetes.client.Watcher.Action
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, Tag}
import org.scalatest.Matchers
import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
import org.scalatest.time.{Minutes, Seconds, Span}
import scala.collection.JavaConverters._

import org.apache.spark.SparkFunSuite
import org.apache.spark.deploy.k8s.integrationtest.TestConfig._
import org.apache.spark.{SPARK_VERSION, SparkFunSuite}
import org.apache.spark.deploy.k8s.integrationtest.backend.{IntegrationTestBackend, IntegrationTestBackendFactory}
import org.apache.spark.internal.Logging

private[spark] class KubernetesSuite extends SparkFunSuite
class KubernetesSuite extends SparkFunSuite
with BeforeAndAfterAll with BeforeAndAfter with BasicTestsSuite with SecretsTestsSuite
with PythonTestsSuite with ClientModeTestsSuite with PodTemplateSuite
with Logging with Eventually with Matchers {

import KubernetesSuite._

private var sparkHomeDir: Path = _
private var pyImage: String = _
private var rImage: String = _
protected var sparkHomeDir: Path = _
protected var pyImage: String = _
protected var rImage: String = _

protected var image: String = _
protected var testBackend: IntegrationTestBackend = _
Expand All @@ -66,6 +66,31 @@ private[spark] class KubernetesSuite extends SparkFunSuite
private val extraExecTotalMemory =
s"${(1024 + memOverheadConstant*1024 + additionalMemory).toInt}Mi"

/**
* Build the image ref for the given image name, taking the repo and tag from the
* test configuration.
*/
private def testImageRef(name: String): String = {
val tag = sys.props.get("spark.kubernetes.test.imageTagFile")
.map { path =>
val tagFile = new File(path)
require(tagFile.isFile,
s"No file found for image tag at ${tagFile.getAbsolutePath}.")
Files.toString(tagFile, Charsets.UTF_8).trim
}
.orElse(sys.props.get("spark.kubernetes.test.imageTag"))
.getOrElse {
throw new IllegalArgumentException(
"One of spark.kubernetes.test.imageTagFile or " +
"spark.kubernetes.test.imageTag is required.")
}
val repo = sys.props.get("spark.kubernetes.test.imageRepo")
.map { _ + "/" }
.getOrElse("")

s"$repo$name:$tag"
}

override def beforeAll(): Unit = {
super.beforeAll()
// The scalatest-maven-plugin gives system properties that are referenced but not set null
Expand All @@ -82,17 +107,16 @@ private[spark] class KubernetesSuite extends SparkFunSuite
sparkHomeDir = Paths.get(sparkDirProp)
require(sparkHomeDir.toFile.isDirectory,
s"No directory found for spark home specified at $sparkHomeDir.")
val imageTag = getTestImageTag
val imageRepo = getTestImageRepo
image = s"$imageRepo/spark:$imageTag"
pyImage = s"$imageRepo/spark-py:$imageTag"
rImage = s"$imageRepo/spark-r:$imageTag"

val sparkDistroExamplesJarFile: File = sparkHomeDir.resolve(Paths.get("examples", "jars"))
.toFile
.listFiles(new PatternFilenameFilter(Pattern.compile("^spark-examples_.*\\.jar$")))(0)
containerLocalSparkDistroExamplesJar = s"local:///opt/spark/examples/jars/" +
s"${sparkDistroExamplesJarFile.getName}"
image = testImageRef("spark")
pyImage = testImageRef("spark-py")
rImage = testImageRef("spark-r")

val scalaVersion = scala.util.Properties.versionNumberString
.split("\\.")
.take(2)
.mkString(".")
containerLocalSparkDistroExamplesJar =
s"local:///opt/spark/examples/jars/spark-examples_$scalaVersion-${SPARK_VERSION}.jar"
testBackend = IntegrationTestBackendFactory.getTestBackend
testBackend.initialize()
kubernetesTestComponents = new KubernetesTestComponents(testBackend.getKubernetesClient)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,14 @@
*/
package org.apache.spark.deploy.k8s.integrationtest

import org.apache.spark.deploy.k8s.integrationtest.TestConfig.{getTestImageRepo, getTestImageTag}

private[spark] trait PythonTestsSuite { k8sSuite: KubernetesSuite =>

import PythonTestsSuite._
import KubernetesSuite.k8sTestTag

private val pySparkDockerImage =
s"${getTestImageRepo}/spark-py:${getTestImageTag}"
test("Run PySpark on simple pi.py example", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.container.image", pySparkDockerImage)
.set("spark.kubernetes.container.image", pyImage)
runSparkApplicationAndVerifyCompletion(
appResource = PYSPARK_PI,
mainClass = "",
Expand All @@ -41,7 +37,7 @@ private[spark] trait PythonTestsSuite { k8sSuite: KubernetesSuite =>

test("Run PySpark with Python2 to test a pyfiles example", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.container.image", pySparkDockerImage)
.set("spark.kubernetes.container.image", pyImage)
.set("spark.kubernetes.pyspark.pythonVersion", "2")
runSparkApplicationAndVerifyCompletion(
appResource = PYSPARK_FILES,
Expand All @@ -59,7 +55,7 @@ private[spark] trait PythonTestsSuite { k8sSuite: KubernetesSuite =>

test("Run PySpark with Python3 to test a pyfiles example", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.container.image", pySparkDockerImage)
.set("spark.kubernetes.container.image", pyImage)
.set("spark.kubernetes.pyspark.pythonVersion", "3")
runSparkApplicationAndVerifyCompletion(
appResource = PYSPARK_FILES,
Expand All @@ -77,7 +73,7 @@ private[spark] trait PythonTestsSuite { k8sSuite: KubernetesSuite =>

test("Run PySpark with memory customization", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.container.image", pySparkDockerImage)
.set("spark.kubernetes.container.image", pyImage)
.set("spark.kubernetes.pyspark.pythonVersion", "3")
.set("spark.kubernetes.memoryOverheadFactor", s"$memOverheadConstant")
.set("spark.executor.pyspark.memory", s"${additionalMemory}m")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,13 @@
*/
package org.apache.spark.deploy.k8s.integrationtest

import org.apache.spark.deploy.k8s.integrationtest.TestConfig.{getTestImageRepo, getTestImageTag}

private[spark] trait RTestsSuite { k8sSuite: KubernetesSuite =>

import RTestsSuite._
import KubernetesSuite.k8sTestTag

test("Run SparkR on simple dataframe.R example", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.container.image", s"${getTestImageRepo}/spark-r:${getTestImageTag}")
sparkAppConf.set("spark.kubernetes.container.image", rImage)
runSparkApplicationAndVerifyCompletion(
appResource = SPARK_R_DATAFRAME_TEST,
mainClass = "",
Expand Down

This file was deleted.