diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java index 3ae4633c79b0..dbc787e8c389 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java +++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java @@ -181,9 +181,16 @@ List buildClassPath(String appClassPath) throws IOException { } } if (isTesting) { + boolean isGitHubActionsBuild = System.getenv("GITHUB_ACTIONS") != null; for (String project : projects) { - addToClassPath(cp, String.format("%s/%s/target/scala-%s/test-classes", sparkHome, - project, scala)); + if (isGitHubActionsBuild) { + // In GitHub Actions build, SBT option 'crossPaths' is disabled so the Scala version + // directory is not created, see SPARK-32408. This is a temporary workaround. + addToClassPath(cp, String.format("%s/%s/target/test-classes", sparkHome, project)); + } else { + addToClassPath(cp, String.format("%s/%s/target/scala-%s/test-classes", sparkHome, + project, scala)); + } } } diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 198405a1d29c..b7301e22b8b0 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -1035,8 +1035,10 @@ object TestSettings { Seq("-eNCXEHLOPQMDF") }.getOrElse(Nil): _*), testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"), - // Required to detect Junit tests for each project, see also https://github.com/sbt/junit-interface/issues/35 - crossPaths := false, + // This is currently only disabled in GitHub Actions build as a temporary workaround. See SPARK-32408. + // It is required to detect Junit tests for each project, see also + // https://github.com/sbt/junit-interface/issues/35 + crossPaths := sys.env.get("GITHUB_ACTIONS").isEmpty, // Enable Junit testing. libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test", // `parallelExecutionInTest` controls whether test suites belonging to the same SBT project diff --git a/python/pyspark/sql/tests/test_dataframe.py b/python/pyspark/sql/tests/test_dataframe.py index 30c3fd4c8d16..c53a83c296cb 100644 --- a/python/pyspark/sql/tests/test_dataframe.py +++ b/python/pyspark/sql/tests/test_dataframe.py @@ -850,9 +850,10 @@ def setUpClass(cls): SPARK_HOME = _find_spark_home() filename_pattern = ( - "sql/core/target/scala-*/test-classes/org/apache/spark/sql/" + "sql/core/target/**/test-classes/org/apache/spark/sql/" "TestQueryExecutionListener.class") - cls.has_listener = bool(glob.glob(os.path.join(SPARK_HOME, filename_pattern))) + cls.has_listener = bool(glob.glob( + os.path.join(SPARK_HOME, filename_pattern), recursive=True)) if cls.has_listener: # Note that 'spark.sql.queryExecutionListeners' is a static immutable configuration. diff --git a/python/pyspark/sql/tests/test_session.py b/python/pyspark/sql/tests/test_session.py index 5e4166e6f8e7..ab484edd4e7c 100644 --- a/python/pyspark/sql/tests/test_session.py +++ b/python/pyspark/sql/tests/test_session.py @@ -326,9 +326,9 @@ def setUpClass(cls): SPARK_HOME = _find_spark_home() filename_pattern = ( - "sql/core/target/scala-*/test-classes/org/apache/spark/sql/" + "sql/core/target/**/test-classes/org/apache/spark/sql/" "SparkSessionExtensionSuite.class") - if not glob.glob(os.path.join(SPARK_HOME, filename_pattern)): + if not glob.glob(os.path.join(SPARK_HOME, filename_pattern), recursive=True): raise unittest.SkipTest( "'org.apache.spark.sql.SparkSessionExtensionSuite' is not " "available. Will skip the related tests.") diff --git a/python/pyspark/testing/utils.py b/python/pyspark/testing/utils.py index cda902b6f44d..ebba811299cc 100644 --- a/python/pyspark/testing/utils.py +++ b/python/pyspark/testing/utils.py @@ -155,10 +155,10 @@ def search_jar(project_relative_path, sbt_jar_name_prefix, mvn_jar_name_prefix): # Search jar in the project dir using the jar name_prefix for both sbt build and maven # build because the artifact jars are in different directories. sbt_build = glob.glob(os.path.join( - project_full_path, "target/scala-*/%s*.jar" % sbt_jar_name_prefix)) + project_full_path, "target/**/%s*.jar" % sbt_jar_name_prefix), recursive=True) maven_build = glob.glob(os.path.join( project_full_path, "target/%s*.jar" % mvn_jar_name_prefix)) - jar_paths = sbt_build + maven_build + jar_paths = set(sbt_build + maven_build) jars = [jar for jar in jar_paths if not jar.endswith(ignored_jar_suffixes)] if not jars: