Skip to content

Commit 4e6da9c

Browse files
committed
Merge remote-tracking branch 'upstream/master' into upgrade
2 parents 40fc8ee + a6088e5 commit 4e6da9c

130 files changed

Lines changed: 3899 additions & 3210 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

.github/workflows/build_and_test.yml

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -414,8 +414,14 @@ jobs:
414414
cd docs
415415
bundle exec jekyll build
416416
417-
java-11:
418-
name: Java 11 build with Maven
417+
java-11-17:
418+
name: Java ${{ matrix.java }} build with Maven
419+
strategy:
420+
fail-fast: false
421+
matrix:
422+
java:
423+
- 11
424+
- 17-ea
419425
runs-on: ubuntu-20.04
420426
steps:
421427
- name: Checkout Spark repository
@@ -446,19 +452,20 @@ jobs:
446452
uses: actions/cache@v2
447453
with:
448454
path: ~/.m2/repository
449-
key: java11-maven-${{ hashFiles('**/pom.xml') }}
455+
key: java${{ matrix.java }}-maven-${{ hashFiles('**/pom.xml') }}
450456
restore-keys: |
451-
java11-maven-
452-
- name: Install Java 11
457+
java${{ matrix.java }}-maven-
458+
- name: Install Java ${{ matrix.java }}
453459
uses: actions/setup-java@v1
454460
with:
455-
java-version: 11
461+
java-version: ${{ matrix.java }}
456462
- name: Build with Maven
457463
run: |
458464
export MAVEN_OPTS="-Xss64m -Xmx2g -XX:ReservedCodeCacheSize=1g -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN"
459465
export MAVEN_CLI_OPTS="--no-transfer-progress"
466+
export JAVA_VERSION=${{ matrix.java }}
460467
# It uses Maven's 'install' intentionally, see https://github.com/apache/spark/pull/26414.
461-
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Djava.version=11 install
468+
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Djava.version=${JAVA_VERSION/-ea} install
462469
rm -rf ~/.m2/repository/org/apache/spark
463470
464471
scala-213:

core/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@
5757
</dependency>
5858
<dependency>
5959
<groupId>org.apache.xbean</groupId>
60-
<artifactId>xbean-asm7-shaded</artifactId>
60+
<artifactId>xbean-asm9-shaded</artifactId>
6161
</dependency>
6262
<dependency>
6363
<groupId>org.apache.hadoop</groupId>

core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,9 @@ import scala.collection.JavaConverters._
2424
import scala.collection.mutable.{Map, Set, Stack}
2525

2626
import org.apache.commons.lang3.ClassUtils
27-
import org.apache.xbean.asm7.{ClassReader, ClassVisitor, Handle, MethodVisitor, Type}
28-
import org.apache.xbean.asm7.Opcodes._
29-
import org.apache.xbean.asm7.tree.{ClassNode, MethodNode}
27+
import org.apache.xbean.asm9.{ClassReader, ClassVisitor, Handle, MethodVisitor, Type}
28+
import org.apache.xbean.asm9.Opcodes._
29+
import org.apache.xbean.asm9.tree.{ClassNode, MethodNode}
3030

3131
import org.apache.spark.{SparkEnv, SparkException}
3232
import org.apache.spark.internal.Logging
@@ -664,7 +664,7 @@ private[spark] object IndylambdaScalaClosures extends Logging {
664664
val currentClass = currentId.cls
665665
val currentMethodNode = methodNodeById(currentId)
666666
logTrace(s" scanning ${currentId.cls.getName}.${currentId.name}${currentId.desc}")
667-
currentMethodNode.accept(new MethodVisitor(ASM7) {
667+
currentMethodNode.accept(new MethodVisitor(ASM9) {
668668
val currentClassName = currentClass.getName
669669
val currentClassInternalName = currentClassName.replace('.', '/')
670670

@@ -744,7 +744,7 @@ private[spark] class ReturnStatementInClosureException
744744
extends SparkException("Return statements aren't allowed in Spark closures")
745745

746746
private class ReturnStatementFinder(targetMethodName: Option[String] = None)
747-
extends ClassVisitor(ASM7) {
747+
extends ClassVisitor(ASM9) {
748748
override def visitMethod(access: Int, name: String, desc: String,
749749
sig: String, exceptions: Array[String]): MethodVisitor = {
750750

@@ -758,15 +758,15 @@ private class ReturnStatementFinder(targetMethodName: Option[String] = None)
758758
val isTargetMethod = targetMethodName.isEmpty ||
759759
name == targetMethodName.get || name == targetMethodName.get.stripSuffix("$adapted")
760760

761-
new MethodVisitor(ASM7) {
761+
new MethodVisitor(ASM9) {
762762
override def visitTypeInsn(op: Int, tp: String): Unit = {
763763
if (op == NEW && tp.contains("scala/runtime/NonLocalReturnControl") && isTargetMethod) {
764764
throw new ReturnStatementInClosureException
765765
}
766766
}
767767
}
768768
} else {
769-
new MethodVisitor(ASM7) {}
769+
new MethodVisitor(ASM9) {}
770770
}
771771
}
772772
}
@@ -790,7 +790,7 @@ private[util] class FieldAccessFinder(
790790
findTransitively: Boolean,
791791
specificMethod: Option[MethodIdentifier[_]] = None,
792792
visitedMethods: Set[MethodIdentifier[_]] = Set.empty)
793-
extends ClassVisitor(ASM7) {
793+
extends ClassVisitor(ASM9) {
794794

795795
override def visitMethod(
796796
access: Int,
@@ -805,7 +805,7 @@ private[util] class FieldAccessFinder(
805805
return null
806806
}
807807

808-
new MethodVisitor(ASM7) {
808+
new MethodVisitor(ASM9) {
809809
override def visitFieldInsn(op: Int, owner: String, name: String, desc: String): Unit = {
810810
if (op == GETFIELD) {
811811
for (cl <- fields.keys if cl.getName == owner.replace('/', '.')) {
@@ -845,7 +845,7 @@ private[util] class FieldAccessFinder(
845845
}
846846
}
847847

848-
private class InnerClosureFinder(output: Set[Class[_]]) extends ClassVisitor(ASM7) {
848+
private class InnerClosureFinder(output: Set[Class[_]]) extends ClassVisitor(ASM9) {
849849
var myName: String = null
850850

851851
// TODO: Recursively find inner closures that we indirectly reference, e.g.
@@ -860,7 +860,7 @@ private class InnerClosureFinder(output: Set[Class[_]]) extends ClassVisitor(ASM
860860

861861
override def visitMethod(access: Int, name: String, desc: String,
862862
sig: String, exceptions: Array[String]): MethodVisitor = {
863-
new MethodVisitor(ASM7) {
863+
new MethodVisitor(ASM9) {
864864
override def visitMethodInsn(
865865
op: Int, owner: String, name: String, desc: String, itf: Boolean): Unit = {
866866
val argTypes = Type.getArgumentTypes(desc)

dev/deps/spark-deps-hadoop-2.7-hive-2.3

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,8 @@ bonecp/0.8.0.RELEASE//bonecp-0.8.0.RELEASE.jar
3131
breeze-macros_2.12/1.0//breeze-macros_2.12-1.0.jar
3232
breeze_2.12/1.0//breeze_2.12-1.0.jar
3333
cats-kernel_2.12/2.0.0-M4//cats-kernel_2.12-2.0.0-M4.jar
34-
chill-java/0.9.5//chill-java-0.9.5.jar
35-
chill_2.12/0.9.5//chill_2.12-0.9.5.jar
34+
chill-java/0.10.0//chill-java-0.10.0.jar
35+
chill_2.12/0.10.0//chill_2.12-0.10.0.jar
3636
commons-beanutils/1.9.4//commons-beanutils-1.9.4.jar
3737
commons-cli/1.2//commons-cli-1.2.jar
3838
commons-codec/1.15//commons-codec-1.15.jar
@@ -234,7 +234,7 @@ threeten-extra/1.5.0//threeten-extra-1.5.0.jar
234234
transaction-api/1.1//transaction-api-1.1.jar
235235
univocity-parsers/2.9.1//univocity-parsers-2.9.1.jar
236236
velocity/1.5//velocity-1.5.jar
237-
xbean-asm7-shaded/4.16//xbean-asm7-shaded-4.16.jar
237+
xbean-asm9-shaded/4.20//xbean-asm9-shaded-4.20.jar
238238
xercesImpl/2.12.0//xercesImpl-2.12.0.jar
239239
xml-apis/1.4.01//xml-apis-1.4.01.jar
240240
xmlenc/0.52//xmlenc-0.52.jar

dev/deps/spark-deps-hadoop-3.2-hive-2.3

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@ breeze-macros_2.12/1.0//breeze-macros_2.12-1.0.jar
2828
breeze_2.12/1.0//breeze_2.12-1.0.jar
2929
cats-kernel_2.12/2.0.0-M4//cats-kernel_2.12-2.0.0-M4.jar
3030
checker-qual/2.5.2//checker-qual-2.5.2.jar
31-
chill-java/0.9.5//chill-java-0.9.5.jar
32-
chill_2.12/0.9.5//chill_2.12-0.9.5.jar
31+
chill-java/0.10.0//chill-java-0.10.0.jar
32+
chill_2.12/0.10.0//chill_2.12-0.10.0.jar
3333
commons-cli/1.2//commons-cli-1.2.jar
3434
commons-codec/1.15//commons-codec-1.15.jar
3535
commons-collections/3.2.2//commons-collections-3.2.2.jar
@@ -211,7 +211,7 @@ threeten-extra/1.5.0//threeten-extra-1.5.0.jar
211211
transaction-api/1.1//transaction-api-1.1.jar
212212
univocity-parsers/2.9.1//univocity-parsers-2.9.1.jar
213213
velocity/1.5//velocity-1.5.jar
214-
xbean-asm7-shaded/4.16//xbean-asm7-shaded-4.16.jar
214+
xbean-asm9-shaded/4.20//xbean-asm9-shaded-4.20.jar
215215
xz/1.8//xz-1.8.jar
216216
zjsonpatch/0.3.0//zjsonpatch-0.3.0.jar
217217
zookeeper-jute/3.6.2//zookeeper-jute-3.6.2.jar

dev/run-tests.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -122,19 +122,19 @@ def determine_modules_to_test(changed_modules, deduplicated=True):
122122
['graphx', 'examples']
123123
>>> [x.name for x in determine_modules_to_test([modules.sql])]
124124
... # doctest: +NORMALIZE_WHITESPACE
125-
['sql', 'avro', 'hive', 'mllib', 'sql-kafka-0-10', 'examples',
125+
['sql', 'avro', 'docker-integration-tests', 'hive', 'mllib', 'sql-kafka-0-10', 'examples',
126126
'hive-thriftserver', 'pyspark-sql', 'repl', 'sparkr',
127127
'pyspark-mllib', 'pyspark-pandas', 'pyspark-pandas-slow', 'pyspark-ml']
128128
>>> sorted([x.name for x in determine_modules_to_test(
129129
... [modules.sparkr, modules.sql], deduplicated=False)])
130130
... # doctest: +NORMALIZE_WHITESPACE
131-
['avro', 'examples', 'hive', 'hive-thriftserver', 'mllib',
131+
['avro', 'docker-integration-tests', 'examples', 'hive', 'hive-thriftserver', 'mllib',
132132
'pyspark-ml', 'pyspark-mllib', 'pyspark-pandas', 'pyspark-pandas-slow', 'pyspark-sql',
133133
'repl', 'sparkr', 'sql', 'sql-kafka-0-10']
134134
>>> sorted([x.name for x in determine_modules_to_test(
135135
... [modules.sql, modules.core], deduplicated=False)])
136136
... # doctest: +NORMALIZE_WHITESPACE
137-
['avro', 'catalyst', 'core', 'examples', 'graphx', 'hive',
137+
['avro', 'catalyst', 'core', 'docker-integration-tests', 'examples', 'graphx', 'hive',
138138
'hive-thriftserver', 'mllib', 'mllib-local', 'pyspark-core', 'pyspark-ml', 'pyspark-mllib',
139139
'pyspark-pandas', 'pyspark-pandas-slow', 'pyspark-resource', 'pyspark-sql',
140140
'pyspark-streaming', 'repl', 'root', 'sparkr', 'sql', 'sql-kafka-0-10', 'streaming',

0 commit comments

Comments
 (0)