Skip to content

Commit 8d12df8

Browse files
committed
Update MimaExcludes now Spark 1.6 is in Maven.
1 parent 6c5bbd6 commit 8d12df8

1 file changed

Lines changed: 1 addition & 136 deletions

File tree

project/MimaExcludes.scala

Lines changed: 1 addition & 136 deletions
Original file line numberDiff line numberDiff line change
@@ -53,142 +53,7 @@ object MimaExcludes {
5353
// SPARK-12481
5454
ProblemFilters.exclude[IncompatibleTemplateDefProblem](
5555
"org.apache.spark.mapred.SparkHadoopMapRedUtil")
56-
) ++
57-
// When 1.6 is officially released, update this exclusion list.
58-
Seq(
59-
MimaBuild.excludeSparkPackage("deploy"),
60-
MimaBuild.excludeSparkPackage("network"),
61-
MimaBuild.excludeSparkPackage("unsafe"),
62-
// These are needed if checking against the sbt build, since they are part of
63-
// the maven-generated artifacts in 1.3.
64-
excludePackage("org.spark-project.jetty"),
65-
MimaBuild.excludeSparkPackage("unused"),
66-
// SQL execution is considered private.
67-
excludePackage("org.apache.spark.sql.execution"),
68-
// SQL columnar is considered private.
69-
excludePackage("org.apache.spark.sql.columnar"),
70-
// The shuffle package is considered private.
71-
excludePackage("org.apache.spark.shuffle"),
72-
// The collections utlities are considered pricate.
73-
excludePackage("org.apache.spark.util.collection")
74-
) ++
75-
MimaBuild.excludeSparkClass("streaming.flume.FlumeTestUtils") ++
76-
MimaBuild.excludeSparkClass("streaming.flume.PollingFlumeTestUtils") ++
77-
Seq(
78-
// MiMa does not deal properly with sealed traits
79-
ProblemFilters.exclude[MissingMethodProblem](
80-
"org.apache.spark.ml.classification.LogisticRegressionSummary.featuresCol")
81-
) ++ Seq(
82-
// SPARK-11530
83-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.mllib.feature.PCAModel.this")
84-
) ++ Seq(
85-
// SPARK-10381 Fix types / units in private AskPermissionToCommitOutput RPC message.
86-
// This class is marked as `private` but MiMa still seems to be confused by the change.
87-
ProblemFilters.exclude[MissingMethodProblem](
88-
"org.apache.spark.scheduler.AskPermissionToCommitOutput.task"),
89-
ProblemFilters.exclude[IncompatibleResultTypeProblem](
90-
"org.apache.spark.scheduler.AskPermissionToCommitOutput.copy$default$2"),
91-
ProblemFilters.exclude[IncompatibleMethTypeProblem](
92-
"org.apache.spark.scheduler.AskPermissionToCommitOutput.copy"),
93-
ProblemFilters.exclude[MissingMethodProblem](
94-
"org.apache.spark.scheduler.AskPermissionToCommitOutput.taskAttempt"),
95-
ProblemFilters.exclude[IncompatibleResultTypeProblem](
96-
"org.apache.spark.scheduler.AskPermissionToCommitOutput.copy$default$3"),
97-
ProblemFilters.exclude[IncompatibleMethTypeProblem](
98-
"org.apache.spark.scheduler.AskPermissionToCommitOutput.this"),
99-
ProblemFilters.exclude[IncompatibleMethTypeProblem](
100-
"org.apache.spark.scheduler.AskPermissionToCommitOutput.apply")
101-
) ++ Seq(
102-
ProblemFilters.exclude[MissingClassProblem](
103-
"org.apache.spark.shuffle.FileShuffleBlockResolver$ShuffleFileGroup")
104-
) ++ Seq(
105-
ProblemFilters.exclude[MissingMethodProblem](
106-
"org.apache.spark.ml.regression.LeastSquaresAggregator.add"),
107-
ProblemFilters.exclude[MissingMethodProblem](
108-
"org.apache.spark.ml.regression.LeastSquaresCostFun.this"),
109-
ProblemFilters.exclude[MissingMethodProblem](
110-
"org.apache.spark.sql.SQLContext.clearLastInstantiatedContext"),
111-
ProblemFilters.exclude[MissingMethodProblem](
112-
"org.apache.spark.sql.SQLContext.setLastInstantiatedContext"),
113-
ProblemFilters.exclude[MissingClassProblem](
114-
"org.apache.spark.sql.SQLContext$SQLSession"),
115-
ProblemFilters.exclude[MissingMethodProblem](
116-
"org.apache.spark.sql.SQLContext.detachSession"),
117-
ProblemFilters.exclude[MissingMethodProblem](
118-
"org.apache.spark.sql.SQLContext.tlSession"),
119-
ProblemFilters.exclude[MissingMethodProblem](
120-
"org.apache.spark.sql.SQLContext.defaultSession"),
121-
ProblemFilters.exclude[MissingMethodProblem](
122-
"org.apache.spark.sql.SQLContext.currentSession"),
123-
ProblemFilters.exclude[MissingMethodProblem](
124-
"org.apache.spark.sql.SQLContext.openSession"),
125-
ProblemFilters.exclude[MissingMethodProblem](
126-
"org.apache.spark.sql.SQLContext.setSession"),
127-
ProblemFilters.exclude[MissingMethodProblem](
128-
"org.apache.spark.sql.SQLContext.createSession")
129-
) ++ Seq(
130-
ProblemFilters.exclude[MissingMethodProblem](
131-
"org.apache.spark.SparkContext.preferredNodeLocationData_="),
132-
ProblemFilters.exclude[MissingClassProblem](
133-
"org.apache.spark.rdd.MapPartitionsWithPreparationRDD"),
134-
ProblemFilters.exclude[MissingClassProblem](
135-
"org.apache.spark.rdd.MapPartitionsWithPreparationRDD$"),
136-
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SparkSQLParser")
137-
) ++ Seq(
138-
// SPARK-11485
139-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.DataFrameHolder.df"),
140-
// SPARK-11541 mark various JDBC dialects as private
141-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.NoopDialect.productElement"),
142-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.NoopDialect.productArity"),
143-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.NoopDialect.canEqual"),
144-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.NoopDialect.productIterator"),
145-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.NoopDialect.productPrefix"),
146-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.NoopDialect.toString"),
147-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.NoopDialect.hashCode"),
148-
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.sql.jdbc.PostgresDialect$"),
149-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.PostgresDialect.productElement"),
150-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.PostgresDialect.productArity"),
151-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.PostgresDialect.canEqual"),
152-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.PostgresDialect.productIterator"),
153-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.PostgresDialect.productPrefix"),
154-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.PostgresDialect.toString"),
155-
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.PostgresDialect.hashCode"),
156-
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.sql.jdbc.NoopDialect$")
157-
) ++ Seq (
158-
ProblemFilters.exclude[MissingMethodProblem](
159-
"org.apache.spark.status.api.v1.ApplicationInfo.this"),
160-
ProblemFilters.exclude[MissingMethodProblem](
161-
"org.apache.spark.status.api.v1.StageData.this")
162-
) ++ Seq(
163-
// SPARK-11766 add toJson to Vector
164-
ProblemFilters.exclude[MissingMethodProblem](
165-
"org.apache.spark.mllib.linalg.Vector.toJson")
166-
) ++ Seq(
167-
// SPARK-9065 Support message handler in Kafka Python API
168-
ProblemFilters.exclude[MissingMethodProblem](
169-
"org.apache.spark.streaming.kafka.KafkaUtilsPythonHelper.createDirectStream"),
170-
ProblemFilters.exclude[MissingMethodProblem](
171-
"org.apache.spark.streaming.kafka.KafkaUtilsPythonHelper.createRDD")
172-
) ++ Seq(
173-
// SPARK-4557 Changed foreachRDD to use VoidFunction
174-
ProblemFilters.exclude[MissingMethodProblem](
175-
"org.apache.spark.streaming.api.java.JavaDStreamLike.foreachRDD")
176-
) ++ Seq(
177-
// SPARK-11996 Make the executor thread dump work again
178-
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.executor.ExecutorEndpoint"),
179-
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.executor.ExecutorEndpoint$"),
180-
ProblemFilters.exclude[MissingClassProblem](
181-
"org.apache.spark.storage.BlockManagerMessages$GetRpcHostPortForExecutor"),
182-
ProblemFilters.exclude[MissingClassProblem](
183-
"org.apache.spark.storage.BlockManagerMessages$GetRpcHostPortForExecutor$")
184-
) ++ Seq(
185-
// SPARK-3580 Add getNumPartitions method to JavaRDD
186-
ProblemFilters.exclude[MissingMethodProblem](
187-
"org.apache.spark.api.java.JavaRDDLike.getNumPartitions")
188-
) ++
189-
// SPARK-11314: YARN backend moved to yarn sub-module and MiMA complains even though it's a
190-
// private class.
191-
MimaBuild.excludeSparkClass("scheduler.cluster.YarnSchedulerBackend$YarnSchedulerEndpoint")
56+
)
19257
case v if v.startsWith("1.6") =>
19358
Seq(
19459
MimaBuild.excludeSparkPackage("deploy"),

0 commit comments

Comments
 (0)