Skip to content

Commit d80740c

Browse files
psuterCurtis Howard
authored andcommitted
Upgrade Scala to 2.11.12 (apache#32)
* Upgrade Scala to 2.11.12 - Modifies slightly the Spark REPL code to reflect internal changes in Scala tooling - This code was ported from apache#21495 (cherry picked from commit 3e52a9160875ec5c145c4e9fa0106ff7d1f380b2) (cherry picked from commit f5a3901) (cherry picked from commit a72488d)
1 parent e81d324 commit d80740c

File tree

7 files changed

+47
-39
lines changed

7 files changed

+47
-39
lines changed

LICENSE

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -242,18 +242,18 @@ The text of each license is also included at licenses/LICENSE-[project].txt.
242242
(BSD licence) ANTLR ST4 4.0.4 (org.antlr:ST4:4.0.4 - http://www.stringtemplate.org)
243243
(BSD licence) ANTLR StringTemplate (org.antlr:stringtemplate:3.2.1 - http://www.stringtemplate.org)
244244
(BSD License) Javolution (javolution:javolution:5.5.1 - http://javolution.org)
245-
(BSD) JLine (jline:jline:0.9.94 - http://jline.sourceforge.net)
245+
(BSD) JLine (jline:jline:2.14.3 - https://github.com/jline/jline2)
246246
(BSD) ParaNamer Core (com.thoughtworks.paranamer:paranamer:2.3 - http://paranamer.codehaus.org/paranamer)
247247
(BSD) ParaNamer Core (com.thoughtworks.paranamer:paranamer:2.6 - http://paranamer.codehaus.org/paranamer)
248248
(BSD 3 Clause) Scala (http://www.scala-lang.org/download/#License)
249249
(Interpreter classes (all .scala files in repl/src/main/scala
250250
except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala),
251251
and for SerializableMapWrapper in JavaUtils.scala)
252-
(BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.8 - http://www.scala-lang.org/)
253-
(BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.8 - http://www.scala-lang.org/)
254-
(BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.8 - http://www.scala-lang.org/)
255-
(BSD-like) Scala Library (org.scala-lang:scala-library:2.11.8 - http://www.scala-lang.org/)
256-
(BSD-like) Scalap (org.scala-lang:scalap:2.11.8 - http://www.scala-lang.org/)
252+
(BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.12 - http://www.scala-lang.org/)
253+
(BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.12 - http://www.scala-lang.org/)
254+
(BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.12 - http://www.scala-lang.org/)
255+
(BSD-like) Scala Library (org.scala-lang:scala-library:2.11.12 - http://www.scala-lang.org/)
256+
(BSD-like) Scalap (org.scala-lang:scalap:2.11.12 - http://www.scala-lang.org/)
257257
(BSD-style) scalacheck (org.scalacheck:scalacheck_2.11:1.10.0 - http://www.scalacheck.org)
258258
(BSD-style) spire (org.spire-math:spire_2.11:0.7.1 - http://spire-math.org)
259259
(BSD-style) spire-macros (org.spire-math:spire-macros_2.11:0.7.1 - http://spire-math.org)

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
7878
private val MESOS = 4
7979
private val LOCAL = 8
8080
private val KUBERNETES = 16
81-
private val COOK = 32
81+
private val COOK = 32
8282
private val ALL_CLUSTER_MGRS = YARN | STANDALONE | MESOS | COOK | LOCAL | KUBERNETES
8383

8484
// Deploy modes
@@ -597,7 +597,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
597597
// Other options
598598
OptionAssigner(args.executorCores, STANDALONE | YARN | KUBERNETES, ALL_DEPLOY_MODES,
599599
confKey = "spark.executor.cores"),
600-
OptionAssigner(args.executorMemory, STANDALONE | MESOS | COOK | YARN | KUBERNETES,
600+
OptionAssigner(args.executorMemory, STANDALONE | MESOS | COOK | YARN | KUBERNETES,
601601
ALL_DEPLOY_MODES, confKey = "spark.executor.memory"),
602602
OptionAssigner(args.totalExecutorCores, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
603603
confKey = "spark.cores.max"),
@@ -606,7 +606,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
606606
OptionAssigner(args.jars, LOCAL, CLIENT, confKey = "spark.jars"),
607607
OptionAssigner(args.jars, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
608608
confKey = "spark.jars"),
609-
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | COOK | KUBERNETES,
609+
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | COOK | KUBERNETES,
610610
CLUSTER, confKey = "spark.driver.memory"),
611611
OptionAssigner(args.driverCores, STANDALONE | MESOS | YARN | KUBERNETES, CLUSTER,
612612
confKey = "spark.driver.cores"),

dev/deps/spark-deps-hadoop-2.6

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ jersey-server-2.22.2.jar
122122
jets3t-0.9.4.jar
123123
jetty-6.1.26.jar
124124
jetty-util-6.1.26.jar
125-
jline-2.12.1.jar
125+
jline-2.14.3.jar
126126
joda-time-2.9.3.jar
127127
jodd-core-3.5.2.jar
128128
jpam-1.1.jar
@@ -171,10 +171,10 @@ parquet-jackson-1.8.3.jar
171171
protobuf-java-2.5.0.jar
172172
py4j-0.10.7.jar
173173
pyrolite-4.13.jar
174-
scala-compiler-2.11.8.jar
175-
scala-library-2.11.8.jar
176-
scala-parser-combinators_2.11-1.0.4.jar
177-
scala-reflect-2.11.8.jar
174+
scala-compiler-2.11.12.jar
175+
scala-library-2.11.12.jar
176+
scala-parser-combinators_2.11-1.1.0.jar
177+
scala-reflect-2.11.12.jar
178178
scala-xml_2.11-1.0.5.jar
179179
scalap-2.11.8.jar
180180
shapeless_2.11-2.3.2.jar

dev/deps/spark-deps-hadoop-2.7

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -122,7 +122,7 @@ jersey-server-2.22.2.jar
122122
jets3t-0.9.4.jar
123123
jetty-6.1.26.jar
124124
jetty-util-6.1.26.jar
125-
jline-2.12.1.jar
125+
jline-2.14.3.jar
126126
joda-time-2.9.3.jar
127127
jodd-core-3.5.2.jar
128128
jpam-1.1.jar
@@ -172,10 +172,10 @@ parquet-jackson-1.8.3.jar
172172
protobuf-java-2.5.0.jar
173173
py4j-0.10.7.jar
174174
pyrolite-4.13.jar
175-
scala-compiler-2.11.8.jar
176-
scala-library-2.11.8.jar
177-
scala-parser-combinators_2.11-1.0.4.jar
178-
scala-reflect-2.11.8.jar
175+
scala-compiler-2.11.12.jar
176+
scala-library-2.11.12.jar
177+
scala-parser-combinators_2.11-1.1.0.jar
178+
scala-reflect-2.11.12.jar
179179
scala-xml_2.11-1.0.5.jar
180180
scalap-2.11.8.jar
181181
shapeless_2.11-2.3.2.jar

pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@
159159
<commons.math3.version>3.4.1</commons.math3.version>
160160
<!-- managed up from 3.2.1 for SPARK-11652 -->
161161
<commons.collections.version>3.2.2</commons.collections.version>
162-
<scala.version>2.11.8</scala.version>
162+
<scala.version>2.11.12</scala.version>
163163
<scala.binary.version>2.11</scala.binary.version>
164164
<codehaus.jackson.version>1.9.13</codehaus.jackson.version>
165165
<fasterxml.jackson.version>2.6.7</fasterxml.jackson.version>
@@ -849,7 +849,7 @@
849849
<dependency>
850850
<groupId>org.scala-lang.modules</groupId>
851851
<artifactId>scala-parser-combinators_${scala.binary.version}</artifactId>
852-
<version>1.0.4</version>
852+
<version>1.1.0</version>
853853
</dependency>
854854
<dependency>
855855
<groupId>org.scala-lang</groupId>
@@ -860,7 +860,7 @@
860860
<dependency>
861861
<groupId>jline</groupId>
862862
<artifactId>jline</artifactId>
863-
<version>2.12.1</version>
863+
<version>2.14.3</version>
864864
</dependency>
865865
<dependency>
866866
<groupId>org.scalatest</groupId>

repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala

Lines changed: 9 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
3636
def this() = this(None, new JPrintWriter(Console.out, true))
3737

3838
override def createInterpreter(): Unit = {
39-
intp = new SparkILoopInterpreter(settings, out)
39+
intp = new SparkILoopInterpreter(settings, out, initializeSpark)
4040
}
4141

4242
val initializationCommands: Seq[String] = Seq(
@@ -73,11 +73,15 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
7373
"import org.apache.spark.sql.functions._"
7474
)
7575

76-
def initializeSpark() {
77-
intp.beQuietDuring {
78-
savingReplayStack { // remove the commands from session history.
79-
initializationCommands.foreach(processLine)
76+
def initializeSpark(): Unit = {
77+
if (!intp.reporter.hasErrors) {
78+
// `savingReplayStack` removes the commands from session history.
79+
savingReplayStack {
80+
initializationCommands.foreach(intp quietRun _)
8081
}
82+
} else {
83+
throw new RuntimeException(s"Scala $versionString interpreter encountered " +
84+
"errors during initialization")
8185
}
8286
}
8387

@@ -101,16 +105,6 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
101105
/** Available commands */
102106
override def commands: List[LoopCommand] = standardCommands
103107

104-
/**
105-
* We override `loadFiles` because we need to initialize Spark *before* the REPL
106-
* sees any files, so that the Spark context is visible in those files. This is a bit of a
107-
* hack, but there isn't another hook available to us at this point.
108-
*/
109-
override def loadFiles(settings: Settings): Unit = {
110-
initializeSpark()
111-
super.loadFiles(settings)
112-
}
113-
114108
override def resetCommand(line: String): Unit = {
115109
super.resetCommand(line)
116110
initializeSpark()

repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,22 @@ import scala.collection.mutable
2121
import scala.tools.nsc.Settings
2222
import scala.tools.nsc.interpreter._
2323

24-
class SparkILoopInterpreter(settings: Settings, out: JPrintWriter) extends IMain(settings, out) {
25-
self =>
24+
class SparkILoopInterpreter(settings: Settings, out: JPrintWriter, initializeSpark: () => Unit)
25+
extends IMain(settings, out) { self =>
26+
27+
/**
28+
* We override `initializeSynchronous` to initialize Spark *after* `intp` is properly initialized
29+
* and *before* the REPL sees any files in the private `loadInitFiles` functions, so that
30+
* the Spark context is visible in those files.
31+
*
32+
* This is a bit of a hack, but there isn't another hook available to us at this point.
33+
*
34+
* See the discussion in Scala community https://github.com/scala/bug/issues/10913 for detail.
35+
*/
36+
override def initializeSynchronous(): Unit = {
37+
super.initializeSynchronous()
38+
initializeSpark()
39+
}
2640

2741
override lazy val memberHandlers = new {
2842
val intp: self.type = self

0 commit comments

Comments
 (0)