diff --git a/LICENSE b/LICENSE
index c2b0d72663b55..e5d60718fb0fd 100644
--- a/LICENSE
+++ b/LICENSE
@@ -242,18 +242,18 @@ The text of each license is also included at licenses/LICENSE-[project].txt.
(BSD licence) ANTLR ST4 4.0.4 (org.antlr:ST4:4.0.4 - http://www.stringtemplate.org)
(BSD licence) ANTLR StringTemplate (org.antlr:stringtemplate:3.2.1 - http://www.stringtemplate.org)
(BSD License) Javolution (javolution:javolution:5.5.1 - http://javolution.org)
- (BSD) JLine (jline:jline:0.9.94 - http://jline.sourceforge.net)
+ (BSD) JLine (jline:jline:2.14.3 - https://github.com/jline/jline2)
(BSD) ParaNamer Core (com.thoughtworks.paranamer:paranamer:2.3 - http://paranamer.codehaus.org/paranamer)
(BSD) ParaNamer Core (com.thoughtworks.paranamer:paranamer:2.6 - http://paranamer.codehaus.org/paranamer)
(BSD 3 Clause) Scala (http://www.scala-lang.org/download/#License)
(Interpreter classes (all .scala files in repl/src/main/scala
except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala),
and for SerializableMapWrapper in JavaUtils.scala)
- (BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.8 - http://www.scala-lang.org/)
- (BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.8 - http://www.scala-lang.org/)
- (BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.8 - http://www.scala-lang.org/)
- (BSD-like) Scala Library (org.scala-lang:scala-library:2.11.8 - http://www.scala-lang.org/)
- (BSD-like) Scalap (org.scala-lang:scalap:2.11.8 - http://www.scala-lang.org/)
+ (BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.12 - http://www.scala-lang.org/)
+ (BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.12 - http://www.scala-lang.org/)
+ (BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.12 - http://www.scala-lang.org/)
+ (BSD-like) Scala Library (org.scala-lang:scala-library:2.11.12 - http://www.scala-lang.org/)
+ (BSD-like) Scalap (org.scala-lang:scalap:2.11.12 - http://www.scala-lang.org/)
(BSD-style) scalacheck (org.scalacheck:scalacheck_2.11:1.10.0 - http://www.scalacheck.org)
(BSD-style) spire (org.spire-math:spire_2.11:0.7.1 - http://spire-math.org)
(BSD-style) spire-macros (org.spire-math:spire-macros_2.11:0.7.1 - http://spire-math.org)
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 2ebada3f59e7f..122e99592ee4d 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -77,7 +77,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
private val MESOS = 4
private val LOCAL = 8
private val KUBERNETES = 16
- private val COOK = 32
+ private val COOK = 32
private val ALL_CLUSTER_MGRS = YARN | STANDALONE | MESOS | COOK | LOCAL | KUBERNETES
// Deploy modes
@@ -582,7 +582,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
// Other options
OptionAssigner(args.executorCores, STANDALONE | YARN | KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.executor.cores"),
- OptionAssigner(args.executorMemory, STANDALONE | MESOS | COOK | YARN | KUBERNETES,
+ OptionAssigner(args.executorMemory, STANDALONE | MESOS | COOK | YARN | KUBERNETES,
ALL_DEPLOY_MODES, confKey = "spark.executor.memory"),
OptionAssigner(args.totalExecutorCores, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.cores.max"),
@@ -591,7 +591,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
OptionAssigner(args.jars, LOCAL, CLIENT, confKey = "spark.jars"),
OptionAssigner(args.jars, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.jars"),
- OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | COOK | KUBERNETES,
+ OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | COOK | KUBERNETES,
CLUSTER, confKey = "spark.driver.memory"),
OptionAssigner(args.driverCores, STANDALONE | MESOS | YARN | KUBERNETES, CLUSTER,
confKey = "spark.driver.cores"),
diff --git a/dev/deps/spark-deps-hadoop-2.6 b/dev/deps/spark-deps-hadoop-2.6
index 3b2a88537ca50..faf626cbb0721 100644
--- a/dev/deps/spark-deps-hadoop-2.6
+++ b/dev/deps/spark-deps-hadoop-2.6
@@ -122,7 +122,7 @@ jersey-server-2.22.2.jar
jets3t-0.9.4.jar
jetty-6.1.26.jar
jetty-util-6.1.26.jar
-jline-2.12.1.jar
+jline-2.14.3.jar
joda-time-2.9.3.jar
jodd-core-3.5.2.jar
jpam-1.1.jar
@@ -171,10 +171,10 @@ parquet-jackson-1.8.2.jar
protobuf-java-2.5.0.jar
py4j-0.10.6.jar
pyrolite-4.13.jar
-scala-compiler-2.11.8.jar
-scala-library-2.11.8.jar
-scala-parser-combinators_2.11-1.0.4.jar
-scala-reflect-2.11.8.jar
+scala-compiler-2.11.12.jar
+scala-library-2.11.12.jar
+scala-parser-combinators_2.11-1.1.0.jar
+scala-reflect-2.11.12.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.3.2.jar
diff --git a/dev/deps/spark-deps-hadoop-2.7 b/dev/deps/spark-deps-hadoop-2.7
index 757da00788e54..c2cc831f22c67 100644
--- a/dev/deps/spark-deps-hadoop-2.7
+++ b/dev/deps/spark-deps-hadoop-2.7
@@ -122,7 +122,7 @@ jersey-server-2.22.2.jar
jets3t-0.9.4.jar
jetty-6.1.26.jar
jetty-util-6.1.26.jar
-jline-2.12.1.jar
+jline-2.14.3.jar
joda-time-2.9.3.jar
jodd-core-3.5.2.jar
jpam-1.1.jar
@@ -172,10 +172,10 @@ parquet-jackson-1.8.2.jar
protobuf-java-2.5.0.jar
py4j-0.10.6.jar
pyrolite-4.13.jar
-scala-compiler-2.11.8.jar
-scala-library-2.11.8.jar
-scala-parser-combinators_2.11-1.0.4.jar
-scala-reflect-2.11.8.jar
+scala-compiler-2.11.12.jar
+scala-library-2.11.12.jar
+scala-parser-combinators_2.11-1.1.0.jar
+scala-reflect-2.11.12.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.3.2.jar
diff --git a/pom.xml b/pom.xml
index 118604560996f..4dea7a36be44b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -159,7 +159,7 @@
3.4.1
3.2.2
- 2.11.8
+ 2.11.12
2.11
1.9.13
2.6.7
@@ -850,7 +850,7 @@
org.scala-lang.modules
scala-parser-combinators_${scala.binary.version}
- 1.0.4
+ 1.1.0
org.scala-lang
@@ -861,7 +861,7 @@
jline
jline
- 2.12.1
+ 2.14.3
org.scalatest
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index e69441a475e9a..a44051b351e19 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -36,7 +36,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
def this() = this(None, new JPrintWriter(Console.out, true))
override def createInterpreter(): Unit = {
- intp = new SparkILoopInterpreter(settings, out)
+ intp = new SparkILoopInterpreter(settings, out, initializeSpark)
}
val initializationCommands: Seq[String] = Seq(
@@ -73,11 +73,15 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
"import org.apache.spark.sql.functions._"
)
- def initializeSpark() {
- intp.beQuietDuring {
- savingReplayStack { // remove the commands from session history.
- initializationCommands.foreach(processLine)
+ def initializeSpark(): Unit = {
+ if (!intp.reporter.hasErrors) {
+ // `savingReplayStack` removes the commands from session history.
+ savingReplayStack {
+ initializationCommands.foreach(intp quietRun _)
}
+ } else {
+ throw new RuntimeException(s"Scala $versionString interpreter encountered " +
+ "errors during initialization")
}
}
@@ -101,16 +105,6 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
/** Available commands */
override def commands: List[LoopCommand] = standardCommands
- /**
- * We override `loadFiles` because we need to initialize Spark *before* the REPL
- * sees any files, so that the Spark context is visible in those files. This is a bit of a
- * hack, but there isn't another hook available to us at this point.
- */
- override def loadFiles(settings: Settings): Unit = {
- initializeSpark()
- super.loadFiles(settings)
- }
-
override def resetCommand(line: String): Unit = {
super.resetCommand(line)
initializeSpark()
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
index e736607a9a6b9..4e63816402a10 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
@@ -21,8 +21,22 @@ import scala.collection.mutable
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter._
-class SparkILoopInterpreter(settings: Settings, out: JPrintWriter) extends IMain(settings, out) {
- self =>
+class SparkILoopInterpreter(settings: Settings, out: JPrintWriter, initializeSpark: () => Unit)
+ extends IMain(settings, out) { self =>
+
+ /**
+ * We override `initializeSynchronous` to initialize Spark *after* `intp` is properly initialized
+ * and *before* the REPL sees any files in the private `loadInitFiles` functions, so that
+ * the Spark context is visible in those files.
+ *
+ * This is a bit of a hack, but there isn't another hook available to us at this point.
+ *
+ * See the discussion in Scala community https://github.com/scala/bug/issues/10913 for detail.
+ */
+ override def initializeSynchronous(): Unit = {
+ super.initializeSynchronous()
+ initializeSpark()
+ }
override lazy val memberHandlers = new {
val intp: self.type = self