Skip to content

Commit

Permalink
Upgrade Scala to 2.11.12 (#32)
Browse files Browse the repository at this point in the history
* Upgrade Scala to 2.11.12

- Modifies slightly the Spark REPL code to reflect internal changes in Scala tooling
- This code was ported from apache#21495

(cherry picked from commit 3e52a9160875ec5c145c4e9fa0106ff7d1f380b2)
  • Loading branch information
psuter authored and Curtis Howard committed Jul 26, 2018
1 parent eba08e2 commit f5a3901
Show file tree
Hide file tree
Showing 7 changed files with 47 additions and 39 deletions.
12 changes: 6 additions & 6 deletions LICENSE
Original file line number Diff line number Diff line change
Expand Up @@ -242,18 +242,18 @@ The text of each license is also included at licenses/LICENSE-[project].txt.
(BSD licence) ANTLR ST4 4.0.4 (org.antlr:ST4:4.0.4 - http://www.stringtemplate.org)
(BSD licence) ANTLR StringTemplate (org.antlr:stringtemplate:3.2.1 - http://www.stringtemplate.org)
(BSD License) Javolution (javolution:javolution:5.5.1 - http://javolution.org)
(BSD) JLine (jline:jline:0.9.94 - http://jline.sourceforge.net)
(BSD) JLine (jline:jline:2.14.3 - https://github.com/jline/jline2)
(BSD) ParaNamer Core (com.thoughtworks.paranamer:paranamer:2.3 - http://paranamer.codehaus.org/paranamer)
(BSD) ParaNamer Core (com.thoughtworks.paranamer:paranamer:2.6 - http://paranamer.codehaus.org/paranamer)
(BSD 3 Clause) Scala (http://www.scala-lang.org/download/#License)
(Interpreter classes (all .scala files in repl/src/main/scala
except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala),
and for SerializableMapWrapper in JavaUtils.scala)
(BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Library (org.scala-lang:scala-library:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scalap (org.scala-lang:scalap:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.12 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.12 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.12 - http://www.scala-lang.org/)
(BSD-like) Scala Library (org.scala-lang:scala-library:2.11.12 - http://www.scala-lang.org/)
(BSD-like) Scalap (org.scala-lang:scalap:2.11.12 - http://www.scala-lang.org/)
(BSD-style) scalacheck (org.scalacheck:scalacheck_2.11:1.10.0 - http://www.scalacheck.org)
(BSD-style) spire (org.spire-math:spire_2.11:0.7.1 - http://spire-math.org)
(BSD-style) spire-macros (org.spire-math:spire-macros_2.11:0.7.1 - http://spire-math.org)
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
private val MESOS = 4
private val LOCAL = 8
private val KUBERNETES = 16
private val COOK = 32
private val COOK = 32
private val ALL_CLUSTER_MGRS = YARN | STANDALONE | MESOS | COOK | LOCAL | KUBERNETES

// Deploy modes
Expand Down Expand Up @@ -582,7 +582,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
// Other options
OptionAssigner(args.executorCores, STANDALONE | YARN | KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.executor.cores"),
OptionAssigner(args.executorMemory, STANDALONE | MESOS | COOK | YARN | KUBERNETES,
OptionAssigner(args.executorMemory, STANDALONE | MESOS | COOK | YARN | KUBERNETES,
ALL_DEPLOY_MODES, confKey = "spark.executor.memory"),
OptionAssigner(args.totalExecutorCores, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.cores.max"),
Expand All @@ -591,7 +591,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
OptionAssigner(args.jars, LOCAL, CLIENT, confKey = "spark.jars"),
OptionAssigner(args.jars, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.jars"),
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | COOK | KUBERNETES,
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | COOK | KUBERNETES,
CLUSTER, confKey = "spark.driver.memory"),
OptionAssigner(args.driverCores, STANDALONE | MESOS | YARN | KUBERNETES, CLUSTER,
confKey = "spark.driver.cores"),
Expand Down
10 changes: 5 additions & 5 deletions dev/deps/spark-deps-hadoop-2.6
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ jersey-server-2.22.2.jar
jets3t-0.9.4.jar
jetty-6.1.26.jar
jetty-util-6.1.26.jar
jline-2.12.1.jar
jline-2.14.3.jar
joda-time-2.9.3.jar
jodd-core-3.5.2.jar
jpam-1.1.jar
Expand Down Expand Up @@ -171,10 +171,10 @@ parquet-jackson-1.8.2.jar
protobuf-java-2.5.0.jar
py4j-0.10.6.jar
pyrolite-4.13.jar
scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-compiler-2.11.12.jar
scala-library-2.11.12.jar
scala-parser-combinators_2.11-1.1.0.jar
scala-reflect-2.11.12.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.3.2.jar
Expand Down
10 changes: 5 additions & 5 deletions dev/deps/spark-deps-hadoop-2.7
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ jersey-server-2.22.2.jar
jets3t-0.9.4.jar
jetty-6.1.26.jar
jetty-util-6.1.26.jar
jline-2.12.1.jar
jline-2.14.3.jar
joda-time-2.9.3.jar
jodd-core-3.5.2.jar
jpam-1.1.jar
Expand Down Expand Up @@ -172,10 +172,10 @@ parquet-jackson-1.8.2.jar
protobuf-java-2.5.0.jar
py4j-0.10.6.jar
pyrolite-4.13.jar
scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-compiler-2.11.12.jar
scala-library-2.11.12.jar
scala-parser-combinators_2.11-1.1.0.jar
scala-reflect-2.11.12.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
shapeless_2.11-2.3.2.jar
Expand Down
6 changes: 3 additions & 3 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@
<commons.math3.version>3.4.1</commons.math3.version>
<!-- managed up from 3.2.1 for SPARK-11652 -->
<commons.collections.version>3.2.2</commons.collections.version>
<scala.version>2.11.8</scala.version>
<scala.version>2.11.12</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<codehaus.jackson.version>1.9.13</codehaus.jackson.version>
<fasterxml.jackson.version>2.6.7</fasterxml.jackson.version>
Expand Down Expand Up @@ -850,7 +850,7 @@
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-parser-combinators_${scala.binary.version}</artifactId>
<version>1.0.4</version>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
Expand All @@ -861,7 +861,7 @@
<dependency>
<groupId>jline</groupId>
<artifactId>jline</artifactId>
<version>2.12.1</version>
<version>2.14.3</version>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
def this() = this(None, new JPrintWriter(Console.out, true))

override def createInterpreter(): Unit = {
intp = new SparkILoopInterpreter(settings, out)
intp = new SparkILoopInterpreter(settings, out, initializeSpark)
}

val initializationCommands: Seq[String] = Seq(
Expand Down Expand Up @@ -73,11 +73,15 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
"import org.apache.spark.sql.functions._"
)

def initializeSpark() {
intp.beQuietDuring {
savingReplayStack { // remove the commands from session history.
initializationCommands.foreach(processLine)
def initializeSpark(): Unit = {
if (!intp.reporter.hasErrors) {
// `savingReplayStack` removes the commands from session history.
savingReplayStack {
initializationCommands.foreach(intp quietRun _)
}
} else {
throw new RuntimeException(s"Scala $versionString interpreter encountered " +
"errors during initialization")
}
}

Expand All @@ -101,16 +105,6 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
/** Available commands */
override def commands: List[LoopCommand] = standardCommands

/**
* We override `loadFiles` because we need to initialize Spark *before* the REPL
* sees any files, so that the Spark context is visible in those files. This is a bit of a
* hack, but there isn't another hook available to us at this point.
*/
override def loadFiles(settings: Settings): Unit = {
initializeSpark()
super.loadFiles(settings)
}

override def resetCommand(line: String): Unit = {
super.resetCommand(line)
initializeSpark()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,22 @@ import scala.collection.mutable
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter._

class SparkILoopInterpreter(settings: Settings, out: JPrintWriter) extends IMain(settings, out) {
self =>
class SparkILoopInterpreter(settings: Settings, out: JPrintWriter, initializeSpark: () => Unit)
extends IMain(settings, out) { self =>

/**
* We override `initializeSynchronous` to initialize Spark *after* `intp` is properly initialized
* and *before* the REPL sees any files in the private `loadInitFiles` functions, so that
* the Spark context is visible in those files.
*
* This is a bit of a hack, but there isn't another hook available to us at this point.
*
* See the discussion in Scala community https://github.com/scala/bug/issues/10913 for detail.
*/
override def initializeSynchronous(): Unit = {
super.initializeSynchronous()
initializeSpark()
}

override lazy val memberHandlers = new {
val intp: self.type = self
Expand Down

0 comments on commit f5a3901

Please sign in to comment.