diff --git a/core/pom.xml b/core/pom.xml
index 30449f4397d5b..2373a9cc50966 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -174,7 +174,7 @@
org.easymock
- easymock
+ easymockclassextension
test
diff --git a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
index e121b162ad9e6..3ff9ef449f429 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
@@ -24,11 +24,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {
// This test suite should run all tests in ShuffleSuite with Netty shuffle mode.
- override def beforeAll(configMap: Map[String, Any]) {
+ override def beforeAll() {
System.setProperty("spark.shuffle.use.netty", "true")
}
- override def afterAll(configMap: Map[String, Any]) {
+ override def afterAll() {
System.setProperty("spark.shuffle.use.netty", "false")
}
}
diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
index 395d5680f25b7..7ab806e3a0edc 100644
--- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
@@ -233,8 +233,9 @@ class RDDSuite extends FunSuite with SharedSparkContext {
// we can optionally shuffle to keep the upstream parallel
val coalesced5 = data.coalesce(1, shuffle = true)
- assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] !=
- null)
+ val isEquals = coalesced5.dependencies.head.rdd.dependencies.head.rdd.
+ asInstanceOf[ShuffledRDD[_, _, _]] != null
+ assert(isEquals)
// when shuffling, we can increase the number of partitions
val coalesced6 = data.coalesce(20, shuffle = true)
diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index 6ea1744fb664e..2d61ed0a308f6 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -31,7 +31,7 @@ import org.apache.spark.{FetchFailed, Success, TaskEndReason}
import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster}
-import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.scalatest.{BeforeAndAfter, FunSuiteLike}
/**
* Tests for DAGScheduler. These tests directly call the event processing functions in DAGScheduler
@@ -45,7 +45,7 @@ import org.scalatest.{BeforeAndAfter, FunSuite}
* DAGScheduler#submitWaitingStages (via test utility functions like runEvent or respondToTaskSet)
* and capturing the resulting TaskSets from the mock TaskScheduler.
*/
-class DAGSchedulerSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
+class DAGSchedulerSuite extends FunSuiteLike with BeforeAndAfter with LocalSparkContext {
val conf = new SparkConf
/** Set of TaskSets the DAGScheduler has requested executed. */
val taskSets = scala.collection.mutable.Buffer[TaskSet]()
diff --git a/pom.xml b/pom.xml
index dfe5653290b31..b41fcc9a94f3d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -107,7 +107,7 @@
1.6
- 2.10.3
+ 2.10.4
2.10
0.13.0
4.1.2
@@ -373,7 +373,7 @@
org.scalatest
scalatest_${scala.binary.version}
- 1.9.1
+ 2.2.0
test
@@ -389,20 +389,26 @@
org.easymock
- easymock
+ easymockclassextension
3.1
test
org.mockito
mockito-all
- 1.8.5
+ 1.9.0
test
org.scalacheck
scalacheck_${scala.binary.version}
- 1.10.0
+ 1.11.3
+ test
+
+
+ junit
+ junit
+ 4.10
test
@@ -609,6 +615,8 @@
-unchecked
-deprecation
+ -feature
+ -language:postfixOps
-Xms64m
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index ecdf38623756c..57742f8eca134 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -135,7 +135,7 @@ object SparkBuild extends Build {
def sharedSettings = Defaults.defaultSettings ++ Seq(
organization := "org.apache.spark",
version := "0.9.2-SNAPSHOT",
- scalaVersion := "2.10.3",
+ scalaVersion := "2.10.4",
scalacOptions := Seq("-Xmax-classfile-name", "120", "-unchecked", "-deprecation",
"-target:" + SCALAC_JVM_VERSION),
javacOptions := Seq("-target", JAVAC_JVM_VERSION, "-source", JAVAC_JVM_VERSION),
@@ -224,12 +224,13 @@ object SparkBuild extends Build {
"org.eclipse.jetty" % "jetty-server" % "7.6.8.v20121106",
/** Workaround for SPARK-959. Dependency used by org.eclipse.jetty. Fixed in ivy 2.3.0. */
"org.eclipse.jetty.orbit" % "javax.servlet" % "2.5.0.v201103041518" artifacts Artifact("javax.servlet", "jar", "jar"),
- "org.scalatest" %% "scalatest" % "1.9.1" % "test",
- "org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
+ "org.scalatest" %% "scalatest" % "2.2.0" % "test",
+ "org.scalacheck" %% "scalacheck" % "1.11.3" % "test",
"com.novocode" % "junit-interface" % "0.10" % "test",
- "org.easymock" % "easymock" % "3.1" % "test",
- "org.mockito" % "mockito-all" % "1.8.5" % "test",
+ "org.easymock" % "easymockclassextension" % "3.1" % "test",
+ "org.mockito" % "mockito-all" % "1.9.0" % "test",
"commons-io" % "commons-io" % "2.4" % "test"
+ "junit" % "junit" % "4.10" % "test"
),
testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 8203b8f6122e1..578fa31217975 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -56,12 +56,14 @@ class ReplSuite extends FunSuite {
}
def assertContains(message: String, output: String) {
- assert(output.contains(message),
+ val isContain = output.contains(message)
+ assert(isContain,
"Interpreter output did not contain '" + message + "':\n" + output)
}
def assertDoesNotContain(message: String, output: String) {
- assert(!output.contains(message),
+ val isContain = output.contains(message)
+ assert(!isContain,
"Interpreter output contained '" + message + "':\n" + output)
}
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
index 95f1588b86195..99b3485e6dbcc 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
@@ -90,9 +90,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 5)
assert(third.size === 5)
- assert(first.flatten.toSet === (1 to 100).toSet)
- assert(second.flatten.toSet === (101 to 200).toSet)
- assert(third.flatten.toSet === (201 to 300).toSet)
+ assert(first.flatten.toSet.equals((1 to 100).toSet) )
+ assert(second.flatten.toSet.equals((101 to 200).toSet))
+ assert(third.flatten.toSet.equals((201 to 300).toSet))
}
test("repartition (fewer partitions)") {
@@ -109,9 +109,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 2)
assert(third.size === 2)
- assert(first.flatten.toSet === (1 to 100).toSet)
- assert(second.flatten.toSet === (101 to 200).toSet)
- assert(third.flatten.toSet === (201 to 300).toSet)
+ assert(first.flatten.toSet.equals((1 to 100).toSet))
+ assert(second.flatten.toSet.equals( (101 to 200).toSet))
+ assert(third.flatten.toSet.equals((201 to 300).toSet))
}
test("groupByKey") {