From 1aff39c675a87f2d3f3be22a3955ddc2e4997d7b Mon Sep 17 00:00:00 2001 From: Joshi Date: Fri, 26 Jun 2015 11:15:14 -0700 Subject: [PATCH] Fix for SparkContext stop behavior --- .../src/test/scala/org/apache/spark/SparkContextSuite.scala | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala index d79ec5cddf195..35cd2b1e1fff1 100644 --- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala @@ -30,6 +30,7 @@ import org.apache.spark.util.Utils import scala.concurrent.Await import scala.concurrent.duration.Duration +import org.scalatest.Matchers._ class SparkContextSuite extends SparkFunSuite with LocalSparkContext { @@ -274,16 +275,13 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext { } test("calling multiple sc.stop() must not throw any exception") { - try { + noException should be thrownBy { sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local")) val cnt = sc.parallelize(1 to 4).count() sc.cancelAllJobs() sc.stop() // call stop second time sc.stop() - } catch { - case e: Exception => - fail("calling multiple sc.stop() must not throw any exception", e); } } }