Skip to content

Commit

Permalink
fix local-cluster mode for ExternalShuffleServiceSuite
Browse files Browse the repository at this point in the history
  • Loading branch information
aarondav committed May 8, 2015
1 parent 59e5e38 commit e901eb2
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,10 @@ private[deploy]
class ExternalShuffleService(sparkConf: SparkConf, securityManager: SecurityManager)
extends Logging {

private val enabled = sparkConf.getBoolean("spark.shuffle.service.enabled", false)
// Check both if shuffle service is enabled, and that the worker should actually host the
// shuffle service in that case. (The latter is currently only used for testing.)
private val enabled = sparkConf.getBoolean("spark.shuffle.service.enabled", false) &&
sparkConf.getBoolean("spark.worker.shouldHostShuffleServiceIfEnabled", true)
private val port = sparkConf.getInt("spark.shuffle.service.port", 7337)
private val useSasl: Boolean = securityManager.isAuthenticationEnabled()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ class ExternalShuffleServiceSuite extends ShuffleSuite with BeforeAndAfterAll {
conf.set("spark.shuffle.manager", "sort")
conf.set("spark.shuffle.service.enabled", "true")
conf.set("spark.shuffle.service.port", server.getPort.toString)

// local-cluster mode starts a Worker which would start its own shuffle service without this:
conf.set("spark.worker.shouldHostShuffleServiceIfEnabled", "false")
}

override def afterAll() {
Expand Down

0 comments on commit e901eb2

Please sign in to comment.