From e3fa36f259b7ede73bc148891e2635bf41221660 Mon Sep 17 00:00:00 2001 From: Patrick Wendell Date: Wed, 15 Jan 2014 13:55:14 -0800 Subject: [PATCH] Merge pull request #442 from pwendell/standalone Workers should use working directory as spark home if it's not specified If users don't set SPARK_HOME in their environment file when launching an application, the standalone cluster should default to the spark home of the worker. (cherry picked from commit 59f475c79fc8fd6d3485e4d0adf6768b6a9225a4) Signed-off-by: Patrick Wendell --- .../main/scala/org/apache/spark/deploy/worker/Worker.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala index 5182dcbb2abfd..312560d7063a4 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala @@ -209,8 +209,11 @@ private[spark] class Worker( logWarning("Invalid Master (" + masterUrl + ") attempted to launch executor.") } else { logInfo("Asked to launch executor %s/%d for %s".format(appId, execId, appDesc.name)) + // TODO (pwendell): We shuld make sparkHome an Option[String] in + // ApplicationDescription to be more explicit about this. + val effectiveSparkHome = Option(execSparkHome_).getOrElse(sparkHome.getAbsolutePath) val manager = new ExecutorRunner(appId, execId, appDesc, cores_, memory_, - self, workerId, host, new File(execSparkHome_), workDir, akkaUrl, ExecutorState.RUNNING) + self, workerId, host, new File(effectiveSparkHome), workDir, akkaUrl, ExecutorState.RUNNING) executors(appId + "/" + execId) = manager manager.start() coresUsed += cores_