From 6f64a9bac938b9c47b3408d7fdc34ce8048a61d0 Mon Sep 17 00:00:00 2001 From: Andrew Or Date: Mon, 28 Jul 2014 15:11:04 -0700 Subject: [PATCH] Revert changes in YARN There is currently no good way to handle quoted arguments and backslashes in YARN. The new code does not do any escaping, which is fine for standalone mode (which uses Java's ProcessBuilder) but not for YARN mode. I will open a separate JIRA for this. --- .../scala/org/apache/spark/deploy/yarn/ClientBase.scala | 5 ++++- .../apache/spark/deploy/yarn/ExecutorRunnableUtil.scala | 8 +++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala index cb4cc7b119066..d6c96554c040f 100644 --- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala +++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala @@ -386,7 +386,10 @@ trait ClientBase extends Logging { // Forward the Spark configuration to the application master / executors. // TODO: it might be nicer to pass these as an internal environment variable rather than // as Java options, due to complications with string parsing of nested quotes. - javaOpts ++= Utils.sparkJavaOpts(sparkConf) + // TODO: Use Utils.sparkJavaOpts here once we figure out how to deal with quotes and backslashes + for ((k, v) <- sparkConf.getAll) { + javaOpts += "-D" + k + "=" + "\\\"" + v + "\\\"" + } if (args.amClass == classOf[ApplicationMaster].getName) { sparkConf.getOption("spark.driver.extraJavaOptions") diff --git a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala index eb22eeeb0098e..d4769d34e7440 100644 --- a/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala +++ b/yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnableUtil.scala @@ -67,7 +67,13 @@ trait ExecutorRunnableUtil extends Logging { // registers with the Scheduler and transfers the spark configs. Since the Executor backend // uses Akka to connect to the scheduler, the akka settings are needed as well as the // authentication settings. - javaOpts ++= Utils.sparkJavaOpts(sparkConf, SparkConf.isExecutorStartupConf) + // TODO: Use Utils.sparkJavaOpts here once we figure out how to deal with quotes and backslashes + sparkConf.getAll. + filter { case (k, v) => k.startsWith("spark.auth") || k.startsWith("spark.akka") }. + foreach { case (k, v) => javaOpts += "-D" + k + "=" + "\\\"" + v + "\\\"" } + + sparkConf.getAkkaConf. + foreach { case (k, v) => javaOpts += "-D" + k + "=" + "\\\"" + v + "\\\"" } // Commenting it out for now - so that people can refer to the properties if required. Remove // it once cpuset version is pushed out.