Skip to content

Commit

Permalink
Keep "ExecutorLauncher" as the main class for client-mode AM.
Browse files Browse the repository at this point in the history
  • Loading branch information
Marcelo Vanzin committed Aug 18, 2014
1 parent 91beabb commit 0e4be3d
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -413,11 +413,11 @@ object ApplicationMaster extends Logging {

private var master: ApplicationMaster = _

def main(argStrings: Array[String]) = {
def main(args: Array[String]) = {
SignalLogger.register(log)
val args = new ApplicationMasterArguments(argStrings)
val amArgs = new ApplicationMasterArguments(args)
SparkHadoopUtil.get.runAsSparkUser { () =>
master = new ApplicationMaster(args, new YarnRMClientImpl(args))
master = new ApplicationMaster(amArgs, new YarnRMClientImpl(amArgs))
master.run()
}
}
Expand All @@ -427,3 +427,15 @@ object ApplicationMaster extends Logging {
}

}

/**
* This object does not provide any special functionality. It exists so that it's easy to tell
* apart the client-mode AM from the cluster-mode AM when using tools such as ps or jps.
*/
object ExecutorLauncher {

def main(args: Array[String]) = {
ApplicationMaster.main(args)
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -312,6 +312,8 @@ trait ClientBase extends Logging {
val amContainer = Records.newRecord(classOf[ContainerLaunchContext])
amContainer.setLocalResources(localResources)

val isLaunchingDriver = args.userClass != null

// In cluster mode, if the deprecated SPARK_JAVA_OPTS is set, we need to propagate it to
// executors. But we can't just set spark.executor.extraJavaOptions, because the driver's
// SparkContext will not let that set spark* system properties, which is expected behavior for
Expand All @@ -320,7 +322,7 @@ trait ClientBase extends Logging {
// Note that to warn the user about the deprecation in cluster mode, some code from
// SparkConf#validateSettings() is duplicated here (to avoid triggering the condition
// described above).
if (args.userClass != null) {
if (isLaunchingDriver) {
sys.env.get("SPARK_JAVA_OPTS").foreach { value =>
val warning =
s"""
Expand Down Expand Up @@ -380,7 +382,7 @@ trait ClientBase extends Logging {
javaOpts += YarnSparkHadoopUtil.escapeForShell(s"-D$k=$v")
}

if (args.userClass != null) {
if (isLaunchingDriver) {
sparkConf.getOption("spark.driver.extraJavaOptions")
.orElse(sys.env.get("SPARK_JAVA_OPTS"))
.foreach(opts => javaOpts += opts)
Expand All @@ -394,6 +396,12 @@ trait ClientBase extends Logging {
} else {
Nil
}
val amClass =
if (isLaunchingDriver) {
classOf[ApplicationMaster].getName()
} else {
classOf[ApplicationMaster].getName().replace("ApplicationMaster", "ExecutorLauncher")
}
val amArgs =
Seq(classOf[ApplicationMaster].getName()) ++ userClass ++
(if (args.userJar != null) Seq("--jar", args.userJar) else Nil) ++
Expand Down

0 comments on commit 0e4be3d

Please sign in to comment.