From 909da96e1471886a01a9e1def93630c4fd40e74a Mon Sep 17 00:00:00 2001 From: Peter Toth Date: Tue, 10 Jan 2023 18:05:52 +0900 Subject: [PATCH] [SPARK-41958][CORE] Disallow arbitrary custom classpath with proxy user in cluster mode ### What changes were proposed in this pull request? This PR proposes to disallow arbitrary custom classpath with proxy user in cluster mode by default. ### Why are the changes needed? To avoid arbitrary classpath in spark cluster. ### Does this PR introduce _any_ user-facing change? Yes. User should reenable this feature by `spark.submit.proxyUser.allowCustomClasspathInClusterMode`. ### How was this patch tested? Manually tested. Closes #39474 from Ngone51/dev. Lead-authored-by: Peter Toth Co-authored-by: Yi Wu Signed-off-by: Hyukjin Kwon --- .../org/apache/spark/deploy/SparkSubmit.scala | 15 +++++++++++++++ .../apache/spark/internal/config/package.scala | 7 +++++++ 2 files changed, 22 insertions(+) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 65e4367b33afe..a701b0ea60702 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -306,6 +306,10 @@ private[spark] class SparkSubmit extends Logging { val isKubernetesClient = clusterManager == KUBERNETES && deployMode == CLIENT val isKubernetesClusterModeDriver = isKubernetesClient && sparkConf.getBoolean("spark.kubernetes.submitInDriver", false) + val isCustomClasspathInClusterModeDisallowed = + !sparkConf.get(ALLOW_CUSTOM_CLASSPATH_BY_PROXY_USER_IN_CLUSTER_MODE) && + args.proxyUser != null && + (isYarnCluster || isMesosCluster || isStandAloneCluster || isKubernetesCluster) if (!isMesosCluster && !isStandAloneCluster) { // Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files @@ -887,6 +891,13 @@ private[spark] class SparkSubmit extends Logging { sparkConf.set("spark.app.submitTime", System.currentTimeMillis().toString) + if (childClasspath.nonEmpty && isCustomClasspathInClusterModeDisallowed) { + childClasspath.clear() + logWarning(s"Ignore classpath ${childClasspath.mkString(", ")} with proxy user specified " + + s"in Cluster mode when ${ALLOW_CUSTOM_CLASSPATH_BY_PROXY_USER_IN_CLUSTER_MODE.key} is " + + s"disabled") + } + (childArgs.toSeq, childClasspath.toSeq, sparkConf, childMainClass) } @@ -940,6 +951,10 @@ private[spark] class SparkSubmit extends Logging { logInfo(s"Classpath elements:\n${childClasspath.mkString("\n")}") logInfo("\n") } + assert(!(args.deployMode == "cluster" && args.proxyUser != null && childClasspath.nonEmpty) || + sparkConf.get(ALLOW_CUSTOM_CLASSPATH_BY_PROXY_USER_IN_CLUSTER_MODE), + s"Classpath of spark-submit should not change in cluster mode if proxy user is specified " + + s"when ${ALLOW_CUSTOM_CLASSPATH_BY_PROXY_USER_IN_CLUSTER_MODE.key} is disabled") val loader = getSubmitClassLoader(sparkConf) for (jar <- childClasspath) { addJarToClasspath(jar, loader) diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala index eb6ac8b765b01..be210cfe59b3d 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/package.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala @@ -2461,4 +2461,11 @@ package object config { .version("3.4.0") .timeConf(TimeUnit.MILLISECONDS) .createWithDefaultString("5s") + + private[spark] val ALLOW_CUSTOM_CLASSPATH_BY_PROXY_USER_IN_CLUSTER_MODE = + ConfigBuilder("spark.submit.proxyUser.allowCustomClasspathInClusterMode") + .internal() + .version("3.4.0") + .booleanConf + .createWithDefault(false) }