From 8a1e172b513ba58763336de83f94e00ceaa69255 Mon Sep 17 00:00:00 2001 From: HyukjinKwon Date: Wed, 24 Feb 2021 20:38:03 -0800 Subject: [PATCH] [SPARK-34520][CORE] Remove unused SecurityManager references ### What changes were proposed in this pull request? This is kind of a followup of https://github.com/apache/spark/pull/24033 and https://github.com/apache/spark/pull/30945. Many of references in `SecurityManager` were introduced from SPARK-1189, and related usages were removed later from https://github.com/apache/spark/pull/24033 and https://github.com/apache/spark/pull/30945. This PR proposes to remove them out. ### Why are the changes needed? For better readability of codes. ### Does this PR introduce _any_ user-facing change? No, dev-only. ### How was this patch tested? Manually complied. GitHub Actions and Jenkins build should test it out as well. Closes #31636 from HyukjinKwon/SPARK-34520. Authored-by: HyukjinKwon Signed-off-by: Dongjoon Hyun --- .../scala/org/apache/spark/SparkEnv.scala | 7 +++-- .../spark/broadcast/BroadcastFactory.scala | 3 +-- .../spark/broadcast/BroadcastManager.scala | 9 +++---- .../broadcast/TorrentBroadcastFactory.scala | 5 ++-- .../spark/deploy/ExternalShuffleService.scala | 3 +-- .../apache/spark/deploy/master/Master.scala | 4 +-- .../apache/spark/deploy/worker/Worker.scala | 2 +- .../apache/spark/metrics/MetricsSystem.scala | 27 +++++++------------ .../spark/metrics/sink/ConsoleSink.scala | 5 ++-- .../apache/spark/metrics/sink/CsvSink.scala | 5 ++-- .../spark/metrics/sink/GraphiteSink.scala | 5 ++-- .../apache/spark/metrics/sink/JmxSink.scala | 5 ++-- .../spark/metrics/sink/MetricsServlet.scala | 7 ++--- .../metrics/sink/PrometheusServlet.scala | 7 ++--- .../apache/spark/metrics/sink/Slf4jSink.scala | 6 +---- .../spark/metrics/sink/StatsdSink.scala | 6 +---- .../apache/spark/MapOutputTrackerSuite.scala | 3 +-- .../spark/metrics/MetricsSystemSuite.scala | 26 +++++++++--------- .../metrics/sink/GraphiteSinkSuite.scala | 8 +++--- .../metrics/sink/PrometheusServletSuite.scala | 2 +- .../spark/metrics/sink/StatsdSinkSuite.scala | 5 ++-- .../spark/scheduler/DAGSchedulerSuite.scala | 2 +- .../BlockManagerReplicationSuite.scala | 2 +- .../spark/storage/BlockManagerSuite.scala | 2 +- .../cluster/mesos/MesosClusterScheduler.scala | 5 ++-- .../spark/deploy/yarn/ApplicationMaster.scala | 3 +-- .../streaming/ReceivedBlockHandlerSuite.scala | 2 +- 27 files changed, 64 insertions(+), 102 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala index 9fc60ac3990fc..ed8dc43b16c96 100644 --- a/core/src/main/scala/org/apache/spark/SparkEnv.scala +++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala @@ -314,7 +314,7 @@ object SparkEnv extends Logging { } } - val broadcastManager = new BroadcastManager(isDriver, conf, securityManager) + val broadcastManager = new BroadcastManager(isDriver, conf) val mapOutputTracker = if (isDriver) { new MapOutputTrackerMaster(conf, broadcastManager, isLocal) @@ -397,14 +397,13 @@ object SparkEnv extends Logging { // Don't start metrics system right now for Driver. // We need to wait for the task scheduler to give us an app ID. // Then we can start the metrics system. - MetricsSystem.createMetricsSystem(MetricsSystemInstances.DRIVER, conf, securityManager) + MetricsSystem.createMetricsSystem(MetricsSystemInstances.DRIVER, conf) } else { // We need to set the executor ID before the MetricsSystem is created because sources and // sinks specified in the metrics configuration file will want to incorporate this executor's // ID into the metrics they report. conf.set(EXECUTOR_ID, executorId) - val ms = MetricsSystem.createMetricsSystem(MetricsSystemInstances.EXECUTOR, conf, - securityManager) + val ms = MetricsSystem.createMetricsSystem(MetricsSystemInstances.EXECUTOR, conf) ms.start(conf.get(METRICS_STATIC_SOURCES_ENABLED)) ms } diff --git a/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala b/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala index ece4ae6ab0310..9891582501b8b 100644 --- a/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala +++ b/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala @@ -19,7 +19,6 @@ package org.apache.spark.broadcast import scala.reflect.ClassTag -import org.apache.spark.SecurityManager import org.apache.spark.SparkConf /** @@ -29,7 +28,7 @@ import org.apache.spark.SparkConf */ private[spark] trait BroadcastFactory { - def initialize(isDriver: Boolean, conf: SparkConf, securityMgr: SecurityManager): Unit + def initialize(isDriver: Boolean, conf: SparkConf): Unit /** * Creates a new broadcast variable. diff --git a/core/src/main/scala/org/apache/spark/broadcast/BroadcastManager.scala b/core/src/main/scala/org/apache/spark/broadcast/BroadcastManager.scala index c93cadf1ab3e8..989a1941d1791 100644 --- a/core/src/main/scala/org/apache/spark/broadcast/BroadcastManager.scala +++ b/core/src/main/scala/org/apache/spark/broadcast/BroadcastManager.scala @@ -24,15 +24,12 @@ import scala.reflect.ClassTag import org.apache.commons.collections.map.{AbstractReferenceMap, ReferenceMap} -import org.apache.spark.{SecurityManager, SparkConf} +import org.apache.spark.SparkConf import org.apache.spark.api.python.PythonBroadcast import org.apache.spark.internal.Logging private[spark] class BroadcastManager( - val isDriver: Boolean, - conf: SparkConf, - securityManager: SecurityManager) - extends Logging { + val isDriver: Boolean, conf: SparkConf) extends Logging { private var initialized = false private var broadcastFactory: BroadcastFactory = null @@ -44,7 +41,7 @@ private[spark] class BroadcastManager( synchronized { if (!initialized) { broadcastFactory = new TorrentBroadcastFactory - broadcastFactory.initialize(isDriver, conf, securityManager) + broadcastFactory.initialize(isDriver, conf) initialized = true } } diff --git a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcastFactory.scala b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcastFactory.scala index 65fb5186afae1..6846e1967c4d6 100644 --- a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcastFactory.scala +++ b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcastFactory.scala @@ -19,7 +19,7 @@ package org.apache.spark.broadcast import scala.reflect.ClassTag -import org.apache.spark.{SecurityManager, SparkConf} +import org.apache.spark.SparkConf /** * A [[org.apache.spark.broadcast.Broadcast]] implementation that uses a BitTorrent-like @@ -28,8 +28,7 @@ import org.apache.spark.{SecurityManager, SparkConf} */ private[spark] class TorrentBroadcastFactory extends BroadcastFactory { - override def initialize(isDriver: Boolean, conf: SparkConf, - securityMgr: SecurityManager): Unit = { } + override def initialize(isDriver: Boolean, conf: SparkConf): Unit = { } override def newBroadcast[T: ClassTag](value_ : T, isLocal: Boolean, id: Long): Broadcast[T] = { new TorrentBroadcast[T](value_, id) diff --git a/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala b/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala index ebfff89308886..eff1e15659fc4 100644 --- a/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala +++ b/core/src/main/scala/org/apache/spark/deploy/ExternalShuffleService.scala @@ -44,8 +44,7 @@ private[deploy] class ExternalShuffleService(sparkConf: SparkConf, securityManager: SecurityManager) extends Logging { protected val masterMetricsSystem = - MetricsSystem.createMetricsSystem(MetricsSystemInstances.SHUFFLE_SERVICE, - sparkConf, securityManager) + MetricsSystem.createMetricsSystem(MetricsSystemInstances.SHUFFLE_SERVICE, sparkConf) private val enabled = sparkConf.get(config.SHUFFLE_SERVICE_ENABLED) private val port = sparkConf.get(config.SHUFFLE_SERVICE_PORT) diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala index 471a3c1b45c39..c964e343ca6c9 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala @@ -87,9 +87,9 @@ private[deploy] class Master( Utils.checkHost(address.host) private val masterMetricsSystem = - MetricsSystem.createMetricsSystem(MetricsSystemInstances.MASTER, conf, securityMgr) + MetricsSystem.createMetricsSystem(MetricsSystemInstances.MASTER, conf) private val applicationMetricsSystem = - MetricsSystem.createMetricsSystem(MetricsSystemInstances.APPLICATIONS, conf, securityMgr) + MetricsSystem.createMetricsSystem(MetricsSystemInstances.APPLICATIONS, conf) private val masterSource = new MasterSource(this) // After onStart, webUi will be set diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala index adc953286625a..05e8e5a6b6766 100755 --- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala @@ -190,7 +190,7 @@ private[deploy] class Worker( private var connectionAttemptCount = 0 private val metricsSystem = - MetricsSystem.createMetricsSystem(MetricsSystemInstances.WORKER, conf, securityMgr) + MetricsSystem.createMetricsSystem(MetricsSystemInstances.WORKER, conf) private val workerSource = new WorkerSource(this) val reverseProxy = conf.get(UI_REVERSE_PROXY) diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala index 48f816f649d36..b0c424bdc3f99 100644 --- a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala +++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala @@ -25,7 +25,7 @@ import scala.collection.mutable import com.codahale.metrics.{Metric, MetricRegistry} import org.eclipse.jetty.servlet.ServletContextHandler -import org.apache.spark.{SecurityManager, SparkConf} +import org.apache.spark.SparkConf import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.metrics.sink.{MetricsServlet, PrometheusServlet, Sink} @@ -68,10 +68,7 @@ import org.apache.spark.util.Utils * [options] represent the specific property of this source or sink. */ private[spark] class MetricsSystem private ( - val instance: String, - conf: SparkConf, - securityMgr: SecurityManager) - extends Logging { + val instance: String, conf: SparkConf) extends Logging { private[this] val metricsConfig = new MetricsConfig(conf) @@ -200,21 +197,18 @@ private[spark] class MetricsSystem private ( try { if (kv._1 == "servlet") { val servlet = Utils.classForName[MetricsServlet](classPath) - .getConstructor( - classOf[Properties], classOf[MetricRegistry], classOf[SecurityManager]) - .newInstance(kv._2, registry, securityMgr) + .getConstructor(classOf[Properties], classOf[MetricRegistry]) + .newInstance(kv._2, registry) metricsServlet = Some(servlet) } else if (kv._1 == "prometheusServlet") { val servlet = Utils.classForName[PrometheusServlet](classPath) - .getConstructor( - classOf[Properties], classOf[MetricRegistry], classOf[SecurityManager]) - .newInstance(kv._2, registry, securityMgr) + .getConstructor(classOf[Properties], classOf[MetricRegistry]) + .newInstance(kv._2, registry) prometheusServlet = Some(servlet) } else { val sink = Utils.classForName[Sink](classPath) - .getConstructor( - classOf[Properties], classOf[MetricRegistry], classOf[SecurityManager]) - .newInstance(kv._2, registry, securityMgr) + .getConstructor(classOf[Properties], classOf[MetricRegistry]) + .newInstance(kv._2, registry) sinks += sink } } catch { @@ -242,9 +236,8 @@ private[spark] object MetricsSystem { } } - def createMetricsSystem( - instance: String, conf: SparkConf, securityMgr: SecurityManager): MetricsSystem = { - new MetricsSystem(instance, conf, securityMgr) + def createMetricsSystem(instance: String, conf: SparkConf): MetricsSystem = { + new MetricsSystem(instance, conf) } } diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala index bfd23168e4003..c8a3e4488a019 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala @@ -22,11 +22,10 @@ import java.util.concurrent.TimeUnit import com.codahale.metrics.{ConsoleReporter, MetricRegistry} -import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem -private[spark] class ConsoleSink(val property: Properties, val registry: MetricRegistry, - securityMgr: SecurityManager) extends Sink { +private[spark] class ConsoleSink( + val property: Properties, val registry: MetricRegistry) extends Sink { val CONSOLE_DEFAULT_PERIOD = 10 val CONSOLE_DEFAULT_UNIT = "SECONDS" diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala index 579b8e0c0e984..101691f640029 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala @@ -23,11 +23,10 @@ import java.util.concurrent.TimeUnit import com.codahale.metrics.{CsvReporter, MetricRegistry} -import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem -private[spark] class CsvSink(val property: Properties, val registry: MetricRegistry, - securityMgr: SecurityManager) extends Sink { +private[spark] class CsvSink( + val property: Properties, val registry: MetricRegistry) extends Sink { val CSV_KEY_PERIOD = "period" val CSV_KEY_UNIT = "unit" val CSV_KEY_DIR = "directory" diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala index 6ce64cd3543fe..1c59e191db531 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala @@ -23,11 +23,10 @@ import java.util.concurrent.TimeUnit import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry} import com.codahale.metrics.graphite.{Graphite, GraphiteReporter, GraphiteUDP} -import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem -private[spark] class GraphiteSink(val property: Properties, val registry: MetricRegistry, - securityMgr: SecurityManager) extends Sink { +private[spark] class GraphiteSink( + val property: Properties, val registry: MetricRegistry) extends Sink { val GRAPHITE_DEFAULT_PERIOD = 10 val GRAPHITE_DEFAULT_UNIT = "SECONDS" val GRAPHITE_DEFAULT_PREFIX = "" diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala index a7b7b5573cfe8..7ca581aee6ba6 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala @@ -22,10 +22,9 @@ import java.util.Properties import com.codahale.metrics.MetricRegistry import com.codahale.metrics.jmx.JmxReporter -import org.apache.spark.SecurityManager -private[spark] class JmxSink(val property: Properties, val registry: MetricRegistry, - securityMgr: SecurityManager) extends Sink { +private[spark] class JmxSink( + val property: Properties, val registry: MetricRegistry) extends Sink { val reporter: JmxReporter = JmxReporter.forRegistry(registry).build() diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala index 7dd27d4fb9bf3..46d2c6821fea1 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala @@ -26,14 +26,11 @@ import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler -import org.apache.spark.{SecurityManager, SparkConf} +import org.apache.spark.SparkConf import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( - val property: Properties, - val registry: MetricRegistry, - securityMgr: SecurityManager) - extends Sink { + val property: Properties, val registry: MetricRegistry) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/PrometheusServlet.scala b/core/src/main/scala/org/apache/spark/metrics/sink/PrometheusServlet.scala index 7cc2665ee7eee..c087ee7c000c3 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/PrometheusServlet.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/PrometheusServlet.scala @@ -23,7 +23,7 @@ import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import org.eclipse.jetty.servlet.ServletContextHandler -import org.apache.spark.{SecurityManager, SparkConf} +import org.apache.spark.SparkConf import org.apache.spark.ui.JettyUtils._ /** @@ -34,10 +34,7 @@ import org.apache.spark.ui.JettyUtils._ * in terms of key string format. */ private[spark] class PrometheusServlet( - val property: Properties, - val registry: MetricRegistry, - securityMgr: SecurityManager) - extends Sink { + val property: Properties, val registry: MetricRegistry) extends Sink { val SERVLET_KEY_PATH = "path" diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/Slf4jSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/Slf4jSink.scala index 968d5ca809e72..728687f8f78ba 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/Slf4jSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/Slf4jSink.scala @@ -22,14 +22,10 @@ import java.util.concurrent.TimeUnit import com.codahale.metrics.{MetricRegistry, Slf4jReporter} -import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem private[spark] class Slf4jSink( - val property: Properties, - val registry: MetricRegistry, - securityMgr: SecurityManager) - extends Sink { + val property: Properties, val registry: MetricRegistry) extends Sink { val SLF4J_DEFAULT_PERIOD = 10 val SLF4J_DEFAULT_UNIT = "SECONDS" diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/StatsdSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/StatsdSink.scala index 61e74e05169cc..c6e7bcccd4ce9 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/StatsdSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/StatsdSink.scala @@ -22,7 +22,6 @@ import java.util.concurrent.TimeUnit import com.codahale.metrics.MetricRegistry -import org.apache.spark.SecurityManager import org.apache.spark.internal.Logging import org.apache.spark.metrics.MetricsSystem @@ -41,10 +40,7 @@ private[spark] object StatsdSink { } private[spark] class StatsdSink( - val property: Properties, - val registry: MetricRegistry, - securityMgr: SecurityManager) - extends Sink with Logging { + val property: Properties, val registry: MetricRegistry) extends Sink with Logging { import StatsdSink._ val host = property.getProperty(STATSD_KEY_HOST, STATSD_DEFAULT_HOST) diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala index b5b68f639ffc9..20b040f7c810d 100644 --- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala +++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala @@ -35,8 +35,7 @@ class MapOutputTrackerSuite extends SparkFunSuite { private val conf = new SparkConf private def newTrackerMaster(sparkConf: SparkConf = conf) = { - val broadcastManager = new BroadcastManager(true, sparkConf, - new SecurityManager(sparkConf)) + val broadcastManager = new BroadcastManager(true, sparkConf) new MapOutputTrackerMaster(sparkConf, broadcastManager, true) } diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala index 70b6c9a112142..31d8492510f06 100644 --- a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala +++ b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala @@ -40,7 +40,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM } test("MetricsSystem with default config") { - val metricsSystem = MetricsSystem.createMetricsSystem("default", conf, securityMgr) + val metricsSystem = MetricsSystem.createMetricsSystem("default", conf) metricsSystem.start() val sources = PrivateMethod[ArrayBuffer[Source]](Symbol("sources")) val sinks = PrivateMethod[ArrayBuffer[Sink]](Symbol("sinks")) @@ -51,7 +51,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM } test("MetricsSystem with sources add") { - val metricsSystem = MetricsSystem.createMetricsSystem("test", conf, securityMgr) + val metricsSystem = MetricsSystem.createMetricsSystem("test", conf) metricsSystem.start() val sources = PrivateMethod[ArrayBuffer[Source]](Symbol("sources")) val sinks = PrivateMethod[ArrayBuffer[Sink]](Symbol("sinks")) @@ -77,7 +77,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set("spark.executor.id", executorId) val instanceName = MetricsSystemInstances.DRIVER - val driverMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val driverMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = driverMetricsSystem.buildRegistryName(source) assert(metricName === s"$appId.$executorId.${source.sourceName}") @@ -93,7 +93,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set("spark.executor.id", executorId) val instanceName = MetricsSystemInstances.DRIVER - val driverMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val driverMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = driverMetricsSystem.buildRegistryName(source) assert(metricName === source.sourceName) @@ -109,7 +109,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set("spark.app.id", appId) val instanceName = MetricsSystemInstances.DRIVER - val driverMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val driverMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = driverMetricsSystem.buildRegistryName(source) assert(metricName === source.sourceName) @@ -127,7 +127,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set("spark.executor.id", executorId) val instanceName = MetricsSystemInstances.EXECUTOR - val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = executorMetricsSystem.buildRegistryName(source) assert(metricName === s"$appId.$executorId.${source.sourceName}") @@ -143,7 +143,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set("spark.executor.id", executorId) val instanceName = MetricsSystemInstances.EXECUTOR - val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = executorMetricsSystem.buildRegistryName(source) assert(metricName === source.sourceName) @@ -159,7 +159,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set("spark.app.id", appId) val instanceName = MetricsSystemInstances.EXECUTOR - val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = executorMetricsSystem.buildRegistryName(source) assert(metricName === source.sourceName) @@ -177,7 +177,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set("spark.executor.id", executorId) val instanceName = "testInstance" - val testMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val testMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = testMetricsSystem.buildRegistryName(source) @@ -201,7 +201,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set(METRICS_NAMESPACE, "${spark.app.name}") val instanceName = MetricsSystemInstances.EXECUTOR - val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = executorMetricsSystem.buildRegistryName(source) assert(metricName === s"$appName.$executorId.${source.sourceName}") @@ -219,7 +219,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set(METRICS_NAMESPACE, namespaceToResolve) val instanceName = MetricsSystemInstances.EXECUTOR - val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = executorMetricsSystem.buildRegistryName(source) // If the user set the spark.metrics.namespace property to an expansion of another property @@ -239,7 +239,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set(METRICS_NAMESPACE, "${spark.app.name}") val instanceName = MetricsSystemInstances.EXECUTOR - val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val executorMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = executorMetricsSystem.buildRegistryName(source) assert(metricName === source.sourceName) @@ -260,7 +260,7 @@ class MetricsSystemSuite extends SparkFunSuite with BeforeAndAfter with PrivateM conf.set("spark.executor.id", executorId) val instanceName = "testInstance" - val testMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf, securityMgr) + val testMetricsSystem = MetricsSystem.createMetricsSystem(instanceName, conf) val metricName = testMetricsSystem.buildRegistryName(source) diff --git a/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala b/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala index 2369218830215..cf34121fe73dc 100644 --- a/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala +++ b/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala @@ -23,7 +23,7 @@ import scala.collection.JavaConverters._ import com.codahale.metrics._ -import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite} +import org.apache.spark.SparkFunSuite class GraphiteSinkSuite extends SparkFunSuite { @@ -32,9 +32,8 @@ class GraphiteSinkSuite extends SparkFunSuite { props.put("host", "127.0.0.1") props.put("port", "54321") val registry = new MetricRegistry - val securityMgr = new SecurityManager(new SparkConf(false)) - val sink = new GraphiteSink(props, registry, securityMgr) + val sink = new GraphiteSink(props, registry) val gauge = new Gauge[Double] { override def getValue: Double = 1.23 @@ -55,9 +54,8 @@ class GraphiteSinkSuite extends SparkFunSuite { props.put("port", "54321") props.put("regex", "local-[0-9]+.driver.(CodeGenerator|BlockManager)") val registry = new MetricRegistry - val securityMgr = new SecurityManager(new SparkConf(false)) - val sink = new GraphiteSink(props, registry, securityMgr) + val sink = new GraphiteSink(props, registry) val gauge = new Gauge[Double] { override def getValue: Double = 1.23 diff --git a/core/src/test/scala/org/apache/spark/metrics/sink/PrometheusServletSuite.scala b/core/src/test/scala/org/apache/spark/metrics/sink/PrometheusServletSuite.scala index 080ca0e41f793..4b5b41c14a21e 100644 --- a/core/src/test/scala/org/apache/spark/metrics/sink/PrometheusServletSuite.scala +++ b/core/src/test/scala/org/apache/spark/metrics/sink/PrometheusServletSuite.scala @@ -69,5 +69,5 @@ class PrometheusServletSuite extends SparkFunSuite with PrivateMethodTester { } private def createPrometheusServlet(): PrometheusServlet = - new PrometheusServlet(new Properties, new MetricRegistry, securityMgr = null) + new PrometheusServlet(new Properties, new MetricRegistry) } diff --git a/core/src/test/scala/org/apache/spark/metrics/sink/StatsdSinkSuite.scala b/core/src/test/scala/org/apache/spark/metrics/sink/StatsdSinkSuite.scala index 3d4b8c868d6fc..ff883633d5e7a 100644 --- a/core/src/test/scala/org/apache/spark/metrics/sink/StatsdSinkSuite.scala +++ b/core/src/test/scala/org/apache/spark/metrics/sink/StatsdSinkSuite.scala @@ -24,11 +24,10 @@ import java.util.concurrent.TimeUnit._ import com.codahale.metrics._ -import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite} +import org.apache.spark.SparkFunSuite import org.apache.spark.metrics.sink.StatsdSink._ class StatsdSinkSuite extends SparkFunSuite { - private val securityMgr = new SecurityManager(new SparkConf(false)) private val defaultProps = Map( STATSD_KEY_PREFIX -> "spark", STATSD_KEY_PERIOD -> "1", @@ -61,7 +60,7 @@ class StatsdSinkSuite extends SparkFunSuite { defaultProps.foreach(e => props.put(e._1, e._2)) props.put(STATSD_KEY_PORT, socket.getLocalPort.toString) val registry = new MetricRegistry - val sink = new StatsdSink(props, registry, securityMgr) + val sink = new StatsdSink(props, registry) try { testCode(socket, sink) } finally { diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala index 6b332ec1298f5..4c74e4fbb3728 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala @@ -319,7 +319,7 @@ class DAGSchedulerSuite extends SparkFunSuite with TempLocalSparkContext with Ti cacheLocations.clear() results.clear() securityMgr = new SecurityManager(sc.getConf) - broadcastManager = new BroadcastManager(true, sc.getConf, securityMgr) + broadcastManager = new BroadcastManager(true, sc.getConf) mapOutputTracker = spy(new MyMapOutputTrackerMaster(sc.getConf, broadcastManager)) blockManagerMaster = spy(new MyBlockManagerMaster(sc.getConf)) scheduler = new DAGScheduler( diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala index 1e9b48102616f..495747b2c7c11 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala @@ -55,7 +55,7 @@ trait BlockManagerReplicationBehavior extends SparkFunSuite protected var rpcEnv: RpcEnv = null protected var master: BlockManagerMaster = null protected lazy val securityMgr = new SecurityManager(conf) - protected lazy val bcastManager = new BroadcastManager(true, conf, securityMgr) + protected lazy val bcastManager = new BroadcastManager(true, conf) protected lazy val mapOutputTracker = new MapOutputTrackerMaster(conf, bcastManager, true) protected lazy val shuffleManager = new SortShuffleManager(conf) diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala index 82d7abfddd82b..055ee0debeb12 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala @@ -79,7 +79,7 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterE var master: BlockManagerMaster = null var liveListenerBus: LiveListenerBus = null val securityMgr = new SecurityManager(new SparkConf(false)) - val bcastManager = new BroadcastManager(true, new SparkConf(false), securityMgr) + val bcastManager = new BroadcastManager(true, new SparkConf(false)) val mapOutputTracker = new MapOutputTrackerMaster(new SparkConf(false), bcastManager, true) val shuffleManager = new SortShuffleManager(new SparkConf(false)) diff --git a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala index c7e0869e4bd5c..16cffd03135df 100644 --- a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala +++ b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala @@ -30,7 +30,7 @@ import org.apache.mesos.Protos.{SlaveID => AgentID, TaskState => MesosTaskState, import org.apache.mesos.Protos.Environment.Variable import org.apache.mesos.Protos.TaskStatus.Reason -import org.apache.spark.{SecurityManager, SparkConf, SparkException, TaskState} +import org.apache.spark.{SparkConf, SparkException, TaskState} import org.apache.spark.deploy.mesos.{config, MesosDriverDescription} import org.apache.spark.deploy.rest.{CreateSubmissionResponse, KillSubmissionResponse, SubmissionStatusResponse} import org.apache.spark.internal.config._ @@ -125,8 +125,7 @@ private[spark] class MesosClusterScheduler( extends Scheduler with MesosSchedulerUtils with MesosScheduler { var frameworkUrl: String = _ private val metricsSystem = - MetricsSystem.createMetricsSystem(MetricsSystemInstances.MESOS_CLUSTER, conf, - new SecurityManager(conf)) + MetricsSystem.createMetricsSystem(MetricsSystemInstances.MESOS_CLUSTER, conf) private val master = conf.get("spark.master") private val appName = conf.get("spark.app.name") private val queuedCapacity = conf.get(config.MAX_DRIVERS) diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala index eb927a3c296c0..e7377e05479c5 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala @@ -481,8 +481,7 @@ private[spark] class ApplicationMaster( rpcEnv.setupEndpoint("YarnAM", new AMEndpoint(rpcEnv, driverRef)) allocator.allocateResources() - val ms = MetricsSystem.createMetricsSystem(MetricsSystemInstances.APPLICATION_MASTER, - sparkConf, securityMgr) + val ms = MetricsSystem.createMetricsSystem(MetricsSystemInstances.APPLICATION_MASTER, sparkConf) val prefix = _sparkConf.get(YARN_METRICS_NAMESPACE).getOrElse(appId) ms.registerSource(new ApplicationMasterSource(prefix, allocator)) // do not register static sources in this case as per SPARK-25277 diff --git a/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockHandlerSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockHandlerSuite.scala index 913ab1f46d59e..425e39c5980a1 100644 --- a/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockHandlerSuite.scala +++ b/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockHandlerSuite.scala @@ -71,7 +71,7 @@ abstract class BaseReceivedBlockHandlerSuite(enableEncryption: Boolean) val hadoopConf = new Configuration() val streamId = 1 val securityMgr = new SecurityManager(conf, encryptionKey) - val broadcastManager = new BroadcastManager(true, conf, securityMgr) + val broadcastManager = new BroadcastManager(true, conf) val mapOutputTracker = new MapOutputTrackerMaster(conf, broadcastManager, true) val shuffleManager = new SortShuffleManager(conf) val serializer = new KryoSerializer(conf)