Skip to content

Commit

Permalink
[SPARK-18204][WEBUI] Remove SparkUI.appUIAddress
Browse files Browse the repository at this point in the history
## What changes were proposed in this pull request?

Removing `appUIAddress` attribute since it is no longer in use.
## How was this patch tested?

Local build

Author: Jacek Laskowski <[email protected]>

Closes #15603 from jaceklaskowski/sparkui-fixes.
  • Loading branch information
jaceklaskowski authored and srowen committed Nov 2, 2016
1 parent 98ede49 commit 70a5db7
Show file tree
Hide file tree
Showing 11 changed files with 36 additions and 44 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ private[spark] class StandaloneSchedulerBackend(
val javaOpts = sparkJavaOpts ++ extraJavaOpts
val command = Command("org.apache.spark.executor.CoarseGrainedExecutorBackend",
args, sc.executorEnvs, classPathEntries ++ testingClassPath, libraryPathEntries, javaOpts)
val appUIAddress = sc.ui.map(_.appUIAddress).getOrElse("")
val webUrl = sc.ui.map(_.webUrl).getOrElse("")
val coresPerExecutor = conf.getOption("spark.executor.cores").map(_.toInt)
// If we're using dynamic allocation, set our initial executor limit to 0 for now.
// ExecutorAllocationManager will send the real initial limit to the Master later.
Expand All @@ -103,8 +103,8 @@ private[spark] class StandaloneSchedulerBackend(
} else {
None
}
val appDesc = new ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
appUIAddress, sc.eventLogDir, sc.eventLogCodec, coresPerExecutor, initialExecutorLimit)
val appDesc = ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
webUrl, sc.eventLogDir, sc.eventLogCodec, coresPerExecutor, initialExecutorLimit)
client = new StandaloneAppClient(sc.env.rpcEnv, masters, appDesc, this, conf)
client.start()
launcherBackend.setState(SparkAppHandle.State.SUBMITTED)
Expand Down
13 changes: 3 additions & 10 deletions core/src/main/scala/org/apache/spark/ui/SparkUI.scala
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ private[spark] class SparkUI private (
initialize()

def getSparkUser: String = {
environmentListener.systemProperties.toMap.get("user.name").getOrElse("<unknown>")
environmentListener.systemProperties.toMap.getOrElse("user.name", "<unknown>")
}

def getAppName: String = appName
Expand All @@ -94,16 +94,9 @@ private[spark] class SparkUI private (
/** Stop the server behind this web interface. Only valid after bind(). */
override def stop() {
super.stop()
logInfo("Stopped Spark web UI at %s".format(appUIAddress))
logInfo(s"Stopped Spark web UI at $webUrl")
}

/**
* Return the application UI host:port. This does not include the scheme (http://).
*/
private[spark] def appUIHostPort = publicHostName + ":" + boundPort

private[spark] def appUIAddress = s"http://$appUIHostPort"

def getSparkUI(appId: String): Option[SparkUI] = {
if (appId == this.appId) Some(this) else None
}
Expand Down Expand Up @@ -136,7 +129,7 @@ private[spark] class SparkUI private (
private[spark] abstract class SparkUITab(parent: SparkUI, prefix: String)
extends WebUITab(parent, prefix) {

def appName: String = parent.getAppName
def appName: String = parent.appName

}

Expand Down
8 changes: 4 additions & 4 deletions core/src/main/scala/org/apache/spark/ui/WebUI.scala
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@ private[spark] abstract class WebUI(
private val className = Utils.getFormattedClassName(this)

def getBasePath: String = basePath
def getTabs: Seq[WebUITab] = tabs.toSeq
def getHandlers: Seq[ServletContextHandler] = handlers.toSeq
def getTabs: Seq[WebUITab] = tabs
def getHandlers: Seq[ServletContextHandler] = handlers
def getSecurityManager: SecurityManager = securityManager

/** Attach a tab to this UI, along with all of its attached pages. */
Expand Down Expand Up @@ -133,7 +133,7 @@ private[spark] abstract class WebUI(
def initialize(): Unit

/** Bind to the HTTP server behind this web interface. */
def bind() {
def bind(): Unit = {
assert(!serverInfo.isDefined, s"Attempted to bind $className more than once!")
try {
val host = Option(conf.getenv("SPARK_LOCAL_IP")).getOrElse("0.0.0.0")
Expand All @@ -156,7 +156,7 @@ private[spark] abstract class WebUI(
def boundPort: Int = serverInfo.map(_.boundPort).getOrElse(-1)

/** Stop the server behind this web interface. Only valid after bind(). */
def stop() {
def stop(): Unit = {
assert(serverInfo.isDefined,
s"Attempted to stop $className before binding to a server!")
serverInfo.get.stop()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -289,8 +289,8 @@ private[ui] class AllJobsPage(parent: JobsTab) extends WebUIPage("") {
val startTime = listener.startTime
val endTime = listener.endTime
val activeJobs = listener.activeJobs.values.toSeq
val completedJobs = listener.completedJobs.reverse.toSeq
val failedJobs = listener.failedJobs.reverse.toSeq
val completedJobs = listener.completedJobs.reverse
val failedJobs = listener.failedJobs.reverse

val activeJobsTable =
jobsTable(request, "active", "activeJob", activeJobs, killEnabled = parent.killEnabled)
Expand Down
16 changes: 8 additions & 8 deletions core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
sc.parallelize(1 to 10).map{x => Thread.sleep(10000); x}.countAsync()
eventually(timeout(5 seconds), interval(50 milliseconds)) {
val url = new URL(
sc.ui.get.appUIAddress.stripSuffix("/") + "/stages/stage/kill/?id=0")
sc.ui.get.webUrl.stripSuffix("/") + "/stages/stage/kill/?id=0")
// SPARK-6846: should be POST only but YARN AM doesn't proxy POST
getResponseCode(url, "GET") should be (200)
getResponseCode(url, "POST") should be (200)
Expand All @@ -486,7 +486,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
sc.parallelize(1 to 10).map{x => Thread.sleep(10000); x}.countAsync()
eventually(timeout(5 seconds), interval(50 milliseconds)) {
val url = new URL(
sc.ui.get.appUIAddress.stripSuffix("/") + "/jobs/job/kill/?id=0")
sc.ui.get.webUrl.stripSuffix("/") + "/jobs/job/kill/?id=0")
// SPARK-6846: should be POST only but YARN AM doesn't proxy POST
getResponseCode(url, "GET") should be (200)
getResponseCode(url, "POST") should be (200)
Expand Down Expand Up @@ -620,7 +620,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
test("live UI json application list") {
withSpark(newSparkContext()) { sc =>
val appListRawJson = HistoryServerSuite.getUrl(new URL(
sc.ui.get.appUIAddress + "/api/v1/applications"))
sc.ui.get.webUrl + "/api/v1/applications"))
val appListJsonAst = JsonMethods.parse(appListRawJson)
appListJsonAst.children.length should be (1)
val attempts = (appListJsonAst \ "attempts").children
Expand All @@ -640,7 +640,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
sc.parallelize(Seq(1, 2, 3)).map(identity).groupBy(identity).map(identity).groupBy(identity)
rdd.count()

val stage0 = Source.fromURL(sc.ui.get.appUIAddress +
val stage0 = Source.fromURL(sc.ui.get.webUrl +
"/stages/stage/?id=0&attempt=0&expandDagViz=true").mkString
assert(stage0.contains("digraph G {\n subgraph clusterstage_0 {\n " +
"label=&quot;Stage 0&quot;;\n subgraph "))
Expand All @@ -651,7 +651,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
assert(stage0.contains("{\n label=&quot;groupBy&quot;;\n " +
"2 [label=&quot;MapPartitionsRDD [2]"))

val stage1 = Source.fromURL(sc.ui.get.appUIAddress +
val stage1 = Source.fromURL(sc.ui.get.webUrl +
"/stages/stage/?id=1&attempt=0&expandDagViz=true").mkString
assert(stage1.contains("digraph G {\n subgraph clusterstage_1 {\n " +
"label=&quot;Stage 1&quot;;\n subgraph "))
Expand All @@ -662,7 +662,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
assert(stage1.contains("{\n label=&quot;groupBy&quot;;\n " +
"5 [label=&quot;MapPartitionsRDD [5]"))

val stage2 = Source.fromURL(sc.ui.get.appUIAddress +
val stage2 = Source.fromURL(sc.ui.get.webUrl +
"/stages/stage/?id=2&attempt=0&expandDagViz=true").mkString
assert(stage2.contains("digraph G {\n subgraph clusterstage_2 {\n " +
"label=&quot;Stage 2&quot;;\n subgraph "))
Expand All @@ -687,7 +687,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
}

def goToUi(ui: SparkUI, path: String): Unit = {
go to (ui.appUIAddress.stripSuffix("/") + path)
go to (ui.webUrl.stripSuffix("/") + path)
}

def parseDate(json: JValue): Long = {
Expand All @@ -699,6 +699,6 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
}

def apiUrl(ui: SparkUI, path: String): URL = {
new URL(ui.appUIAddress + "/api/v1/applications/" + ui.sc.get.applicationId + "/" + path)
new URL(ui.webUrl + "/api/v1/applications/" + ui.sc.get.applicationId + "/" + path)
}
}
13 changes: 6 additions & 7 deletions core/src/test/scala/org/apache/spark/ui/UISuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ class UISuite extends SparkFunSuite {
withSpark(newSparkContext()) { sc =>
// test if the ui is visible, and all the expected tabs are visible
eventually(timeout(10 seconds), interval(50 milliseconds)) {
val html = Source.fromURL(sc.ui.get.appUIAddress).mkString
val html = Source.fromURL(sc.ui.get.webUrl).mkString
assert(!html.contains("random data that should not be present"))
assert(html.toLowerCase.contains("stages"))
assert(html.toLowerCase.contains("storage"))
Expand Down Expand Up @@ -176,19 +176,18 @@ class UISuite extends SparkFunSuite {
}
}

test("verify appUIAddress contains the scheme") {
test("verify webUrl contains the scheme") {
withSpark(newSparkContext()) { sc =>
val ui = sc.ui.get
val uiAddress = ui.appUIAddress
val uiHostPort = ui.appUIHostPort
assert(uiAddress.equals("http://" + uiHostPort))
val uiAddress = ui.webUrl
assert(uiAddress.startsWith("http://") || uiAddress.startsWith("https://"))
}
}

test("verify appUIAddress contains the port") {
test("verify webUrl contains the port") {
withSpark(newSparkContext()) { sc =>
val ui = sc.ui.get
val splitUIAddress = ui.appUIAddress.split(':')
val splitUIAddress = ui.webUrl.split(':')
val boundPort = ui.boundPort
assert(splitUIAddress(2).toInt == boundPort)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ private[spark] class MesosCoarseGrainedSchedulerBackend(
sc.sparkUser,
sc.appName,
sc.conf,
sc.conf.getOption("spark.mesos.driver.webui.url").orElse(sc.ui.map(_.appUIAddress)),
sc.conf.getOption("spark.mesos.driver.webui.url").orElse(sc.ui.map(_.webUrl)),
None,
None,
sc.conf.getOption("spark.mesos.driver.frameworkId")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ private[spark] class MesosFineGrainedSchedulerBackend(
sc.sparkUser,
sc.appName,
sc.conf,
sc.conf.getOption("spark.mesos.driver.webui.url").orElse(sc.ui.map(_.appUIAddress)),
sc.conf.getOption("spark.mesos.driver.webui.url").orElse(sc.ui.map(_.webUrl)),
Option.empty,
Option.empty,
sc.conf.getOption("spark.mesos.driver.frameworkId")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,13 +92,13 @@ class UISeleniumSuite
val sparkUI = ssc.sparkContext.ui.get

eventually(timeout(10 seconds), interval(50 milliseconds)) {
go to (sparkUI.appUIAddress.stripSuffix("/"))
go to (sparkUI.webUrl.stripSuffix("/"))
find(cssSelector( """ul li a[href*="streaming"]""")) should not be (None)
}

eventually(timeout(10 seconds), interval(50 milliseconds)) {
// check whether streaming page exists
go to (sparkUI.appUIAddress.stripSuffix("/") + "/streaming")
go to (sparkUI.webUrl.stripSuffix("/") + "/streaming")
val h3Text = findAll(cssSelector("h3")).map(_.text).toSeq
h3Text should contain("Streaming Statistics")

Expand Down Expand Up @@ -180,23 +180,23 @@ class UISeleniumSuite
jobDetails should contain("Completed Stages:")

// Check a batch page without id
go to (sparkUI.appUIAddress.stripSuffix("/") + "/streaming/batch/")
go to (sparkUI.webUrl.stripSuffix("/") + "/streaming/batch/")
webDriver.getPageSource should include ("Missing id parameter")

// Check a non-exist batch
go to (sparkUI.appUIAddress.stripSuffix("/") + "/streaming/batch/?id=12345")
go to (sparkUI.webUrl.stripSuffix("/") + "/streaming/batch/?id=12345")
webDriver.getPageSource should include ("does not exist")
}

ssc.stop(false)

eventually(timeout(10 seconds), interval(50 milliseconds)) {
go to (sparkUI.appUIAddress.stripSuffix("/"))
go to (sparkUI.webUrl.stripSuffix("/"))
find(cssSelector( """ul li a[href*="streaming"]""")) should be(None)
}

eventually(timeout(10 seconds), interval(50 milliseconds)) {
go to (sparkUI.appUIAddress.stripSuffix("/") + "/streaming")
go to (sparkUI.webUrl.stripSuffix("/") + "/streaming")
val h3Text = findAll(cssSelector("h3")).map(_.text).toSeq
h3Text should not contain("Streaming Statistics")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,7 @@ private[spark] class ApplicationMaster(
sc.getConf.get("spark.driver.host"),
sc.getConf.get("spark.driver.port"),
isClusterMode = true)
registerAM(sc.getConf, rpcEnv, driverRef, sc.ui.map(_.appUIAddress).getOrElse(""),
registerAM(sc.getConf, rpcEnv, driverRef, sc.ui.map(_.webUrl).getOrElse(""),
securityMgr)
} else {
// Sanity check; should never happen in normal operation, since sc should only be null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ private[spark] class YarnClientSchedulerBackend(
val driverHost = conf.get("spark.driver.host")
val driverPort = conf.get("spark.driver.port")
val hostport = driverHost + ":" + driverPort
sc.ui.foreach { ui => conf.set("spark.driver.appUIAddress", ui.appUIAddress) }
sc.ui.foreach { ui => conf.set("spark.driver.appUIAddress", ui.webUrl) }

val argsArrayBuf = new ArrayBuffer[String]()
argsArrayBuf += ("--arg", hostport)
Expand Down

0 comments on commit 70a5db7

Please sign in to comment.