Skip to content

Commit

Permalink
[SPARK-10144] [UI] Actually show peak execution memory by default
Browse files Browse the repository at this point in the history
The peak execution memory metric was introduced in SPARK-8735. That was before Tungsten was enabled by default, so it assumed that `spark.sql.unsafe.enabled` must be explicitly set to true. The result is that the memory is not displayed by default.

Author: Andrew Or <[email protected]>

Closes #8345 from andrewor14/show-memory-default.
  • Loading branch information
Andrew Or authored and yhuai committed Aug 24, 2015
1 parent 9ce0c7a commit 662bb96
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 6 deletions.
6 changes: 2 additions & 4 deletions core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,7 @@ private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
// if we find that it's okay.
private val MAX_TIMELINE_TASKS = parent.conf.getInt("spark.ui.timeline.tasks.maximum", 1000)

private val displayPeakExecutionMemory =
parent.conf.getOption("spark.sql.unsafe.enabled").exists(_.toBoolean)
private val displayPeakExecutionMemory = parent.conf.getBoolean("spark.sql.unsafe.enabled", true)

def render(request: HttpServletRequest): Seq[Node] = {
progressListener.synchronized {
Expand Down Expand Up @@ -1193,8 +1192,7 @@ private[ui] class TaskPagedTable(
desc: Boolean) extends PagedTable[TaskTableRowData] {

// We only track peak memory used for unsafe operators
private val displayPeakExecutionMemory =
conf.getOption("spark.sql.unsafe.enabled").exists(_.toBoolean)
private val displayPeakExecutionMemory = conf.getBoolean("spark.sql.unsafe.enabled", true)

override def tableId: String = "task-table"

Expand Down
8 changes: 6 additions & 2 deletions core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,18 @@ class StagePageSuite extends SparkFunSuite with LocalSparkContext {

test("peak execution memory only displayed if unsafe is enabled") {
val unsafeConf = "spark.sql.unsafe.enabled"
val conf = new SparkConf().set(unsafeConf, "true")
val conf = new SparkConf(false).set(unsafeConf, "true")
val html = renderStagePage(conf).toString().toLowerCase
val targetString = "peak execution memory"
assert(html.contains(targetString))
// Disable unsafe and make sure it's not there
val conf2 = new SparkConf().set(unsafeConf, "false")
val conf2 = new SparkConf(false).set(unsafeConf, "false")
val html2 = renderStagePage(conf2).toString().toLowerCase
assert(!html2.contains(targetString))
// Avoid setting anything; it should be displayed by default
val conf3 = new SparkConf(false)
val html3 = renderStagePage(conf3).toString().toLowerCase
assert(html3.contains(targetString))
}

/**
Expand Down

0 comments on commit 662bb96

Please sign in to comment.