Skip to content

Commit

Permalink
small cleanup of accidental changes
Browse files Browse the repository at this point in the history
  • Loading branch information
squito committed Feb 6, 2015
1 parent d1a8c92 commit b252e7a
Show file tree
Hide file tree
Showing 14 changed files with 5 additions and 95 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import org.json4s.JsonDSL._
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.ExecutorRunner
import org.json4s._

private[spark] object JsonProtocol {
def writeWorkerInfo(obj: WorkerInfo) = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,6 @@ package org.apache.spark.deploy.history

import javax.servlet.http.HttpServletRequest

import org.json4s.JValue
import org.json4s.JsonDSL._

import scala.xml.Node

import org.apache.spark.ui.{WebUIPage, UIUtils}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,6 @@ class HistoryServer(
* this UI with the event logs in the provided base directory.
*/
def initialize() {
//earlier handlers take precedence
attachPage(new HistoryPage(this))

val jsonHandler = new JsonRequestHandler(this, securityManager)
Expand Down
22 changes: 0 additions & 22 deletions core/src/main/scala/org/apache/spark/ui/env/EnvironmentPage.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,35 +19,13 @@ package org.apache.spark.ui.env

import javax.servlet.http.HttpServletRequest

import org.json4s.{JObject, JValue}
import org.json4s.JsonDSL._

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") {
private val listener = parent.listener

override def renderJson(request: HttpServletRequest): JValue = {
val jvmInfoJson =
("Runtime Information" -> listener.jvmInformation.foldLeft(JObject())(_ ~ _))
val sparkPropertiesJson =
("Spark Properties" -> listener.sparkProperties.foldLeft(JObject())(_ ~ _))
val systemPropertiesJson =
("System Properties" -> listener.systemProperties.foldLeft(JObject())(_ ~ _))
val classPathEntriesJson =
("Classpath Entries" -> listener.classpathEntries.foldLeft(JObject())(_ ~ _))

val environmentJson =
jvmInfoJson ~
sparkPropertiesJson ~
systemPropertiesJson ~
classPathEntriesJson

environmentJson
}

def render(request: HttpServletRequest): Seq[Node] = {
val runtimeInformationTable = UIUtils.listingTable(
propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,6 @@ import org.apache.spark.status.api.ExecutorSummary

import scala.xml.Node

import org.json4s.JValue
import org.json4s.JsonDSL._

import org.apache.spark.ui.{ToolTips, UIUtils, WebUIPage}
import org.apache.spark.util.Utils

Expand Down Expand Up @@ -159,6 +156,7 @@ private[ui] class ExecutorsPage(
}

private[spark] object ExecutorsPage {
/** Represent an executor's info as a map given a storage status index */
def getExecInfo(listener: ExecutorsListener, statusId: Int): ExecutorSummary = {
val status = listener.storageStatusList(statusId)
val execId = status.blockManagerId.executorId
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,6 @@

package org.apache.spark.ui.jobs

import org.json4s.{JInt, JNothing, JObject, JString, JValue}
import org.json4s.JsonDSL._

import scala.xml.{Node, NodeSeq}

import javax.servlet.http.HttpServletRequest
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,6 @@ package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import org.apache.spark.util.JsonProtocol
import org.json4s.JValue
import org.json4s.JsonDSL._

import scala.xml.{Node, NodeSeq}

import org.apache.spark.scheduler.Schedulable
Expand Down
4 changes: 0 additions & 4 deletions core/src/main/scala/org/apache/spark/ui/jobs/JobPage.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,6 @@

package org.apache.spark.ui.jobs

import org.apache.spark.util.JsonProtocol
import org.json4s.{JNothing, JValue}
import org.json4s.JsonDSL._

import scala.collection.mutable
import scala.xml.{NodeSeq, Node}

Expand Down
35 changes: 1 addition & 34 deletions core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,49 +19,16 @@ package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import org.apache.spark.util.JsonProtocol

import scala.xml.Node

import org.json4s.JValue
import org.json4s.JsonDSL._

import org.apache.spark.scheduler.{Schedulable, StageInfo}
import org.apache.spark.scheduler.StageInfo
import org.apache.spark.ui.{WebUIPage, UIUtils}

/** Page showing specific pool details */
private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") {
private val sc = parent.sc
private val listener = parent.listener

override def renderJson(request: HttpServletRequest): JValue = {
listener.synchronized {
val poolName = request.getParameter("poolname")
val poolToActiveStages = listener.poolToActiveStages
val activeStages = poolToActiveStages.get(poolName) match {
case Some(s) => s.values.map {
case info: StageInfo =>
JsonProtocol.stageInfoToJson(info)
}
case None => Seq[JValue]()
}

val pools:Option[Schedulable] = sc.flatMap{_.getPoolForName(poolName)}

val poolListJson =
pools.map { schedulable =>
("Pool Name" -> schedulable.name) ~
("Minimum Share" -> schedulable.minShare) ~
("Pool Weight" -> schedulable.weight) ~
("Active Stages" -> activeStages) ~
("Running Tasks" -> schedulable.runningTasks) ~
("Scheduling Mode" -> schedulable.schedulingMode.toString)
}

poolListJson
}
}

def render(request: HttpServletRequest): Seq[Node] = {
listener.synchronized {
val poolName = request.getParameter("poolname")
Expand Down
5 changes: 1 addition & 4 deletions core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,9 @@ import org.apache.commons.lang3.StringEscapeUtils

import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler.{AccumulableInfo, TaskInfo}

import org.json4s.{JNothing, JValue}

import org.apache.spark.ui.{ToolTips, WebUIPage, UIUtils}
import org.apache.spark.ui.jobs.UIData._
import org.apache.spark.util.{JsonProtocol, Utils, Distribution}
import org.apache.spark.util.{Utils, Distribution}

/** Page showing statistics and task list for a given stage */
private[ui] class StagePage(parent: StagesTab) extends WebUIPage("stage") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,9 @@ package org.apache.spark.ui.storage

import javax.servlet.http.HttpServletRequest

import org.apache.spark.status.api.{RDDPartitionInfo, RDDDataDistribution}

import scala.xml.Node

import org.json4s.{JNothing, JValue}
import org.json4s.JsonDSL._

import org.apache.spark.storage._
import org.apache.spark.status.api.{RDDPartitionInfo, RDDDataDistribution}
import org.apache.spark.ui.{WebUIPage, UIUtils}
import org.apache.spark.util.Utils

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,9 @@ package org.apache.spark.ui.storage
import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.json4s.JValue
import org.json4s.JsonDSL._

import org.apache.spark.storage.RDDInfo
import org.apache.spark.ui.{WebUIPage, UIUtils}
import org.apache.spark.util.{JsonProtocol, Utils}
import org.apache.spark.util.Utils

/** Page showing list of RDD's currently stored in the cluster */
private[ui] class StoragePage(parent: StorageTab) extends WebUIPage("") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package org.apache.spark.util

import java.util.{Properties, UUID}

import org.apache.spark.deploy.history.ApplicationHistoryInfo
import org.apache.spark.scheduler.cluster.ExecutorInfo

import scala.collection.JavaConverters._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -280,10 +280,6 @@ class JsonProtocolSuite extends FunSuite {
assertEquals(expectedJobEnd, JsonProtocol.jobEndFromJson(oldEndEvent))
}

test("new UI json methods") {
pending
}

/** -------------------------- *
| Helper test running methods |
* --------------------------- */
Expand Down

0 comments on commit b252e7a

Please sign in to comment.