diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala index 8f8b2975aab8d..6d185e2e0d529 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala @@ -91,7 +91,8 @@ private[spark] class HistoryPage(parent: HistoryServer) extends WebUIPage("") { ++ appTable } else { -
Did you specify the correct logging directory? Please verify your setting of spark.history.fs.logDirectory and whether you have the permissions to diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala index 273240d904474..aca446208a038 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala @@ -53,7 +53,8 @@ class HistoryServer( private val appLoader = new CacheLoader[String, SparkUI] { override def load(key: String): SparkUI = { - val ui = provider.getAppUI(key).getOrElse(throw new NoSuchElementException(s"no app with key $key")) + val ui = provider.getAppUI(key).getOrElse( + throw new NoSuchElementException(s"no app with key $key")) attachSparkUI(ui) ui } diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala index f7742c8d11baa..2c6f3f2e7ec40 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala @@ -29,7 +29,8 @@ import org.apache.spark.util.AkkaUtils */ private[spark] class MasterWebUI(val master: Master, requestedPort: Int) - extends WebUI(master.securityMgr, requestedPort, master.conf, name = "MasterUI") with Logging with UIRoot { + extends WebUI(master.securityMgr, requestedPort, master.conf, name = "MasterUI") with Logging + with UIRoot { val masterActorRef = master.self val timeout = AkkaUtils.askTimeout(master.conf) diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala index ed153fc308f63..c8b01cc1bc8e8 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/AllRDDResource.scala @@ -30,11 +30,12 @@ class AllRDDResource(uiRoot: UIRoot) { @PathParam("appId") appId: String ): Seq[RDDStorageInfo] = { uiRoot.withSparkUI(appId) { ui => - //should all access on storageListener also be synchronized? + // should all access on storageListener also be synchronized? val storageStatusList = ui.storageListener.storageStatusList val rddInfos = ui.storageListener.rddInfoList rddInfos.map{rddInfo => - AllRDDResource.getRDDStorageInfo(rddInfo.id, rddInfo, storageStatusList, includeDetails = false) + AllRDDResource.getRDDStorageInfo(rddInfo.id, rddInfo, storageStatusList, + includeDetails = false) } } @@ -44,7 +45,10 @@ class AllRDDResource(uiRoot: UIRoot) { object AllRDDResource { - def getRDDStorageInfo(rddId: Int, listener: StorageListener, includeDetails: Boolean): Option[RDDStorageInfo] = { + def getRDDStorageInfo( + rddId: Int, + listener: StorageListener, + includeDetails: Boolean): Option[RDDStorageInfo] = { val storageStatusList = listener.storageStatusList listener.rddInfoList.find(_.id == rddId).map{rddInfo => getRDDStorageInfo(rddId, rddInfo, storageStatusList, includeDetails) diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala index 9605c1f69775b..0ea4c3d59f1f0 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala @@ -46,7 +46,8 @@ class ApplicationListResource(uiRoot: UIRoot) { allApps.filter{app => val statusOk = (app.completed && includeCompleted) || (!app.completed && includeRunning) - val dateOk = app.startTime.getTime >= minDate.timestamp && app.startTime.getTime <= maxDate.timestamp + val dateOk = app.startTime.getTime >= minDate.timestamp && + app.startTime.getTime <= maxDate.timestamp statusOk && dateOk } } @@ -64,7 +65,9 @@ object ApplicationsListResource { ) } - def convertApplicationInfo(internal: InternalApplicationInfo, completed: Boolean): ApplicationInfo = { + def convertApplicationInfo( + internal: InternalApplicationInfo, + completed: Boolean): ApplicationInfo = { ApplicationInfo( id = internal.id, name = internal.desc.name, @@ -75,4 +78,4 @@ object ApplicationsListResource { ) } -} \ No newline at end of file +} diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala index 5cc2a0a4b727b..4d7aba0456925 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala @@ -36,4 +36,4 @@ class ExecutorListResource(uiRoot: UIRoot) { } } } -} \ No newline at end of file +} diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/JsonRootResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/JsonRootResource.scala index 6092cbdcc5f2e..8b6dba1b74b43 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/JsonRootResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/JsonRootResource.scala @@ -126,4 +126,4 @@ class NotFoundException(msg: String) extends WebApplicationException( .status(Response.Status.NOT_FOUND) .entity(msg) .build() -) \ No newline at end of file +) diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala index 748dc840e7495..00cfd098a2561 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala @@ -40,11 +40,12 @@ class OneStageResource(uiRoot: UIRoot) { case Some((status,stageInfo)) => val stageUiData = listener.synchronized{ listener.stageIdToData.get((stageInfo.stageId, stageInfo.attemptId)). - getOrElse{ throw new SparkException("failed to get full stage data for stage: " + stageInfo.stageId + - ":" + stageInfo.attemptId) + getOrElse{ throw new SparkException("failed to get full stage data for stage: " + + stageInfo.stageId + ":" + stageInfo.attemptId) } } - AllStagesResource.stageUiToStageData(status, stageInfo, stageUiData, includeDetails = true) + AllStagesResource.stageUiToStageData(status, stageInfo, stageUiData, + includeDetails = true) case None => throw new NotFoundException("unknown stage: " + stageId) } diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/RDDStorageInfo.scala b/core/src/main/scala/org/apache/spark/status/api/v1/RDDStorageInfo.scala index da268a9bb0dc5..59d99e8374d12 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/RDDStorageInfo.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/RDDStorageInfo.scala @@ -16,8 +16,8 @@ */ package org.apache.spark.status.api.v1 -//Q: should Tachyon size go in here as well? currently the UI only shows it on the overall storage page ... does -// anybody pay attention to it? +// Q: should Tachyon size go in here as well? currently the UI only shows it on the overall storage +// page ... does anybody pay attention to it? case class RDDStorageInfo( id: Int, name: String, @@ -43,4 +43,4 @@ case class RDDPartitionInfo( memoryUsed: Long, diskUsed: Long, executors: Seq[String] -) \ No newline at end of file +) diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/TaskMetrics.scala b/core/src/main/scala/org/apache/spark/status/api/v1/TaskMetrics.scala index de90874a6baed..acae428f3139e 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/TaskMetrics.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/TaskMetrics.scala @@ -53,4 +53,4 @@ case class ShuffleWriteMetrics( bytesWritten: Long, writeTime: Long, recordsWritten: Long -) \ No newline at end of file +) diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala index a81a5f61ed75c..1fda6f1a09d9d 100644 --- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala @@ -38,7 +38,8 @@ private[ui] class ExecutorsPage( val maxMem = storageStatusList.map(_.maxMem).sum val memUsed = storageStatusList.map(_.memUsed).sum val diskUsed = storageStatusList.map(_.diskUsed).sum - val execInfo = for (statusId <- 0 until storageStatusList.size) yield ExecutorsPage.getExecInfo(listener, statusId) + val execInfo = for (statusId <- 0 until storageStatusList.size) yield + ExecutorsPage.getExecInfo(listener, statusId) val execInfoSorted = execInfo.sortBy(_.id) val logsExist = execInfo.filter(_.executorLogs.nonEmpty).nonEmpty diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala index 0c616ef129cd0..53e0886bc8e8d 100644 --- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala @@ -34,14 +34,15 @@ private[ui] class RDDPage(parent: StorageTab) extends WebUIPage("rdd") { val parameterId = request.getParameter("id") require(parameterId != null && parameterId.nonEmpty, "Missing id parameter") val rddId = parameterId.toInt - val rddStorageInfo = AllRDDResource.getRDDStorageInfo(rddId, listener, includeDetails = true).getOrElse { - // Rather than crashing, render an "RDD Not Found" page - return UIUtils.headerSparkPage("RDD Not Found", Seq[Node](), parent) + val rddStorageInfo = AllRDDResource.getRDDStorageInfo(rddId, listener, + includeDetails = true).getOrElse { + // Rather than crashing, render an "RDD Not Found" page + return UIUtils.headerSparkPage("RDD Not Found", Seq[Node](), parent) } // Worker table - val workerTable = UIUtils.listingTable(workerHeader, workerRow, rddStorageInfo.dataDistribution.get, - id = Some("rdd-storage-by-worker-table")) + val workerTable = UIUtils.listingTable(workerHeader, workerRow, + rddStorageInfo.dataDistribution.get, id = Some("rdd-storage-by-worker-table")) // Block table val blockTable = UIUtils.listingTable(blockHeader, blockRow, rddStorageInfo.partitions.get,