From a325563337c673151f025148c0e0366dc89cdc1d Mon Sep 17 00:00:00 2001 From: Imran Rashid Date: Mon, 27 Apr 2015 12:07:11 -0500 Subject: [PATCH] style --- .../apache/spark/deploy/master/Master.scala | 1 - .../spark/status/api/v1/AllJobsResource.scala | 12 +++---- .../status/api/v1/AllStagesResource.scala | 7 ++-- .../api/v1/ApplicationListResource.scala | 3 +- .../spark/status/api/v1/SimpleDateParam.scala | 2 +- .../org/apache/spark/status/api/v1/api.scala | 12 +------ .../scala/org/apache/spark/ui/SparkUI.scala | 3 +- .../apache/spark/ui/exec/ExecutorsPage.scala | 30 ++++++++--------- .../deploy/history/HistoryServerSuite.scala | 32 +++++++++---------- 9 files changed, 43 insertions(+), 59 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala index e7809084e233d..cedb8c62eb773 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala @@ -768,7 +768,6 @@ private[master] class Master( if (inProgressExists) { // Event logging is enabled for this application, but the application is still in progress logWarning(s"Application $appName is still in progress, it may be terminated abnormally.") - return None } val (eventLogFile, status) = if (inProgressExists) { diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/AllJobsResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/AllJobsResource.scala index 034c4635da97e..6ef34a57e220d 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/AllJobsResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/AllJobsResource.scala @@ -16,8 +16,7 @@ */ package org.apache.spark.status.api.v1 -import java.util -import java.util.Date +import java.util.{Arrays, Date, List => JList} import javax.ws.rs._ import javax.ws.rs.core.MediaType @@ -32,16 +31,15 @@ private[v1] class AllJobsResource(uiRoot: UIRoot) { @GET def jobsList( @PathParam("appId") appId: String, - @QueryParam("status") statuses: java.util.List[JobExecutionStatus] + @QueryParam("status") statuses: JList[JobExecutionStatus] ): Seq[JobData] = { uiRoot.withSparkUI(appId) { ui => val statusToJobs: Seq[(JobExecutionStatus, Seq[JobUIData])] = AllJobsResource.getStatusToJobs(ui) - val adjStatuses: util.List[JobExecutionStatus] = { + val adjStatuses: JList[JobExecutionStatus] = { if (statuses.isEmpty) { - java.util.Arrays.asList(JobExecutionStatus.values(): _*) - } - else { + Arrays.asList(JobExecutionStatus.values(): _*) + } else { statuses } } diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala index c135ce1bae9ef..b866e1e7c545b 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala @@ -16,7 +16,7 @@ */ package org.apache.spark.status.api.v1 -import java.util.Date +import java.util.{Arrays, Date, List => JList} import javax.ws.rs.{GET, PathParam, Produces, QueryParam} import javax.ws.rs.core.MediaType @@ -33,14 +33,14 @@ private[v1] class AllStagesResource(uiRoot: UIRoot) { @GET def stageList( @PathParam("appId") appId: String, - @QueryParam("status") statuses: java.util.List[StageStatus] + @QueryParam("status") statuses: JList[StageStatus] ): Seq[StageData] = { uiRoot.withSparkUI(appId) { ui => val listener = ui.jobProgressListener val stageAndStatus = AllStagesResource.stagesAndStatus(ui) val adjStatuses = { if (statuses.isEmpty()) { - java.util.Arrays.asList(StageStatus.values(): _*) + Arrays.asList(StageStatus.values(): _*) } else { statuses } @@ -279,5 +279,4 @@ private[v1] object AllStagesResource { recordsWritten = internal.shuffleRecordsWritten ) } - } diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala index 7cb826bbc7b1c..9f9055de20347 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala @@ -16,8 +16,7 @@ */ package org.apache.spark.status.api.v1 -import java.util.Date -import java.util.{Arrays, List => JList} +import java.util.{Arrays, Date, List => JList} import javax.ws.rs.{DefaultValue, GET, Produces, QueryParam} import javax.ws.rs.core.MediaType diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/SimpleDateParam.scala b/core/src/main/scala/org/apache/spark/status/api/v1/SimpleDateParam.scala index 7c61c31fbb4df..cee29786c3019 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/SimpleDateParam.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/SimpleDateParam.scala @@ -27,7 +27,7 @@ import scala.util.Try private[v1] class SimpleDateParam(val originalValue: String) { val timestamp: Long = { SimpleDateParam.formats.collectFirst { - case fmt if Try{ fmt.parse(originalValue) }.isSuccess => + case fmt if Try(fmt.parse(originalValue)).isSuccess => fmt.parse(originalValue).getTime() }.getOrElse( throw new WebApplicationException( diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala index 62721c6c44a9b..5705ac2fa9217 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala @@ -220,7 +220,6 @@ class OutputMetricDistributions( val recordsWritten: IndexedSeq[Double] ) - class ShuffleReadMetricDistributions( val readBytes: IndexedSeq[Double], val readRecords: IndexedSeq[Double], @@ -241,13 +240,4 @@ class AccumulableInfo ( val id: Long, val name: String, val update: Option[String], - val value: String) { - - override def equals(other: Any): Boolean = other match { - case acc: AccumulableInfo => - this.id == acc.id && this.name == acc.name && - this.value == acc.value - case _ => false - } -} - + val value: String) diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala index 7f230b2b48fea..bb3a1ba3364f4 100644 --- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala @@ -19,8 +19,7 @@ package org.apache.spark.ui import java.util.Date -import org.apache.spark.status.api.v1.{ApplicationInfo, JsonRootResource} -import org.apache.spark.status.api.v1.UIRoot +import org.apache.spark.status.api.v1.{ApplicationInfo, JsonRootResource, UIRoot} import org.apache.spark.{Logging, SecurityManager, SparkConf, SparkContext} import org.apache.spark.scheduler._ import org.apache.spark.storage.StorageStatusListener diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala index a628b5397c299..b247e4cdc3bd4 100644 --- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala @@ -28,21 +28,21 @@ import org.apache.spark.util.Utils // This isn't even used anymore -- but we need to keep it b/c of a MiMa false positive private[ui] case class ExecutorSummaryInfo( - id: String, - hostPort: String, - rddBlocks: Int, - memoryUsed: Long, - diskUsed: Long, - activeTasks: Int, - failedTasks: Int, - completedTasks: Int, - totalTasks: Int, - totalDuration: Long, - totalInputBytes: Long, - totalShuffleRead: Long, - totalShuffleWrite: Long, - maxMemory: Long, - executorLogs: Map[String, String]) + id: String, + hostPort: String, + rddBlocks: Int, + memoryUsed: Long, + diskUsed: Long, + activeTasks: Int, + failedTasks: Int, + completedTasks: Int, + totalTasks: Int, + totalDuration: Long, + totalInputBytes: Long, + totalShuffleRead: Long, + totalShuffleWrite: Long, + maxMemory: Long, + executorLogs: Map[String, String]) private[ui] class ExecutorsPage( diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala index 00f9b382dd857..6d90ff026f23b 100644 --- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala @@ -52,6 +52,7 @@ class HistoryServerSuite extends FunSuite with BeforeAndAfter with Matchers with server.bind() port = server.boundPort } + def stop(): Unit = { server.stop() } @@ -102,27 +103,26 @@ class HistoryServerSuite extends FunSuite with BeforeAndAfter with Matchers with "stage with accumulable json" -> "applications/local-1426533911241/stages/0/0", "rdd list storage json" -> "applications/local-1422981780767/storage/rdd", "one rdd storage json" -> "applications/local-1422981780767/storage/rdd/0" - // TODO multi-attempt stages ) // run a bunch of characterization tests -- just verify the behavior is the same as what is saved // in the test resource folder cases.foreach { case (name, path) => - test(name) { - val (code, jsonOpt, errOpt) = getContentAndCode(path) - code should be (HttpServletResponse.SC_OK) - jsonOpt should be ('defined) - errOpt should be (None) - val json = jsonOpt.get - val exp = IOUtils.toString(new FileInputStream( - new File(expRoot, path + "/json_expectation"))) - // compare the ASTs so formatting differences don't cause failures - import org.json4s._ - import org.json4s.jackson.JsonMethods._ - val jsonAst = parse(json) - val expAst = parse(exp) - assertValidDataInJson(jsonAst, expAst) - } + test(name) { + val (code, jsonOpt, errOpt) = getContentAndCode(path) + code should be (HttpServletResponse.SC_OK) + jsonOpt should be ('defined) + errOpt should be (None) + val json = jsonOpt.get + val exp = IOUtils.toString(new FileInputStream( + new File(expRoot, path + "/json_expectation"))) + // compare the ASTs so formatting differences don't cause failures + import org.json4s._ + import org.json4s.jackson.JsonMethods._ + val jsonAst = parse(json) + val expAst = parse(exp) + assertValidDataInJson(jsonAst, expAst) + } } test("security") {