From dc8a7fe8ba9b5e6ab49a8ec80fb25d5bb378564e Mon Sep 17 00:00:00 2001 From: Imran Rashid Date: Tue, 28 Apr 2015 14:02:17 -0500 Subject: [PATCH] style, fix errant comments --- .../org/apache/spark/status/api/v1/AllStagesResource.scala | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala index edfa7f1b1b86a..2bef722061745 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala @@ -287,10 +287,7 @@ private[v1] object AllStagesResource { * Helper for getting distributions from nested metric types. Many of the metrics we want are * contained in options inside TaskMetrics (eg., ShuffleWriteMetrics). This makes it easy to handle * the options (returning None if the metrics are all empty), and extract the quantiles for each - * metric. After creating an instance, call metricOption to get the result type - * - * **getSubMetrics** -- pulls out the submetric of interest from the overall task metrics - * **build** -- create the result container. Generally this will just call submetricQuantiles for each + * metric. After creating an instance, call metricOption to get the result type. */ private[v1] abstract class MetricHelper[I,O]( rawMetrics: Seq[InternalTaskMetrics], @@ -299,7 +296,7 @@ private[v1] abstract class MetricHelper[I,O]( def getSubmetrics(raw: InternalTaskMetrics): Option[I] def build: O val data: Seq[I] = rawMetrics.flatMap(getSubmetrics) - /** applies the given function to all input metrics, and returns the quantiles*/ + /** applies the given function to all input metrics, and returns the quantiles */ def submetricQuantiles(f: I => Double): IndexedSeq[Double] = { Distribution(data.map { d => f(d) }).get.getQuantiles(quantiles) }