Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove a few warnings #1720

Merged
merged 3 commits into from
Sep 24, 2017
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.6.2")
addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.3.0")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.0")
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "1.0")
addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.0.2")
addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.1.1")
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ private[scalding] class ConfPropertiesHfsTap(
* Changes here however will not show up in the hadoop UI
*/
trait HfsConfPropertySetter extends HfsTapProvider {
@deprecated("Tap config is deprecated, use sourceConfig or sinkConfig directly. In cascading configs applied to sinks can leak to sources in the step writing to the sink.")
@deprecated("Tap config is deprecated, use sourceConfig or sinkConfig directly. In cascading configs applied to sinks can leak to sources in the step writing to the sink.", "2017-01-12")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

might have been nice if we could tie this to a scalding version. E.g. deprecated since 0.17.1 or so..

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I can fix that.

def tapConfig: Config = Config.empty

def sourceConfig: Config = Config.empty
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ trait PipeOperationsConversions {
}

class ListPipesOperation(op: List[Pipe] => Pipe) extends PipeOperation {
def apply(pipes: List[RichPipe]): Pipe = op(pipes.map(_.pipe).toList)
def apply(pipes: List[RichPipe]): Pipe = op(pipes.map(_.pipe))
}

implicit val fromSingleRichPipeFunctionToOperation: (RichPipe => RichPipe) => OnePipeOperation = (op: RichPipe => RichPipe) => new OnePipeOperation(op(_).pipe)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,11 +93,7 @@ object FieldsProviderImpl {
case tpe if tpe =:= typeOf[Float] => true
case tpe if tpe =:= typeOf[Double] => true
case tpe if tpe =:= typeOf[String] => true
case tpe =>
optionInner(c)(tpe) match {
case Some(t) => isNumbered(t)
case None => false
}
case tpe => optionInner(c)(tpe).exists(isNumbered)
}

object FieldBuilder {
Expand Down Expand Up @@ -163,7 +159,7 @@ object FieldsProviderImpl {
.declarations
.collect { case m: MethodSymbol if m.isCaseAccessor => m }
.map { accessorMethod =>
val fieldName = accessorMethod.name.toTermName.toString
val fieldName = accessorMethod.name.toString
val fieldType = accessorMethod.returnType.asSeenFrom(outerTpe, outerTpe.typeSymbol.asClass)
(fieldType, fieldName)
}.toVector
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -342,13 +342,10 @@ case class Product[R, C, C2, V](left: Matrix2[R, C, V],

override lazy val toTypedPipe: TypedPipe[(R, C2, V)] = {
expressions match {
case Some(m) => m.get(this) match {
case Some(pipe) => pipe
case None => {
val result = computePipe()
m.put(this, result)
result
}
case Some(m) => m.get(this).getOrElse {
val result = computePipe()
m.put(this, result)
result
}
case None => optimizedSelf.toTypedPipe
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.twitter.scalding.reducer_estimation

import com.twitter.scalding.estimation.{Common, FlowStepHistory, FlowStrategyInfo}
import com.twitter.scalding.estimation.{ Common, FlowStepHistory, FlowStrategyInfo }
import org.apache.hadoop.mapred.JobConf
import org.slf4j.LoggerFactory

Expand Down Expand Up @@ -41,8 +41,7 @@ abstract class RatioBasedEstimator extends ReducerHistoryEstimator {
override protected def estimate(
info: FlowStrategyInfo,
conf: JobConf,
history: Seq[FlowStepHistory]
): Option[Int] = {
history: Seq[FlowStepHistory]): Option[Int] = {
val threshold = RatioBasedEstimator.getInputRatioThreshold(conf)
val inputBytes = Common.totalInputSize(info.step)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,19 @@ object ReducerEstimatorStepStrategy extends FlowStepStrategy[JobConf] {
final override def apply(
flow: Flow[JobConf],
preds: JList[FlowStep[JobConf]],
step: FlowStep[JobConf]
): Unit = {
step: FlowStep[JobConf]): Unit = {
val conf = step.getConfig
// for steps with reduce phase, mapred.reduce.tasks is set in the jobconf at this point
// so we check that to determine if this is a map-only step.
conf.getNumReduceTasks match {
case 0 => LOG.info(s"${ flow.getName } is a map-only step. Skipping reducer estimation.")
case 0 => LOG.info(s"${flow.getName} is a map-only step. Skipping reducer estimation.")
case _ =>
if (skipReducerEstimation(step)) {
LOG.info(
s"""
|Flow step ${ step.getName } was configured with reducers
|set explicitly (${ Config.WithReducersSetExplicitly }=true) and the estimator
|explicit override turned off (${ Config.ReducerEstimatorOverride }=false). Skipping
|Flow step ${step.getName} was configured with reducers
|set explicitly (${Config.WithReducersSetExplicitly}=true) and the estimator
|explicit override turned off (${Config.ReducerEstimatorOverride}=false). Skipping
|reducer estimation.
""".stripMargin)
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,16 +105,15 @@ trait BasicRuntimeReducerEstimator extends ReducerHistoryEstimator {
override protected def estimate(
info: FlowStrategyInfo,
conf: JobConf,
history: Seq[FlowStepHistory]
): Option[Int] = {
history: Seq[FlowStepHistory]): Option[Int] = {
val reduceTimes: Seq[Seq[Double]] = getReduceTimes(history)

LOG.info(
s"""|
|History items have the following numbers of tasks:
| ${ history.map(_.tasks.length) },
| ${history.map(_.tasks.length)},
|and the following numbers of tasks have valid task histories:
| ${ reduceTimes.map(_.length) }""".stripMargin)
| ${reduceTimes.map(_.length)}""".stripMargin)

// total time taken in the step = time per reducer * number of reducers
val jobTimes: Seq[Option[Double]] = reduceTimes
Expand Down Expand Up @@ -148,16 +147,15 @@ trait InputScaledRuntimeReducerEstimator extends ReducerHistoryEstimator {
override protected def estimate(
info: FlowStrategyInfo,
conf: JobConf,
history: Seq[FlowStepHistory]
): Option[Int] = {
history: Seq[FlowStepHistory]): Option[Int] = {
val reduceTimes: Seq[Seq[Double]] = getReduceTimes(history)

LOG.info(
s"""|
|History items have the following numbers of tasks:
| ${ history.map(_.tasks.length) },
| ${history.map(_.tasks.length)},
|and the following numbers of tasks have valid task histories:
| ${ reduceTimes.map(_.length) }""".stripMargin)
| ${reduceTimes.map(_.length)}""".stripMargin)

// total time taken in the step = time per reducer * number of reducers
val jobTimes: Seq[Option[Double]] = reduceTimes
Expand Down Expand Up @@ -188,7 +186,7 @@ trait InputScaledRuntimeReducerEstimator extends ReducerHistoryEstimator {

LOG.info(
s"""
| - HDFS bytes read: ${ history.map(_.hdfsBytesRead) }
| - HDFS bytes read: ${history.map(_.hdfsBytesRead)}
| - Time-to-byte-ratios: $timeToByteRatios
| - Typical type-to-byte-ratio: $typicalTimeToByteRatio
| - Desired runtime: $desiredRuntime
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@ object CalendarOps {
if (currentField > field) {
currentField match {
case Calendar.DAY_OF_MONTH => cal.set(currentField, 1)
case Calendar.DAY_OF_WEEK_IN_MONTH => () // Skip
case Calendar.DAY_OF_WEEK => () // Skip
case Calendar.DAY_OF_YEAR => () // Skip
case Calendar.WEEK_OF_MONTH => () // Skip
case Calendar.WEEK_OF_YEAR => () // Skip
case Calendar.HOUR_OF_DAY => () // Skip
case Calendar.DAY_OF_WEEK_IN_MONTH |
Calendar.DAY_OF_WEEK |
Calendar.DAY_OF_YEAR |
Calendar.WEEK_OF_MONTH |
Calendar.WEEK_OF_YEAR |
Calendar.HOUR_OF_DAY => () // Skip
case _ => cal.set(currentField, 0)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -833,7 +833,7 @@ object Boxed {
private[scalding] def nextCached[K](cacheKey: Option[AnyRef]): (K => Boxed[K], Class[Boxed[K]]) =
cacheKey match {
case Some(cls) =>
val untypedRes = Option(boxedCache.get(cls)) match {
val untypedRes = Option(boxedCache.get(cls)) match { // linter:ignore
case Some(r) => r
case None =>
val r = next[Any]()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ object JavaStreamEnrichments {
s.write(i)
} else {
// the linter does not like us repeating ourselves here
s.write(-1)
s.write(-1) // linter:ignore
s.write(-1) // linter:ignore
writeInt(i)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ object CaseClassOrderedBuf {
.map { accessorMethod =>
val fieldType = accessorMethod.returnType.asSeenFrom(outerType, outerType.typeSymbol.asClass)
val b: TreeOrderedBuf[c.type] = dispatcher(fieldType)
(fieldType, accessorMethod.name.toTermName, b)
(fieldType, accessorMethod.name, b)
}.toList

new TreeOrderedBuf[c.type] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,11 @@ object ProductOrderedBuf {
outerType
.declarations
.collect { case m: MethodSymbol => m }
.filter(m => m.name.toTermName.toString.startsWith("_"))
.filter(m => m.name.toString.startsWith("_"))
.map { accessorMethod =>
val fieldType = accessorMethod.returnType.asSeenFrom(outerType, outerType.typeSymbol.asClass)
val b: TreeOrderedBuf[c.type] = dispatcher(fieldType)
(fieldType, accessorMethod.name.toTermName, b)
(fieldType, accessorMethod.name, b)
}.toList

new TreeOrderedBuf[c.type] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,11 @@ object SealedTraitOrderedBuf {

val dispatcher = buildDispatcher

val subClasses: List[Type] = knownDirectSubclasses.map(_.asType.toType).toList
val subClasses: List[Type] = knownDirectSubclasses.map(_.asType.toType)

val subData: List[(Int, Type, TreeOrderedBuf[c.type])] = subClasses.map { t =>
(t, dispatcher(t))
}.zipWithIndex.map{ case ((tpe, tbuf), idx) => (idx, tpe, tbuf) }.toList
}.zipWithIndex.map { case ((tpe, tbuf), idx) => (idx, tpe, tbuf) }

require(subData.nonEmpty, "Unable to parse any subtypes for the sealed trait, error. This must be an error.")

Expand Down