Skip to content

Commit

Permalink
add missing lazys and null handling
Browse files Browse the repository at this point in the history
  • Loading branch information
peter-toth committed Nov 20, 2019
1 parent e0b4ca6 commit a3c92e7
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ case class LocalTableScanExec(
}

override protected def stringArgs: Iterator[Any] = {
if (rows.isEmpty) {
if (rows == null || rows.isEmpty) {
Iterator("<empty>", output)
} else {
Iterator(output)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ case class HashAggregateExec(

// This is for testing. We force TungstenAggregationIterator to fall back to the unsafe row hash
// map and/or the sort-based aggregation once it has processed a given number of input rows.
private val testFallbackStartsAt: Option[(Int, Int)] = {
private lazy val testFallbackStartsAt: Option[(Int, Int)] = {
sqlContext.getConf("spark.sql.TungstenAggregate.testFallbackStartsAt", null) match {
case null | "" => None
case fallbackStartsAt =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ case class ShuffleExchangeExec(

override def nodeName: String = "Exchange"

private val serializer: Serializer =
private lazy val serializer: Serializer =
new UnsafeRowSerializer(child.output.size, longMetric("dataSize"))

@transient lazy val inputRDD: RDD[InternalRow] = child.execute()
Expand Down

0 comments on commit a3c92e7

Please sign in to comment.