Skip to content

Commit a3c92e7

Browse files
committed
add missing lazys and null handling
1 parent e0b4ca6 commit a3c92e7

File tree

3 files changed

+3
-3
lines changed

3 files changed

+3
-3
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScanExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ case class LocalTableScanExec(
5959
}
6060

6161
override protected def stringArgs: Iterator[Any] = {
62-
if (rows.isEmpty) {
62+
if (rows == null || rows.isEmpty) {
6363
Iterator("<empty>", output)
6464
} else {
6565
Iterator(output)

sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ case class HashAggregateExec(
9292

9393
// This is for testing. We force TungstenAggregationIterator to fall back to the unsafe row hash
9494
// map and/or the sort-based aggregation once it has processed a given number of input rows.
95-
private val testFallbackStartsAt: Option[(Int, Int)] = {
95+
private lazy val testFallbackStartsAt: Option[(Int, Int)] = {
9696
sqlContext.getConf("spark.sql.TungstenAggregate.testFallbackStartsAt", null) match {
9797
case null | "" => None
9898
case fallbackStartsAt =>

sql/core/src/main/scala/org/apache/spark/sql/execution/exchange/ShuffleExchangeExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ case class ShuffleExchangeExec(
5959

6060
override def nodeName: String = "Exchange"
6161

62-
private val serializer: Serializer =
62+
private lazy val serializer: Serializer =
6363
new UnsafeRowSerializer(child.output.size, longMetric("dataSize"))
6464

6565
@transient lazy val inputRDD: RDD[InternalRow] = child.execute()

0 commit comments

Comments
 (0)