From c60a44d4c9b59a613764484dedbe1cbbc514ad9b Mon Sep 17 00:00:00 2001 From: Josh Rosen Date: Thu, 18 Jun 2015 11:01:16 -0700 Subject: [PATCH] Manually bind references --- .../scala/org/apache/spark/sql/execution/SortSuite.scala | 8 ++++---- .../org/apache/spark/sql/execution/SparkPlanTest.scala | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SortSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SortSuite.scala index a890904f90a85..5f2db13d8202d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SortSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SortSuite.scala @@ -17,8 +17,8 @@ package org.apache.spark.sql.execution -import org.apache.spark.sql.catalyst.dsl.expressions._ -import org.apache.spark.sql.catalyst.expressions.{Ascending, SortOrder} +import org.apache.spark.sql.catalyst.expressions.{BoundReference, Ascending, SortOrder} +import org.apache.spark.sql.types.{IntegerType, StringType} class SortSuite extends SparkPlanTest { @@ -31,8 +31,8 @@ class SortSuite extends SparkPlanTest { ) val sortOrder = Seq( - SortOrder('_1, Ascending), - SortOrder('_2, Ascending) + SortOrder(BoundReference(0, StringType, nullable = false), Ascending), + SortOrder(BoundReference(1, IntegerType, nullable = false), Ascending) ) checkAnswer( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkPlanTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkPlanTest.scala index ed8c761b69093..b4f37cf8f69ae 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkPlanTest.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkPlanTest.scala @@ -60,7 +60,7 @@ class SparkPlanTest extends SparkFunSuite { planFunction: SparkPlan => SparkPlan, expectedAnswer: Seq[A]): Unit = { val inputDf = TestSQLContext.createDataFrame(input) - val expectedRows = expectedAnswer.map(t => Row.apply(t)) + val expectedRows = expectedAnswer.map(Row.fromTuple) SparkPlanTest.checkAnswer(inputDf, planFunction, expectedRows) match { case Some(errorMessage) => fail(errorMessage) case None =>