Skip to content

Commit

Permalink
Decouples ParquetTest and TestSQLContext
Browse files Browse the repository at this point in the history
  • Loading branch information
liancheng committed Dec 10, 2014
1 parent 7b43a68 commit aa2cb2e
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,8 @@ import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag
import scala.util.Try

import org.apache.spark.sql.SchemaRDD
import org.apache.spark.sql.{SQLContext, SchemaRDD}
import org.apache.spark.sql.catalyst.util
import org.apache.spark.sql.test.TestSQLContext._
import org.apache.spark.util.Utils

/**
Expand All @@ -36,7 +35,11 @@ import org.apache.spark.util.Utils
* Especially, `Tuple1.apply` can be used to easily wrap a single type/value.
*/
trait ParquetTest {
protected val configuration = sparkContext.hadoopConfiguration
val sqlContext: SQLContext

import sqlContext._

protected def configuration = sparkContext.hadoopConfiguration

/**
* Sets all SQL configurations specified in `pairs`, calls `f`, and then restore all SQL
Expand Down Expand Up @@ -86,7 +89,7 @@ trait ParquetTest {
(data: Seq[T])
(f: String => Unit): Unit = {
withTempPath { file =>
sparkContext.parallelize(data).toSchemaRDD.saveAsParquetFile(file.getCanonicalPath)
sparkContext.parallelize(data).saveAsParquetFile(file.getCanonicalPath)
f(file.getCanonicalPath)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import parquet.filter2.predicate.{FilterPredicate, Operators}

import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions.{Literal, Predicate, Row}
import org.apache.spark.sql.test.TestSQLContext
import org.apache.spark.sql.{QueryTest, SQLConf, SchemaRDD}

/**
Expand All @@ -34,6 +35,8 @@ import org.apache.spark.sql.{QueryTest, SQLConf, SchemaRDD}
* @todo Add test cases for `IsNull` and `IsNotNull` after merging PR #3367
*/
class ParquetFilterSuite extends QueryTest with ParquetTest {
val sqlContext = TestSQLContext

private def checkFilterPushdown(
rdd: SchemaRDD,
output: Seq[Symbol],
Expand Down Expand Up @@ -76,7 +79,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest {
case s: Seq[_] => s.map(_.asInstanceOf[Row].getAs[Array[Byte]](0).mkString(","))
case s => Seq(s.asInstanceOf[Array[Byte]].mkString(","))
}
assert(actual.sameElements(expected))
assert(actual === expected)
}
checkFilterPushdown(rdd, output, predicate, filterClass, checkBinaryAnswer _, expectedResult)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ import parquet.schema.{MessageType, MessageTypeParser}
import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.catalyst.expressions.Row
import org.apache.spark.sql.catalyst.types.DecimalType
import org.apache.spark.sql.test.TestSQLContext
import org.apache.spark.sql.test.TestSQLContext._
import org.apache.spark.sql.{QueryTest, SQLConf, SchemaRDD}

Expand Down Expand Up @@ -62,6 +63,8 @@ private[parquet] class TestGroupWriteSupport(schema: MessageType) extends WriteS
* A test suite that tests basic Parquet I/O.
*/
class ParquetIOSuite extends QueryTest with ParquetTest {
val sqlContext = TestSQLContext

/**
* Writes `data` to a Parquet file, reads it back and check file contents.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,15 @@ package org.apache.spark.sql.parquet

import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.catalyst.expressions.Row
import org.apache.spark.sql.test.TestSQLContext
import org.apache.spark.sql.test.TestSQLContext._

/**
* A test suite that tests various Parquet queries.
*/
class ParquetQuerySuite2 extends QueryTest with ParquetTest {
val sqlContext = TestSQLContext

test("simple projection") {
withParquetTable((0 until 10).map(i => (i, i.toString)), "t") {
checkAnswer(sql("SELECT _1 FROM t"), (0 until 10).map(Row.apply(_)))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,11 @@ import parquet.schema.MessageTypeParser

import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.catalyst.types.{BinaryType, IntegerType, StructField, StructType}
import org.apache.spark.sql.test.TestSQLContext

class ParquetSchemaSuite extends FunSuite with ParquetTest {
val sqlContext = TestSQLContext

/**
* Checks whether the reflected Parquet message type for product type `T` conforms `messageType`.
*/
Expand Down

0 comments on commit aa2cb2e

Please sign in to comment.