Skip to content

Commit

Permalink
[SPARK-18296][SQL] Use consistent naming for expression test suites
Browse files Browse the repository at this point in the history
## What changes were proposed in this pull request?
We have an undocumented naming convention to call expression unit tests ExpressionsSuite, and the end-to-end tests FunctionsSuite. It'd be great to make all test suites consistent with this naming convention.

## How was this patch tested?
This is a test-only naming change.

Author: Reynold Xin <[email protected]>

Closes apache#15793 from rxin/SPARK-18296.
  • Loading branch information
rxin authored and uzadude committed Jan 27, 2017
1 parent 033ca3a commit b168e75
Show file tree
Hide file tree
Showing 6 changed files with 8 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.types._


class BitwiseFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper {
class BitwiseExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {

import IntegralLiteralTestUtils._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@ package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.types._


class CollectionFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper {
class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {

test("Array and Map Size") {
val a0 = Literal.create(Seq(1, 2, 3), ArrayType(IntegerType))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import org.apache.spark.sql.catalyst.optimizer.SimpleTestOptimizer
import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Project}
import org.apache.spark.sql.types._

class MathFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper {
class MathExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {

import IntegralLiteralTestUtils._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.types._

class MiscFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper {
class MiscExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {

test("assert_true") {
intercept[RuntimeException] {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.spark.sql.catalyst.expressions.objects.AssertNotNull
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, Project}
import org.apache.spark.sql.types._

class NullFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper {
class NullExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {

def testAllTypes(testFunc: (Any, DataType) => Unit): Unit = {
testFunc(false, BooleanType)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@ import org.apache.spark.sql.functions._
import org.apache.spark.sql.functions.{log => logarithm}
import org.apache.spark.sql.test.SharedSQLContext

private object MathExpressionsTestData {
private object MathFunctionsTestData {
case class DoubleData(a: java.lang.Double, b: java.lang.Double)
case class NullDoubles(a: java.lang.Double)
}

class MathExpressionsSuite extends QueryTest with SharedSQLContext {
import MathExpressionsTestData._
class MathFunctionsSuite extends QueryTest with SharedSQLContext {
import MathFunctionsTestData._
import testImplicits._

private lazy val doubleData = (1 to 10).map(i => DoubleData(i * 0.2 - 1, i * -0.2 + 1)).toDF()
Expand Down

0 comments on commit b168e75

Please sign in to comment.