Skip to content

Commit

Permalink
[SQL][Minor] rename DataTypeParser.apply to DataTypeParser.parse
Browse files Browse the repository at this point in the history
rename DataTypeParser.apply to DataTypeParser.parse to make it more clear and readable.
/cc rxin

Author: wangfei <[email protected]>

Closes apache#5710 from scwf/apply and squashes the following commits:

c319977 [wangfei] rename apply to parse
  • Loading branch information
scwf authored and rxin committed Apr 27, 2015
1 parent ca55dc9 commit d188b8b
Show file tree
Hide file tree
Showing 5 changed files with 6 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ object PhysicalOperation extends PredicateHelper {
}

def collectAliases(fields: Seq[Expression]): Map[Attribute, Expression] = fields.collect {
case a @ Alias(child, _) => a.toAttribute.asInstanceOf[Attribute] -> child
case a @ Alias(child, _) => a.toAttribute -> child
}.toMap

def substitute(aliases: Map[Attribute, Expression])(expr: Expression): Expression = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ private[sql] object DataTypeParser {
override val lexical = new SqlLexical
}

def apply(dataTypeString: String): DataType = dataTypeParser.toDataType(dataTypeString)
def parse(dataTypeString: String): DataType = dataTypeParser.toDataType(dataTypeString)
}

/** The exception thrown from the [[DataTypeParser]]. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@ class DataTypeParserSuite extends FunSuite {

def checkDataType(dataTypeString: String, expectedDataType: DataType): Unit = {
test(s"parse ${dataTypeString.replace("\n", "")}") {
assert(DataTypeParser(dataTypeString) === expectedDataType)
assert(DataTypeParser.parse(dataTypeString) === expectedDataType)
}
}

def unsupported(dataTypeString: String): Unit = {
test(s"$dataTypeString is not supported") {
intercept[DataTypeException](DataTypeParser(dataTypeString))
intercept[DataTypeException](DataTypeParser.parse(dataTypeString))
}
}

Expand Down
2 changes: 1 addition & 1 deletion sql/core/src/main/scala/org/apache/spark/sql/Column.scala
Original file line number Diff line number Diff line change
Expand Up @@ -647,7 +647,7 @@ class Column(protected[sql] val expr: Expression) extends Logging {
*
* @group expr_ops
*/
def cast(to: String): Column = cast(DataTypeParser(to))
def cast(to: String): Column = cast(DataTypeParser.parse(to))

/**
* Returns an ordering used in sorting.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -871,7 +871,7 @@ private[hive] case class MetastoreRelation


private[hive] object HiveMetastoreTypes {
def toDataType(metastoreType: String): DataType = DataTypeParser(metastoreType)
def toDataType(metastoreType: String): DataType = DataTypeParser.parse(metastoreType)

def toMetastoreType(dt: DataType): String = dt match {
case ArrayType(elementType, _) => s"array<${toMetastoreType(elementType)}>"
Expand Down

0 comments on commit d188b8b

Please sign in to comment.