From bdf44b4e71f180ebf970ef40d5850d558d23a073 Mon Sep 17 00:00:00 2001 From: bomeng Date: Sat, 26 Mar 2016 23:09:43 -0700 Subject: [PATCH] add test cases and update code style --- .../sql/execution/command/commands.scala | 8 ++--- .../org/apache/spark/sql/SQLQuerySuite.scala | 29 +++++++++++++++++++ 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala index e3bc3cb42bdb8..32b6f4c0164ff 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala @@ -446,10 +446,8 @@ case class SetDatabaseCommand(databaseName: String) extends RunnableCommand { * }}} */ case class CreateFunction( - functionName: String, - alias: String, - resources: Seq[(String, String)], - isTemp: Boolean)(sql: String) extends RunnableCommand { + functionName: String, alias: String, resources: Seq[(String, String)], + isTemp: Boolean)(sql: String) extends RunnableCommand { override def run(sqlContext: SQLContext): Seq[Row] = { val catalog = sqlContext.sessionState.catalog val functionIdentifier = FunctionIdentifier(functionName, Some(catalog.getCurrentDatabase)) @@ -458,4 +456,4 @@ case class CreateFunction( } override val output: Seq[Attribute] = Seq.empty -} \ No newline at end of file +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index c958eac266d61..9809a1a23fa4b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -21,10 +21,13 @@ import java.math.MathContext import java.sql.Timestamp import org.apache.spark.AccumulatorSuite +import org.apache.spark.sql.catalyst.FunctionIdentifier import org.apache.spark.sql.catalyst.analysis.UnresolvedException +import org.apache.spark.sql.catalyst.catalog.CatalogFunction import org.apache.spark.sql.catalyst.expressions.SortOrder import org.apache.spark.sql.catalyst.plans.logical.Aggregate import org.apache.spark.sql.execution.aggregate +import org.apache.spark.sql.execution.command.CreateFunction import org.apache.spark.sql.execution.joins.{BroadcastHashJoin, CartesianProduct, SortMergeJoin} import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf @@ -2376,4 +2379,30 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { Row("r3c1x", "r3c2", "t1r3c3", "r3c2", "t1r3c3") :: Nil) } } + + test("SPARK-14123") { + val sql1 = + """ + |CREATE TEMPORARY FUNCTION helloworld1 AS + |'spark.example.SimpleUDFExample1' USING JAR '/path/to/jar1', + |JAR '/path/to/jar2' + """.stripMargin + val sql2 = + """ + |CREATE FUNCTION helloworld2 AS + |'spark.example.SimpleUDFExample2' USING ARCHIVE '/path/to/archive', + |FILE '/path/to/file' + """.stripMargin + sql(sql1) + sql(sql2) + + val catalog = sqlContext.sessionState.catalog + val id1 = FunctionIdentifier("helloworld1", Some(catalog.getCurrentDatabase)) + val id2 = FunctionIdentifier("helloworld2", Some(catalog.getCurrentDatabase)) + + val f1 = catalog.getFunction(id1) + val f2 = catalog.getFunction(id2) + assert(f1 == CatalogFunction(id1, "spark.example.SimpleUDFExample1")) + assert(f2 == CatalogFunction(id2, "spark.example.SimpleUDFExample2")) + } }