diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala index 964f0a7a7b4e7..e3bc3cb42bdb8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/commands.scala @@ -21,8 +21,9 @@ import java.util.NoSuchElementException import org.apache.spark.internal.Logging import org.apache.spark.rdd.RDD +import org.apache.spark.sql.catalyst.catalog.CatalogFunction import org.apache.spark.sql.{Dataset, Row, SQLContext} -import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow, TableIdentifier} +import org.apache.spark.sql.catalyst.{CatalystTypeConverters, FunctionIdentifier, InternalRow, TableIdentifier} import org.apache.spark.sql.catalyst.errors.TreeNodeException import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference} import org.apache.spark.sql.catalyst.plans.logical @@ -434,3 +435,27 @@ case class SetDatabaseCommand(databaseName: String) extends RunnableCommand { override val output: Seq[Attribute] = Seq.empty } + +/** + * A command for users to create a function. + * The syntax of using this command in SQL is + * {{{ + * CREATE TEMPORARY FUNCTION function_name AS class_name; + * CREATE FUNCTION [db_name.]function_name AS class_name + * [USING JAR|FILE|ARCHIVE 'file_uri' [, JAR|FILE|ARCHIVE 'file_uri'] ]; + * }}} + */ +case class CreateFunction( + functionName: String, + alias: String, + resources: Seq[(String, String)], + isTemp: Boolean)(sql: String) extends RunnableCommand { + override def run(sqlContext: SQLContext): Seq[Row] = { + val catalog = sqlContext.sessionState.catalog + val functionIdentifier = FunctionIdentifier(functionName, Some(catalog.getCurrentDatabase)) + catalog.createFunction(CatalogFunction(functionIdentifier, alias)) + Seq.empty[Row] + } + + override val output: Seq[Attribute] = Seq.empty +} \ No newline at end of file diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 07c89afafb6b6..374cc04a84852 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -52,13 +52,6 @@ case class CreateDatabase( props: Map[String, String])(sql: String) extends NativeDDLCommand(sql) with Logging -case class CreateFunction( - functionName: String, - alias: String, - resources: Seq[(String, String)], - isTemp: Boolean)(sql: String) - extends NativeDDLCommand(sql) with Logging - case class AlterTableRename( oldName: TableIdentifier, newName: TableIdentifier)(sql: String) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala index 6f1eea273fafa..bdc192a61b2d6 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala @@ -47,13 +47,13 @@ class DDLCommandSuite extends PlanTest { test("create function") { val sql1 = """ - |CREATE TEMPORARY FUNCTION helloworld as + |CREATE TEMPORARY FUNCTION helloworld AS |'com.matthewrathbone.example.SimpleUDFExample' USING JAR '/path/to/jar1', |JAR '/path/to/jar2' """.stripMargin val sql2 = """ - |CREATE FUNCTION hello.world as + |CREATE FUNCTION hello.world AS |'com.matthewrathbone.example.SimpleUDFExample' USING ARCHIVE '/path/to/archive', |FILE '/path/to/file' """.stripMargin