diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index db64a582be882..bc8048359751d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -364,7 +364,8 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging { string(script), attributes, withFilter, - withScriptIOSchema(inRowFormat, recordWriter, outRowFormat, recordReader, schemaLess)) + withScriptIOSchema(ctx, inRowFormat, recordWriter, outRowFormat, recordReader, schemaLess) + .asInstanceOf[ScriptInputOutputSchema]) case SqlBaseParser.SELECT => // Regular select @@ -411,11 +412,12 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging { * Create a (Hive based) [[ScriptInputOutputSchema]]. */ protected def withScriptIOSchema( + ctx: QuerySpecificationContext, inRowFormat: RowFormatContext, recordWriter: Token, outRowFormat: RowFormatContext, recordReader: Token, - schemaLess: Boolean): ScriptInputOutputSchema = null + schemaLess: Boolean): AnyRef = null /** * Create a logical plan for a given 'FROM' clause. Note that we support multiple (comma diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index ff3ab7746cc86..1b4531257d3e8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -18,6 +18,8 @@ package org.apache.spark.sql.execution import scala.collection.JavaConverters._ +import org.antlr.v4.runtime.Token + import org.apache.spark.sql.SaveMode import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.parser.{AbstractSqlParser, AstBuilder, ParseException} @@ -182,6 +184,20 @@ class SparkSqlAstBuilder extends AstBuilder { (visitTableIdentifier(ctx.tableIdentifier), temporary, ifNotExists, ctx.EXTERNAL != null) } + /** + * Unsupported operation in SQL Context. + */ + override protected def withScriptIOSchema( + ctx: QuerySpecificationContext, + inRowFormat: RowFormatContext, + recordWriter: Token, + outRowFormat: RowFormatContext, + recordReader: Token, + schemaLess: Boolean): AnyRef = { + throw new ParseException( + "Script Transform is not supported in SQLContext. Use a HiveContext instead", ctx) + } + /** * Create a [[CreateTableUsing]] or a [[CreateTableUsingAsSelect]] logical plan. * diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala index bbaa0984569f3..5bcc172ca7655 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala @@ -332,9 +332,6 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] { def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match { case r: RunnableCommand => ExecutedCommand(r) :: Nil - case _: logical.ScriptTransformation => - sys.error("Script Transform is not supported in SQLContext. Use a HiveContext instead.") - case logical.Distinct(child) => throw new IllegalStateException( "logical distinct operator should have been replaced by aggregate in the optimizer") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index ba2458e411540..b727e88668370 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -658,19 +658,6 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { } } - test("script transform") { - val e = intercept[RuntimeException] { - sql( - """ - |SELECT TRANSFORM (key, value) - |USING 'cat' AS (tKey, tValue) - |FROM testData - """.stripMargin).show() - } - assert(e.getMessage contains - "Script Transform is not supported in SQLContext. Use a HiveContext instead.") - } - test("date row") { checkAnswer(sql( """select cast("2015-01-28" as date) from testData limit 1"""), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala index 65eda53ed9ce9..fd9bd8b0f2436 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala @@ -774,7 +774,7 @@ class DDLCommandSuite extends PlanTest { comparePlans(parsed2, expected2) } - test("commands in HiveSqlParser") { + test("commands only available in HiveContext") { intercept[ParseException] { parser.parsePlan("DROP TABLE D1.T1") } @@ -791,5 +791,8 @@ class DDLCommandSuite extends PlanTest { |TBLPROPERTIES('prop1Key '= "prop1Val", ' `prop2Key` '= "prop2Val") """.stripMargin) } + intercept[ParseException] { + parser.parsePlan("SELECT TRANSFORM (key, value) USING 'cat' AS (tKey, tValue) FROM testData") + } } } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala index a7e2b1d540776..7dfaa913add37 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala @@ -294,11 +294,12 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder { * Create a [[HiveScriptIOSchema]]. */ override protected def withScriptIOSchema( + ctx: QuerySpecificationContext, inRowFormat: RowFormatContext, recordWriter: Token, outRowFormat: RowFormatContext, recordReader: Token, - schemaLess: Boolean): HiveScriptIOSchema = { + schemaLess: Boolean): AnyRef = { if (recordWriter != null || recordReader != null) { logWarning("Used defined record reader/writer classes are currently ignored.") }