Skip to content

Commit

Permalink
address comments.
Browse files Browse the repository at this point in the history
  • Loading branch information
gatorsmile committed Apr 4, 2016
1 parent c7c9461 commit e69a7e8
Show file tree
Hide file tree
Showing 6 changed files with 26 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,8 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
string(script),
attributes,
withFilter,
withScriptIOSchema(inRowFormat, recordWriter, outRowFormat, recordReader, schemaLess))
withScriptIOSchema(ctx, inRowFormat, recordWriter, outRowFormat, recordReader, schemaLess)
.asInstanceOf[ScriptInputOutputSchema])

case SqlBaseParser.SELECT =>
// Regular select
Expand Down Expand Up @@ -411,11 +412,12 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
* Create a (Hive based) [[ScriptInputOutputSchema]].
*/
protected def withScriptIOSchema(
ctx: QuerySpecificationContext,
inRowFormat: RowFormatContext,
recordWriter: Token,
outRowFormat: RowFormatContext,
recordReader: Token,
schemaLess: Boolean): ScriptInputOutputSchema = null
schemaLess: Boolean): AnyRef = null

/**
* Create a logical plan for a given 'FROM' clause. Note that we support multiple (comma
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ package org.apache.spark.sql.execution

import scala.collection.JavaConverters._

import org.antlr.v4.runtime.Token

import org.apache.spark.sql.SaveMode
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.parser.{AbstractSqlParser, AstBuilder, ParseException}
Expand Down Expand Up @@ -182,6 +184,20 @@ class SparkSqlAstBuilder extends AstBuilder {
(visitTableIdentifier(ctx.tableIdentifier), temporary, ifNotExists, ctx.EXTERNAL != null)
}

/**
* Unsupported operation in SQL Context.
*/
override protected def withScriptIOSchema(
ctx: QuerySpecificationContext,
inRowFormat: RowFormatContext,
recordWriter: Token,
outRowFormat: RowFormatContext,
recordReader: Token,
schemaLess: Boolean): AnyRef = {
throw new ParseException(
"Script Transform is not supported in SQLContext. Use a HiveContext instead", ctx)
}

/**
* Create a [[CreateTableUsing]] or a [[CreateTableUsingAsSelect]] logical plan.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -332,9 +332,6 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case r: RunnableCommand => ExecutedCommand(r) :: Nil

case _: logical.ScriptTransformation =>
sys.error("Script Transform is not supported in SQLContext. Use a HiveContext instead.")

case logical.Distinct(child) =>
throw new IllegalStateException(
"logical distinct operator should have been replaced by aggregate in the optimizer")
Expand Down
13 changes: 0 additions & 13 deletions sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -658,19 +658,6 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
}
}

test("script transform") {
val e = intercept[RuntimeException] {
sql(
"""
|SELECT TRANSFORM (key, value)
|USING 'cat' AS (tKey, tValue)
|FROM testData
""".stripMargin).show()
}
assert(e.getMessage contains
"Script Transform is not supported in SQLContext. Use a HiveContext instead.")
}

test("date row") {
checkAnswer(sql(
"""select cast("2015-01-28" as date) from testData limit 1"""),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,7 @@ class DDLCommandSuite extends PlanTest {
comparePlans(parsed2, expected2)
}

test("commands in HiveSqlParser") {
test("commands only available in HiveContext") {
intercept[ParseException] {
parser.parsePlan("DROP TABLE D1.T1")
}
Expand All @@ -791,5 +791,8 @@ class DDLCommandSuite extends PlanTest {
|TBLPROPERTIES('prop1Key '= "prop1Val", ' `prop2Key` '= "prop2Val")
""".stripMargin)
}
intercept[ParseException] {
parser.parsePlan("SELECT TRANSFORM (key, value) USING 'cat' AS (tKey, tValue) FROM testData")
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -294,11 +294,12 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder {
* Create a [[HiveScriptIOSchema]].
*/
override protected def withScriptIOSchema(
ctx: QuerySpecificationContext,
inRowFormat: RowFormatContext,
recordWriter: Token,
outRowFormat: RowFormatContext,
recordReader: Token,
schemaLess: Boolean): HiveScriptIOSchema = {
schemaLess: Boolean): AnyRef = {
if (recordWriter != null || recordReader != null) {
logWarning("Used defined record reader/writer classes are currently ignored.")
}
Expand Down

0 comments on commit e69a7e8

Please sign in to comment.