From 9dd61b7a63ff90e666b554fb832e2a979937aa40 Mon Sep 17 00:00:00 2001 From: Gideon P Date: Sun, 29 Sep 2024 13:26:57 -0400 Subject: [PATCH] Added COMPLEX_EXPRESSION_UNSUPPORTED_INPUT.BAD_INPUTS. Tests pass. --- .../src/main/resources/error/error-conditions.json | 5 +++++ .../sql/catalyst/expressions/aggregate/Mode.scala | 14 ++++++++------ .../spark/sql/CollationSQLExpressionsSuite.scala | 11 ++++++----- 3 files changed, 19 insertions(+), 11 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index a316190214923..b591d59a3f938 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -634,6 +634,11 @@ "message" : [ "The collection of input data types must not be empty." ] + }, + "BAD_INPUTS" : { + "message" : [ + "The input data types to must be valid, but found the input types ." + ] } }, "sqlState" : "42K09" diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Mode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Mode.scala index a69de9e2e70e6..254a839a14868 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Mode.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Mode.scala @@ -19,14 +19,15 @@ package org.apache.spark.sql.catalyst.expressions.aggregate import org.apache.spark.SparkIllegalArgumentException import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, TypeCheckResult, UnresolvedWithinGroup} -import org.apache.spark.sql.catalyst.expressions.{Ascending, Descending, Expression, ExpressionDescription, ImplicitCastInputTypes, SortOrder} +import org.apache.spark.sql.catalyst.analysis.{UnresolvedWithinGroup, TypeCheckResult, ExpressionBuilder} +import org.apache.spark.sql.catalyst.expressions.Cast.toSQLExpr +import org.apache.spark.sql.catalyst.expressions.{ImplicitCastInputTypes, Expression, Ascending, ExpressionDescription, SortOrder, Descending} import org.apache.spark.sql.catalyst.trees.UnaryLike import org.apache.spark.sql.catalyst.types.PhysicalDataType -import org.apache.spark.sql.catalyst.util.{ArrayData, CollationFactory, GenericArrayData, UnsafeRowUtils} -import org.apache.spark.sql.errors.DataTypeErrors.{toSQLId, toSQLType} +import org.apache.spark.sql.catalyst.util.{GenericArrayData, CollationFactory, UnsafeRowUtils, ArrayData} +import org.apache.spark.sql.errors.DataTypeErrors.{toSQLType, toSQLId} import org.apache.spark.sql.errors.QueryCompilationErrors -import org.apache.spark.sql.types.{AbstractDataType, AnyDataType, ArrayType, BooleanType, DataType, MapType, StringType, StructField, StructType} +import org.apache.spark.sql.types.{StringType, ArrayType, StructType, BooleanType, DataType, StructField, MapType, AnyDataType, AbstractDataType} import org.apache.spark.unsafe.types.UTF8String import org.apache.spark.util.collection.OpenHashMap @@ -118,8 +119,9 @@ case class Mode( CollationFactory.getCollationKey(data.asInstanceOf[UTF8String], st.collationId) case _ => throw new SparkIllegalArgumentException( - errorClass = "COMPLEX_EXPRESSION_UNSUPPORTED_INPUT.NO_INPUT", + errorClass = "COMPLEX_EXPRESSION_UNSUPPORTED_INPUT.BAD_INPUTS", messageParameters = Map( + "expression" -> toSQLExpr(this), "functionName" -> toSQLType(prettyName), "dataType" -> toSQLType(child.dataType)) ) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CollationSQLExpressionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CollationSQLExpressionsSuite.scala index 2fbe920a30e71..ecce2da6b6211 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/CollationSQLExpressionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/CollationSQLExpressionsSuite.scala @@ -1957,7 +1957,7 @@ class CollationSQLExpressionsSuite ModeTestCase("unicode_ci", Map("a" -> 3L, "b" -> 2L, "B" -> 2L), "b") ).foreach { t1 => checkError( - exception = intercept[SparkException] { + exception = intercept[SparkIllegalArgumentException] { Mode( child = Literal.create(null, MapType(StringType(t1.collationId), IntegerType)) @@ -1966,10 +1966,11 @@ class CollationSQLExpressionsSuite dataType = MapType(StringType(t1.collationId), IntegerType) ) }, - condition = "INTERNAL_ERROR", - parameters = Map("message" -> - "Cannot find sub error class 'COMPLEX_EXPRESSION_UNSUPPORTED_INPUT.NO_INPUT'") - // Map("function" -> "mode(i)", "dataType" -> "MAP") + condition = "COMPLEX_EXPRESSION_UNSUPPORTED_INPUT.BAD_INPUTS", + parameters = Map( + "expression" -> "\"mode(NULL)\"", + "functionName" -> "\"MODE\"", + "dataType" -> s"\"MAP\"") ) } }