From 5dcdae80f0f58016137dde75439875d586adcc05 Mon Sep 17 00:00:00 2001 From: Uros Stankovic Date: Mon, 3 Jun 2024 11:20:28 +0200 Subject: [PATCH] Add user facing exception message --- .../resources/error/error-conditions.json | 6 ++++++ .../catalyst/plans/logical/statements.scala | 14 +++++++++++--- .../sql/connector/catalog/CatalogV2Util.scala | 19 +++++++++++++++---- ...SourceV2DataFrameSessionCatalogSuite.scala | 14 +++++++++++++- .../sql/connector/DataSourceV2SQLSuite.scala | 17 +++++++++++++++++ 5 files changed, 62 insertions(+), 8 deletions(-) diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index c1c0cd6bfb39e..d3d3096586f41 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -1387,6 +1387,12 @@ ], "sqlState" : "42623" }, + "COLUMN_DEFAULT_VALUE_IS_NOT_FOLDABLE_OR_RESOLVED" : { + "message" : [ + "The existence default value must be a simple SQL string that is resolved and foldable, but column has default value: ()" + ], + "sqlState" : "42624" + }, "GET_TABLES_BY_TYPE_UNSUPPORTED_BY_HIVE_VERSION" : { "message" : [ "Hive 2.2 and lower versions don't support getTablesByType. Please use Hive 2.3 or higher version." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala index 9efc3b13bc261..94ac7d37df323 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql.catalyst.plans.logical +import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.analysis.{FieldName, FieldPosition} import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.catalyst.trees.{LeafLike, UnaryLike} @@ -142,9 +143,16 @@ case class QualifiedColType( def getV2Default: ColumnDefaultValue = { default.map { sql => val e = ResolveDefaultColumns.analyze(colName, dataType, sql, "ALTER TABLE") - assert(e.resolved && e.foldable, - "The existence default value must be a simple SQL string that is resolved and foldable, " + - "but got: " + sql) + if (!e.resolved || !e.foldable) { + throw new AnalysisException( + errorClass = "COLUMN_DEFAULT_VALUE_IS_NOT_FOLDABLE_OR_RESOLVED", + messageParameters = Map( + "colName" -> colName, + "defaultValue" -> sql, + ) + ) + } + new ColumnDefaultValue(sql, LiteralValue(e.eval(), dataType)) }.orNull } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala index 3fe25dd49fd05..4d9a0302e87f0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala @@ -525,10 +525,21 @@ private[sql] object CatalogV2Util { } if (isDefaultColumn) { - val e = analyze(f, statementType = "", metadataKey = EXISTS_DEFAULT_COLUMN_METADATA_KEY) - assert(e.resolved && e.foldable, - "The existence default value must be a simple SQL string that is resolved and foldable, " + - "but got: " + f.getExistenceDefaultValue().get) + val e = analyze( + f, + statementType = "", + metadataKey = EXISTS_DEFAULT_COLUMN_METADATA_KEY) + + if (!e.resolved || !e.foldable) { + throw new AnalysisException( + errorClass = "COLUMN_DEFAULT_VALUE_IS_NOT_FOLDABLE_OR_RESOLVED", + messageParameters = Map( + "colName" -> f.name, + "defaultValue" -> f.getExistenceDefaultValue().get, + ) + ) + } + val defaultValue = new ColumnDefaultValue( f.getCurrentDefaultValue().get, LiteralValue(e.eval(), f.dataType)) val cleanedMetadata = metadataWithKeysRemoved( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala index 5d5ea6499c49d..764324c360a15 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2DataFrameSessionCatalogSuite.scala @@ -110,7 +110,19 @@ class InMemoryTableSessionCatalog extends TestV2SessionCatalogBase[InMemoryTable Option(tables.get(ident)) match { case Some(table) => val properties = CatalogV2Util.applyPropertiesChanges(table.properties, changes) - val schema = CatalogV2Util.applySchemaChanges(table.schema, changes, None, "ALTER TABLE") + val provider = + if (properties.containsKey("provider")) { + Some(properties.get("provider")) + } else { + None + } + + val schema = CatalogV2Util.applySchemaChanges( + table.schema, + changes, + provider, + "ALTER TABLE" + ) // fail if the last column in the schema was dropped if (schema.fields.isEmpty) { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index d89c0a2525fd9..8a434bb96cf85 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -3488,6 +3488,23 @@ class DataSourceV2SQLSuiteV1Filter } } + test("SPARK-48286: Add new column with default value which is not foldable") { + withSQLConf( + SQLConf.DEFAULT_COLUMN_ALLOWED_PROVIDERS.key -> v2Source) { + withTable("tab") { + spark.sql(s"CREATE TABLE tab (col1 INT DEFAULT 100) USING $v2Source") + val exception = intercept[AnalysisException] { + // Rand function is not foldable + spark.sql(s"ALTER TABLE tab ADD COLUMN col2 DOUBLE DEFAULT rand()") + } + assert(exception.getSqlState == "42624") + assert(exception.errorClass.get == "COLUMN_DEFAULT_VALUE_IS_NOT_FOLDABLE_OR_RESOLVED") + assert(exception.messageParameters("colName") == "col2") + assert(exception.messageParameters("defaultValue") == "rand()") + } + } + } + private def testNotSupportedV2Command( sqlCommand: String, sqlParams: String,