From 61eead9119b634a85001a2067909006057fa5149 Mon Sep 17 00:00:00 2001 From: allisonwang-db Date: Tue, 4 Apr 2023 16:21:25 +0800 Subject: [PATCH 1/4] SPARK-43028 --- core/src/main/resources/error/error-classes.json | 5 +++++ .../apache/spark/sql/errors/QueryExecutionErrors.scala | 6 ++++-- .../scala/org/apache/spark/sql/internal/SQLConf.scala | 8 ++++---- .../org/apache/spark/sql/internal/SQLConfSuite.scala | 9 ++++++++- 4 files changed, 21 insertions(+), 7 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 8369c7c5666bb..08e0d093ae1bf 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -1414,6 +1414,11 @@ "sortBy must be used together with bucketBy." ] }, + "SQL_CONF_NOT_FOUND" : { + "message" : [ + "The SQL config \"\" cannot be found. Please verify that the config exists." + ] + }, "STAR_GROUP_BY_POS" : { "message" : [ "Star (*) is not allowed in a select list when GROUP BY an ordinal position is used." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 17c5b2f4f10ec..3c3f1d82d5962 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2134,8 +2134,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { cause = null) } - def noSuchElementExceptionError(key: String): Throwable = { - new NoSuchElementException(key) + def sqlConfigNotFoundError(key: String): SparkRuntimeException = { + new SparkRuntimeException( + errorClass = "SQL_CONF_NOT_FOUND", + messageParameters = Map("key" -> key)) } def cannotMutateReadOnlySQLConfError(): SparkUnsupportedOperationException = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index ae69e4cf69856..1100139f9c5ee 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.internal -import java.util.{Locale, NoSuchElementException, Properties, TimeZone} +import java.util.{Locale, Properties, TimeZone} import java.util import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicReference @@ -31,7 +31,7 @@ import scala.util.matching.Regex import org.apache.hadoop.fs.Path -import org.apache.spark.{ErrorMessageFormat, SparkConf, SparkContext, TaskContext} +import org.apache.spark.{ErrorMessageFormat, SparkConf, SparkContext, SparkRuntimeException, TaskContext} import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.internal.config.{IGNORE_MISSING_FILES => SPARK_IGNORE_MISSING_FILES} @@ -5056,14 +5056,14 @@ class SQLConf extends Serializable with Logging { } /** Return the value of Spark SQL configuration property for the given key. */ - @throws[NoSuchElementException]("if key is not set") + @throws[SparkRuntimeException]("if key is not set") def getConfString(key: String): String = { Option(settings.get(key)). orElse { // Try to use the default value Option(getConfigEntry(key)).map { e => e.stringConverter(e.readFrom(reader)) } }. - getOrElse(throw QueryExecutionErrors.noSuchElementExceptionError(key)) + getOrElse(throw QueryExecutionErrors.sqlConfigNotFoundError(key)) } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index 30f4fdfbbcff3..d79db1ca895ec 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -22,7 +22,7 @@ import java.util.TimeZone import org.apache.hadoop.fs.Path import org.apache.logging.log4j.Level -import org.apache.spark.SPARK_DOC_ROOT +import org.apache.spark.{SPARK_DOC_ROOT, SparkRuntimeException} import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.MIT @@ -493,4 +493,11 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { |${nonInternalLegacyConfigs.map(_._1).mkString("\n")} |""".stripMargin) } + + test("SPARK-43028: config not found error") { + checkError( + exception = intercept[SparkRuntimeException](spark.conf.get("some.conf")), + errorClass = "SQL_CONF_NOT_FOUND", + parameters = Map("key" -> "some.conf")) + } } From 44102109dfa89885ed05dd0663385d3a6fd3407a Mon Sep 17 00:00:00 2001 From: allisonwang-db Date: Wed, 5 Apr 2023 15:46:04 +0800 Subject: [PATCH 2/4] address comments --- core/src/main/resources/error/error-classes.json | 2 +- .../org/apache/spark/sql/errors/QueryExecutionErrors.scala | 2 +- .../test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 08e0d093ae1bf..5ade2648f08a8 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -1416,7 +1416,7 @@ }, "SQL_CONF_NOT_FOUND" : { "message" : [ - "The SQL config \"\" cannot be found. Please verify that the config exists." + "The SQL config cannot be found. Please verify that the config exists." ] }, "STAR_GROUP_BY_POS" : { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 3c3f1d82d5962..b1118b29dd7fb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2137,7 +2137,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def sqlConfigNotFoundError(key: String): SparkRuntimeException = { new SparkRuntimeException( errorClass = "SQL_CONF_NOT_FOUND", - messageParameters = Map("key" -> key)) + messageParameters = Map("sqlConf" -> toSQLConf(key))) } def cannotMutateReadOnlySQLConfError(): SparkUnsupportedOperationException = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index d79db1ca895ec..2f49b7c962de4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -498,6 +498,6 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { checkError( exception = intercept[SparkRuntimeException](spark.conf.get("some.conf")), errorClass = "SQL_CONF_NOT_FOUND", - parameters = Map("key" -> "some.conf")) + parameters = Map("sqlConf" -> "\"some.conf\"")) } } From c4c4b1086b28091d6bc07021fc081a826d8c1972 Mon Sep 17 00:00:00 2001 From: allisonwang-db Date: Thu, 6 Apr 2023 09:41:42 +0800 Subject: [PATCH 3/4] update error --- .../org/apache/spark/SparkException.scala | 19 +++++++++++++++++++ .../sql/errors/QueryExecutionErrors.scala | 4 ++-- .../apache/spark/sql/internal/SQLConf.scala | 4 ++-- .../spark/sql/internal/SQLConfSuite.scala | 4 ++-- 4 files changed, 25 insertions(+), 6 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkException.scala b/core/src/main/scala/org/apache/spark/SparkException.scala index 2f05b2ad6a7fa..4e48e9c8d418a 100644 --- a/core/src/main/scala/org/apache/spark/SparkException.scala +++ b/core/src/main/scala/org/apache/spark/SparkException.scala @@ -293,6 +293,25 @@ private[spark] class SparkRuntimeException( override def getQueryContext: Array[QueryContext] = context } +/** + * No such element exception thrown from Spark with an error class. + */ +private[spark] class SparkNoSuchElementException( + errorClass: String, + messageParameters: Map[String, String], + context: Array[QueryContext] = Array.empty, + summary: String = "") + extends NoSuchElementException( + SparkThrowableHelper.getMessage(errorClass, messageParameters, summary)) + with SparkThrowable { + + override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava + + override def getErrorClass: String = errorClass + + override def getQueryContext: Array[QueryContext] = context +} + /** * Security exception thrown from Spark with an error class. */ diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index b1118b29dd7fb..2bd45ef0a4981 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2134,8 +2134,8 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { cause = null) } - def sqlConfigNotFoundError(key: String): SparkRuntimeException = { - new SparkRuntimeException( + def sqlConfigNotFoundError(key: String): SparkNoSuchElementException = { + new SparkNoSuchElementException( errorClass = "SQL_CONF_NOT_FOUND", messageParameters = Map("sqlConf" -> toSQLConf(key))) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 1100139f9c5ee..1f33a4ca188e4 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -31,7 +31,7 @@ import scala.util.matching.Regex import org.apache.hadoop.fs.Path -import org.apache.spark.{ErrorMessageFormat, SparkConf, SparkContext, SparkRuntimeException, TaskContext} +import org.apache.spark.{ErrorMessageFormat, SparkConf, SparkContext, TaskContext} import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.internal.config.{IGNORE_MISSING_FILES => SPARK_IGNORE_MISSING_FILES} @@ -5056,7 +5056,7 @@ class SQLConf extends Serializable with Logging { } /** Return the value of Spark SQL configuration property for the given key. */ - @throws[SparkRuntimeException]("if key is not set") + @throws[NoSuchElementException]("if key is not set") def getConfString(key: String): String = { Option(settings.get(key)). orElse { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index 2f49b7c962de4..0a0bee2eabd29 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -22,7 +22,7 @@ import java.util.TimeZone import org.apache.hadoop.fs.Path import org.apache.logging.log4j.Level -import org.apache.spark.{SPARK_DOC_ROOT, SparkRuntimeException} +import org.apache.spark.{SPARK_DOC_ROOT, SparkNoSuchElementException} import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.MIT @@ -496,7 +496,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { test("SPARK-43028: config not found error") { checkError( - exception = intercept[SparkRuntimeException](spark.conf.get("some.conf")), + exception = intercept[SparkNoSuchElementException](spark.conf.get("some.conf")), errorClass = "SQL_CONF_NOT_FOUND", parameters = Map("sqlConf" -> "\"some.conf\"")) } From ea30da684d94fa193be6bf6bacbed1fe05df850f Mon Sep 17 00:00:00 2001 From: allisonwang-db Date: Thu, 6 Apr 2023 16:52:43 +0800 Subject: [PATCH 4/4] try fix r test --- R/pkg/R/SQLContext.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R index 5adebade8b7eb..5f500c2995821 100644 --- a/R/pkg/R/SQLContext.R +++ b/R/pkg/R/SQLContext.R @@ -111,7 +111,7 @@ sparkR.conf <- function(key, defaultValue) { tryCatch(callJMethod(conf, "get", key), error = function(e) { estr <- as.character(e) - if (any(grepl("java.util.NoSuchElementException", estr, fixed = TRUE))) { + if (any(grepl("SQL_CONF_NOT_FOUND", estr, fixed = TRUE))) { stop("Config '", key, "' is not set") } else { stop("Unknown error: ", estr)