diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R index 5adebade8b7eb..5f500c2995821 100644 --- a/R/pkg/R/SQLContext.R +++ b/R/pkg/R/SQLContext.R @@ -111,7 +111,7 @@ sparkR.conf <- function(key, defaultValue) { tryCatch(callJMethod(conf, "get", key), error = function(e) { estr <- as.character(e) - if (any(grepl("java.util.NoSuchElementException", estr, fixed = TRUE))) { + if (any(grepl("SQL_CONF_NOT_FOUND", estr, fixed = TRUE))) { stop("Config '", key, "' is not set") } else { stop("Unknown error: ", estr) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 54cffa498cc85..ae73071a120d8 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -1425,6 +1425,11 @@ "sortBy must be used together with bucketBy." ] }, + "SQL_CONF_NOT_FOUND" : { + "message" : [ + "The SQL config cannot be found. Please verify that the config exists." + ] + }, "STAR_GROUP_BY_POS" : { "message" : [ "Star (*) is not allowed in a select list when GROUP BY an ordinal position is used." diff --git a/core/src/main/scala/org/apache/spark/SparkException.scala b/core/src/main/scala/org/apache/spark/SparkException.scala index 2f05b2ad6a7fa..4e48e9c8d418a 100644 --- a/core/src/main/scala/org/apache/spark/SparkException.scala +++ b/core/src/main/scala/org/apache/spark/SparkException.scala @@ -293,6 +293,25 @@ private[spark] class SparkRuntimeException( override def getQueryContext: Array[QueryContext] = context } +/** + * No such element exception thrown from Spark with an error class. + */ +private[spark] class SparkNoSuchElementException( + errorClass: String, + messageParameters: Map[String, String], + context: Array[QueryContext] = Array.empty, + summary: String = "") + extends NoSuchElementException( + SparkThrowableHelper.getMessage(errorClass, messageParameters, summary)) + with SparkThrowable { + + override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava + + override def getErrorClass: String = errorClass + + override def getQueryContext: Array[QueryContext] = context +} + /** * Security exception thrown from Spark with an error class. */ diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 2fccca839d5e0..d07dcec3693b2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2139,8 +2139,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { cause = null) } - def noSuchElementExceptionError(key: String): Throwable = { - new NoSuchElementException(key) + def sqlConfigNotFoundError(key: String): SparkNoSuchElementException = { + new SparkNoSuchElementException( + errorClass = "SQL_CONF_NOT_FOUND", + messageParameters = Map("sqlConf" -> toSQLConf(key))) } def cannotMutateReadOnlySQLConfError(): SparkUnsupportedOperationException = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index b0996dd6acce9..4986dc3661c06 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.internal -import java.util.{Locale, NoSuchElementException, Properties, TimeZone} +import java.util.{Locale, Properties, TimeZone} import java.util import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicReference @@ -5072,7 +5072,7 @@ class SQLConf extends Serializable with Logging { // Try to use the default value Option(getConfigEntry(key)).map { e => e.stringConverter(e.readFrom(reader)) } }. - getOrElse(throw QueryExecutionErrors.noSuchElementExceptionError(key)) + getOrElse(throw QueryExecutionErrors.sqlConfigNotFoundError(key)) } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index 30f4fdfbbcff3..0a0bee2eabd29 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -22,7 +22,7 @@ import java.util.TimeZone import org.apache.hadoop.fs.Path import org.apache.logging.log4j.Level -import org.apache.spark.SPARK_DOC_ROOT +import org.apache.spark.{SPARK_DOC_ROOT, SparkNoSuchElementException} import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.MIT @@ -493,4 +493,11 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { |${nonInternalLegacyConfigs.map(_._1).mkString("\n")} |""".stripMargin) } + + test("SPARK-43028: config not found error") { + checkError( + exception = intercept[SparkNoSuchElementException](spark.conf.get("some.conf")), + errorClass = "SQL_CONF_NOT_FOUND", + parameters = Map("sqlConf" -> "\"some.conf\"")) + } }