Skip to content

Commit

Permalink
[SPARK-43028][SQL] Add error class SQL_CONF_NOT_FOUND
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?

This PR adds a new error class `SQL_CONF_NOT_FOUND`.

### Why are the changes needed?

To make the error message more user-friendly when getting a non-existing SQL config. For example:
```
spark.conf.get("some.conf")
```
Before this PR, it will throw this error:
```
java.util.NoSuchElementException: some.conf
```
After this PR:
```
[SQL_CONF_NOT_FOUND] The SQL config "some.conf" cannot be found. Please verify that the config exists.
```

### Does this PR introduce _any_ user-facing change?

Yes. The error message will be changed.

### How was this patch tested?

Added a new UT.

Closes apache#40660 from allisonwang-db/SPARK-43028-conf-error.

Authored-by: allisonwang-db <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
  • Loading branch information
allisonwang-db authored and MaxGekk committed Apr 11, 2023
1 parent 0a63a49 commit d4134a8
Show file tree
Hide file tree
Showing 6 changed files with 39 additions and 6 deletions.
2 changes: 1 addition & 1 deletion R/pkg/R/SQLContext.R
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ sparkR.conf <- function(key, defaultValue) {
tryCatch(callJMethod(conf, "get", key),
error = function(e) {
estr <- as.character(e)
if (any(grepl("java.util.NoSuchElementException", estr, fixed = TRUE))) {
if (any(grepl("SQL_CONF_NOT_FOUND", estr, fixed = TRUE))) {
stop("Config '", key, "' is not set")
} else {
stop("Unknown error: ", estr)
Expand Down
5 changes: 5 additions & 0 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -1425,6 +1425,11 @@
"sortBy must be used together with bucketBy."
]
},
"SQL_CONF_NOT_FOUND" : {
"message" : [
"The SQL config <sqlConf> cannot be found. Please verify that the config exists."
]
},
"STAR_GROUP_BY_POS" : {
"message" : [
"Star (*) is not allowed in a select list when GROUP BY an ordinal position is used."
Expand Down
19 changes: 19 additions & 0 deletions core/src/main/scala/org/apache/spark/SparkException.scala
Original file line number Diff line number Diff line change
Expand Up @@ -293,6 +293,25 @@ private[spark] class SparkRuntimeException(
override def getQueryContext: Array[QueryContext] = context
}

/**
* No such element exception thrown from Spark with an error class.
*/
private[spark] class SparkNoSuchElementException(
errorClass: String,
messageParameters: Map[String, String],
context: Array[QueryContext] = Array.empty,
summary: String = "")
extends NoSuchElementException(
SparkThrowableHelper.getMessage(errorClass, messageParameters, summary))
with SparkThrowable {

override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass

override def getQueryContext: Array[QueryContext] = context
}

/**
* Security exception thrown from Spark with an error class.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2139,8 +2139,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
cause = null)
}

def noSuchElementExceptionError(key: String): Throwable = {
new NoSuchElementException(key)
def sqlConfigNotFoundError(key: String): SparkNoSuchElementException = {
new SparkNoSuchElementException(
errorClass = "SQL_CONF_NOT_FOUND",
messageParameters = Map("sqlConf" -> toSQLConf(key)))
}

def cannotMutateReadOnlySQLConfError(): SparkUnsupportedOperationException = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql.internal

import java.util.{Locale, NoSuchElementException, Properties, TimeZone}
import java.util.{Locale, Properties, TimeZone}
import java.util
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicReference
Expand Down Expand Up @@ -5072,7 +5072,7 @@ class SQLConf extends Serializable with Logging {
// Try to use the default value
Option(getConfigEntry(key)).map { e => e.stringConverter(e.readFrom(reader)) }
}.
getOrElse(throw QueryExecutionErrors.noSuchElementExceptionError(key))
getOrElse(throw QueryExecutionErrors.sqlConfigNotFoundError(key))
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import java.util.TimeZone
import org.apache.hadoop.fs.Path
import org.apache.logging.log4j.Level

import org.apache.spark.SPARK_DOC_ROOT
import org.apache.spark.{SPARK_DOC_ROOT, SparkNoSuchElementException}
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.MIT
Expand Down Expand Up @@ -493,4 +493,11 @@ class SQLConfSuite extends QueryTest with SharedSparkSession {
|${nonInternalLegacyConfigs.map(_._1).mkString("\n")}
|""".stripMargin)
}

test("SPARK-43028: config not found error") {
checkError(
exception = intercept[SparkNoSuchElementException](spark.conf.get("some.conf")),
errorClass = "SQL_CONF_NOT_FOUND",
parameters = Map("sqlConf" -> "\"some.conf\""))
}
}

0 comments on commit d4134a8

Please sign in to comment.