-
Notifications
You must be signed in to change notification settings - Fork 28.5k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[SPARK-36336][SQL] Add new exception of base exception used in QueryE…
…xecutionErrors ### What changes were proposed in this pull request? When we refactor the query execution errors to use error classes in QueryExecutionErrors, we need define some exception that mix SparkThrowable into a base Exception type. according the example [SparkArithmeticException](https://github.com/apache/spark/blob/f90eb6a5db0778fd18b0b544f93eac3103bbf03b/core/src/main/scala/org/apache/spark/SparkException.scala#L75) Add SparkXXXException as follows: - `SparkClassNotFoundException` - `SparkConcurrentModificationException` - `SparkDateTimeException` - `SparkFileAlreadyExistsException` - `SparkFileNotFoundException` - `SparkNoSuchMethodException` - `SparkIndexOutOfBoundsException` - `SparkIOException` - `SparkSecurityException` - `SparkSQLException` - `SparkSQLFeatureNotSupportedException` Refactor some exceptions in QueryExecutionErrors to use error classes and new exception for testing new exception Some added by [PR](#33538) as follows: - `SparkUnsupportedOperationException` - `SparkIllegalStateException` - `SparkNumberFormatException` - `SparkIllegalArgumentException` - `SparkArrayIndexOutOfBoundsException` - `SparkNoSuchElementException` ### Why are the changes needed? [SPARK-36336](https://issues.apache.org/jira/browse/SPARK-36336) ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? existed ut test Closes #33573 from Peng-Lei/SPARK-36336. Authored-by: PengLei <[email protected]> Signed-off-by: Hyukjin Kwon <[email protected]>
- Loading branch information
1 parent
de932f5
commit 3e32ea1
Showing
5 changed files
with
205 additions
and
24 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -32,7 +32,7 @@ import org.apache.hadoop.fs.permission.FsPermission | |
import org.codehaus.commons.compiler.CompileException | ||
import org.codehaus.janino.InternalCompilerException | ||
|
||
import org.apache.spark.{Partition, SparkArithmeticException, SparkException, SparkUpgradeException} | ||
import org.apache.spark.{Partition, SparkArithmeticException, SparkClassNotFoundException, SparkConcurrentModificationException, SparkDateTimeException, SparkException, SparkFileAlreadyExistsException, SparkFileNotFoundException, SparkIndexOutOfBoundsException, SparkNoSuchMethodException, SparkRuntimeException, SparkSecurityException, SparkSQLException, SparkSQLFeatureNotSupportedException, SparkUpgradeException} | ||
import org.apache.spark.executor.CommitDeniedException | ||
import org.apache.spark.launcher.SparkLauncher | ||
import org.apache.spark.memory.SparkOutOfMemoryError | ||
|
@@ -157,7 +157,7 @@ object QueryExecutionErrors { | |
} | ||
|
||
def invalidFractionOfSecondError(): DateTimeException = { | ||
new DateTimeException("The fraction of sec must be zero. Valid range is [0, 60].") | ||
new SparkDateTimeException(errorClass = "INVALID_FRACTION_OF_SECOND", Array.empty) | ||
} | ||
|
||
def overflowInSumOfDecimalError(): ArithmeticException = { | ||
|
@@ -179,7 +179,8 @@ object QueryExecutionErrors { | |
} | ||
|
||
def literalTypeUnsupportedError(v: Any): RuntimeException = { | ||
new RuntimeException(s"Unsupported literal type ${v.getClass} $v") | ||
new SparkRuntimeException("UNSUPPORTED_LITERAL_TYPE", | ||
Array(v.getClass.toString, v.toString)) | ||
} | ||
|
||
def noDefaultForDataTypeError(dataType: DataType): RuntimeException = { | ||
|
@@ -261,8 +262,7 @@ object QueryExecutionErrors { | |
} | ||
|
||
def methodNotDeclaredError(name: String): Throwable = { | ||
new NoSuchMethodException(s"""A method named "$name" is not declared """ + | ||
"in any enclosing class nor any supertype") | ||
new SparkNoSuchMethodException(errorClass = "MISSING_METHOD", Array(name)) | ||
} | ||
|
||
def constructorNotFoundError(cls: String): Throwable = { | ||
|
@@ -449,11 +449,7 @@ object QueryExecutionErrors { | |
} | ||
|
||
def incompatibleDataSourceRegisterError(e: Throwable): Throwable = { | ||
new ClassNotFoundException( | ||
s""" | ||
|Detected an incompatible DataSourceRegister. Please remove the incompatible | ||
|library from classpath or upgrade it. Error: ${e.getMessage} | ||
""".stripMargin, e) | ||
new SparkClassNotFoundException("INCOMPATIBLE_DATASOURCE_REGISTER", Array(e.getMessage), e) | ||
} | ||
|
||
def unrecognizedFileFormatError(format: String): Throwable = { | ||
|
@@ -675,7 +671,7 @@ object QueryExecutionErrors { | |
} | ||
|
||
def unrecognizedSqlTypeError(sqlType: Int): Throwable = { | ||
new SQLException(s"Unrecognized SQL type $sqlType") | ||
new SparkSQLException(errorClass = "UNRECOGNIZED_SQL_TYPE", Array(sqlType.toString)) | ||
} | ||
|
||
def unsupportedJdbcTypeError(content: String): Throwable = { | ||
|
@@ -702,8 +698,8 @@ object QueryExecutionErrors { | |
} | ||
|
||
def transactionUnsupportedByJdbcServerError(): Throwable = { | ||
new SQLFeatureNotSupportedException("The target JDBC server does not support " + | ||
"transaction and can only support ALTER TABLE with a single action.") | ||
new SparkSQLFeatureNotSupportedException(errorClass = "UNSUPPORTED_TRANSACTION_BY_JDBC_SERVER", | ||
Array.empty) | ||
} | ||
|
||
def dataTypeUnsupportedYetError(dataType: DataType): Throwable = { | ||
|
@@ -952,8 +948,7 @@ object QueryExecutionErrors { | |
} | ||
|
||
def concurrentQueryInstanceError(): Throwable = { | ||
new ConcurrentModificationException( | ||
"Another instance of this query was just started by a concurrent session.") | ||
new SparkConcurrentModificationException("CONCURRENT_QUERY_ERROR", Array.empty) | ||
} | ||
|
||
def cannotParseJsonArraysAsStructsError(): Throwable = { | ||
|
@@ -1233,8 +1228,7 @@ object QueryExecutionErrors { | |
} | ||
|
||
def indexOutOfBoundsOfArrayDataError(idx: Int): Throwable = { | ||
new IndexOutOfBoundsException( | ||
s"Index $idx must be between 0 and the length of the ArrayData.") | ||
new SparkIndexOutOfBoundsException(errorClass = "INDEX_OUT_OF_BOUNDS", Array(idx.toString)) | ||
This comment has been minimized.
Sorry, something went wrong.
This comment has been minimized.
Sorry, something went wrong.
MaxGekk
Member
|
||
} | ||
|
||
def malformedRecordsDetectedInRecordParsingError(e: BadRecordException): Throwable = { | ||
|
@@ -1354,16 +1348,17 @@ object QueryExecutionErrors { | |
} | ||
|
||
def renamePathAsExistsPathError(srcPath: Path, dstPath: Path): Throwable = { | ||
new FileAlreadyExistsException( | ||
s"Failed to rename $srcPath to $dstPath as destination already exists") | ||
new SparkFileAlreadyExistsException(errorClass = "FAILED_RENAME_PATH", | ||
Array(srcPath.toString, dstPath.toString)) | ||
} | ||
|
||
def renameAsExistsPathError(dstPath: Path): Throwable = { | ||
new FileAlreadyExistsException(s"Failed to rename as $dstPath already exists") | ||
} | ||
|
||
def renameSrcPathNotFoundError(srcPath: Path): Throwable = { | ||
new FileNotFoundException(s"Failed to rename as $srcPath was not found") | ||
new SparkFileNotFoundException(errorClass = "RENAME_SRC_PATH_NOT_FOUND", | ||
Array(srcPath.toString)) | ||
} | ||
|
||
def failedRenameTempFileError(srcPath: Path, dstPath: Path): Throwable = { | ||
|
@@ -1560,8 +1555,8 @@ object QueryExecutionErrors { | |
permission: FsPermission, | ||
path: Path, | ||
e: Throwable): Throwable = { | ||
new SecurityException(s"Failed to set original permission $permission back to " + | ||
s"the created path: $path. Exception: ${e.getMessage}") | ||
new SparkSecurityException(errorClass = "FAILED_SET_ORIGINAL_PERMISSION_BACK", | ||
Array(permission.toString, path.toString, e.getMessage)) | ||
} | ||
|
||
def failToSetOriginalACLBackError(aclEntries: String, path: Path, e: Throwable): Throwable = { | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
@Peng-Lei @HyukjinKwon Any ideas how to trigger the error from user space? If it is not possible, let's replace it by an internal error.