diff --git a/dev/deps/spark-deps-hadoop-2.7-hive-2.3 b/dev/deps/spark-deps-hadoop-2.7-hive-2.3
index bac670ffff0cf..a73987a8d91a9 100644
--- a/dev/deps/spark-deps-hadoop-2.7-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-2.7-hive-2.3
@@ -37,7 +37,7 @@ commons-beanutils/1.9.4//commons-beanutils-1.9.4.jar
commons-cli/1.2//commons-cli-1.2.jar
commons-codec/1.15//commons-codec-1.15.jar
commons-collections/3.2.2//commons-collections-3.2.2.jar
-commons-compiler/3.1.4//commons-compiler-3.1.4.jar
+commons-compiler/3.0.16//commons-compiler-3.0.16.jar
commons-compress/1.20//commons-compress-1.20.jar
commons-configuration/1.6//commons-configuration-1.6.jar
commons-crypto/1.1.0//commons-crypto-1.1.0.jar
@@ -122,7 +122,7 @@ jakarta.servlet-api/4.0.3//jakarta.servlet-api-4.0.3.jar
jakarta.validation-api/2.0.2//jakarta.validation-api-2.0.2.jar
jakarta.ws.rs-api/2.1.6//jakarta.ws.rs-api-2.1.6.jar
jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar
-janino/3.1.4//janino-3.1.4.jar
+janino/3.0.16//janino-3.0.16.jar
javassist/3.25.0-GA//javassist-3.25.0-GA.jar
javax.inject/1//javax.inject-1.jar
javax.jdo/3.2.0-m3//javax.jdo-3.2.0-m3.jar
diff --git a/dev/deps/spark-deps-hadoop-3.2-hive-2.3 b/dev/deps/spark-deps-hadoop-3.2-hive-2.3
index ea196c2c27042..764142a043fc9 100644
--- a/dev/deps/spark-deps-hadoop-3.2-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3.2-hive-2.3
@@ -31,7 +31,7 @@ chill_2.12/0.10.0//chill_2.12-0.10.0.jar
commons-cli/1.2//commons-cli-1.2.jar
commons-codec/1.15//commons-codec-1.15.jar
commons-collections/3.2.2//commons-collections-3.2.2.jar
-commons-compiler/3.1.4//commons-compiler-3.1.4.jar
+commons-compiler/3.0.16//commons-compiler-3.0.16.jar
commons-compress/1.20//commons-compress-1.20.jar
commons-crypto/1.1.0//commons-crypto-1.1.0.jar
commons-dbcp/1.4//commons-dbcp-1.4.jar
@@ -98,7 +98,7 @@ jakarta.servlet-api/4.0.3//jakarta.servlet-api-4.0.3.jar
jakarta.validation-api/2.0.2//jakarta.validation-api-2.0.2.jar
jakarta.ws.rs-api/2.1.6//jakarta.ws.rs-api-2.1.6.jar
jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar
-janino/3.1.4//janino-3.1.4.jar
+janino/3.0.16//janino-3.0.16.jar
javassist/3.25.0-GA//javassist-3.25.0-GA.jar
javax.jdo/3.2.0-m3//javax.jdo-3.2.0-m3.jar
javolution/5.5.1//javolution-5.5.1.jar
diff --git a/pom.xml b/pom.xml
index 0c6a9146d11b9..06b60f5feaa2b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -182,7 +182,7 @@
2.6.2
4.1.17
14.0.1
- 3.1.4
+ 3.0.16
2.34
2.10.10
3.5.2
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
index 43ac2abaf7e82..b8b5a40905939 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.catalyst.expressions.codegen
import java.io.ByteArrayInputStream
+import java.util.{Map => JavaMap}
import scala.collection.JavaConverters._
import scala.collection.mutable
@@ -26,8 +27,8 @@ import scala.util.control.NonFatal
import com.google.common.cache.{CacheBuilder, CacheLoader}
import com.google.common.util.concurrent.{ExecutionError, UncheckedExecutionException}
-import org.codehaus.commons.compiler.{CompileException, InternalCompilerException}
-import org.codehaus.janino.ClassBodyEvaluator
+import org.codehaus.commons.compiler.CompileException
+import org.codehaus.janino.{ByteArrayClassLoader, ClassBodyEvaluator, InternalCompilerException, SimpleCompiler}
import org.codehaus.janino.util.ClassFile
import org.apache.spark.{TaskContext, TaskKilledException}
@@ -1519,7 +1520,14 @@ object CodeGenerator extends Logging {
*/
private def updateAndGetCompilationStats(evaluator: ClassBodyEvaluator): ByteCodeStats = {
// First retrieve the generated classes.
- val classes = evaluator.getBytecodes.asScala
+ val classes = {
+ val resultField = classOf[SimpleCompiler].getDeclaredField("result")
+ resultField.setAccessible(true)
+ val loader = resultField.get(evaluator).asInstanceOf[ByteArrayClassLoader]
+ val classesField = loader.getClass.getDeclaredField("classes")
+ classesField.setAccessible(true)
+ classesField.get(loader).asInstanceOf[JavaMap[String, Array[Byte]]].asScala
+ }
// Then walk the classes to get at the method bytecode.
val codeAttr = Utils.classForName("org.codehaus.janino.util.ClassFile$CodeAttribute")
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index e761eb633c473..7f77243af8a88 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -29,7 +29,8 @@ import java.util.concurrent.TimeoutException
import com.fasterxml.jackson.core.JsonToken
import org.apache.hadoop.fs.{FileAlreadyExistsException, FileStatus, Path}
import org.apache.hadoop.fs.permission.FsPermission
-import org.codehaus.commons.compiler.{CompileException, InternalCompilerException}
+import org.codehaus.commons.compiler.CompileException
+import org.codehaus.janino.InternalCompilerException
import org.apache.spark.{Partition, SparkArithmeticException, SparkException, SparkUpgradeException}
import org.apache.spark.executor.CommitDeniedException
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
index 07e045cabd777..4ab50cc4a6af6 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
@@ -563,8 +563,8 @@ class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper {
assert(refTerm.contains("scala.math.LowPriorityOrderingImplicits$$anon$"))
}
- // TODO (SPARK-35579): Fix this bug in janino or work around it in Spark.
- ignore("SPARK-35578: final local variable bug in janino") {
+ // TODO (SPARK-35579): Fix this bug in janino and upgrade janino in Spark.
+ test("SPARK-35578: final local variable bug in janino") {
val code =
"""
|public Object generate(Object[] references) {