diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 3a2a4d95fd5d0..978e5965d5ede 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -1627,10 +1627,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli partitions: Seq[Int], allowLocal: Boolean ): Array[U] = { - // We must clean `func` here before using it in another closure below - // Otherwise, the closure cleaner will only clean the outer closure but not `func` - val cleanedFunc = clean(func) - runJob(rdd, (ctx: TaskContext, iter: Iterator[T]) => cleanedFunc(iter), partitions, allowLocal) + runJob(rdd, (context: TaskContext, iter: Iterator[T]) => func(iter), partitions, allowLocal) } /**