diff --git a/core/src/main/scala/org/apache/spark/rdd/RDDOperationScope.scala b/core/src/main/scala/org/apache/spark/rdd/RDDOperationScope.scala index 93ec606f2de7d..66df1ebd4d5b0 100644 --- a/core/src/main/scala/org/apache/spark/rdd/RDDOperationScope.scala +++ b/core/src/main/scala/org/apache/spark/rdd/RDDOperationScope.scala @@ -43,7 +43,7 @@ import org.apache.spark.SparkContext @JsonPropertyOrder(Array("id", "name", "parent")) private[spark] class RDDOperationScope( val name: String, - val parent: Option[RDDOperationScope] = None) { + val parent: Option[RDDOperationScope] = None) extends Serializable { val id: Int = RDDOperationScope.nextScopeId() diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala index 9e905993032b6..0c110d79b0839 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala @@ -117,7 +117,7 @@ abstract class DStream[T: ClassTag] ( * Instead, every time we call `compute` we instantiate a new scope using the same name as this * one. Otherwise, all RDDs ever created by this DStream will be in the same scope. */ - @transient private val scope: Option[RDDOperationScope] = { + private val scope: Option[RDDOperationScope] = { Option(ssc.sc.getLocalProperty(SparkContext.RDD_SCOPE_KEY)).map(RDDOperationScope.fromJson) }