diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala index 6095042de7f0c..15b00a4496da6 100644 --- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala @@ -1919,9 +1919,8 @@ abstract class RDD[T: ClassTag]( val persistence = if (storageLevel != StorageLevel.NONE) storageLevel.description else "" val storageInfo = rdd.context.getRDDStorageInfo(_.id == rdd.id).map(info => - " CachedPartitions: %d; MemorySize: %s; ExternalBlockStoreSize: %s; DiskSize: %s".format( - info.numCachedPartitions, bytesToString(info.memSize), - bytesToString(info.externalBlockStoreSize), bytesToString(info.diskSize))) + " CachedPartitions: %d; MemorySize: %s; DiskSize: %s".format( + info.numCachedPartitions, bytesToString(info.memSize), bytesToString(info.diskSize))) s"$rdd [$persistence]" +: storageInfo } diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala index 569d7d32284bc..b8c5cbd121861 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala @@ -737,7 +737,6 @@ private[spark] class BlockManagerInfo( if (storageLevel.isValid) { /* isValid means it is either stored in-memory or on-disk. * The memSize here indicates the data size in or dropped from memory, - * externalBlockStoreSize here indicates the data size in or dropped from externalBlockStore, * and the diskSize here indicates the data size in or dropped to disk. * They can be both larger than 0, when a block is dropped from memory to disk. * Therefore, a safe way to set BlockStatus is to set its info in accurate modes. */ diff --git a/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala b/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala index 27a4d4b64175e..f3575c4e43eb0 100644 --- a/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala +++ b/core/src/main/scala/org/apache/spark/storage/RDDInfo.scala @@ -38,7 +38,6 @@ class RDDInfo( var numCachedPartitions = 0 var memSize = 0L var diskSize = 0L - var externalBlockStoreSize = 0L def isCached: Boolean = (memSize + diskSize > 0) && numCachedPartitions > 0