diff --git a/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala index deab798ee08d2..cac7dcfbde0bc 100644 --- a/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala @@ -221,8 +221,8 @@ private[spark] class MemoryStore(blockManager: BlockManager, maxMemory: Long) var keepUnrolling = true // Initial per-thread memory to request for unrolling blocks (bytes). Exposed for testing. val initialMemoryThreshold = conf.getLong("spark.storage.unrollMemoryThreshold", 1024 * 1024) - // How often to check whether we need to request more memory. Exposed for testing. - val memoryCheckPeriod = conf.getLong("spark.storage.unrollCheckPeriod", 16) + // How often to check whether we need to request more memory + val memoryCheckPeriod = 16 // Memory currently reserved by this thread (bytes) var memoryThreshold = initialMemoryThreshold // Memory to request as a multiple of current vector size diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala index 7f9878fbf669c..1e8dd83e6d803 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala @@ -76,7 +76,6 @@ class BlockManagerSuite extends FunSuite with Matchers with BeforeAndAfter conf.set("spark.driver.port", boundPort.toString) conf.set("spark.storage.unrollFraction", "0.4") conf.set("spark.storage.unrollMemoryThreshold", "512") - conf.set("spark.storage.unrollCheckPeriod", "1") SparkEnv.set(env) master = new BlockManagerMaster(