From fd84156d9baabc4bbba446248067a7d771e7ede6 Mon Sep 17 00:00:00 2001 From: RongGu Date: Thu, 27 Mar 2014 22:33:18 +0800 Subject: [PATCH] use randomUUID to generate sparkapp directory name on tachyon;minor code style fix --- .../apache/spark/network/netty/TachyonFilePathResolver.java | 2 +- core/src/main/scala/org/apache/spark/SparkContext.scala | 4 ++-- .../main/scala/org/apache/spark/storage/BlockManager.scala | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/apache/spark/network/netty/TachyonFilePathResolver.java b/core/src/main/java/org/apache/spark/network/netty/TachyonFilePathResolver.java index 483de58c4976c..49c550fff6e3a 100644 --- a/core/src/main/java/org/apache/spark/network/netty/TachyonFilePathResolver.java +++ b/core/src/main/java/org/apache/spark/network/netty/TachyonFilePathResolver.java @@ -21,6 +21,6 @@ import org.apache.spark.storage.TachyonFileSegment; public interface TachyonFilePathResolver { - /** Get the file segment in which the given block resides. */ + /** Get the file segment in which the given block resides. This is not a user-facing API*/ TachyonFileSegment getBlockLocation(BlockId blockId); } diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index c3f3b1ea56b36..7f763163dabb4 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -21,6 +21,7 @@ import java.io._ import java.net.URI import java.util.{Properties, UUID} import java.util.concurrent.atomic.AtomicInteger +import java.util.UUID.randomUUID import scala.collection.{Map, Set} import scala.collection.generic.Growable import scala.collection.mutable.{ArrayBuffer, HashMap} @@ -42,7 +43,6 @@ import org.apache.spark.scheduler.local.LocalBackend import org.apache.spark.storage.{BlockManagerSource, RDDInfo, StorageStatus, StorageUtils} import org.apache.spark.ui.SparkUI import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerType, TimeStampedHashMap, Utils} -import java.util.Random /** * Main entry point for Spark functionality. A SparkContext represents the connection to a Spark @@ -128,7 +128,7 @@ class SparkContext( // Generate the random name for a temp folder in Tachyon // Add a timestamp as the suffix here to make it more safe - val tachyonFolderName = new Random().nextInt() + "_" + System.currentTimeMillis() + val tachyonFolderName = "spark-" + randomUUID.toString() conf.set("spark.tachyonstore.foldername", tachyonFolderName) val isLocal = (master == "local" || master.startsWith("local[")) diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala index e4b6142c223f1..aae061d4d93a2 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala @@ -58,7 +58,7 @@ private[spark] class BlockManager( private[storage] val memoryStore: BlockStore = new MemoryStore(this, maxMemory) private[storage] val diskStore = new DiskStore(this, diskBlockManager) var tachyonInitialized = false - private[storage] lazy val tachyonStore : TachyonStore = { + private[storage] lazy val tachyonStore: TachyonStore = { val storeDir = conf.get("spark.tachyonstore.dir", System.getProperty("java.io.tmpdir")) val appFolderName = conf.get("spark.tachyonstore.foldername") val tachyonStorePath = s"${storeDir}/${appFolderName}/${this.executorId}" @@ -1000,7 +1000,7 @@ private[spark] class BlockManager( blockInfo.clear() memoryStore.clear() diskStore.clear() - if(tachyonInitialized) { + if (tachyonInitialized) { tachyonStore.clear() } metadataCleaner.cancel()