From b2018a5996ad10ef5938bab1dfd9d0dfbc178a56 Mon Sep 17 00:00:00 2001 From: Sephiroth-Lin Date: Mon, 9 Feb 2015 10:29:43 +0800 Subject: [PATCH] check sparkFilesDir before delete change from < if (SparkContext.DRIVER_IDENTIFIER == executorId) > to < if (sparkFilesDir != ".") >, and add comment where sparkFilesDir is created. --- core/src/main/scala/org/apache/spark/SparkEnv.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala index a429bd4c82fd6..44cb9d9773eaa 100644 --- a/core/src/main/scala/org/apache/spark/SparkEnv.scala +++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala @@ -98,7 +98,7 @@ class SparkEnv ( // the tmp dir, if not, it will create too many tmp dirs. // We only need to delete the tmp dir create by driver, because sparkFilesDir is point to the // current working dir in executor which we do not need to delete. - if (SparkContext.DRIVER_IDENTIFIER == executorId) { + if (sparkFilesDir != ".") { try { Utils.deleteRecursively(new File(sparkFilesDir)) } catch { @@ -351,6 +351,8 @@ object SparkEnv extends Logging { // Set the sparkFiles directory, used when downloading dependencies. In local mode, // this is a temporary directory; in distributed mode, this is the executor's current working // directory. + // As we use this value to decide whether if we need to delete the tmp file in stop(), so if you + // want to change this code please be careful. val sparkFilesDir: String = if (isDriver) { Utils.createTempDir(Utils.getLocalDir(conf), "userFiles").getAbsolutePath } else {