Skip to content

Commit

Permalink
rename the variable and update comment
Browse files Browse the repository at this point in the history
  • Loading branch information
Sephiroth-Lin committed Feb 10, 2015
1 parent 1339c96 commit 4edf394
Showing 1 changed file with 6 additions and 4 deletions.
10 changes: 6 additions & 4 deletions core/src/main/scala/org/apache/spark/SparkEnv.scala
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ class SparkEnv (
// (e.g., HadoopFileRDD uses this to cache JobConfs and InputFormats).
private[spark] val hadoopJobMetadata = new MapMaker().softValues().makeMap[String, Any]()

private var tmpFilesDir: Option[String] = None
private var driverTmpDirToDelete: Option[String] = None

private[spark] def stop() {
isStopped = true
Expand All @@ -100,7 +100,7 @@ class SparkEnv (
// the tmp dir, if not, it will create too many tmp dirs.
// We only need to delete the tmp dir create by driver, because sparkFilesDir is point to the
// current working dir in executor which we do not need to delete.
tmpFilesDir match {
driverTmpDirToDelete match {
case Some(path) => {
try {
Utils.deleteRecursively(new File(path))
Expand Down Expand Up @@ -386,9 +386,11 @@ object SparkEnv extends Logging {
shuffleMemoryManager,
conf)

// Add a reference to tmp dir created by driver
// Add a reference to tmp dir created by driver, we will delete this tmp dir when stop() is
// called, and we only need to do it for driver. Because driver may run as a service, and if we
// don't delete this tmp dir when sc is stopped, then will create too many tmp dirs.
if (isDriver) {
envInstance.tmpFilesDir = Some(sparkFilesDir)
envInstance.driverTmpDirToDelete = Some(sparkFilesDir)
}

envInstance
Expand Down

0 comments on commit 4edf394

Please sign in to comment.