Skip to content

Commit

Permalink
Remove seemingly unnecessary synchronization.
Browse files Browse the repository at this point in the history
  • Loading branch information
Marcelo Vanzin committed Dec 23, 2014
1 parent 50eb4b9 commit b430534
Showing 1 changed file with 6 additions and 9 deletions.
15 changes: 6 additions & 9 deletions core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
Original file line number Diff line number Diff line change
Expand Up @@ -342,15 +342,12 @@ private[spark] class Worker(
// Create local dirs for the executor. These are passed to the executor via the
// SPARK_LOCAL_DIRS environment variable, and deleted by the Worker when the
// application finishes.
val appLocalDirs = appDirectories.synchronized {
val dirs = appDirectories.get(appId).getOrElse {
Utils.getOrCreateLocalRootDirs(conf).map { dir =>
Utils.createDirectory(dir).getAbsolutePath()
}.toSeq
}
appDirectories(appId) = dirs
dirs
val appLocalDirs = appDirectories.get(appId).getOrElse {
Utils.getOrCreateLocalRootDirs(conf).map { dir =>
Utils.createDirectory(dir).getAbsolutePath()
}.toSeq
}
appDirectories(appId) = appLocalDirs

val manager = new ExecutorRunner(appId, execId, appDesc, cores_, memory_,
self, workerId, host, sparkHome, executorDir, akkaUrl, conf, appLocalDirs,
Expand Down Expand Up @@ -472,7 +469,7 @@ private[spark] class Worker(
registerWithMaster()
}

private def maybeCleanupApplication(id: String): Unit = appDirectories.synchronized {
private def maybeCleanupApplication(id: String): Unit = {
val shouldCleanup = finishedApps.contains(id) && !executors.values.exists(_.appId == id)
if (shouldCleanup) {
finishedApps -= id
Expand Down

0 comments on commit b430534

Please sign in to comment.