From b430534e5d47fd12ae7b59925eeac925c1e6d2ab Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Tue, 23 Dec 2014 10:48:24 -0800 Subject: [PATCH] Remove seemingly unnecessary synchronization. --- .../org/apache/spark/deploy/worker/Worker.scala | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala index 3a3db0fb1e548..edcf0f4615ae1 100755 --- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala @@ -342,15 +342,12 @@ private[spark] class Worker( // Create local dirs for the executor. These are passed to the executor via the // SPARK_LOCAL_DIRS environment variable, and deleted by the Worker when the // application finishes. - val appLocalDirs = appDirectories.synchronized { - val dirs = appDirectories.get(appId).getOrElse { - Utils.getOrCreateLocalRootDirs(conf).map { dir => - Utils.createDirectory(dir).getAbsolutePath() - }.toSeq - } - appDirectories(appId) = dirs - dirs + val appLocalDirs = appDirectories.get(appId).getOrElse { + Utils.getOrCreateLocalRootDirs(conf).map { dir => + Utils.createDirectory(dir).getAbsolutePath() + }.toSeq } + appDirectories(appId) = appLocalDirs val manager = new ExecutorRunner(appId, execId, appDesc, cores_, memory_, self, workerId, host, sparkHome, executorDir, akkaUrl, conf, appLocalDirs, @@ -472,7 +469,7 @@ private[spark] class Worker( registerWithMaster() } - private def maybeCleanupApplication(id: String): Unit = appDirectories.synchronized { + private def maybeCleanupApplication(id: String): Unit = { val shouldCleanup = finishedApps.contains(id) && !executors.values.exists(_.appId == id) if (shouldCleanup) { finishedApps -= id