From 69bc898f8d3e349c873d203bb356a7f263d8d457 Mon Sep 17 00:00:00 2001 From: Andrew Or Date: Tue, 8 Jul 2014 17:35:31 -0700 Subject: [PATCH] [SPARK-2392] Executors should not start their own HTTP servers Executors currently start their own unused HTTP file servers. This is because we use the same SparkEnv class for both executors and drivers, and we do not distinguish this case. In the longer term, we should separate out SparkEnv for the driver and SparkEnv for the executors. Author: Andrew Or Closes #1335 from andrewor14/executor-http-server and squashes the following commits: 46ef263 [Andrew Or] Start HTTP server only on the driver --- .../src/main/scala/org/apache/spark/SparkEnv.scala | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala index 2b636b085d73a..8f70744d804d9 100644 --- a/core/src/main/scala/org/apache/spark/SparkEnv.scala +++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala @@ -79,7 +79,7 @@ class SparkEnv ( private[spark] def stop() { pythonWorkers.foreach { case(key, worker) => worker.stop() } - httpFileServer.stop() + Option(httpFileServer).foreach(_.stop()) mapOutputTracker.stop() shuffleManager.stop() broadcastManager.stop() @@ -228,9 +228,15 @@ object SparkEnv extends Logging { val cacheManager = new CacheManager(blockManager) - val httpFileServer = new HttpFileServer(securityManager) - httpFileServer.initialize() - conf.set("spark.fileserver.uri", httpFileServer.serverUri) + val httpFileServer = + if (isDriver) { + val server = new HttpFileServer(securityManager) + server.initialize() + conf.set("spark.fileserver.uri", server.serverUri) + server + } else { + null + } val metricsSystem = if (isDriver) { MetricsSystem.createMetricsSystem("driver", conf, securityManager)