diff --git a/core/src/main/scala/org/apache/spark/network/netty/BlockClientFactory.scala b/core/src/main/scala/org/apache/spark/network/netty/BlockClientFactory.scala index e264f91142ec1..6278e69c2200b 100644 --- a/core/src/main/scala/org/apache/spark/network/netty/BlockClientFactory.scala +++ b/core/src/main/scala/org/apache/spark/network/netty/BlockClientFactory.scala @@ -31,7 +31,7 @@ import io.netty.channel.socket.nio.NioSocketChannel import io.netty.channel.socket.oio.OioSocketChannel import io.netty.util.internal.PlatformDependent -import org.apache.spark.SparkConf +import org.apache.spark.{Logging, SparkConf} import org.apache.spark.util.Utils @@ -42,7 +42,7 @@ import org.apache.spark.util.Utils * for the same remote host. It also shares a single worker thread pool for all [[BlockClient]]s. */ private[netty] -class BlockClientFactory(val conf: NettyConfig) extends Closeable { +class BlockClientFactory(val conf: NettyConfig) extends Logging with Closeable { def this(sparkConf: SparkConf) = this(new NettyConfig(sparkConf)) @@ -102,6 +102,8 @@ class BlockClientFactory(val conf: NettyConfig) extends Closeable { return cachedClient } + logInfo(s"Creating new connection to $remoteHost:$remotePort") + // There is a chance two threads are creating two different clients connecting to the same host. // But that's probably ok ... diff --git a/core/src/main/scala/org/apache/spark/network/netty/BlockServer.scala b/core/src/main/scala/org/apache/spark/network/netty/BlockServer.scala index 9a8ffabd04c84..2611f2eacdb36 100644 --- a/core/src/main/scala/org/apache/spark/network/netty/BlockServer.scala +++ b/core/src/main/scala/org/apache/spark/network/netty/BlockServer.scala @@ -114,6 +114,8 @@ class BlockServer(conf: NettyConfig, dataProvider: BlockDataManager) _port = addr.getPort // _hostName = addr.getHostName _hostName = Utils.localHostName() + + logInfo(s"Server started ${_hostName}:${_port}") } /** Shutdown the server. */