From d695a528bef6291e0e1657f4f3583a8371abd7c8 Mon Sep 17 00:00:00 2001 From: Hideaki Tanaka Date: Thu, 17 Aug 2017 22:02:13 +0800 Subject: [PATCH] [SPARK-21642][CORE] Use FQDN for DRIVER_HOST_ADDRESS instead of ip address ## What changes were proposed in this pull request? The patch lets spark web ui use FQDN as its hostname instead of ip address. In current implementation, ip address of a driver host is set to DRIVER_HOST_ADDRESS. This becomes a problem when we enable SSL using "spark.ssl.enabled", "spark.ssl.trustStore" and "spark.ssl.keyStore" properties. When we configure these properties, spark web ui is launched with SSL enabled and the HTTPS server is configured with the custom SSL certificate you configured in these properties. In this case, client gets javax.net.ssl.SSLPeerUnverifiedException exception when the client accesses the spark web ui because the client fails to verify the SSL certificate (Common Name of the SSL cert does not match with DRIVER_HOST_ADDRESS). To avoid the exception, we should use FQDN of the driver host for DRIVER_HOST_ADDRESS. Error message that client gets when the client accesses spark web ui: javax.net.ssl.SSLPeerUnverifiedException: Certificate for <10.102.138.239> doesn't match any of the subject alternative names: [] ## How was this patch tested? manual tests Author: Hideaki Tanaka Closes #18846 from thideeeee/SPARK-21642. --- .../scala/org/apache/spark/internal/config/package.scala | 2 +- core/src/main/scala/org/apache/spark/util/Utils.scala | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala index ef28e2c48ad02..9495cd2835f97 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/package.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala @@ -222,7 +222,7 @@ package object config { private[spark] val DRIVER_HOST_ADDRESS = ConfigBuilder("spark.driver.host") .doc("Address of driver endpoints.") .stringConf - .createWithDefault(Utils.localHostName()) + .createWithDefault(Utils.localCanonicalHostName()) private[spark] val DRIVER_BIND_ADDRESS = ConfigBuilder("spark.driver.bindAddress") .doc("Address where to bind network listen sockets on the driver.") diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index d661293e529f9..900a619421903 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -936,6 +936,13 @@ private[spark] object Utils extends Logging { customHostname = Some(hostname) } + /** + * Get the local machine's FQDN. + */ + def localCanonicalHostName(): String = { + customHostname.getOrElse(localIpAddress.getCanonicalHostName) + } + /** * Get the local machine's hostname. */