Skip to content

Commit

Permalink
Fix issue with DNS resolution (#118)
Browse files Browse the repository at this point in the history
* Fix issue with DNS resolution

* Address comments
  • Loading branch information
foxish authored and ash211 committed Feb 16, 2017
1 parent 61cd34c commit 16c5620
Show file tree
Hide file tree
Showing 4 changed files with 6 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ import com.google.common.base.Charsets
import com.google.common.io.Files
import io.fabric8.kubernetes.client.{Config, ConfigBuilder, DefaultKubernetesClient}

import org.apache.spark.deploy.kubernetes.constants._

private[spark] object KubernetesClientBuilder {
private val API_SERVER_TOKEN = new File(Config.KUBERNETES_SERVICE_ACCOUNT_TOKEN_PATH)
private val CA_CERT_FILE = new File(Config.KUBERNETES_SERVICE_ACCOUNT_CA_CRT_PATH)
Expand All @@ -33,11 +35,10 @@ private[spark] object KubernetesClientBuilder {
* into the pod's disk space.
*/
def buildFromWithinPod(
kubernetesMaster: String,
kubernetesNamespace: String): DefaultKubernetesClient = {
var clientConfigBuilder = new ConfigBuilder()
.withApiVersion("v1")
.withMasterUrl(kubernetesMaster)
.withMasterUrl(KUBERNETES_MASTER_INTERNAL_URL)
.withNamespace(kubernetesNamespace)

if (CA_CERT_FILE.isFile) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,4 +67,5 @@ package object constants {
// Miscellaneous
private[spark] val DRIVER_CONTAINER_NAME = "spark-kubernetes-driver"
private[spark] val KUBERNETES_SUBMIT_SSL_NAMESPACE = "kubernetes.submit"
private[spark] val KUBERNETES_MASTER_INTERNAL_URL = "https://kubernetes.default.svc"
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ private[spark] class KubernetesClusterSchedulerBackend(
private val EXECUTOR_MODIFICATION_LOCK = new Object
private val runningExecutorPods = new scala.collection.mutable.HashMap[String, Pod]

private val kubernetesMaster = "https://kubernetes"
private val executorDockerImage = conf.get(EXECUTOR_DOCKER_IMAGE)
private val kubernetesNamespace = conf.get(KUBERNETES_NAMESPACE)
private val executorPort = conf.getInt("spark.executor.port", DEFAULT_STATIC_PORT)
Expand Down Expand Up @@ -77,7 +76,7 @@ private[spark] class KubernetesClusterSchedulerBackend(
ThreadUtils.newDaemonCachedThreadPool("kubernetes-executor-requests"))

private val kubernetesClient = KubernetesClientBuilder
.buildFromWithinPod(kubernetesMaster, kubernetesNamespace)
.buildFromWithinPod(kubernetesNamespace)

private val driverPod = try {
kubernetesClient.pods().inNamespace(kubernetesNamespace).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ private[spark] object Minikube extends Logging {

def getDockerEnv: Map[String, String] = synchronized {
assert(MINIKUBE_EXECUTABLE_DEST.exists(), EXPECTED_DOWNLOADED_MINIKUBE_MESSAGE)
executeMinikube("docker-env")
executeMinikube("docker-env", "--shell", "bash")
.filter(_.startsWith("export"))
.map(_.replaceFirst("export ", "").split('='))
.map(arr => (arr(0), arr(1).replaceAllLiterally("\"", "")))
Expand Down

0 comments on commit 16c5620

Please sign in to comment.