Skip to content

Commit

Permalink
wip, uses FBS and env for network auth (apache#21)
Browse files Browse the repository at this point in the history
[SPARK-601] Use FBS and env for network auth
  • Loading branch information
Arthur Rand authored and susanxhuynh committed Jan 14, 2018
1 parent 2cd2e67 commit d79b7f8
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 2 deletions.
8 changes: 7 additions & 1 deletion core/src/main/scala/org/apache/spark/SecurityManager.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark

import java.lang.{Byte => JByte}
import java.net.{Authenticator, PasswordAuthentication}
import java.nio.file.{Files => jFiles, Paths => jPaths}
import java.security.{KeyStore, SecureRandom}
import java.security.cert.X509Certificate
import javax.net.ssl._
Expand Down Expand Up @@ -451,7 +452,12 @@ private[spark] class SecurityManager(
// For Master/Worker, auth secret is in conf; for Executors, it is in env variable
Option(sparkConf.getenv(SecurityManager.ENV_AUTH_SECRET))
.orElse(sparkConf.getOption(SecurityManager.SPARK_AUTH_SECRET_CONF)) match {
case Some(value) => value
case Some(value) =>
if (jFiles.exists(jPaths.get(value))) {
HashCodes.fromBytes(jFiles.readAllBytes(jPaths.get(value))).toString
} else {
value
}
case None =>
throw new IllegalArgumentException(
"Error: a secret key must be specified via the " +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,15 @@ package org.apache.spark.executor

import java.net.URL
import java.nio.ByteBuffer
import java.nio.file.{Files, Paths}
import java.util.Locale
import java.util.concurrent.atomic.AtomicBoolean

import scala.collection.mutable
import scala.util.{Failure, Success}
import scala.util.control.NonFatal

import org.apache.hadoop.security.{Credentials, UserGroupInformation}
import com.google.common.hash.HashCodes

import org.apache.spark._
import org.apache.spark.TaskState.TaskState
Expand Down Expand Up @@ -197,6 +198,16 @@ private[spark] object CoarseGrainedExecutorBackend extends Logging {

// Bootstrap to fetch the driver's Spark properties.
val executorConf = new SparkConf

if (System.getenv(SecurityManager.ENV_AUTH_SECRET) != null) {
executorConf.set("spark.authenticate", "true")
val secret = System.getenv(SecurityManager.ENV_AUTH_SECRET)
if (Files.exists(Paths.get(secret))) {
val s = HashCodes.fromBytes(Files.readAllBytes(Paths.get(secret))).toString
executorConf.set(SecurityManager.SPARK_AUTH_SECRET_CONF, s)
}
}

val port = executorConf.getInt("spark.executor.port", 0)
val fetcher = RpcEnv.create(
"driverPropsFetcher",
Expand Down

0 comments on commit d79b7f8

Please sign in to comment.