Skip to content

Commit

Permalink
SPARK-1384 - fix spark-shell on yarn access to secure HDFS
Browse files Browse the repository at this point in the history
  • Loading branch information
tgravescs committed Apr 1, 2014
1 parent a6c955a commit ae9162a
Showing 1 changed file with 13 additions and 8 deletions.
21 changes: 13 additions & 8 deletions repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
Original file line number Diff line number Diff line change
Expand Up @@ -880,6 +880,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,

def process(settings: Settings): Boolean = savingContextLoader {
this.settings = settings
if (getMaster() == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true")
createInterpreter()

// sets in to some kind of reader depending on environmental cues
Expand Down Expand Up @@ -937,16 +938,9 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,

def createSparkContext(): SparkContext = {
val execUri = System.getenv("SPARK_EXECUTOR_URI")
val master = this.master match {
case Some(m) => m
case None => {
val prop = System.getenv("MASTER")
if (prop != null) prop else "local"
}
}
val jars = SparkILoop.getAddedJars.map(new java.io.File(_).getAbsolutePath)
val conf = new SparkConf()
.setMaster(master)
.setMaster(getMaster())
.setAppName("Spark shell")
.setJars(jars)
.set("spark.repl.class.uri", intp.classServer.uri)
Expand All @@ -961,6 +955,17 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
sparkContext
}

private def getMaster(): String = {
val master = this.master match {
case Some(m) => m
case None => {
val prop = System.getenv("MASTER")
if (prop != null) prop else "local"
}
}
master
}

/** process command-line arguments and do as they request */
def process(args: Array[String]): Boolean = {
val command = new SparkCommandLine(args.toList, msg => echo(msg))
Expand Down

0 comments on commit ae9162a

Please sign in to comment.