Skip to content

Commit

Permalink
Spark shell exits if it cannot create SparkContext
Browse files Browse the repository at this point in the history
Mainly, this occurs if you provide a messed up MASTER url (one that doesn't match one
of our regexes). Previously, we would default to Mesos, fail, and then start the shell
anyway, except that any Spark command would fail.
  • Loading branch information
aarondav committed Oct 18, 2013
1 parent fc26e5b commit 7473726
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 8 deletions.
13 changes: 6 additions & 7 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -217,21 +217,20 @@ class SparkContext(
scheduler.initialize(backend)
scheduler

case _ =>
if (MESOS_REGEX.findFirstIn(master).isEmpty) {
logWarning("Master %s does not match expected format, parsing as Mesos URL".format(master))
}
case MESOS_REGEX(mesosUrl) =>
MesosNativeLibrary.load()
val scheduler = new ClusterScheduler(this)
val coarseGrained = System.getProperty("spark.mesos.coarse", "false").toBoolean
val masterWithoutProtocol = master.replaceFirst("^mesos://", "") // Strip initial mesos://
val backend = if (coarseGrained) {
new CoarseMesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName)
new CoarseMesosSchedulerBackend(scheduler, this, mesosUrl, appName)
} else {
new MesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName)
new MesosSchedulerBackend(scheduler, this, mesosUrl, appName)
}
scheduler.initialize(backend)
scheduler

case _ =>
throw new SparkException("Could not parse Master URL: '" + master + "'")
}
}
taskScheduler.start()
Expand Down
9 changes: 8 additions & 1 deletion repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
Original file line number Diff line number Diff line change
Expand Up @@ -845,7 +845,14 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
val jars = Option(System.getenv("ADD_JARS")).map(_.split(','))
.getOrElse(new Array[String](0))
.map(new java.io.File(_).getAbsolutePath)
sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
try {
sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
} catch {
case e: Exception =>
e.printStackTrace()
echo("Failed to create SparkContext, exiting...")
sys.exit(1)
}
sparkContext
}

Expand Down

0 comments on commit 7473726

Please sign in to comment.