Skip to content

Commit

Permalink
Print "Spark context available as sc." only when SparkContext is crea…
Browse files Browse the repository at this point in the history
…ted successfully
  • Loading branch information
zsxwing committed Nov 18, 2014
1 parent c6e0c2a commit 4850093
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -121,11 +121,14 @@ trait SparkILoopInit {
def initializeSpark() {
intp.beQuietDuring {
command("""
@transient val sc = org.apache.spark.repl.Main.interp.createSparkContext();
@transient val sc = {
val _sc = org.apache.spark.repl.Main.interp.createSparkContext()
println("Spark context available as sc.")
_sc
}
""")
command("import org.apache.spark.SparkContext._")
}
echo("Spark context available as sc.")
}

// code to be executed only after the interpreter is initialized
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,14 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def initializeSpark() {
intp.beQuietDuring {
command( """
@transient val sc = org.apache.spark.repl.Main.createSparkContext();
@transient val sc = {
val _sc = org.apache.spark.repl.Main.createSparkContext()
println("Spark context available as sc.")
_sc
}
""")
command("import org.apache.spark.SparkContext._")
}
echo("Spark context available as sc.")
}

/** Print a welcome message */
Expand Down

0 comments on commit 4850093

Please sign in to comment.