Skip to content

Commit

Permalink
#7375: auto import of org.apache.spark.sql.SparkSession in spark magi…
Browse files Browse the repository at this point in the history
…c command (#7389)
  • Loading branch information
jaroslawmalekcodete authored and scottdraves committed May 21, 2018
1 parent 8a93627 commit 4c23a58
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 5 deletions.
2 changes: 0 additions & 2 deletions doc/scala/SparkUI.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
"outputs": [],
"source": [
"%%spark\n",
"import org.apache.spark.sql.SparkSession\n",
"SparkSession.builder()\n",
" .appName(\"BeakerX Demo\")\n",
" .master(\"local[4]\")"
Expand All @@ -73,7 +72,6 @@
"outputs": [],
"source": [
"%%spark --connect\n",
"import org.apache.spark.sql.SparkSession\n",
"SparkSession.builder().master(\"local[100]\")"
]
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import com.twosigma.beakerx.TryResult;
import com.twosigma.beakerx.jvm.object.SimpleEvaluationObject;
import com.twosigma.beakerx.kernel.ImportPath;
import com.twosigma.beakerx.kernel.KernelFunctionality;
import com.twosigma.beakerx.kernel.magic.command.CodeFactory;
import com.twosigma.beakerx.kernel.magic.command.MagicCommandExecutionParam;
Expand Down Expand Up @@ -57,7 +58,7 @@ public MagicCommandOutcomeItem execute(MagicCommandExecutionParam param) {
return new MagicCommandOutput(MagicCommandOutput.Status.ERROR, "Can not run spark support");
}
SparkDisplayers.register();

addDefaultImports();
return new MagicCommandOutput(MagicCommandOutput.Status.OK, "Spark support enabled");
}

Expand All @@ -74,4 +75,8 @@ private TryResult addImplicits() {
return kernel.executeCode(codeToExecute, seo);
}

private void addDefaultImports() {
kernel.addImport(new ImportPath("org.apache.spark.sql.SparkSession"));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ private void enableSparkSupport() throws InterruptedException {
private void runSparkDataset(String returnStatement) throws InterruptedException {
//given
String peoplePath = EnableSparkSupportTest.class.getClassLoader().getResource("people.json").getPath();
String code = "import org.apache.spark.sql.SparkSession\n" +
String code =
"val spark = SparkSession\n" +
" .builder\n" +
" .appName(\"jupyter\")\n" +
Expand Down
1 change: 0 additions & 1 deletion test/ipynb/scala/SparkUI_example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
"outputs": [],
"source": [
"%%spark\n",
"import org.apache.spark.sql.SparkSession\n",
"SparkSession.builder()\n",
" .appName(\"Simple Application\")\n",
" .master(\"local[4]\")"
Expand Down

0 comments on commit 4c23a58

Please sign in to comment.