Skip to content

Commit

Permalink
Do not wait for pod finishing in integration tests. (#84)
Browse files Browse the repository at this point in the history
Since the example job are patched to never finish.
  • Loading branch information
lins05 authored and ash211 committed Feb 16, 2017
1 parent f1285e1 commit bc80ac4
Showing 1 changed file with 6 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
.set("spark.app.name", "spark-pi")
.set("spark.ui.enabled", "true")
.set("spark.testing", "false")
.set("spark.kubernetes.submit.waitAppCompletion", "false")
val mainAppResource = s"file://$EXAMPLES_JAR"

new Client(
Expand Down Expand Up @@ -210,6 +211,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
"--conf", s"spark.kubernetes.submit.clientCertFile=${clientConfig.getClientCertFile}",
"--conf", "spark.kubernetes.executor.docker.image=spark-executor:latest",
"--conf", "spark.kubernetes.driver.docker.image=spark-driver:latest",
"--conf", "spark.kubernetes.submit.waitAppCompletion=false",
EXAMPLES_JAR)
SparkSubmit.main(args)
val sparkMetricsService = getSparkMetricsService("spark-pi")
Expand All @@ -231,6 +233,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
"--conf", s"spark.kubernetes.submit.clientCertFile=${clientConfig.getClientCertFile}",
"--conf", "spark.kubernetes.executor.docker.image=spark-executor:latest",
"--conf", "spark.kubernetes.driver.docker.image=spark-driver:latest",
"--conf", "spark.kubernetes.submit.waitAppCompletion=false",
s"container:///opt/spark/examples/jars/$EXAMPLES_JAR_FILE_NAME")
val allContainersSucceeded = SettableFuture.create[Boolean]
val watcher = new Watcher[Pod] {
Expand Down Expand Up @@ -292,6 +295,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
"--conf", "spark.kubernetes.executor.docker.image=spark-executor:latest",
"--conf", "spark.kubernetes.driver.docker.image=spark-driver:latest",
"--conf", "spark.kubernetes.driver.labels=label1=label1value,label2=label2value",
"--conf", "spark.kubernetes.submit.waitAppCompletion=false",
EXAMPLES_JAR)
SparkSubmit.main(args)
val driverPodLabels = minikubeKubernetesClient
Expand Down Expand Up @@ -337,6 +341,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
"--conf", "spark.ssl.kubernetes.submit.trustStore=" +
s"file://${trustStoreFile.getAbsolutePath}",
"--conf", s"spark.ssl.kubernetes.driverlaunch.trustStorePassword=changeit",
"--conf", "spark.kubernetes.submit.waitAppCompletion=false",
EXAMPLES_JAR)
SparkSubmit.main(args)
}
Expand All @@ -360,6 +365,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
"--conf", s"spark.kubernetes.submit.clientCertFile=${clientConfig.getClientCertFile}",
"--conf", "spark.kubernetes.executor.docker.image=spark-executor:latest",
"--conf", "spark.kubernetes.driver.docker.image=spark-driver:latest",
"--conf", "spark.kubernetes.submit.waitAppCompletion=false",
EXAMPLES_JAR,
TEST_EXISTENCE_FILE.getName,
TEST_EXISTENCE_FILE_CONTENTS)
Expand Down

0 comments on commit bc80ac4

Please sign in to comment.