Skip to content
This repository has been archived by the owner on Jan 9, 2020. It is now read-only.

Use a list of environment variables for JVM options. #444

Merged
merged 3 commits into from
Aug 22, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ package object constants {
private[spark] val ENV_MOUNTED_FILES_DIR = "SPARK_MOUNTED_FILES_DIR"
private[spark] val ENV_PYSPARK_FILES = "PYSPARK_FILES"
private[spark] val ENV_PYSPARK_PRIMARY = "PYSPARK_PRIMARY"
private[spark] val ENV_JAVA_OPT_PREFIX = "SPARK_JAVA_OPT_"
private[spark] val ENV_MOUNTED_FILES_FROM_SECRET_DIR = "SPARK_MOUNTED_FILES_FROM_SECRET_DIR"

// Bootstrapping dependencies with the init-container
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@ package org.apache.spark.deploy.kubernetes.submit

import java.util.{Collections, UUID}

import io.fabric8.kubernetes.api.model.{ContainerBuilder, OwnerReferenceBuilder, PodBuilder}
import io.fabric8.kubernetes.api.model.{ContainerBuilder, EnvVar, EnvVarBuilder, OwnerReferenceBuilder, PodBuilder}
import io.fabric8.kubernetes.client.KubernetesClient
import scala.collection.mutable
import scala.collection.JavaConverters._

import org.apache.spark.SparkConf
import org.apache.spark.deploy.kubernetes.config._
Expand Down Expand Up @@ -92,18 +93,21 @@ private[spark] class Client(
currentDriverSpec = nextStep.configureDriver(currentDriverSpec)
}
val resolvedDriverJavaOpts = currentDriverSpec
.driverSparkConf
// We don't need this anymore since we just set the JVM options on the environment
.remove(org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS)
.getAll
.map {
case (confKey, confValue) => s"-D$confKey=$confValue"
}.mkString(" ") + driverJavaOptions.map(" " + _).getOrElse("")
.driverSparkConf
// We don't need this anymore since we just set the JVM options on the environment
.remove(org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS)
.getAll
.map {
case (confKey, confValue) => s"-D$confKey=$confValue"
} ++ driverJavaOptions.map(Utils.splitCommandString).getOrElse(Seq.empty)
val driverJavaOptsEnvs: Seq[EnvVar] = resolvedDriverJavaOpts.zipWithIndex.map {
case (option, index) => new EnvVarBuilder()
.withName(s"$ENV_JAVA_OPT_PREFIX$index")
.withValue(option)
.build()
}
val resolvedDriverContainer = new ContainerBuilder(currentDriverSpec.driverContainer)
.addNewEnv()
.withName(ENV_DRIVER_JAVA_OPTS)
.withValue(resolvedDriverJavaOpts)
.endEnv()
.addAllToEnv(driverJavaOptsEnvs.asJava)
.build()
val resolvedDriverPod = new PodBuilder(currentDriverSpec.driverPod)
.editSpec()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter {
.set("spark.logConf", "true")
.set(
org.apache.spark.internal.config.DRIVER_JAVA_OPTIONS,
"-XX:+|-HeapDumpOnOutOfMemoryError")
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what was this +|- for before? afaik that's not valid syntax

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure - probably just a badly written unit test.

"-XX:+HeapDumpOnOutOfMemoryError -XX:+PrintGCDetails")
val submissionClient = new Client(
submissionSteps,
sparkConf,
Expand All @@ -147,15 +147,22 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter {
val createdPod = createdPodArgumentCaptor.getValue
val driverContainer = Iterables.getOnlyElement(createdPod.getSpec.getContainers)
assert(driverContainer.getName === SecondTestConfigurationStep.containerName)
val driverJvmOptsEnv = Iterables.getOnlyElement(driverContainer.getEnv)
assert(driverJvmOptsEnv.getName === ENV_DRIVER_JAVA_OPTS)
val driverJvmOpts = driverJvmOptsEnv.getValue.split(" ").toSet
assert(driverJvmOpts.contains("-Dspark.logConf=true"))
assert(driverJvmOpts.contains(
val driverJvmOptsEnvs = driverContainer.getEnv.asScala.filter { env =>
env.getName.startsWith(ENV_JAVA_OPT_PREFIX)
}.sortBy(_.getName)
assert(driverJvmOptsEnvs.size === 4)

val expectedJvmOptsValues = Seq(
"-Dspark.logConf=true",
s"-D${SecondTestConfigurationStep.sparkConfKey}=" +
SecondTestConfigurationStep.sparkConfValue))
assert(driverJvmOpts.contains(
"-XX:+|-HeapDumpOnOutOfMemoryError"))
s"${SecondTestConfigurationStep.sparkConfValue}",
s"-XX:+HeapDumpOnOutOfMemoryError",
s"-XX:+PrintGCDetails")
driverJvmOptsEnvs.zip(expectedJvmOptsValues).zipWithIndex.foreach {
case ((resolvedEnv, expectedJvmOpt), index) =>
assert(resolvedEnv.getName === s"$ENV_JAVA_OPT_PREFIX$index")
assert(resolvedEnv.getValue === expectedJvmOpt)
}
}

test("Waiting for app completion should stall on the watcher") {
Expand Down Expand Up @@ -211,8 +218,8 @@ private object SecondTestConfigurationStep extends DriverConfigurationStep {
override def configureDriver(driverSpec: KubernetesDriverSpec): KubernetesDriverSpec = {
val modifiedPod = new PodBuilder(driverSpec.driverPod)
.editMetadata()
.addToAnnotations(annotationKey, annotationValue)
.endMetadata()
.addToAnnotations(annotationKey, annotationValue)
.endMetadata()
.build()
val resolvedSparkConf = driverSpec.driverSparkConf.clone().set(sparkConfKey, sparkConfValue)
val modifiedContainer = new ContainerBuilder(driverSpec.driverContainer)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,11 @@ ENV PYSPARK_DRIVER_PYTHON python
ENV PYTHONPATH ${SPARK_HOME}/python/:${SPARK_HOME}/python/lib/py4j-0.10.4-src.zip:${PYTHONPATH}

CMD SPARK_CLASSPATH="${SPARK_HOME}/jars/*" && \
env | grep SPARK_JAVA_OPT_ | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt && \
Copy link
Author

@mccheah mccheah Aug 19, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is pretty strange and is fairly dependent on the shell being Bash instead of sh. It would be good to make this compatible with /bin/sh but I couldn't think of a great way to handle this sanely without using arrays.

Separately, the usage of a temporary file is also sub-par. I found that when piping the result of env... sed into the readarray command via the <<< operator, readarray treated the entire input string as a single element of the array, making it such that the resulting array only had one large JVM option. I also tried piping the results of the sed command into readarray but that also isn't working for me in my testing.

I would appreciate any feedback or suggestions from those who are experienced at shell scripting.

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd also appreciate thoughts on how to avoid bouncing this off a temp file. What's there now is ugly but works, so if someone knows how to do this where it's pretty and works, that'd be even better

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

cc @ifilonenko @erikerlandson @foxish for shell scripting suggestions

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

readarray can be at the end of the pipe, which avoids the temp file. That's at least slightly cleaner.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Piping to readarray didn't work for me; I believe the result was an empty array in the environment variable. But perhaps I was writing it incorrectly? I had this:

env | grep SPARK_JAVA_OPT_ | sed 's/[^=]*=\(.*\)/\1/g' | readarray -t SPARK_DRIVER_JAVA_OPTS

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Weird, I'd expect that to be equivalent to the code using the temp file.

readarray -t SPARK_DRIVER_JAVA_OPTS < /tmp/java_opts.txt && \
if ! [ -z ${SPARK_MOUNTED_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_MOUNTED_CLASSPATH:$SPARK_CLASSPATH"; fi && \
if ! [ -z ${SPARK_SUBMIT_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_SUBMIT_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
if ! [ -z ${SPARK_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \
if ! [ -z ${SPARK_MOUNTED_FILES_FROM_SECRET_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_FROM_SECRET_DIR/." .; fi && \
${JAVA_HOME}/bin/java $SPARK_DRIVER_JAVA_OPTS -cp $SPARK_CLASSPATH \
-Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY \
$SPARK_DRIVER_CLASS $PYSPARK_PRIMARY $PYSPARK_FILES $SPARK_DRIVER_ARGS
if ! [ -z ${SPARK_MOUNTED_FILES_DIR+x} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \
if ! [ -z ${SPARK_MOUNTED_FILES_FROM_SECRET_DIR+x} ]; then cp -R "$SPARK_MOUNTED_FILES_FROM_SECRET_DIR/." .; fi && \
${JAVA_HOME}/bin/java "${SPARK_DRIVER_JAVA_OPTS[@]}" -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $PYSPARK_PRIMARY $PYSPARK_FILES $SPARK_DRIVER_ARGS
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,11 @@ FROM spark-base
COPY examples /opt/spark/examples

CMD SPARK_CLASSPATH="${SPARK_HOME}/jars/*" && \
env | grep SPARK_JAVA_OPT_ | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt && \
readarray -t SPARK_DRIVER_JAVA_OPTS < /tmp/java_opts.txt && \
if ! [ -z ${SPARK_MOUNTED_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_MOUNTED_CLASSPATH:$SPARK_CLASSPATH"; fi && \
if ! [ -z ${SPARK_SUBMIT_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_SUBMIT_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
if ! [ -z ${SPARK_EXTRA_CLASSPATH+x} ]; then SPARK_CLASSPATH="$SPARK_EXTRA_CLASSPATH:$SPARK_CLASSPATH"; fi && \
if ! [ -z ${SPARK_MOUNTED_FILES_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \
if ! [ -z ${SPARK_MOUNTED_FILES_DIR+x} ]; then cp -R "$SPARK_MOUNTED_FILES_DIR/." .; fi && \
if ! [ -z ${SPARK_MOUNTED_FILES_FROM_SECRET_DIR} ]; then cp -R "$SPARK_MOUNTED_FILES_FROM_SECRET_DIR/." .; fi && \
${JAVA_HOME}/bin/java $SPARK_DRIVER_JAVA_OPTS -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $SPARK_DRIVER_ARGS
${JAVA_HOME}/bin/java "${SPARK_DRIVER_JAVA_OPTS[@]}" -cp $SPARK_CLASSPATH -Xms$SPARK_DRIVER_MEMORY -Xmx$SPARK_DRIVER_MEMORY $SPARK_DRIVER_CLASS $SPARK_DRIVER_ARGS
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ RUN apk upgrade --no-cache && \
mkdir -p /opt/spark && \
mkdir -p /opt/spark/work-dir \
touch /opt/spark/RELEASE && \
rm /bin/sh && \
ln -sv /bin/bash /bin/sh && \
chgrp root /etc/passwd && chmod ug+rw /etc/passwd

COPY jars /opt/spark/jars
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.kubernetes.integrationtest.jobs

import java.io.{File, FileInputStream}
import java.util.Properties

import com.google.common.collect.Maps
import scala.collection.JavaConverters._

import org.apache.spark.sql.SparkSession
import org.apache.spark.util.Utils

private[spark] object JavaOptionsTest {

def main(args: Array[String]): Unit = {
// scalastyle:off println
if (args.length != 1) {
println(s"Invalid arguments: ${args.mkString(",")}." +
s"Usage: JavaOptionsTest <driver-java-options-list-file>")
System.exit(1)
}
val expectedDriverJavaOptions = loadPropertiesFromFile(args(0))
val nonMatchingDriverOptions = expectedDriverJavaOptions.filter {
case (optKey, optValue) => System.getProperty(optKey) != optValue
}
if (nonMatchingDriverOptions.nonEmpty) {
println(s"The driver's JVM options did not match. Expected $expectedDriverJavaOptions." +
s" But these options did not match: $nonMatchingDriverOptions.")
val sysProps = Maps.fromProperties(System.getProperties).asScala
println("System properties are:")
for (prop <- sysProps) {
println(s"Key: ${prop._1}, Value: ${prop._2}")
}
System.exit(1)
}

// TODO support spark.executor.extraJavaOptions and test here.
println(s"All expected JVM options were present on the driver and executors.")
// scalastyle:on println
}

private def loadPropertiesFromFile(filePath: String): Map[String, String] = {
val file = new File(filePath)
if (!file.isFile) {
throw new IllegalArgumentException(s"File not found at $filePath or is not a file.")
}
val properties = new Properties()
Utils.tryWithResource(new FileInputStream(file)) { is =>
properties.load(is)
}
Maps.fromProperties(properties).asScala.toMap
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@
*/
package org.apache.spark.deploy.kubernetes.integrationtest

import java.io.File
import java.io.{File, FileOutputStream}
import java.nio.file.Paths
import java.util.UUID
import java.util.{Properties, UUID}

import com.google.common.base.Charsets
import com.google.common.io.Files
Expand Down Expand Up @@ -229,6 +229,26 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
Seq.empty[String])
}

test("Setting JVM options on the driver and executors with spaces.") {
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

right now this PR only tests drivers -- the executors is coming in the followup PR

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm ok with keeping the name as-is for now though given that the next PR is imminent.

assume(testBackend.name == MINIKUBE_TEST_BACKEND)
launchStagingServer(SSLOptions(), None)
val driverJvmOptionsFile = storeJvmOptionsInTempFile(
Map("simpleDriverConf" -> "simpleDriverConfValue",
"driverconfwithspaces" -> "driver conf with spaces value"),
"driver-jvm-options.properties",
"JVM options that should be set on the driver.")
sparkConf.set(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS,
"-DsimpleDriverConf=simpleDriverConfValue" +
" -Ddriverconfwithspaces='driver conf with spaces value'")
sparkConf.set("spark.files", driverJvmOptionsFile.getAbsolutePath)
runSparkApplicationAndVerifyCompletion(
JavaMainAppResource(SUBMITTER_LOCAL_MAIN_APP_RESOURCE),
JAVA_OPTIONS_MAIN_CLASS,
Seq(s"All expected JVM options were present on the driver and executors."),
Array(driverJvmOptionsFile.getName),
Seq.empty[String])
}

test("Submit small local files without the resource staging server.") {
assume(testBackend.name == MINIKUBE_TEST_BACKEND)
sparkConf.setJars(Seq(CONTAINER_LOCAL_HELPER_JAR_PATH))
Expand Down Expand Up @@ -360,6 +380,20 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
}
}
}

private def storeJvmOptionsInTempFile(
options: Map[String, String],
propertiesFileName: String,
comments: String): File = {
val tempDir = Utils.createTempDir()
val propertiesFile = new File(tempDir, propertiesFileName)
val properties = new Properties()
options.foreach { case (propKey, propValue) => properties.setProperty(propKey, propValue) }
Utils.tryWithResource(new FileOutputStream(propertiesFile)) { os =>
properties.store(os, comments)
}
propertiesFile
}
}

private[spark] object KubernetesSuite {
Expand Down Expand Up @@ -389,6 +423,8 @@ private[spark] object KubernetesSuite {
".integrationtest.jobs.FileExistenceTest"
val GROUP_BY_MAIN_CLASS = "org.apache.spark.deploy.kubernetes" +
".integrationtest.jobs.GroupByTest"
val JAVA_OPTIONS_MAIN_CLASS = "org.apache.spark.deploy.kubernetes" +
".integrationtest.jobs.JavaOptionsTest"
val TEST_EXISTENCE_FILE_CONTENTS = "contents"

case object ShuffleNotReadyException extends Exception
Expand Down