Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-25957][K8S] Make building alternate language binding docker images optional #23053

Closed
wants to merge 6 commits into from
65 changes: 43 additions & 22 deletions bin/docker-image-tool.sh
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,18 @@ function image_ref {
echo "$image"
}

function docker_push {
local image_name="$1"
if [ ! -z $(docker images -q "$(image_ref ${image_name})") ]; then
docker push "$(image_ref ${image_name})"
if [ $? -ne 0 ]; then
error "Failed to push $image_name Docker image."
ramaddepally marked this conversation as resolved.
Show resolved Hide resolved
fi
else
echo "$(image_ref ${image_name}) image not found. Skipping push for this image."
ramaddepally marked this conversation as resolved.
Show resolved Hide resolved
fi
}

function build {
local BUILD_ARGS
local IMG_PATH
Expand Down Expand Up @@ -102,33 +114,37 @@ function build {
error "Failed to build Spark JVM Docker image, please refer to Docker build output for details."
fi

docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
-t $(image_ref spark-py) \
-f "$PYDOCKERFILE" .
if [ $? -ne 0 ]; then
error "Failed to build PySpark Docker image, please refer to Docker build output for details."
if [ "${PYDOCKERFILE}" != "skip" ]; then
docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
-t $(image_ref spark-py) \
-f "$PYDOCKERFILE" .
if [ $? -ne 0 ]; then
error "Failed to build PySpark Docker image, please refer to Docker build output for details."
ramaddepally marked this conversation as resolved.
Show resolved Hide resolved
fi
else
echo "Skipped building PySpark docker image."
fi

if [ "${RDOCKERFILE}" != "skip" ]; then
if [ -d "${SPARK_HOME}/R/lib" ]; then
docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
-t $(image_ref spark-r) \
-f "$RDOCKERFILE" .
if [ $? -ne 0 ]; then
error "Failed to build SparkR Docker image, please refer to Docker build output for details."
ramaddepally marked this conversation as resolved.
Show resolved Hide resolved
fi
else
echo "SparkR artifacts not found. Skipped building SparkR docker image."
fi
docker build $NOCACHEARG "${BINDING_BUILD_ARGS[@]}" \
-t $(image_ref spark-r) \
-f "$RDOCKERFILE" .
if [ $? -ne 0 ]; then
error "Failed to build SparkR Docker image, please refer to Docker build output for details."
else
echo "Skipped building SparkR docker image."
ramaddepally marked this conversation as resolved.
Show resolved Hide resolved
fi
}

function push {
docker push "$(image_ref spark)"
if [ $? -ne 0 ]; then
error "Failed to push Spark JVM Docker image."
fi
docker push "$(image_ref spark-py)"
if [ $? -ne 0 ]; then
error "Failed to push PySpark Docker image."
fi
docker push "$(image_ref spark-r)"
if [ $? -ne 0 ]; then
error "Failed to push SparkR Docker image."
fi
docker_push "spark"
docker_push "spark-py"
docker_push "spark-r"
}

function usage {
Expand All @@ -144,7 +160,9 @@ Commands:
Options:
-f file Dockerfile to build for JVM based Jobs. By default builds the Dockerfile shipped with Spark.
-p file Dockerfile to build for PySpark Jobs. Builds Python dependencies and ships with Spark.
Specify 'skip' to skip building PySpark docker image.
-R file Dockerfile to build for SparkR Jobs. Builds R dependencies and ships with Spark.
Specify 'skip' to skip building SparkR docker image.
-r repo Repository address.
-t tag Tag to apply to the built image, or to identify the image to be pushed.
-m Use minikube's Docker daemon.
Expand All @@ -164,6 +182,9 @@ Examples:
- Build image in minikube with tag "testing"
$0 -m -t testing build

- Skip building SparkR docker image
$0 -r repo -t tag -R skip build

- Build and push image with tag "v2.3.0" to docker.io/myrepo
$0 -r docker.io/myrepo -t v2.3.0 build
$0 -r docker.io/myrepo -t v2.3.0 push
Expand Down