forked from apache/spark
-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
OP-667: adding Jenkinsfile for build and change pom.xml to push artif…
…acts into artifactory
- Loading branch information
1 parent
4c6cf79
commit 11560c9
Showing
6 changed files
with
168 additions
and
46 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
3.0.0-1,1.0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
ARG spark_image_tag=3.0.0-1-hadoop3.2-1.0 | ||
|
||
FROM artifacts.ggn.in.guavus.com:4244/spark:${spark_image_tag} | ||
|
||
ARG spark_uid=185 | ||
|
||
USER root | ||
|
||
RUN apt-get update && \ | ||
apt-get -y install curl && \ | ||
curl -fSL http://artifacts.ggn.in.guavus.com:8081/artifactory/libs-release-local/org/elasticsearch/elasticsearch-hadoop-core/7.8.1_3.0.0/elasticsearch-hadoop-core-7.8.1_3.0.0.jar -o elasticsearch-hadoop-core-7.8.1_3.0.0.jar && \ | ||
curl -fSL http://artifacts.ggn.in.guavus.com:8081/artifactory/libs-release-local/org/elasticsearch/elasticsearch-hadoop-mr/7.8.1_3.0.0/elasticsearch-hadoop-mr-7.8.1_3.0.0.jar -o elasticsearch-hadoop-mr-7.8.1_3.0.0.jar && \ | ||
curl -fSL http://artifacts.ggn.in.guavus.com:8081/artifactory/libs-release-local/org/elasticsearch/elasticsearch-hadoop-sql/7.8.1_3.0.0/elasticsearch-hadoop-sql-7.8.1_3.0.0.jar -o elasticsearch-hadoop-sql-7.8.1_3.0.0.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-s3/1.11.832/aws-java-sdk-s3-1.11.832.jar -o aws-java-sdk-s3-1.11.832.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk/1.11.832/aws-java-sdk-1.11.832.jar -o aws-java-sdk-1.11.832.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-core/1.11.832/aws-java-sdk-core-1.11.832.jar -o aws-java-sdk-core-1.11.832.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-dynamodb/1.11.832/aws-java-sdk-dynamodb-1.11.832.jar -o aws-java-sdk-dynamodb-1.11.832.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/org/apache/commons/commons-pool2/2.8.0/commons-pool2-2.8.0.jar -o commons-pool2-2.8.0.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.2.0/hadoop-aws-3.2.0.jar -o hadoop-aws-3.2.0.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/org/apache/spark/spark-avro_2.12/3.0.0/spark-avro_2.12-3.0.0.jar -o spark-avro_2.12-3.0.0.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/org/apache/spark/spark-token-provider-kafka-0-10_2.12/3.0.0/spark-token-provider-kafka-0-10_2.12-3.0.0.jar -o spark-token-provider-kafka-0-10_2.12-3.0.0.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/net/java/dev/jets3t/jets3t/0.9.0/jets3t-0.9.0.jar -o jets3t-0.9.0.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/org/apache/spark/spark-sql-kafka-0-10_2.12/3.0.0/spark-sql-kafka-0-10_2.12-3.0.0.jar -o spark-sql-kafka-0-10_2.12-3.0.0.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/org/apache/kafka/kafka-clients/2.2.0/kafka-clients-2.2.0.jar -o kafka-clients-2.2.0.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/org/apache/spark/spark-streaming-kafka-0-10_2.12/3.0.0/spark-streaming-kafka-0-10_2.12-3.0.0.jar -o spark-streaming-kafka-0-10_2.12-3.0.0.jar && \ | ||
curl -fSL https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.13.0/jmx_prometheus_javaagent-0.13.0.jar -o jmx_prometheus_javaagent-0.13.0.jar && \ | ||
mv elasticsearch-hadoop-core-7.8.1_3.0.0.jar /opt/spark/jars/ && \ | ||
mv elasticsearch-hadoop-mr-7.8.1_3.0.0.jar /opt/spark/jars/ && \ | ||
mv elasticsearch-hadoop-sql-7.8.1_3.0.0.jar /opt/spark/jars/ && \ | ||
mv aws-java-sdk-s3-1.11.832.jar /opt/spark/jars/ && \ | ||
mv aws-java-sdk-1.11.832.jar /opt/spark/jars/ && \ | ||
mv aws-java-sdk-core-1.11.832.jar /opt/spark/jars/ && \ | ||
mv aws-java-sdk-dynamodb-1.11.832.jar /opt/spark/jars/ && \ | ||
mv commons-pool2-2.8.0.jar /opt/spark/jars/ && \ | ||
mv hadoop-aws-3.2.0.jar /opt/spark/jars/ && \ | ||
mv spark-avro_2.12-3.0.0.jar /opt/spark/jars/ && \ | ||
mv spark-token-provider-kafka-0-10_2.12-3.0.0.jar /opt/spark/jars/ && \ | ||
mv jets3t-0.9.0.jar /opt/spark/jars/ && \ | ||
mv spark-sql-kafka-0-10_2.12-3.0.0.jar /opt/spark/jars/ && \ | ||
mv kafka-clients-2.2.0.jar /opt/spark/jars/ && \ | ||
mv spark-streaming-kafka-0-10_2.12-3.0.0.jar /opt/spark/jars/ && \ | ||
mv jmx_prometheus_javaagent-0.13.0.jar /opt/spark/jars/ | ||
|
||
|
||
|
||
ENTRYPOINT [ "/opt/entrypoint.sh" ] | ||
|
||
# Specify the User that the actual main process will run as | ||
USER ${spark_uid} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
@Library('jenkins_lib')_ | ||
pipeline | ||
{ | ||
agent {label 'slave'} | ||
|
||
environment { | ||
|
||
project = "apache-spark"; | ||
buildNum = currentBuild.getNumber() ; | ||
//ex. like feat, release, fix | ||
buildType = BRANCH_NAME.split("/").first(); | ||
//ex. like OP-<User-Story ID> | ||
branchVersion = BRANCH_NAME.split("/").last().toUpperCase(); | ||
// Define global environment variables in this section | ||
} | ||
|
||
stages { | ||
stage("Define Release version") { | ||
steps { | ||
script { | ||
//Global Lib for Environment Versions Definition | ||
versionDefine() | ||
env.GUAVUS_SPARK_VERSION = "${VERSION}".split(",").first(); | ||
env.GUAVUS_DOCKER_VERSION = "${VERSION}".split(",").last(); | ||
env.dockerTag = "${GUAVUS_SPARK_VERSION}-hadoop3.2-${GUAVUS_DOCKER_VERSION}-${RELEASE}" | ||
echo "GUAVUS_SPARK_VERSION : ${GUAVUS_SPARK_VERSION}" | ||
echo "GUAVUS_DOCKER_VERSION : ${GUAVUS_DOCKER_VERSION}" | ||
echo "DOCKER TAG : ${dockerTag}" | ||
} | ||
} | ||
} | ||
|
||
stage("Versioning") { | ||
steps { | ||
echo "GUAVUS_SPARK_VERSION : ${GUAVUS_SPARK_VERSION}" | ||
echo "GUAVUS_DOCKER_VERSION : ${GUAVUS_DOCKER_VERSION}" | ||
sh 'mvn versions:set -DnewVersion=${GUAVUS_SPARK_VERSION}' | ||
} | ||
} | ||
|
||
stage("Initialize Variable") { | ||
steps { | ||
script { | ||
PUSH_JAR = false; | ||
PUSH_DOCKER = false; | ||
DOCKER_IMAGE_NAME = "spark-opsiq"; | ||
longCommit = sh(returnStdout: true, script: "git rev-parse HEAD").trim() | ||
|
||
if( env.buildType in ['release'] ) | ||
{ | ||
PUSH_JAR = true; | ||
} | ||
else if ( env.buildType ==~ /PR-.*/ ) { | ||
PUSH_DOCKER = true | ||
} | ||
|
||
} | ||
|
||
} | ||
} | ||
|
||
stage("Push JAR to Maven Artifactory") { | ||
when { | ||
expression { PUSH_JAR == true } | ||
} | ||
steps { | ||
script { | ||
echo "Pushing JAR to Maven Artifactory" | ||
sh "mvn deploy -U -Dcheckstyle.skip=true -Denforcer.skip=true -DskipTests=true;" | ||
} | ||
} | ||
} | ||
|
||
stage("Build and Push Docker") { | ||
when { | ||
expression { PUSH_DOCKER == true } | ||
} | ||
stages { | ||
stage("Create Docker Image") { | ||
steps { | ||
script { | ||
echo "Creating docker build..." | ||
sh "./dev/make-distribution.sh --name guavus_spark-${GUAVUS_SPARK_VERSION}-3.2.0 -Phive -Phive-thriftserver -Pkubernetes -Phadoop-3.2 -Dhadoop.version=3.2.0" | ||
sh "./dist/bin/docker-image-tool.sh -r artifacts.ggn.in.guavus.com:4244 -t ${GUAVUS_SPARK_VERSION}-hadoop3.2-${GUAVUS_DOCKER_VERSION} build" | ||
sh "./dist/bin/docker-image-tool.sh -r artifacts.ggn.in.guavus.com:4244 -t ${GUAVUS_SPARK_VERSION}-hadoop3.2-${GUAVUS_DOCKER_VERSION} push" | ||
sh "docker build -t ${DOCKER_IMAGE_NAME} --build-arg GIT_HEAD=${longCommit} --build-arg GIT_BRANCH=${env.BRANCH_NAME} --build-arg VERSION=${dockerTag} --build-arg BUILD_NUMBER=${env.BUILD_NUMBER} ." | ||
} | ||
} | ||
} | ||
|
||
stage("PUSH Docker") { | ||
steps { | ||
script { | ||
echo "Docker PUSH..." | ||
docker_push( buildType, DOCKER_IMAGE_NAME ) | ||
} | ||
} | ||
} | ||
} | ||
} | ||
|
||
} | ||
|
||
} | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters