Skip to content

Commit

Permalink
OP-667: adding Jenkinsfile for build and change pom.xml to push artif…
Browse files Browse the repository at this point in the history
…acts into artifactory
  • Loading branch information
jainshashank24 committed Oct 19, 2020
1 parent 4c6cf79 commit 11560c9
Show file tree
Hide file tree
Showing 6 changed files with 168 additions and 46 deletions.
1 change: 1 addition & 0 deletions .VERSION
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3.0.0-1,1.0
49 changes: 49 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
ARG spark_image_tag=3.0.0-1-hadoop3.2-1.0

FROM artifacts.ggn.in.guavus.com:4244/spark:${spark_image_tag}

ARG spark_uid=185

USER root

RUN apt-get update && \
apt-get -y install curl && \
curl -fSL http://artifacts.ggn.in.guavus.com:8081/artifactory/libs-release-local/org/elasticsearch/elasticsearch-hadoop-core/7.8.1_3.0.0/elasticsearch-hadoop-core-7.8.1_3.0.0.jar -o elasticsearch-hadoop-core-7.8.1_3.0.0.jar && \
curl -fSL http://artifacts.ggn.in.guavus.com:8081/artifactory/libs-release-local/org/elasticsearch/elasticsearch-hadoop-mr/7.8.1_3.0.0/elasticsearch-hadoop-mr-7.8.1_3.0.0.jar -o elasticsearch-hadoop-mr-7.8.1_3.0.0.jar && \
curl -fSL http://artifacts.ggn.in.guavus.com:8081/artifactory/libs-release-local/org/elasticsearch/elasticsearch-hadoop-sql/7.8.1_3.0.0/elasticsearch-hadoop-sql-7.8.1_3.0.0.jar -o elasticsearch-hadoop-sql-7.8.1_3.0.0.jar && \
curl -fSL https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-s3/1.11.832/aws-java-sdk-s3-1.11.832.jar -o aws-java-sdk-s3-1.11.832.jar && \
curl -fSL https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk/1.11.832/aws-java-sdk-1.11.832.jar -o aws-java-sdk-1.11.832.jar && \
curl -fSL https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-core/1.11.832/aws-java-sdk-core-1.11.832.jar -o aws-java-sdk-core-1.11.832.jar && \
curl -fSL https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-dynamodb/1.11.832/aws-java-sdk-dynamodb-1.11.832.jar -o aws-java-sdk-dynamodb-1.11.832.jar && \
curl -fSL https://repo1.maven.org/maven2/org/apache/commons/commons-pool2/2.8.0/commons-pool2-2.8.0.jar -o commons-pool2-2.8.0.jar && \
curl -fSL https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.2.0/hadoop-aws-3.2.0.jar -o hadoop-aws-3.2.0.jar && \
curl -fSL https://repo1.maven.org/maven2/org/apache/spark/spark-avro_2.12/3.0.0/spark-avro_2.12-3.0.0.jar -o spark-avro_2.12-3.0.0.jar && \
curl -fSL https://repo1.maven.org/maven2/org/apache/spark/spark-token-provider-kafka-0-10_2.12/3.0.0/spark-token-provider-kafka-0-10_2.12-3.0.0.jar -o spark-token-provider-kafka-0-10_2.12-3.0.0.jar && \
curl -fSL https://repo1.maven.org/maven2/net/java/dev/jets3t/jets3t/0.9.0/jets3t-0.9.0.jar -o jets3t-0.9.0.jar && \
curl -fSL https://repo1.maven.org/maven2/org/apache/spark/spark-sql-kafka-0-10_2.12/3.0.0/spark-sql-kafka-0-10_2.12-3.0.0.jar -o spark-sql-kafka-0-10_2.12-3.0.0.jar && \
curl -fSL https://repo1.maven.org/maven2/org/apache/kafka/kafka-clients/2.2.0/kafka-clients-2.2.0.jar -o kafka-clients-2.2.0.jar && \
curl -fSL https://repo1.maven.org/maven2/org/apache/spark/spark-streaming-kafka-0-10_2.12/3.0.0/spark-streaming-kafka-0-10_2.12-3.0.0.jar -o spark-streaming-kafka-0-10_2.12-3.0.0.jar && \
curl -fSL https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.13.0/jmx_prometheus_javaagent-0.13.0.jar -o jmx_prometheus_javaagent-0.13.0.jar && \
mv elasticsearch-hadoop-core-7.8.1_3.0.0.jar /opt/spark/jars/ && \
mv elasticsearch-hadoop-mr-7.8.1_3.0.0.jar /opt/spark/jars/ && \
mv elasticsearch-hadoop-sql-7.8.1_3.0.0.jar /opt/spark/jars/ && \
mv aws-java-sdk-s3-1.11.832.jar /opt/spark/jars/ && \
mv aws-java-sdk-1.11.832.jar /opt/spark/jars/ && \
mv aws-java-sdk-core-1.11.832.jar /opt/spark/jars/ && \
mv aws-java-sdk-dynamodb-1.11.832.jar /opt/spark/jars/ && \
mv commons-pool2-2.8.0.jar /opt/spark/jars/ && \
mv hadoop-aws-3.2.0.jar /opt/spark/jars/ && \
mv spark-avro_2.12-3.0.0.jar /opt/spark/jars/ && \
mv spark-token-provider-kafka-0-10_2.12-3.0.0.jar /opt/spark/jars/ && \
mv jets3t-0.9.0.jar /opt/spark/jars/ && \
mv spark-sql-kafka-0-10_2.12-3.0.0.jar /opt/spark/jars/ && \
mv kafka-clients-2.2.0.jar /opt/spark/jars/ && \
mv spark-streaming-kafka-0-10_2.12-3.0.0.jar /opt/spark/jars/ && \
mv jmx_prometheus_javaagent-0.13.0.jar /opt/spark/jars/



ENTRYPOINT [ "/opt/entrypoint.sh" ]

# Specify the User that the actual main process will run as
USER ${spark_uid}
105 changes: 105 additions & 0 deletions Jenkinsfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
@Library('jenkins_lib')_
pipeline
{
agent {label 'slave'}

environment {

project = "apache-spark";
buildNum = currentBuild.getNumber() ;
//ex. like feat, release, fix
buildType = BRANCH_NAME.split("/").first();
//ex. like OP-<User-Story ID>
branchVersion = BRANCH_NAME.split("/").last().toUpperCase();
// Define global environment variables in this section
}

stages {
stage("Define Release version") {
steps {
script {
//Global Lib for Environment Versions Definition
versionDefine()
env.GUAVUS_SPARK_VERSION = "${VERSION}".split(",").first();
env.GUAVUS_DOCKER_VERSION = "${VERSION}".split(",").last();
env.dockerTag = "${GUAVUS_SPARK_VERSION}-hadoop3.2-${GUAVUS_DOCKER_VERSION}-${RELEASE}"
echo "GUAVUS_SPARK_VERSION : ${GUAVUS_SPARK_VERSION}"
echo "GUAVUS_DOCKER_VERSION : ${GUAVUS_DOCKER_VERSION}"
echo "DOCKER TAG : ${dockerTag}"
}
}
}

stage("Versioning") {
steps {
echo "GUAVUS_SPARK_VERSION : ${GUAVUS_SPARK_VERSION}"
echo "GUAVUS_DOCKER_VERSION : ${GUAVUS_DOCKER_VERSION}"
sh 'mvn versions:set -DnewVersion=${GUAVUS_SPARK_VERSION}'
}
}

stage("Initialize Variable") {
steps {
script {
PUSH_JAR = false;
PUSH_DOCKER = false;
DOCKER_IMAGE_NAME = "spark-opsiq";
longCommit = sh(returnStdout: true, script: "git rev-parse HEAD").trim()

if( env.buildType in ['release'] )
{
PUSH_JAR = true;
}
else if ( env.buildType ==~ /PR-.*/ ) {
PUSH_DOCKER = true
}

}

}
}

stage("Push JAR to Maven Artifactory") {
when {
expression { PUSH_JAR == true }
}
steps {
script {
echo "Pushing JAR to Maven Artifactory"
sh "mvn deploy -U -Dcheckstyle.skip=true -Denforcer.skip=true -DskipTests=true;"
}
}
}

stage("Build and Push Docker") {
when {
expression { PUSH_DOCKER == true }
}
stages {
stage("Create Docker Image") {
steps {
script {
echo "Creating docker build..."
sh "./dev/make-distribution.sh --name guavus_spark-${GUAVUS_SPARK_VERSION}-3.2.0 -Phive -Phive-thriftserver -Pkubernetes -Phadoop-3.2 -Dhadoop.version=3.2.0"
sh "./dist/bin/docker-image-tool.sh -r artifacts.ggn.in.guavus.com:4244 -t ${GUAVUS_SPARK_VERSION}-hadoop3.2-${GUAVUS_DOCKER_VERSION} build"
sh "./dist/bin/docker-image-tool.sh -r artifacts.ggn.in.guavus.com:4244 -t ${GUAVUS_SPARK_VERSION}-hadoop3.2-${GUAVUS_DOCKER_VERSION} push"
sh "docker build -t ${DOCKER_IMAGE_NAME} --build-arg GIT_HEAD=${longCommit} --build-arg GIT_BRANCH=${env.BRANCH_NAME} --build-arg VERSION=${dockerTag} --build-arg BUILD_NUMBER=${env.BUILD_NUMBER} ."
}
}
}

stage("PUSH Docker") {
steps {
script {
echo "Docker PUSH..."
docker_push( buildType, DOCKER_IMAGE_NAME )
}
}
}
}
}

}

}

53 changes: 10 additions & 43 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -37,49 +37,16 @@
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:git@github.com:apache/spark.git</connection>
<developerConnection>scm:git:https://gitbox.apache.org/repos/asf/spark.git</developerConnection>
<url>scm:git:git@github.com:apache/spark.git</url>
<tag>HEAD</tag>
</scm>
<developers>
<developer>
<id>matei</id>
<name>Matei Zaharia</name>
<email>matei.zaharia@gmail.com</email>
<url>http://www.cs.berkeley.edu/~matei</url>
<organization>Apache Software Foundation</organization>
<organizationUrl>http://spark.apache.org</organizationUrl>
</developer>
</developers>
<issueManagement>
<system>JIRA</system>
<url>https://issues.apache.org/jira/browse/SPARK</url>
</issueManagement>

<mailingLists>
<mailingList>
<name>Dev Mailing List</name>
<post>dev@spark.apache.org</post>
<subscribe>dev-subscribe@spark.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@spark.apache.org</unsubscribe>
</mailingList>

<mailingList>
<name>User Mailing List</name>
<post>user@spark.apache.org</post>
<subscribe>user-subscribe@spark.apache.org</subscribe>
<unsubscribe>user-unsubscribe@spark.apache.org</unsubscribe>
</mailingList>

<mailingList>
<name>Commits Mailing List</name>
<post>commits@spark.apache.org</post>
<subscribe>commits-subscribe@spark.apache.org</subscribe>
<unsubscribe>commits-unsubscribe@spark.apache.org</unsubscribe>
</mailingList>
</mailingLists>
<distributionManagement>
<repository>
<id>central</id>
<url>http://artifacts.ggn.in.guavus.com/libs-release-local</url>
</repository>
<snapshotRepository>
<id>snapshots</id>
<url>http://artifacts.ggn.in.guavus.com/libs-snapshot-local</url>
</snapshotRepository>
</distributionManagement>

<modules>
<module>common/sketch</module>
Expand Down
2 changes: 1 addition & 1 deletion sql/hive/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>3.0.0-1</version>
<version>3.0.0</version>
<executions>
<execution>
<id>add-scala-test-sources</id>
Expand Down
4 changes: 2 additions & 2 deletions streaming/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@
<scope>test</scope>
</dependency>
</dependencies>
<build>
<!--<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
<plugins>
Expand All @@ -130,7 +130,7 @@
</configuration>
</plugin>
</plugins>
</build>
</build>-->

<profiles>
<profile>
Expand Down

0 comments on commit 11560c9

Please sign in to comment.