Skip to content

Commit

Permalink
Merge pull request #1 from markhamstra/assembly-deb
Browse files Browse the repository at this point in the history
Build enhanced Debian package from assembly
  • Loading branch information
mbautin committed Oct 30, 2013
2 parents 606f91d + 8ac6e39 commit 5040384
Show file tree
Hide file tree
Showing 9 changed files with 151 additions and 417 deletions.
149 changes: 148 additions & 1 deletion assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,13 @@
<name>Spark Project Assembly</name>
<url>http://spark.incubator.apache.org/</url>

<properties>
<spark.jar>${project.build.directory}/scala-${scala.version}/${project.artifactId}-${project.version}-hadoop${hadoop.version}.jar</spark.jar>
<deb.pkg.name>spark</deb.pkg.name>
<deb.install.path>/usr/share/spark</deb.install.path>
<deb.user>root</deb.user>
</properties>

<repositories>
<!-- A repository in the local filesystem for the Py4J JAR, which is not in Maven central -->
<repository>
Expand Down Expand Up @@ -79,7 +86,7 @@
<artifactId>maven-shade-plugin</artifactId>
<configuration>
<shadedArtifactAttached>false</shadedArtifactAttached>
<outputFile>${project.build.directory}/scala-${scala.version}/${project.artifactId}-${project.version}-hadoop${hadoop.version}.jar</outputFile>
<outputFile>${spark.jar}</outputFile>
<artifactSet>
<includes>
<include>*:*</include>
Expand Down Expand Up @@ -161,5 +168,145 @@
</plugins>
</build>
</profile>
<profile>
<id>deb</id>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>buildnumber-maven-plugin</artifactId>
<version>1.1</version>
<executions>
<execution>
<phase>validate</phase>
<goals>
<goal>create</goal>
</goals>
<configuration>
<shortRevisionLength>8</shortRevisionLength>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.vafer</groupId>
<artifactId>jdeb</artifactId>
<version>0.11</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jdeb</goal>
</goals>
<configuration>
<deb>${project.build.directory}/${deb.pkg.name}_${project.version}-${buildNumber}_all.deb</deb>
<attach>false</attach>
<compression>gzip</compression>
<dataSet>
<data>
<src>${spark.jar}</src>
<type>file</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}/jars</prefix>
</mapper>
</data>
<data>
<src>${basedir}/../spark-class</src>
<type>file</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}</prefix>
<filemode>744</filemode>
</mapper>
</data>
<data>
<src>${basedir}/../spark-executor</src>
<type>file</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}</prefix>
<filemode>744</filemode>
</mapper>
</data>
<data>
<src>${basedir}/../spark-shell</src>
<type>file</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}</prefix>
<filemode>744</filemode>
</mapper>
</data>
<data>
<src>${basedir}/../pyspark</src>
<type>file</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}</prefix>
<filemode>744</filemode>
</mapper>
</data>
<data>
<src>${basedir}/src/deb/RELEASE</src>
<type>file</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}</prefix>
</mapper>
</data>
<data>
<src>${basedir}/../conf</src>
<type>directory</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}/conf</prefix>
<filemode>744</filemode>
</mapper>
</data>
<data>
<src>${basedir}/../bin</src>
<type>directory</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}/bin</prefix>
<filemode>744</filemode>
</mapper>
</data>
<data>
<src>${basedir}/../python</src>
<type>directory</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}/python</prefix>
<filemode>744</filemode>
</mapper>
</data>
</dataSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
2 changes: 2 additions & 0 deletions assembly/src/deb/RELEASE
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
compute-classpath.sh uses the existence of this file to decide whether to put the assembly jar on the
classpath or instead to use classfiles in the source tree.
File renamed without changes.
2 changes: 1 addition & 1 deletion docker/spark-test/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ Spark Docker files usable for testing and development purposes.
These images are intended to be run like so:

docker run -v $SPARK_HOME:/opt/spark spark-test-master
docker run -v $SPARK_HOME:/opt/spark spark-test-worker <master_ip>
docker run -v $SPARK_HOME:/opt/spark spark-test-worker spark://<master_ip>:7077

Using this configuration, the containers will have their Spark directories
mounted to your actual `SPARK_HOME`, allowing you to modify and recompile
Expand Down
184 changes: 0 additions & 184 deletions repl-bin/pom.xml

This file was deleted.

Loading

0 comments on commit 5040384

Please sign in to comment.