Skip to content

Commit

Permalink
Switched to gmaven plus, it fixes random failures observer with its p…
Browse files Browse the repository at this point in the history
…redecessor gmaven.
  • Loading branch information
ScrapCodes committed Nov 10, 2014
1 parent 5272ce5 commit 8bd4e40
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 16 deletions.
10 changes: 0 additions & 10 deletions examples/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -184,16 +184,6 @@
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
<plugins>
<plugin>
<groupId>org.codehaus.gmaven</groupId>
<artifactId>gmaven-plugin</artifactId>
<version>1.4</version>
<executions>
<execution>
<phase>none</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
Expand Down
26 changes: 20 additions & 6 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -267,16 +267,28 @@
</snapshots>
</pluginRepository>
</pluginRepositories>

<dependencies>
<!--
This is a dummy dependency that is used along with the shading plug-in
to create effective poms on publishing (see SPARK-3812).
-->
<dependencies>
<dependency>
<groupId>org.spark-project.spark</groupId>
<artifactId>unused</artifactId>
<version>1.0.0</version>
</dependency>
<!--
This depndency has been added to provided scope as it is needed for excuting build
specific groovy scripts using gmaven+ and not required for downstream project building
with spark.
-->
<dependency>
<groupId>org.codehaus.groovy</groupId>
<artifactId>groovy-all</artifactId>
<version>2.3.7</version>
<scope>provided</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
Expand Down Expand Up @@ -1048,20 +1060,22 @@

<!-- This plugin reads a file into maven property. And it lets us write groovy !! -->
<plugin>
<groupId>org.codehaus.gmaven</groupId>
<artifactId>gmaven-plugin</artifactId>
<version>1.4</version>
<groupId>org.codehaus.gmavenplus</groupId>
<artifactId>gmavenplus-plugin</artifactId>
<version>1.2</version>
<executions>
<execution>
<phase>process-test-classes</phase>
<goals>
<goal>execute</goal>
</goals>
<configuration>
<source>
<scripts>
<script><![CDATA[
def file = new File(project.properties.test_classpath_file)
project.properties.test_classpath = file.getText().split().join(":")
</source>
]]></script>
</scripts>
</configuration>
</execution>
</executions>
Expand Down
1 change: 1 addition & 0 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -361,6 +361,7 @@ object TestSettings {
.map { case (k,v) => s"-D$k=$v" }.toSeq,
javaOptions in Test ++= "-Xmx3g -XX:PermSize=128M -XX:MaxNewSize=256m -XX:MaxPermSize=1g"
.split(" ").toSeq,
// This places test scope jars on the classpath of executors during tests.
javaOptions in Test +=
"-Dspark.executor.extraClassPath=" + (fullClasspath in Test).value.files.
map(_.getAbsolutePath).mkString(":").stripSuffix(":"),
Expand Down

0 comments on commit 8bd4e40

Please sign in to comment.