From 472bbcfe5082920ac97bc2e29faeae78764141c7 Mon Sep 17 00:00:00 2001 From: Prashant Sharma Date: Wed, 29 Oct 2014 13:05:48 +0530 Subject: [PATCH] Trying to resolve the timeout issue on jenkins... --- project/SparkBuild.scala | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 52226299d69dc..da1972dfa9685 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -37,9 +37,9 @@ object BuildCommons { "sql", "streaming", "streaming-flume-sink", "streaming-flume", "streaming-kafka", "streaming-mqtt", "streaming-twitter", "streaming-zeromq").map(ProjectRef(buildLocation, _)) - val optionallyEnabledProjects@Seq(yarn, yarnStable, yarnAlpha, java8Tests, sparkGangliaLgpl, sparkKinesisAsl) = - Seq("yarn", "yarn-stable", "yarn-alpha", "java8-tests", "ganglia-lgpl", "kinesis-asl") - .map(ProjectRef(buildLocation, _)) + val optionallyEnabledProjects@Seq(yarn, yarnStable, yarnAlpha, java8Tests, + sparkGangliaLgpl, sparkKinesisAsl) = Seq("yarn", "yarn-stable", "yarn-alpha", "java8-tests", + "ganglia-lgpl", "kinesis-asl").map(ProjectRef(buildLocation, _)) val assemblyProjects@Seq(assembly, examples) = Seq("assembly", "examples") .map(ProjectRef(buildLocation, _)) @@ -305,9 +305,11 @@ object Unidoc { publish := {}, unidocProjectFilter in(ScalaUnidoc, unidoc) := - inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, catalyst, streamingFlumeSink, yarn, yarnAlpha), + inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, catalyst, + streamingFlumeSink, yarn, yarnAlpha), unidocProjectFilter in(JavaUnidoc, unidoc) := - inAnyProject -- inProjects(OldDeps.project, repl, bagel, graphx, examples, tools, catalyst, streamingFlumeSink, yarn, yarnAlpha), + inAnyProject -- inProjects(OldDeps.project, repl, bagel, graphx, examples, tools, catalyst, + streamingFlumeSink, yarn, yarnAlpha), // Skip class names containing $ and some internal packages in Javadocs unidocAllSources in (JavaUnidoc, unidoc) := { @@ -348,6 +350,7 @@ object TestSettings { lazy val settings = Seq ( // Fork new JVMs for tests and set Java options for those + fork in Test := true, fork := true, javaOptions in Test += "-Dspark.test.home=" + sparkHome, javaOptions in Test += "-Dspark.testing=1", @@ -356,9 +359,9 @@ object TestSettings { javaOptions in Test += "-Dsun.io.serialization.extendedDebugInfo=true", javaOptions in Test ++= System.getProperties.filter(_._1 startsWith "spark") .map { case (k,v) => s"-D$k=$v" }.toSeq, - javaOptions in Test ++= "-Xmx3g -XX:PermSize=128M -XX:MaxNewSize=256m -XX:MaxPermSize=1g" + javaOptions in Test ++= "-Xmx4g -XX:PermSize=512M -XX:MaxNewSize=512m -XX:MaxPermSize=1g" .split(" ").toSeq, - javaOptions += "-Xmx3g", + javaOptions += "-Xmx4g", retrievePattern := "[conf]/[artifact](-[revision]).[ext]", // Show full stack trace and duration in test cases. testOptions in Test += Tests.Argument("-oDF"),