Skip to content

Commit

Permalink
Trying to resolve the timeout issue on jenkins...
Browse files Browse the repository at this point in the history
  • Loading branch information
ScrapCodes committed Oct 29, 2014
1 parent df2b19e commit 472bbcf
Showing 1 changed file with 10 additions and 7 deletions.
17 changes: 10 additions & 7 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,9 @@ object BuildCommons {
"sql", "streaming", "streaming-flume-sink", "streaming-flume", "streaming-kafka",
"streaming-mqtt", "streaming-twitter", "streaming-zeromq").map(ProjectRef(buildLocation, _))

val optionallyEnabledProjects@Seq(yarn, yarnStable, yarnAlpha, java8Tests, sparkGangliaLgpl, sparkKinesisAsl) =
Seq("yarn", "yarn-stable", "yarn-alpha", "java8-tests", "ganglia-lgpl", "kinesis-asl")
.map(ProjectRef(buildLocation, _))
val optionallyEnabledProjects@Seq(yarn, yarnStable, yarnAlpha, java8Tests,
sparkGangliaLgpl, sparkKinesisAsl) = Seq("yarn", "yarn-stable", "yarn-alpha", "java8-tests",
"ganglia-lgpl", "kinesis-asl").map(ProjectRef(buildLocation, _))

val assemblyProjects@Seq(assembly, examples) = Seq("assembly", "examples")
.map(ProjectRef(buildLocation, _))
Expand Down Expand Up @@ -305,9 +305,11 @@ object Unidoc {
publish := {},

unidocProjectFilter in(ScalaUnidoc, unidoc) :=
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, catalyst, streamingFlumeSink, yarn, yarnAlpha),
inAnyProject -- inProjects(OldDeps.project, repl, examples, tools, catalyst,
streamingFlumeSink, yarn, yarnAlpha),
unidocProjectFilter in(JavaUnidoc, unidoc) :=
inAnyProject -- inProjects(OldDeps.project, repl, bagel, graphx, examples, tools, catalyst, streamingFlumeSink, yarn, yarnAlpha),
inAnyProject -- inProjects(OldDeps.project, repl, bagel, graphx, examples, tools, catalyst,
streamingFlumeSink, yarn, yarnAlpha),

// Skip class names containing $ and some internal packages in Javadocs
unidocAllSources in (JavaUnidoc, unidoc) := {
Expand Down Expand Up @@ -348,6 +350,7 @@ object TestSettings {

lazy val settings = Seq (
// Fork new JVMs for tests and set Java options for those
fork in Test := true,
fork := true,
javaOptions in Test += "-Dspark.test.home=" + sparkHome,
javaOptions in Test += "-Dspark.testing=1",
Expand All @@ -356,9 +359,9 @@ object TestSettings {
javaOptions in Test += "-Dsun.io.serialization.extendedDebugInfo=true",
javaOptions in Test ++= System.getProperties.filter(_._1 startsWith "spark")
.map { case (k,v) => s"-D$k=$v" }.toSeq,
javaOptions in Test ++= "-Xmx3g -XX:PermSize=128M -XX:MaxNewSize=256m -XX:MaxPermSize=1g"
javaOptions in Test ++= "-Xmx4g -XX:PermSize=512M -XX:MaxNewSize=512m -XX:MaxPermSize=1g"
.split(" ").toSeq,
javaOptions += "-Xmx3g",
javaOptions += "-Xmx4g",
retrievePattern := "[conf]/[artifact](-[revision]).[ext]",
// Show full stack trace and duration in test cases.
testOptions in Test += Tests.Argument("-oDF"),
Expand Down

0 comments on commit 472bbcf

Please sign in to comment.