Skip to content

Commit

Permalink
[SNAPPYDATA] upgrade gradle to v5.6.4, the latest 5.x release
Browse files Browse the repository at this point in the history
- also updated scalatest and shadow plugins and targets accordingly
- fixed scalaStyle errors
- updated jackson dependencies
- added synchronization in MetricsSystem (exposed occasionally by spark-metrics)
- fixed test failures (due to updated hadoop and few others)
- added license headers for modified files
  • Loading branch information
sumwale committed Jun 27, 2021
1 parent bbc94e1 commit 4dcecd7
Show file tree
Hide file tree
Showing 26 changed files with 326 additions and 60 deletions.
45 changes: 31 additions & 14 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ buildscript {
mavenCentral()
}
dependencies {
classpath 'io.snappydata:gradle-scalatest:0.23'
classpath 'io.snappydata:gradle-scalatest:0.25'
classpath 'org.github.ngbinh.scalastyle:gradle-scalastyle-plugin_2.11:0.9.0'
classpath 'com.github.jengelman.gradle.plugins:shadow:4.0.3'
classpath 'com.github.jengelman.gradle.plugins:shadow:5.2.0'
classpath 'com.commercehub.gradle.plugin:gradle-avro-plugin:0.8.0'
}
}
Expand All @@ -43,22 +43,22 @@ allprojects {

repositories {
mavenCentral()
maven { url 'http://repository.apache.org/snapshots' }
maven { url 'https://repo.hortonworks.com/content/repositories/releases/' }
}

apply plugin: 'java'
apply plugin: 'com.github.johnrengelman.shadow'
apply plugin: 'idea'

group = 'io.snappydata'
version = snappySparkVersion
productName = productName
version = '2.1.1.9'

ext {
productName = 'SnappyData'
vendorName = 'TIBCO Software Inc.'
scalaBinaryVersion = '2.11'
scalaVersion = scalaBinaryVersion + '.8'
hadoopVersion = '2.7.7'
hadoopVersion = '3.2.0'
protobufVersion = '3.6.1'
jerseyVersion = '2.22.2'
sunJerseyVersion = '1.19.4'
Expand All @@ -81,7 +81,6 @@ allprojects {
levelDbJniVersion = '1.8'
jackson1Version = '1.9.13'
jacksonVersion = '2.9.9'
jacksonBindVersion = '2.9.9'
snappyJavaVersion = '1.1.7.2'
lz4Version = '1.5.0'
lzfVersion = '1.0.4'
Expand Down Expand Up @@ -170,6 +169,22 @@ allprojects {
}
}

// set python2 for pyspark if python3 version is an unsupported one
String sparkPython = 'python'
def checkResult = exec {
ignoreExitValue = true
commandLine 'sh', '-c', 'python --version 2>/dev/null | grep -Eq "( 3\\.[0-7])|( 2\\.)"'
}
if (checkResult.exitValue != 0) {
checkResult = exec {
ignoreExitValue = true
commandLine 'sh', '-c', 'python2 --version >/dev/null 2>&1'
}
if (checkResult.exitValue == 0) {
sparkPython = 'python2'
}
}

def getStackTrace(def t) {
java.io.StringWriter sw = new java.io.StringWriter()
java.io.PrintWriter pw = new java.io.PrintWriter(sw)
Expand All @@ -193,6 +208,10 @@ subprojects {
apply plugin: 'maven'
apply plugin: 'scalaStyle'

int maxWorkers = project.hasProperty('org.gradle.workers.max') ?
project.property('org.gradle.workers.max') as int :
Runtime.getRuntime().availableProcessors()

// apply compiler options
compileJava.options.encoding = 'UTF-8'
compileJava.options.compilerArgs << '-Xlint:all,-serial,-path,-deprecation'
Expand Down Expand Up @@ -279,8 +298,7 @@ subprojects {

if (rootProject.name == 'snappy-spark') {
task scalaTest(type: Test) {
def factory = new com.github.maiflai.BackwardsCompatibleJavaExecActionFactory(gradle.gradleVersion)
actions = [ new com.github.maiflai.ScalaTestAction(factory) ]
actions = [ new com.github.maiflai.ScalaTestAction() ]

testLogging.exceptionFormat = TestExceptionFormat.FULL
testLogging.events = TestLogEvent.values() as Set
Expand Down Expand Up @@ -326,7 +344,7 @@ subprojects {
}
test {
jvmArgs '-Xss4096k'
maxParallelForks = Runtime.getRuntime().availableProcessors()
maxParallelForks = maxWorkers
systemProperties 'spark.master.rest.enabled': 'false',
'test.src.tables': 'src'

Expand Down Expand Up @@ -369,6 +387,8 @@ gradle.taskGraph.whenReady { graph ->
'SPARK_PREPEND_CLASSES': '1',
'SPARK_SCALA_VERSION': scalaBinaryVersion,
'SPARK_TESTING': '1',
'PYSPARK_PYTHON': sparkPython,
'PYSPARK_DRIVER_PYTHON': sparkPython,
'JAVA_HOME': System.getProperty('java.home')
systemProperties 'log4j.configuration': "file:${projectDir}/src/test/resources/log4j.properties",
'derby.system.durability': 'test',
Expand Down Expand Up @@ -425,10 +445,7 @@ if (rootProject.name == 'snappy-spark') {
task scalaStyle {
dependsOn subprojects.scalaStyle
}
task check {
dependsOn subprojects.check
}
} else {
scalaStyle.dependsOn subprojects.scalaStyle
check.dependsOn subprojects.check
}
check.dependsOn subprojects.check
2 changes: 1 addition & 1 deletion common/network-common/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ dependencies {
compile group: 'com.google.code.findbugs', name: 'jsr305', version: jsr305Version
compile group: 'com.google.guava', name: 'guava', version: guavaVersion
compile group: 'org.fusesource.leveldbjni', name: 'leveldbjni-all', version: levelDbJniVersion
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonBindVersion
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonVersion
compile group: 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: jacksonVersion
compile group: 'org.apache.commons', name: 'commons-lang3', version: commonsLang3Version

Expand Down
2 changes: 1 addition & 1 deletion common/network-shuffle/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ dependencies {
compile project(subprojectBase + 'snappy-spark-tags_' + scalaBinaryVersion)

compile group: 'org.fusesource.leveldbjni', name: 'leveldbjni-all', version: levelDbJniVersion
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonBindVersion
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonVersion
compile group: 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: jacksonVersion
compile group: 'com.google.guava', name: 'guava', version: guavaVersion
compile(group: 'io.dropwizard.metrics', name: 'metrics-core', version: metricsVersion) {
Expand Down
2 changes: 1 addition & 1 deletion common/network-yarn/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ dependencies {
runtimeJar project(subprojectBase + 'snappy-spark-network-common_' + scalaBinaryVersion)
runtimeJar project(subprojectBase + 'snappy-spark-network-shuffle_' + scalaBinaryVersion)
runtimeJar group: 'io.netty', name: 'netty-all', version: nettyAllVersion
runtimeJar group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonBindVersion
runtimeJar group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonVersion
runtimeJar group: 'com.fasterxml.jackson.core', name: 'jackson-annotations', version: jacksonVersion
*/
}
Expand Down
2 changes: 1 addition & 1 deletion core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ dependencies {
exclude(group: 'org.slf4j', module: 'slf4j-api')
exclude(group: 'org.slf4j', module: 'slf4j-log4j12')
}
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonBindVersion
compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: jacksonVersion
compile(group: 'com.fasterxml.jackson.module', name: 'jackson-module-scala_' + scalaBinaryVersion, version: jacksonVersion) {
exclude(group: 'org.scala-lang', module: 'scala-library')
exclude(group: 'org.scala-lang', module: 'scala-reflect')
Expand Down
25 changes: 22 additions & 3 deletions core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes for TIBCO Project SnappyData data platform.
*
* Portions Copyright (c) 2017-2021 TIBCO Software Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/

package org.apache.spark.metrics

Expand Down Expand Up @@ -159,10 +177,11 @@ private[spark] class MetricsSystem private (
} else { defaultName }
}

def getSourcesByName(sourceName: String): Seq[Source] =
def getSourcesByName(sourceName: String): Seq[Source] = synchronized {
sources.filter(_.sourceName == sourceName)
}

def registerSource(source: Source) {
def registerSource(source: Source): Unit = synchronized {
sources += source
try {
val regName = buildRegistryName(source)
Expand All @@ -172,7 +191,7 @@ private[spark] class MetricsSystem private (
}
}

def removeSource(source: Source) {
def removeSource(source: Source): Unit = synchronized {
sources -= source
val regName = buildRegistryName(source)
registry.removeMatching(new MetricFilter {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes for TIBCO Project SnappyData data platform.
*
* Portions Copyright (c) 2017-2021 TIBCO Software Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/

package org.apache.spark.scheduler

Expand All @@ -22,6 +40,7 @@ import java.util.concurrent.{ExecutorService, RejectedExecutionException}

import scala.language.existentials
import scala.util.control.NonFatal

import org.apache.spark._
import org.apache.spark.TaskState.TaskState
import org.apache.spark.internal.Logging
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
/*
* Changes for TIBCO Project SnappyData data platform.
*
* Portions Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved.
* Portions Copyright (c) 2017-2021 TIBCO Software Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
Expand All @@ -43,6 +43,7 @@ import java.util.concurrent.atomic.AtomicLong
import scala.collection.Set
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.util.Random

import org.apache.spark._
import org.apache.spark.TaskState.TaskState
import org.apache.spark.internal.Logging
Expand Down Expand Up @@ -195,8 +196,9 @@ private[spark] class TaskSchedulerImpl(
val tasks = taskSet.tasks
logInfo("Adding task set " + taskSet.id + " with " + tasks.length + " tasks")
this.synchronized {
val maxRetryAttemptsForWrite = taskSet.properties.
getProperty(SNAPPY_WRITE_RETRY_PROP)
val maxRetryAttemptsForWrite =
if (taskSet.properties ne null) taskSet.properties.getProperty(SNAPPY_WRITE_RETRY_PROP)
else null

logInfo("The maxRetryAttemptsForWrite is set to " + maxRetryAttemptsForWrite +
"maxTaskFailure " + maxTaskFailures)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
/*
* Changes for TIBCO Project SnappyData data platform.
*
* Portions Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved.
* Portions Copyright (c) 2017-2021 TIBCO Software Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
Expand All @@ -43,11 +43,12 @@ import java.util.concurrent.ConcurrentLinkedQueue
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.math.{max, min}
import scala.util.control.NonFatal

import org.apache.spark._
import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.SchedulingMode._
import org.apache.spark.TaskState.TaskState
import org.apache.spark.serializer.SerializerInstance
import org.apache.spark.TaskState.TaskState
import org.apache.spark.util.{AccumulatorV2, Clock, SystemClock, Utils}

/**
Expand Down Expand Up @@ -97,10 +98,13 @@ private[spark] class TaskSetManager(
sched.backend.getClass.getName.contains("SnappyCoarseGrainedSchedulerBackend")

// keep the configured value for spark.task.cpus preferring local job setting if present
val confCpusPerTask: Int = taskSet.properties.getProperty(CPUS_PER_TASK) match {
case s if (s ne null) && supportsDynamicCpusPerTask => max(s.toInt, sched.CPUS_PER_TASK)
case _ => sched.CPUS_PER_TASK
}
private[spark] val confCpusPerTask: Int = if (taskSet.properties ne null) {
taskSet.properties.getProperty(CPUS_PER_TASK) match {
case s if (s ne null) && supportsDynamicCpusPerTask => max(s.toInt, sched.CPUS_PER_TASK)
case _ => sched.CPUS_PER_TASK
}
} else sched.CPUS_PER_TASK

// tracks the max of spark.task.cpus across all tasks in this task set
// when they are dynamically incremented for OOME/LME failures
private[spark] var maxCpusPerTask: Int = confCpusPerTask
Expand Down Expand Up @@ -513,7 +517,8 @@ private[spark] class TaskSetManager(
// Serialize and return the task
val serializedTask: ByteBuffer = try {
// Task.serializeWithDependencies(task, sched.sc.addedFiles, sched.sc.addedJars, ser)
Task.serializeWithDependencies(task, sched.sc.addedFiles, sched.sc.addedJars, getSerializer(task))
Task.serializeWithDependencies(task, sched.sc.addedFiles, sched.sc.addedJars,
getSerializer(task))
} catch {
// If the task cannot be serialized, then there's no point to re-attempt the task,
// as it will always fail. So just abort the whole task-set.
Expand Down
25 changes: 22 additions & 3 deletions core/src/main/scala/org/apache/spark/ui/UIUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Changes for TIBCO Project SnappyData data platform.
*
* Portions Copyright (c) 2017-2021 TIBCO Software Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/

package org.apache.spark.ui

Expand Down Expand Up @@ -277,7 +295,8 @@ private[spark] object UIUtils extends Logging {
</a>
</div>
<div class="brand" style="line-height: 2.5;">
<a class="brand" href="https://github.com/TIBCOSoftware/snappydata" target="_blank">
<a class="brand"
href="https://github.com/TIBCOSoftware/snappydata" target="_blank">
<img src={prependBaseUri("/static/snappydata/snappydata-175X28.png")}
style="cursor: pointer;" />
</a>
Expand Down Expand Up @@ -633,8 +652,8 @@ private[spark] object UIUtils extends Logging {
</p>
<p>
For assistance, get started at: <br />
<a href="https://www.snappydata.io/community" target="_blank">
https://www.snappydata.io/community</a> <br />
<a href="https://community.tibco.com/products/tibco-computedb" target="_blank">
https://community.tibco.com/products/tibco-computedb</a> <br />
<a href="https://www.tibco.com/" target="_blank">https://www.tibco.com/</a> <br />
<a href="http://tibcosoftware.github.io/snappydata/" target="_blank">
Product Documentation
Expand Down
Loading

0 comments on commit 4dcecd7

Please sign in to comment.