Skip to content

Commit

Permalink
Bump mimaPreviousVersion to 3.5.0 in dbr-branch-3.5
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshRosen committed Jul 12, 2024
1 parent 67047cd commit c470ffe
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 147 deletions.
2 changes: 1 addition & 1 deletion dev/mima
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ $JAVA_CMD \
-cp "$TOOLS_CLASSPATH:$OLD_DEPS_CLASSPATH" \
org.apache.spark.tools.GenerateMIMAIgnore

echo -e "q\n" | build/sbt -mem 5120 -DcopyDependencies=false "$@" mimaReportBinaryIssues | grep -v -e "info.*Resolving"
echo -e "q\n" | build/sbt -mem 5632 -DcopyDependencies=false "$@" mimaReportBinaryIssues | grep -v -e "info.*Resolving"
ret_val=$?

if [ $ret_val != 0 ]; then
Expand Down
2 changes: 1 addition & 1 deletion project/MimaBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ object MimaBuild {

def mimaSettings(sparkHome: File, projectRef: ProjectRef): Seq[Setting[_]] = {
val organization = "org.apache.spark"
val previousSparkVersion = "3.4.0"
val previousSparkVersion = "3.5.0"
val project = projectRef.project
val id = "spark-" + project

Expand Down
145 changes: 2 additions & 143 deletions project/MimaExcludes.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,40 +34,8 @@ import com.typesafe.tools.mima.core.ProblemFilters._
*/
object MimaExcludes {

// Exclude rules for 3.5.x from 3.4.0
// Exclude rules for 3.5.x from 3.5.0
lazy val v35excludes = defaultExcludes ++ Seq(
// [SPARK-44531][CONNECT][SQL] Move encoder inference to sql/api
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DataTypes"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.SQLUserDefinedType"),
// [SPARK-43165][SQL] Move canWrite to DataTypeUtils
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.types.DataType.canWrite"),
// [SPARK-43792][SQL][PYTHON][CONNECT] Add optional pattern for Catalog.listCatalogs
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.catalog.Catalog.listCatalogs"),
// [SPARK-43881][SQL][PYTHON][CONNECT] Add optional pattern for Catalog.listDatabases
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.catalog.Catalog.listDatabases"),
// [SPARK-43961][SQL][PYTHON][CONNECT] Add optional pattern for Catalog.listTables
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.catalog.Catalog.listTables"),
// [SPARK-43992][SQL][PYTHON][CONNECT] Add optional pattern for Catalog.listFunctions
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.catalog.Catalog.listFunctions"),
// [SPARK-43952][CORE][CONNECT][SQL] Add SparkContext APIs for query cancellation by tag
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.status.api.v1.JobData.this"),
// [SPARK-44205][SQL] Extract Catalyst Code from DecimalType
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.types.DecimalType.unapply"),
// [SPARK-44507][SQL][CONNECT] Move AnalysisException to sql/api.
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.AnalysisException"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.AnalysisException$"),
// [SPARK-44686][CONNECT][SQL] Add the ability to create a RowEncoder in Encoders
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.RowFactory"),
// [SPARK-44535][CONNECT][SQL] Move required Streaming API to sql/api
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.GroupStateTimeout"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.OutputMode"),
// [SPARK-44692][CONNECT][SQL] Move Trigger(s) to sql/api
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.Trigger"),
// [SPARK-44713][CONNECT][SQL] Move shared classes to sql/api
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.FlatMapGroupsWithStateFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.MapGroupsWithStateFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.SaveMode"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.GroupState"),
// [SPARK-46480][CORE][SQL] Fix NPE when table cache task attempt
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.TaskContext.isFailed")
)
Expand Down Expand Up @@ -98,118 +66,9 @@ object MimaExcludes {
ProblemFilters.exclude[Problem]("org.sparkproject.spark_core.protobuf.*"),
ProblemFilters.exclude[Problem]("org.apache.spark.status.protobuf.StoreTypes*"),

// SPARK-43265: Move Error framework to a common utils module
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.QueryContext"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.SparkException"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.SparkException$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.SparkThrowable"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.ErrorInfo$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.ErrorSubInfo$"),

// SPARK-44104: shaded protobuf code and Apis with parameters relocated
ProblemFilters.exclude[Problem]("org.sparkproject.spark_protobuf.protobuf.*"),
ProblemFilters.exclude[Problem]("org.apache.spark.sql.protobuf.utils.SchemaConverters.*"),

// SPARK-44255: Relocate StorageLevel to common/utils
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.storage.StorageLevel"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.storage.StorageLevel$"),

// SPARK-44475: Relocate DataType and Parser to sql/api
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.ArrayType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.ArrayType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.BinaryType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.BinaryType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.BooleanType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.BooleanType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.ByteType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.ByteType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.CalendarIntervalType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.CalendarIntervalType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.CharType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.CharType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DataType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DataType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DateType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DateType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DayTimeIntervalType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DayTimeIntervalType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.Decimal"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.Decimal$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.ShortType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.ShortType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.StringType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.StringType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.StructField"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.StructField$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.StructType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.StructType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.TimestampNTZType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.TimestampNTZType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.TimestampType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.TimestampType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.UDTRegistration"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.UDTRegistration$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.VarcharType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.VarcharType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.YearMonthIntervalType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.YearMonthIntervalType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DecimalType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DecimalType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DoubleType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DoubleType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DoubleType$DoubleAsIfIntegral"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DoubleType$DoubleAsIfIntegral$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.DoubleType$DoubleIsConflicted"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.FloatType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.FloatType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.FloatType$FloatAsIfIntegral"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.FloatType$FloatAsIfIntegral$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.FloatType$FloatIsConflicted"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.IntegerType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.IntegerType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.LongType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.LongType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.MapType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.MapType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.Metadata"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.Metadata$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.MetadataBuilder"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.NullType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.NullType$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.ObjectType"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.types.ObjectType$"),

// SPARK-44496: Move Interfaces needed by SCSC to sql/api.
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.Encoder"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.Row"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.Row$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.package"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.package$"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.CoGroupFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.DoubleFlatMapFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.DoubleFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.FilterFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.FlatMapFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.FlatMapFunction2"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.FlatMapGroupsFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.ForeachFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.ForeachPartitionFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.Function"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.Function0"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.Function2"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.Function3"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.Function4"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.MapFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.MapGroupsFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.MapPartitionsFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.PairFlatMapFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.PairFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.ReduceFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.VoidFunction"),
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.api.java.function.VoidFunction2"),

// SPARK-43997: UDF* classes needed by SCSC and moved to sql/api
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.api.java.UDF*"),

(problem: Problem) => problem match {
case MissingClassProblem(cls) => !cls.fullName.startsWith("org.sparkproject.jpmml") &&
Expand All @@ -218,7 +77,7 @@ object MimaExcludes {
}
)

def excludes(version: String) = version match {
def excludes(version: String): Seq[Problem => Boolean] = version match {
case v if v.startsWith("3.5") => v35excludes
case _ => Seq()
}
Expand Down
3 changes: 1 addition & 2 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -418,8 +418,7 @@ object SparkBuild extends PomBuild {
val mimaProjects = allProjects.filterNot { x =>
Seq(
spark, hive, hiveThriftServer, repl, networkCommon, networkShuffle, networkYarn,
unsafe, tags, tokenProviderKafka010, sqlKafka010, connectCommon, connect, connectClient,
commonUtils, sqlApi
unsafe, tags, tokenProviderKafka010, sqlKafka010, connectCommon, connect, connectClient
).contains(x)
}

Expand Down

0 comments on commit c470ffe

Please sign in to comment.