Skip to content

Commit

Permalink
Merge pull request #1527 from apache/master
Browse files Browse the repository at this point in the history
Create a new pull request by comparing changes across two branches
  • Loading branch information
GulajavaMinistudio authored Jul 20, 2023
2 parents fc3ce94 + 24bf76a commit c86b5b6
Show file tree
Hide file tree
Showing 199 changed files with 7,149 additions and 2,951 deletions.
3 changes: 2 additions & 1 deletion .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ CORE:
- "common/kvstore/**/*"
- "common/network-common/**/*"
- "common/network-shuffle/**/*"
- "python/pyspark/**/*.py"
- "python/pyspark/*.py"
- "python/pyspark/tests/**/*.py"
SPARK SUBMIT:
- "bin/spark-submit*"
Expand Down Expand Up @@ -155,6 +155,7 @@ CONNECT:
- "connector/connect/**/*"
- "**/sql/sparkconnect/**/*"
- "python/pyspark/sql/**/connect/**/*"
- "python/pyspark/ml/**/connect/**/*"
PROTOBUF:
- "connector/protobuf/**/*"
- "python/pyspark/sql/protobuf/**/*"
18 changes: 14 additions & 4 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ on:
description: Branch to run the build against
required: false
type: string
# Change 'master' to 'branch-3.5' in branch-3.5 branch after cutting it.
# Change 'master' to 'branch-4.0' in branch-4.0 branch after cutting it.
default: master
hadoop:
description: Hadoop version to run with. HADOOP_PROFILE environment variable should accept it.
Expand Down Expand Up @@ -517,8 +517,8 @@ jobs:
breaking-changes-buf:
needs: [precondition]
if: always() && fromJson(needs.precondition.outputs.required).breaking-changes-buf == 'true'
# Change 'branch-3.4' to 'branch-3.5' in master branch after cutting branch-3.5 branch.
name: Breaking change detection with Buf (branch-3.4)
# Change 'branch-3.5' to 'branch-4.0' in master branch after cutting branch-4.0 branch.
name: Breaking change detection with Buf (branch-3.5)
runs-on: ubuntu-22.04
steps:
- name: Checkout Spark repository
Expand All @@ -541,7 +541,7 @@ jobs:
uses: bufbuild/buf-breaking-action@v1
with:
input: connector/connect/common/src/main
against: 'https://github.com/apache/spark.git#branch=branch-3.4,subdir=connector/connect/common/src/main'
against: 'https://github.com/apache/spark.git#branch=branch-3.5,subdir=connector/connect/common/src/main'


# Static analysis, and documentation build
Expand Down Expand Up @@ -686,6 +686,16 @@ jobs:
fi
cd docs
bundle exec jekyll build
- name: Tar documentation
if: github.repository != 'apache/spark'
run: tar cjf site.tar.bz2 docs/_site
- name: Upload documentation
if: github.repository != 'apache/spark'
uses: actions/upload-artifact@v3
with:
name: site
path: site.tar.bz2
retention-days: 1

java-11-17:
needs: precondition
Expand Down
49 changes: 49 additions & 0 deletions .github/workflows/build_branch35.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#

name: "Build (branch-3.5, Scala 2.13, Hadoop 3, JDK 8)"

on:
schedule:
- cron: '0 11 * * *'

jobs:
run-build:
permissions:
packages: write
name: Run
uses: ./.github/workflows/build_and_test.yml
if: github.repository == 'apache/spark'
with:
java: 8
branch: branch-3.5
hadoop: hadoop3
envs: >-
{
"SCALA_PROFILE": "scala2.13"
}
jobs: >-
{
"build": "true",
"pyspark": "true",
"sparkr": "true",
"tpcds-1g": "true",
"docker-integration-tests": "true",
"lint" : "true"
}
1 change: 1 addition & 0 deletions .github/workflows/publish_snapshot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ jobs:
matrix:
branch:
- master
- branch-3.5
- branch-3.4
- branch-3.3
steps:
Expand Down
2 changes: 1 addition & 1 deletion R/pkg/DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: SparkR
Type: Package
Version: 3.5.0
Version: 4.0.0
Title: R Front End for 'Apache Spark'
Description: Provides an R Front end for 'Apache Spark' <https://spark.apache.org>.
Authors@R:
Expand Down
2 changes: 1 addition & 1 deletion assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion common/kvstore/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion common/network-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion common/network-shuffle/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion common/network-yarn/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion common/sketch/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion common/tags/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion common/unsafe/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
2 changes: 1 addition & 1 deletion common/utils/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
5 changes: 5 additions & 0 deletions common/utils/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -2446,6 +2446,11 @@
],
"sqlState" : "42P01"
},
"TABLE_VALUED_FUNCTION_FAILED_TO_ANALYZE_IN_PYTHON" : {
"message" : [
"Failed to analyze the Python user defined table function: <msg>"
]
},
"TABLE_VALUED_FUNCTION_TOO_MANY_TABLE_ARGUMENTS" : {
"message" : [
"There are too many table arguments for table-valued function. It allows one table argument, but got: <num>. If you want to allow it, please set \"spark.sql.allowMultipleTableArguments.enabled\" to \"true\""
Expand Down
2 changes: 1 addition & 1 deletion connector/avro/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../../pom.xml</relativePath>
</parent>

Expand Down
21 changes: 20 additions & 1 deletion connector/connect/client/jvm/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../../../../pom.xml</relativePath>
</parent>

Expand Down Expand Up @@ -140,6 +140,7 @@
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
<plugins>
<!-- Shade all Guava / Protobuf / Netty dependencies of this build -->
Expand Down Expand Up @@ -224,6 +225,24 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<executions>
<execution>
<id>add-sources</id>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>src/main/scala-${scala.binary.version}</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.connect.client.arrow

import scala.collection.generic.{GenericCompanion, GenMapFactory}
import scala.collection.mutable
import scala.reflect.ClassTag

import org.apache.spark.sql.connect.client.arrow.ArrowDeserializers.resolveCompanion

/**
* A couple of scala version specific collection utility functions.
*/
private[arrow] object ScalaCollectionUtils {
def getIterableCompanion(tag: ClassTag[_]): GenericCompanion[Iterable] = {
ArrowDeserializers.resolveCompanion[GenericCompanion[Iterable]](tag)
}
def getMapCompanion(tag: ClassTag[_]): GenMapFactory[Map] = {
resolveCompanion[GenMapFactory[Map]](tag)
}
def wrap[T](array: AnyRef): mutable.WrappedArray[T] = {
mutable.WrappedArray.make(array)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.connect.client.arrow

import scala.collection.{mutable, IterableFactory, MapFactory}
import scala.reflect.ClassTag

import org.apache.spark.sql.connect.client.arrow.ArrowDeserializers.resolveCompanion

/**
* A couple of scala version specific collection utility functions.
*/
private[arrow] object ScalaCollectionUtils {
def getIterableCompanion(tag: ClassTag[_]): IterableFactory[Iterable] = {
ArrowDeserializers.resolveCompanion[IterableFactory[Iterable]](tag)
}
def getMapCompanion(tag: ClassTag[_]): MapFactory[Map] = {
resolveCompanion[MapFactory[Map]](tag)
}
def wrap[T](array: AnyRef): mutable.WrappedArray[T] = {
mutable.WrappedArray.make(array.asInstanceOf[Array[T]])
}
}
Loading

0 comments on commit c86b5b6

Please sign in to comment.