diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index eadcaaedc5829..03acc2efff4dc 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -285,7 +285,7 @@ jobs: - name: Install Python packages (Python 3.11) if: (contains(matrix.modules, 'sql') && !contains(matrix.modules, 'sql-')) || contains(matrix.modules, 'connect') run: | - python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.62.0' 'grpcio-status==1.62.0' 'protobuf==4.25.1' + python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'lxml==4.9.4' 'grpcio==1.67.0' 'grpcio-status==1.67.0' 'protobuf==4.25.1' python3.11 -m pip list # Run the tests. - name: Run tests @@ -745,7 +745,7 @@ jobs: python3.9 -m pip install 'sphinx==4.5.0' mkdocs 'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2 markupsafe 'pyzmq<24.0.0' \ ipython ipython_genutils sphinx_plotly_directive 'numpy==1.26.4' pyarrow pandas 'plotly>=4.8' 'docutils<0.18.0' \ 'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' 'black==23.9.1' \ - 'pandas-stubs==1.2.0.53' 'grpcio==1.62.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \ + 'pandas-stubs==1.2.0.53' 'grpcio==1.67.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \ 'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2' 'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3' 'sphinxcontrib-serializinghtml==1.1.5' python3.9 -m pip list - name: Python linter diff --git a/.github/workflows/maven_test.yml b/.github/workflows/maven_test.yml index dd089d665d6e3..fd36b0dde121b 100644 --- a/.github/workflows/maven_test.yml +++ b/.github/workflows/maven_test.yml @@ -178,7 +178,7 @@ jobs: - name: Install Python packages (Python 3.11) if: (contains(matrix.modules, 'sql#core')) || contains(matrix.modules, 'connect') run: | - python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'grpcio==1.62.0' 'grpcio-status==1.62.0' 'protobuf==4.25.1' + python3.11 -m pip install 'numpy>=1.20.0' pyarrow pandas scipy unittest-xml-reporting 'grpcio==1.67.0' 'grpcio-status==1.67.0' 'protobuf==4.25.1' python3.11 -m pip list # Run the tests. - name: Run tests diff --git a/dev/create-release/spark-rm/Dockerfile b/dev/create-release/spark-rm/Dockerfile index 3cba72d042ed6..bb53f5905e115 100644 --- a/dev/create-release/spark-rm/Dockerfile +++ b/dev/create-release/spark-rm/Dockerfile @@ -102,7 +102,7 @@ RUN pypy3 -m pip install numpy 'six==1.16.0' 'pandas==2.2.2' scipy coverage matp ARG BASIC_PIP_PKGS="numpy pyarrow>=15.0.0 six==1.16.0 pandas==2.2.2 scipy plotly>=4.8 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 scikit-learn>=1.3.2 twine==3.4.1" # Python deps for Spark Connect -ARG CONNECT_PIP_PKGS="grpcio==1.62.0 grpcio-status==1.62.0 protobuf==4.25.1 googleapis-common-protos==1.56.4" +ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==4.25.1 googleapis-common-protos==1.56.4" # Install Python 3.10 packages RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10 @@ -131,7 +131,7 @@ RUN python3.9 -m pip install --force $BASIC_PIP_PKGS unittest-xml-reporting $CON RUN python3.9 -m pip install 'sphinx==4.5.0' mkdocs 'pydata_sphinx_theme>=0.13' sphinx-copybutton nbsphinx numpydoc jinja2 markupsafe 'pyzmq<24.0.0' \ ipython ipython_genutils sphinx_plotly_directive 'numpy>=1.20.0' pyarrow pandas 'plotly>=4.8' 'docutils<0.18.0' \ 'flake8==3.9.0' 'mypy==1.8.0' 'pytest==7.1.3' 'pytest-mypy-plugins==1.9.3' 'black==23.9.1' \ -'pandas-stubs==1.2.0.53' 'grpcio==1.62.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \ +'pandas-stubs==1.2.0.53' 'grpcio==1.67.0' 'grpc-stubs==1.24.11' 'googleapis-common-protos-stubs==2.2.0' \ 'sphinxcontrib-applehelp==1.0.4' 'sphinxcontrib-devhelp==1.0.2' 'sphinxcontrib-htmlhelp==2.0.1' 'sphinxcontrib-qthelp==1.0.3' 'sphinxcontrib-serializinghtml==1.1.5' RUN python3.9 -m pip list diff --git a/dev/infra/base/Dockerfile b/dev/infra/base/Dockerfile index 1edeed775880b..60c43288ee0b1 100644 --- a/dev/infra/base/Dockerfile +++ b/dev/infra/base/Dockerfile @@ -96,7 +96,7 @@ RUN pypy3 -m pip install numpy 'six==1.16.0' 'pandas==2.2.3' scipy coverage matp ARG BASIC_PIP_PKGS="numpy pyarrow>=15.0.0 six==1.16.0 pandas==2.2.3 scipy plotly>=4.8 mlflow>=2.8.1 coverage matplotlib openpyxl memory-profiler>=0.61.0 scikit-learn>=1.3.2" # Python deps for Spark Connect -ARG CONNECT_PIP_PKGS="grpcio==1.62.0 grpcio-status==1.62.0 protobuf==4.25.1 googleapis-common-protos==1.56.4 graphviz==0.20.3" +ARG CONNECT_PIP_PKGS="grpcio==1.67.0 grpcio-status==1.67.0 protobuf==4.25.1 googleapis-common-protos==1.56.4 graphviz==0.20.3" # Install Python 3.10 packages RUN curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10 diff --git a/dev/requirements.txt b/dev/requirements.txt index cafc73405aaa8..520baeee0da45 100644 --- a/dev/requirements.txt +++ b/dev/requirements.txt @@ -58,8 +58,8 @@ black==23.9.1 py # Spark Connect (required) -grpcio>=1.62.0 -grpcio-status>=1.62.0 +grpcio>=1.67.0 +grpcio-status>=1.67.0 googleapis-common-protos>=1.56.4 # Spark Connect python proto generation plugin (optional) diff --git a/pom.xml b/pom.xml index fe49568d744a0..4cecce79176a0 100644 --- a/pom.xml +++ b/pom.xml @@ -294,7 +294,7 @@ 33.2.1-jre 1.0.2 - 1.62.2 + 1.67.1 1.1.4 6.0.53 diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index e7f7d68e98483..7b9dbec339cae 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -91,7 +91,7 @@ object BuildCommons { // SPARK-41247: needs to be consistent with `protobuf.version` in `pom.xml`. val protoVersion = "3.25.5" // GRPC version used for Spark Connect. - val grpcVersion = "1.62.2" + val grpcVersion = "1.67.1" } object SparkBuild extends PomBuild { diff --git a/python/docs/source/getting_started/install.rst b/python/docs/source/getting_started/install.rst index 88c0a8c26cc94..2443b8141dcef 100644 --- a/python/docs/source/getting_started/install.rst +++ b/python/docs/source/getting_started/install.rst @@ -208,8 +208,8 @@ Package Supported version Note ========================== ================= ========================== `pandas` >=2.0.0 Required for Spark Connect `pyarrow` >=10.0.0 Required for Spark Connect -`grpcio` >=1.62.0 Required for Spark Connect -`grpcio-status` >=1.62.0 Required for Spark Connect +`grpcio` >=1.67.0 Required for Spark Connect +`grpcio-status` >=1.67.0 Required for Spark Connect `googleapis-common-protos` >=1.56.4 Required for Spark Connect `graphviz` >=0.20 Optional for Spark Connect ========================== ================= ========================== diff --git a/python/packaging/classic/setup.py b/python/packaging/classic/setup.py index 76fd638c4aa03..b8b486bcd3f11 100755 --- a/python/packaging/classic/setup.py +++ b/python/packaging/classic/setup.py @@ -153,7 +153,7 @@ def _supports_symlinks(): _minimum_pandas_version = "2.0.0" _minimum_numpy_version = "1.21" _minimum_pyarrow_version = "10.0.0" -_minimum_grpc_version = "1.62.0" +_minimum_grpc_version = "1.67.0" _minimum_googleapis_common_protos_version = "1.56.4" diff --git a/sql/connect/common/src/main/buf.gen.yaml b/sql/connect/common/src/main/buf.gen.yaml index 9b0b07932eae8..a68bc880b8315 100644 --- a/sql/connect/common/src/main/buf.gen.yaml +++ b/sql/connect/common/src/main/buf.gen.yaml @@ -22,14 +22,14 @@ plugins: out: gen/proto/csharp - plugin: buf.build/protocolbuffers/java:v21.7 out: gen/proto/java - - plugin: buf.build/grpc/ruby:v1.62.0 + - plugin: buf.build/grpc/ruby:v1.67.0 out: gen/proto/ruby - plugin: buf.build/protocolbuffers/ruby:v21.7 out: gen/proto/ruby # Building the Python build and building the mypy interfaces. - plugin: buf.build/protocolbuffers/python:v21.7 out: gen/proto/python - - plugin: buf.build/grpc/python:v1.62.0 + - plugin: buf.build/grpc/python:v1.67.0 out: gen/proto/python - name: mypy out: gen/proto/python