From 4423ea7fceb2346294c70293e71dc9fca2e0d48d Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Tue, 27 Apr 2021 09:53:11 +0200 Subject: [PATCH] Use Pip 21.* to install airflow officially (#15513) * Use Pip 21.* to install airflow officially The PIP 20.2.4 was so far the only officially supported installation mechanism for Airflow as there were some problems with conflicting dependencies (which were ignored by previous versio of PIP). This change attempts to solve this by removing a [gcp] extra from `apache-beam` which turns out to be the major source of the problem - as it contains requirements to the old version of google client libraries (but apparently only used for tests). The "apache-beam" provider migh however need the [gcp] extra for other components so in order to not break the backwards compatibility, another approach is used. Instead of adding [gcp] as extra in the apache-beam extra, the apache.beam provider's [google] extra is extended with 'apache-beam[gcp]' additional requirement so that whenever the provider is installed, the apache-beam with [gcp] extra is installed as well. * Update airflow/providers/apache/beam/CHANGELOG.rst Co-authored-by: Tzu-ping Chung * Update airflow/providers/apache/beam/CHANGELOG.rst Co-authored-by: Tzu-ping Chung * Update airflow/providers/google/CHANGELOG.rst Co-authored-by: Tzu-ping Chung * Update airflow/providers/google/CHANGELOG.rst Co-authored-by: Tzu-ping Chung Co-authored-by: Tzu-ping Chung (cherry picked from commit e229f3541dd764db54785625875a7c5e94225736) --- CONTRIBUTING.rst | 20 ++------------ CONTRIBUTORS_QUICK_START.rst | 10 +------ Dockerfile | 2 +- Dockerfile.ci | 2 +- IMAGES.rst | 12 ++------- INSTALL | 8 ------ LOCAL_VIRTUALENV.rst | 20 ++------------ README.md | 9 +------ UPDATING.md | 7 ----- airflow/provider.yaml.schema.json | 4 +++ airflow/providers/apache/beam/README.md | 8 ------ .../apache/hive/transfers/mssql_to_hive.py | 2 +- .../providers/microsoft/mssql/hooks/mssql.py | 1 + .../PROVIDER_INDEX_TEMPLATE.rst.jinja2 | 9 ------- .../PROVIDER_README_TEMPLATE.rst.jinja2 | 8 ------ .../SETUP_TEMPLATE.py.jinja2 | 2 +- .../prepare_provider_packages.py | 24 +++++++++++++++-- docs/apache-airflow/installation.rst | 7 +---- docs/apache-airflow/start/local.rst | 10 +------ docs/apache-airflow/upgrade-check.rst | 10 +------ docs/docker-stack/build-arg-ref.rst | 2 +- scripts/ci/libraries/_initialization.sh | 3 +-- scripts/in_container/_in_container_utils.sh | 6 ----- .../run_install_and_test_provider_packages.sh | 1 - .../run_prepare_provider_documentation.sh | 5 ---- .../run_prepare_provider_packages.sh | 1 - setup.py | 26 +++---------------- .../hive/transfers/test_mssql_to_hive.py | 2 +- 28 files changed, 48 insertions(+), 173 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7ac115cc39d36..f04a7833ca82a 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -553,15 +553,7 @@ Airflow dependencies .. note:: - On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver - might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might - depend on your choice of extras. In order to install Airflow you might need to either downgrade - pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, - you need to add option ``--use-deprecated legacy-resolver`` to your pip install command. - - While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we - set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well. - Due to those constraints, only ``pip`` installation is currently officially supported. + Only ``pip`` installation is currently officially supported. While they are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as @@ -788,15 +780,7 @@ Pinned constraint files .. note:: - On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver - might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might - depend on your choice of extras. In order to install Airflow you might need to either downgrade - pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, - you need to add option ``--use-deprecated legacy-resolver`` to your pip install command. - - While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we - set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well. - Due to those constraints, only ``pip`` installation is currently officially supported. + Only ``pip`` installation is officially supported. While they are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst index 9a8398e0862b8..c3dfc6cb54936 100644 --- a/CONTRIBUTORS_QUICK_START.rst +++ b/CONTRIBUTORS_QUICK_START.rst @@ -167,15 +167,7 @@ Setup Airflow with Breeze and PyCharm .. note:: - On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver - might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might - depend on your choice of extras. In order to install Airflow you might need to either downgrade - pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, - you need to add option ``--use-deprecated legacy-resolver`` to your pip install command. - - While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we - set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well. - Due to those constraints, only ``pip`` installation is currently officially supported. + Only ``pip`` installation is currently officially supported. While they are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as diff --git a/Dockerfile b/Dockerfile index b86a3c84ff2a1..a75cfb28bee57 100644 --- a/Dockerfile +++ b/Dockerfile @@ -44,7 +44,7 @@ ARG AIRFLOW_GID="50000" ARG PYTHON_BASE_IMAGE="python:3.6-slim-buster" -ARG AIRFLOW_PIP_VERSION=20.2.4 +ARG AIRFLOW_PIP_VERSION=21.1 # By default PIP has progress bar but you can disable it. ARG PIP_PROGRESS_BAR="on" diff --git a/Dockerfile.ci b/Dockerfile.ci index 7c9dc0b1cc140..081f4ee46e861 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -212,7 +212,7 @@ ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true" # By default in the image, we are installing all providers when installing from sources ARG INSTALL_PROVIDERS_FROM_SOURCES="true" ARG INSTALL_FROM_PYPI="true" -ARG AIRFLOW_PIP_VERSION=20.2.4 +ARG AIRFLOW_PIP_VERSION=21.1 # Setup PIP # By default PIP install run without cache to make image smaller ARG PIP_NO_CACHE_DIR="true" diff --git a/IMAGES.rst b/IMAGES.rst index 20837d4882009..1908f224423d8 100644 --- a/IMAGES.rst +++ b/IMAGES.rst @@ -172,15 +172,7 @@ This will build the image using command similar to: .. note:: - On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver - might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might - depend on your choice of extras. In order to install Airflow you might need to either downgrade - pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, - you need to add option ``--use-deprecated legacy-resolver`` to your pip install command. - - While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we - set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well. - Due to those constraints, only ``pip`` installation is currently officially supported. + Only ``pip`` installation is currently officially supported. While they are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as @@ -632,7 +624,7 @@ The following build arguments (``--build-arg`` in docker build command) can be u | ``ADDITIONAL_RUNTIME_APT_ENV`` | | Additional env variables defined | | | | when installing runtime deps | +------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_PIP_VERSION`` | ``20.2.4`` | PIP version used. | +| ``AIRFLOW_PIP_VERSION`` | ``21.1`` | PIP version used. | +------------------------------------------+------------------------------------------+------------------------------------------+ | ``PIP_PROGRESS_BAR`` | ``on`` | Progress bar for PIP installation | +------------------------------------------+------------------------------------------+------------------------------------------+ diff --git a/INSTALL b/INSTALL index 46d15f62aa87b..919c4f540d83a 100644 --- a/INSTALL +++ b/INSTALL @@ -28,14 +28,6 @@ java -jar apache-rat.jar -E ./.rat-excludes -d . python3 -m venv PATH_TO_YOUR_VENV source PATH_TO_YOUR_VENV/bin/activate -NOTE!! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option -``--use-deprecated legacy-resolver`` to your pip install command. - # [required] building and installing by pip (preferred) pip install . diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst index 1e96080f825fe..2f46bca2466e7 100644 --- a/LOCAL_VIRTUALENV.rst +++ b/LOCAL_VIRTUALENV.rst @@ -63,15 +63,7 @@ Extra Packages .. note:: - On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver - might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might - depend on your choice of extras. In order to install Airflow you might need to either downgrade - pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, - you need to add option ``--use-deprecated legacy-resolver`` to your pip install command. - - While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we - set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well. - Due to those constraints, only ``pip`` installation is currently officially supported. + Only ``pip`` installation is currently officially supported. While they are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as @@ -137,15 +129,7 @@ To create and initialize the local virtualenv: .. note:: - On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver - might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might - depend on your choice of extras. In order to install Airflow you might need to either downgrade - pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, - you need to add option ``--use-deprecated legacy-resolver`` to your pip install command. - - While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we - set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well. - Due to those constraints, only ``pip`` installation is currently officially supported. + Only ``pip`` installation is currently officially supported. While they are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as diff --git a/README.md b/README.md index 0b7c50401c17f..01a02fa507e92 100644 --- a/README.md +++ b/README.md @@ -149,14 +149,7 @@ correct Airflow tag/version/branch and Python versions in the URL. NOTE!!! -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might -depend on your choice of extras. In order to install Airflow reliably, you might need to either downgrade -pip to version 20.2.4 `pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, -you might need to add option] `--use-deprecated legacy-resolver` to your pip install command. -While `pip 20.3.3` solved most of the `teething` problems of 20.3, this note will remain here until we -set `pip 20.3` as official version in our CI pipeline where we are testing the installation as well. -Due to those constraints, only `pip` installation is currently officially supported. +Only `pip` installation is currently officially supported. While they are some successes with using other tools like [poetry](https://python-poetry.org) or [pip-tools](https://pypi.org/project/pip-tools), they do not share the same workflow as diff --git a/UPDATING.md b/UPDATING.md index 8735b520029d7..02cf344e000c0 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -1727,13 +1727,6 @@ you should use `pip install apache-airflow[apache.atlas]`. NOTE! -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - - If you want to install integration for Microsoft Azure, then instead of ``` diff --git a/airflow/provider.yaml.schema.json b/airflow/provider.yaml.schema.json index bdec41dc3cc0b..458ee2ef2f168 100644 --- a/airflow/provider.yaml.schema.json +++ b/airflow/provider.yaml.schema.json @@ -191,6 +191,10 @@ "items": { "type": "string" } + }, + "additional-extras": { + "type": "object", + "description": "Additional extras that the provider should have" } }, "additionalProperties": false, diff --git a/airflow/providers/apache/beam/README.md b/airflow/providers/apache/beam/README.md index 34f2863b55985..d9294d2c1efdb 100644 --- a/airflow/providers/apache/beam/README.md +++ b/airflow/providers/apache/beam/README.md @@ -41,14 +41,6 @@ are in `airflow.providers.apache.beam` python package. ## Installation -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - You can install this package on top of an existing airflow 2.* installation via `pip install apache-airflow-providers-apache-beam` diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py index 5e5af8c5dbe86..090a70285af77 100644 --- a/airflow/providers/apache/hive/transfers/mssql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - +# pylint: disable=no-member """This module contains operator to move data from MSSQL to Hive.""" from collections import OrderedDict diff --git a/airflow/providers/microsoft/mssql/hooks/mssql.py b/airflow/providers/microsoft/mssql/hooks/mssql.py index 4acdb52d18642..928d0c45d3fec 100644 --- a/airflow/providers/microsoft/mssql/hooks/mssql.py +++ b/airflow/providers/microsoft/mssql/hooks/mssql.py @@ -15,6 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# pylint: disable=no-member """Microsoft SQLServer hook module""" import pymssql diff --git a/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2 b/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2 index 21e379931bba6..a95376ba4cd5b 100644 --- a/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2 +++ b/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2 @@ -47,15 +47,6 @@ are in ``{{FULL_PACKAGE_NAME}}`` python package. Installation ------------ -.. note:: - - On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver - does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice - of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 - ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option - ``--use-deprecated legacy-resolver`` to your pip install command. - - You can install this package on top of an existing airflow 2.* installation via ``pip install {{PACKAGE_PIP_NAME}}`` {%- if PIP_REQUIREMENTS %} diff --git a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 index 9975925481511..7847d797a2a77 100644 --- a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 +++ b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 @@ -44,14 +44,6 @@ in the `documentation Dict[str, List[str]]: if cross_provider_dependencies.get(provider_package_id) else {} ) + provider_yaml_dict = get_provider_yaml(provider_package_id) + additional_extras = provider_yaml_dict.get('additional-extras') + if additional_extras: + for key in additional_extras: + if key in extras_dict: + extras_dict[key].append(additional_extras[key]) + else: + extras_dict[key] = additional_extras[key] return extras_dict @@ -1295,11 +1303,11 @@ def convert_to_provider_info(provider_info: Dict[str, Any]) -> Dict[str, Any]: This method converts the full provider.yaml schema into the limited version needed at runtime. """ updated_provider_info = deepcopy(provider_info) - expression = jsonpath_ng.parse("[hooks,operators,integrations,sensors,transfers]") + expression = jsonpath_ng.parse("[hooks,operators,integrations,sensors,transfers,additional-extras]") return expression.filter(lambda x: True, updated_provider_info) -def get_provider_info_from_provider_yaml(provider_package_id: str) -> Dict[str, Any]: +def get_provider_yaml(provider_package_id: str) -> Dict[str, Any]: """ Retrieves provider info from the provider yaml file. The provider yaml file contains more information than provider_info that is used at runtime. This method converts the full provider yaml file into @@ -1312,6 +1320,18 @@ def get_provider_info_from_provider_yaml(provider_package_id: str) -> Dict[str, raise Exception(f"The provider.yaml file is missing: {provider_yaml_file_name}") with open(provider_yaml_file_name) as provider_file: provider_yaml_dict = yaml.load(provider_file, SafeLoader) # noqa + return provider_yaml_dict + + +def get_provider_info_from_provider_yaml(provider_package_id: str) -> Dict[str, Any]: + """ + Retrieves provider info from the provider yaml file. The provider yaml file contains more information + than provider_info that is used at runtime. This method converts the full provider yaml file into + stripped-down provider info and validates it against deprecated 2.0.0 schema and runtime schema. + :param provider_package_id: package id to retrieve provider.yaml from + :return: provider_info dictionary + """ + provider_yaml_dict = get_provider_yaml(provider_package_id=provider_package_id) provider_info = convert_to_provider_info(provider_yaml_dict) validate_provider_info_with_2_0_0_schema(provider_info) validate_provider_info_with_runtime_schema(provider_info) diff --git a/docs/apache-airflow/installation.rst b/docs/apache-airflow/installation.rst index 16917a9eca4cd..f297a348fecbb 100644 --- a/docs/apache-airflow/installation.rst +++ b/docs/apache-airflow/installation.rst @@ -56,12 +56,7 @@ tested with Python 3.6, 3.7, and 3.8, but does not yet support Python 3.9. Installation tools '''''''''''''''''' -The official way of installing Airflow is with the ``pip`` tool. -There was a recent (November 2020) change in resolver, so currently only 20.2.4 version is officially -supported, although you might have a success with 20.3.3+ version (to be confirmed if all initial -issues from ``pip`` 20.3.0 release have been fixed in 20.3.3). In order to install Airflow you need to -either downgrade pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option -``--use-deprecated legacy-resolver`` to your pip install command. +Only ``pip`` installation is currently officially supported. While there are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as diff --git a/docs/apache-airflow/start/local.rst b/docs/apache-airflow/start/local.rst index 407f18f081215..c59a05a31ddb9 100644 --- a/docs/apache-airflow/start/local.rst +++ b/docs/apache-airflow/start/local.rst @@ -24,15 +24,7 @@ This quick start guide will help you bootstrap a Airflow standalone instance on .. note:: - On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver - might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might - depend on your choice of extras. In order to install Airflow you might need to either downgrade - pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, - you need to add option ``--use-deprecated legacy-resolver`` to your pip install command. - - While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we - set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well. - Due to those constraints, only ``pip`` installation is currently officially supported. + Only ``pip`` installation is currently officially supported. While they are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as diff --git a/docs/apache-airflow/upgrade-check.rst b/docs/apache-airflow/upgrade-check.rst index 18bbec0094536..0c94f367abb07 100644 --- a/docs/apache-airflow/upgrade-check.rst +++ b/docs/apache-airflow/upgrade-check.rst @@ -38,15 +38,7 @@ time of writing: 1.10.15) and then to download this package and run the script a .. note:: - On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver - might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might - depend on your choice of extras. In order to install Airflow you might need to either downgrade - pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, - you need to add option ``--use-deprecated legacy-resolver`` to your pip install command. - - While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we - set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well. - Due to those constraints, only ``pip`` installation is currently officially supported. + Only ``pip`` installation is currently officially supported. While they are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as diff --git a/docs/docker-stack/build-arg-ref.rst b/docs/docker-stack/build-arg-ref.rst index d3d3540072905..20aa77a43911a 100644 --- a/docs/docker-stack/build-arg-ref.rst +++ b/docs/docker-stack/build-arg-ref.rst @@ -45,7 +45,7 @@ Those are the most common arguments that you use when you want to build a custom +------------------------------------------+------------------------------------------+------------------------------------------+ | ``AIRFLOW_USER_HOME_DIR`` | ``/home/airflow`` | Home directory of the Airflow user. | +------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_PIP_VERSION`` | ``20.2.4`` | PIP version used. | +| ``AIRFLOW_PIP_VERSION`` | ``21.1`` | PIP version used. | +------------------------------------------+------------------------------------------+------------------------------------------+ | ``PIP_PROGRESS_BAR`` | ``on`` | Progress bar for PIP installation | +------------------------------------------+------------------------------------------+------------------------------------------+ diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh index 33dd2a97c7ce3..d41a92e7add05 100644 --- a/scripts/ci/libraries/_initialization.sh +++ b/scripts/ci/libraries/_initialization.sh @@ -408,8 +408,7 @@ function initialization::initialize_image_build_variables() { export INSTALLED_PROVIDERS export INSTALLED_EXTRAS="async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,imap,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv" - # default version of PIP USED (This has to be < 20.3 until https://github.com/apache/airflow/issues/12838 is solved) - AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION:="20.2.4"} + AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION:="21.1"} export AIRFLOW_PIP_VERSION # We also pin version of wheel used to get consistent builds diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh index 1ce84da597aaf..74131affb6026 100644 --- a/scripts/in_container/_in_container_utils.sh +++ b/scripts/in_container/_in_container_utils.sh @@ -268,12 +268,6 @@ function install_airflow_from_sdist() { pip install "${airflow_package}${extras}" } -function install_remaining_dependencies() { - group_start "Installs all remaining dependencies that are not installed by '${AIRFLOW_EXTRAS}' " - pip install apache-beam[gcp] - group_end -} - function uninstall_airflow() { pip uninstall -y apache-airflow || true find /root/airflow/ -type f -print0 | xargs -0 rm -f -- diff --git a/scripts/in_container/run_install_and_test_provider_packages.sh b/scripts/in_container/run_install_and_test_provider_packages.sh index c010df7e1f340..d7c08a88d9bf3 100755 --- a/scripts/in_container/run_install_and_test_provider_packages.sh +++ b/scripts/in_container/run_install_and_test_provider_packages.sh @@ -196,7 +196,6 @@ function discover_all_field_behaviours() { setup_provider_packages verify_parameters install_airflow_as_specified -install_remaining_dependencies install_provider_packages import_all_provider_classes diff --git a/scripts/in_container/run_prepare_provider_documentation.sh b/scripts/in_container/run_prepare_provider_documentation.sh index 71a22917dc8b1..34d22ad268ed9 100755 --- a/scripts/in_container/run_prepare_provider_documentation.sh +++ b/scripts/in_container/run_prepare_provider_documentation.sh @@ -99,11 +99,6 @@ export PYTHONPATH="${AIRFLOW_SOURCES}" verify_suffix_versions_for_package_preparation install_supported_pip_version - -# install extra packages missing in devel_ci -# TODO: remove it when devel_all == devel_ci -install_remaining_dependencies - import_all_provider_classes verify_provider_packages_named_properly diff --git a/scripts/in_container/run_prepare_provider_packages.sh b/scripts/in_container/run_prepare_provider_packages.sh index 7ddcd3a328f83..4cb8d507470cb 100755 --- a/scripts/in_container/run_prepare_provider_packages.sh +++ b/scripts/in_container/run_prepare_provider_packages.sh @@ -169,7 +169,6 @@ function rename_packages_if_needed() { popd >/dev/null } -install_remaining_dependencies setup_provider_packages cd "${PROVIDER_PACKAGES_DIR}" || exit 1 diff --git a/setup.py b/setup.py index 0ae7bd080af5b..0e8573fe9228e 100644 --- a/setup.py +++ b/setup.py @@ -198,7 +198,7 @@ def get_sphinx_theme_version() -> str: 'watchtower~=0.7.3', ] apache_beam = [ - 'apache-beam[gcp]', + 'apache-beam>=2.20.0', ] async_packages = [ 'eventlet>= 0.9.7', @@ -502,7 +502,7 @@ def get_sphinx_theme_version() -> str: 'paramiko', 'pipdeptree', 'pre-commit', - 'pylint>=2.7.0', + 'pylint~=2.7.4', 'pysftp', 'pytest~=6.0', 'pytest-cov', @@ -749,20 +749,6 @@ def add_extras_for_all_deprecated_aliases() -> None: ] ) -# Those packages are excluded because they break tests and they are not needed to run our test suite. -# This can be removed as soon as we get non-conflicting -# requirements for the apache-beam as well. -# -# Currently Apache Beam has very narrow and old dependencies for 'mock' package which -# are required only for our tests. -# once https://github.com/apache/beam/pull/14328 is solved and new version of apache-beam is released -# we will be able to remove this exclusion and get rid of `install_remaining_dependencies` -# function in `scripts/in_container`. -# -PACKAGES_EXCLUDED_FOR_CI = [ - 'apache-beam', -] - def is_package_excluded(package: str, exclusion_list: List[str]): """ @@ -781,13 +767,7 @@ def is_package_excluded(package: str, exclusion_list: List[str]): if not is_package_excluded(package=package, exclusion_list=PACKAGES_EXCLUDED_FOR_ALL) ] -devel_ci = [ - package - for package in devel_all - if not is_package_excluded( - package=package, exclusion_list=PACKAGES_EXCLUDED_FOR_CI + PACKAGES_EXCLUDED_FOR_ALL - ) -] +devel_ci = devel_all # Those are extras that we have to add for development purposes diff --git a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py index 671cc8b855bae..953affd7cba8a 100644 --- a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - +# pylint: disable=no-member import unittest from collections import OrderedDict