diff --git a/.github/actions/breeze/action.yml b/.github/actions/breeze/action.yml index d926e14641959..351e17eb2c550 100644 --- a/.github/actions/breeze/action.yml +++ b/.github/actions/breeze/action.yml @@ -28,7 +28,7 @@ runs: - name: "Setup python" uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.8 cache: 'pip' cache-dependency-path: ./dev/breeze/setup* - name: Cache breeze diff --git a/.github/actions/build-prod-images/action.yml b/.github/actions/build-prod-images/action.yml index b4cec13ddc1a1..6d0ca1b550896 100644 --- a/.github/actions/build-prod-images/action.yml +++ b/.github/actions/build-prod-images/action.yml @@ -37,7 +37,7 @@ runs: shell: bash run: breeze ci-image pull --tag-as-latest env: - PYTHON_MAJOR_MINOR_VERSION: "3.7" + PYTHON_MAJOR_MINOR_VERSION: "3.8" - name: "Cleanup dist and context file" shell: bash run: rm -fv ./dist/* ./docker-context-files/* diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml index fa8686890d0fe..74f36ab61e672 100644 --- a/.github/workflows/build-images.yml +++ b/.github/workflows/build-images.yml @@ -108,7 +108,7 @@ jobs: - name: "Setup python" uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.8 - name: "Retrieve defaults from branch_defaults.py" # We cannot "execute" the branch_defaults.py python code here because that would be # a security problem (we cannot run any code that comes from the sources coming from the PR. diff --git a/.readthedocs.yml b/.readthedocs.yml index 592c9da64e1cf..aa16e3a8e3d57 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,7 +20,7 @@ formats: [] sphinx: configuration: docs/rtd-deprecation/conf.py python: - version: "3.7" + version: "3.8" install: - method: pip path: . diff --git a/BREEZE.rst b/BREEZE.rst index acc1240138be4..9cce6d0d20468 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -360,12 +360,12 @@ You can use additional ``breeze`` flags to choose your environment. You can spec version to use, and backend (the meta-data database). Thanks to that, with Breeze, you can recreate the same environments as we have in matrix builds in the CI. -For example, you can choose to run Python 3.7 tests with MySQL as backend and with mysql version 8 +For example, you can choose to run Python 3.8 tests with MySQL as backend and with mysql version 8 as follows: .. code-block:: bash - breeze --python 3.7 --backend mysql --mysql-version 8 + breeze --python 3.8 --backend mysql --mysql-version 8 The choices you make are persisted in the ``./.build/`` cache directory so that next time when you use the ``breeze`` script, it could use the values that were used previously. This way you do not have to specify @@ -499,7 +499,7 @@ When you are starting airflow from local sources, www asset compilation is autom .. code-block:: bash - breeze --python 3.7 --backend mysql start-airflow + breeze --python 3.8 --backend mysql start-airflow You can also use it to start any released version of Airflow from ``PyPI`` with the @@ -507,7 +507,7 @@ You can also use it to start any released version of Airflow from ``PyPI`` with .. code-block:: bash - breeze start-airflow --python 3.7 --backend mysql --use-airflow-version 2.2.5 + breeze start-airflow --python 3.8 --backend mysql --use-airflow-version 2.2.5 Those are all available flags of ``start-airflow`` command: @@ -1395,10 +1395,10 @@ suffix and they need to also be paired with corresponding runtime dependency add .. code-block:: bash - breeze prod-image build --python 3.7 --additional-dev-deps "libasound2-dev" \ + breeze prod-image build --python 3.8 --additional-dev-deps "libasound2-dev" \ --additional-runtime-apt-deps "libasound2" -Same as above but uses python 3.7. +Same as above but uses python 3.8. Building PROD image ................... diff --git a/CI.rst b/CI.rst index b0b5240423243..132add315e5ac 100644 --- a/CI.rst +++ b/CI.rst @@ -59,7 +59,7 @@ Container Registry used as cache We are using GitHub Container Registry to store the results of the ``Build Images`` workflow which is used in the ``Tests`` workflow. -Currently in main version of Airflow we run tests in 4 different versions of Python (3.7, 3.8, 3.9, 3.10) +Currently in main version of Airflow we run tests in 4 different versions of Python (3.8, 3.9, 3.10) which means that we have to build 8 images (4 CI ones and 4 PROD ones). Yet we run around 12 jobs with each of the CI images. That is a lot of time to just build the environment to run. Therefore we are utilising ``pull_request_target`` feature of GitHub Actions. @@ -145,7 +145,7 @@ have to be percent-encoded when you access them via UI (/ = %2F) +--------------+----------------------------------------------------------+----------------------------------------------------------+ * might be either "main" or "v2-*-test" -* - Python version (Major + Minor).Should be one of ["3.7", "3.8", "3.9"]. +* - Python version (Major + Minor).Should be one of ["3.8", "3.9", "3.10"]. * - full-length SHA of commit either from the tip of the branch (for pushes/schedule) or commit from the tip of the branch used for the PR. @@ -523,9 +523,9 @@ For example knowing that the CI job was for commit ``cd27124534b46c9688a1d89e75f .. code-block:: bash - docker pull ghcr.io/apache/airflow/main/ci/python3.7:cd27124534b46c9688a1d89e75fcd137ab5137e3 + docker pull ghcr.io/apache/airflow/main/ci/python3.8:cd27124534b46c9688a1d89e75fcd137ab5137e3 - docker run -it ghcr.io/apache/airflow/main/ci/python3.7:cd27124534b46c9688a1d89e75fcd137ab5137e3 + docker run -it ghcr.io/apache/airflow/main/ci/python3.8:cd27124534b46c9688a1d89e75fcd137ab5137e3 But you usually need to pass more variables and complex setup if you want to connect to a database or diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f88a12979a441..67a0bbb904191 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -414,7 +414,7 @@ these guidelines: - Run tests locally before opening PR. - You can use any supported python version to run the tests, but the best is to check - if it works for the oldest supported version (Python 3.7 currently). In rare cases + if it works for the oldest supported version (Python 3.8 currently). In rare cases tests might fail with the oldest version when you use features that are available in newer Python versions. For that purpose we have ``airflow.compat`` package where we keep back-ported useful features from newer versions. @@ -830,7 +830,7 @@ from the PyPI package: .. code-block:: bash pip install apache-airflow[google,amazon,async]==2.2.5 \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.2.5/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.2.5/constraints-3.8.txt" The last one can be used to install Airflow in "minimal" mode - i.e when bare Airflow is installed without extras. @@ -842,7 +842,7 @@ requirements). .. code-block:: bash pip install -e . \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" This works also with extras - for example: @@ -850,7 +850,7 @@ This works also with extras - for example: .. code-block:: bash pip install ".[ssh]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" There are different set of fixed constraint files for different python major/minor versions and you should @@ -862,7 +862,7 @@ If you want to update just airflow dependencies, without paying attention to pro .. code-block:: bash pip install . --upgrade \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.8.txt" The ``constraints-.txt`` and ``constraints-no-providers-.txt`` diff --git a/Dockerfile b/Dockerfile index 58b19ae25d367..de82d249b67d0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -46,7 +46,7 @@ ARG AIRFLOW_USER_HOME_DIR=/home/airflow # latest released version here ARG AIRFLOW_VERSION="2.6.0" -ARG PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" +ARG PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" ARG AIRFLOW_PIP_VERSION=23.1.2 ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" diff --git a/Dockerfile.ci b/Dockerfile.ci index 6169b6066f1e3..6c7fa13fd5736 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -16,7 +16,7 @@ # # WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT. # -ARG PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" +ARG PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" ############################################################################################## # This is the script image where we keep all inlined bash scripts needed in other segments @@ -615,7 +615,7 @@ chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) -PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.7} +PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.8} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} diff --git a/IMAGES.rst b/IMAGES.rst index d28db9927843b..fabc58ccebd01 100644 --- a/IMAGES.rst +++ b/IMAGES.rst @@ -99,18 +99,18 @@ image version for the chosen Python version. The images are build with default extras - different extras for CI and production image and you can change the extras via the ``--extras`` parameters and add new ones with ``--additional-extras``. -For example if you want to build Python 3.7 version of production image with +For example if you want to build Python 3.8 version of production image with "all" extras installed you should run this command: .. code-block:: bash - breeze prod-image build --python 3.7 --extras "all" + breeze prod-image build --python 3.8 --extras "all" If you just want to add new extras you can add them like that: .. code-block:: bash - breeze prod-image build --python 3.7 --additional-extras "all" + breeze prod-image build --python 3.8 --additional-extras "all" The command that builds the CI image is optimized to minimize the time needed to rebuild the image when the source code of Airflow evolves. This means that if you already have the image locally downloaded and @@ -128,7 +128,7 @@ parameter to Breeze: .. code-block:: bash - breeze prod-image build --python 3.7 --additional-extras=trino --install-airflow-version=2.0.0 + breeze prod-image build --python 3.8 --additional-extras=trino --install-airflow-version=2.0.0 This will build the image using command similar to: @@ -136,7 +136,7 @@ This will build the image using command similar to: pip install \ apache-airflow[async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv]==2.0.0 \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.8.txt" .. note:: @@ -158,14 +158,14 @@ HEAD of development for constraints): .. code-block:: bash pip install "https://github.com/apache/airflow/archive/.tar.gz#egg=apache-airflow" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt" You can also skip installing airflow and install it from locally provided files by using ``--install-packages-from-context`` parameter to Breeze: .. code-block:: bash - breeze prod-image build --python 3.7 --additional-extras=trino --install-packages-from-context + breeze prod-image build --python 3.8 --additional-extras=trino --install-packages-from-context In this case you airflow and all packages (.whl files) should be placed in ``docker-context-files`` folder. @@ -192,21 +192,21 @@ or ``disabled`` flags when you run Breeze commands. For example: .. code-block:: bash - breeze ci-image build --python 3.7 --docker-cache local + breeze ci-image build --python 3.8 --docker-cache local Will build the CI image using local build cache (note that it will take quite a long time the first time you run it). .. code-block:: bash - breeze prod-image build --python 3.7 --docker-cache registry + breeze prod-image build --python 3.8 --docker-cache registry Will build the production image with cache used from registry. .. code-block:: bash - breeze prod-image build --python 3.7 --docker-cache disabled + breeze prod-image build --python 3.8 --docker-cache disabled Will build the production image from the scratch. @@ -304,7 +304,7 @@ in the `<#ci-image-build-arguments>`_ chapter below. Here just a few examples are presented which should give you general understanding of what you can customize. -This builds the production image in version 3.7 with additional airflow extras from 2.0.0 PyPI package and +This builds the production image in version 3.8 with additional airflow extras from 2.0.0 PyPI package and additional apt dev and runtime dependencies. As of Airflow 2.3.0, it is required to build images with ``DOCKER_BUILDKIT=1`` variable @@ -315,7 +315,7 @@ you have ``buildx`` plugin installed. DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \ @@ -326,7 +326,7 @@ the same image can be built using ``breeze`` (it supports auto-completion of the .. code-block:: bash - breeze ci-image build --python 3.7 --additional-extras=jdbc --additional-python-deps="pandas" \ + breeze ci-image build --python 3.8 --additional-extras=jdbc --additional-python-deps="pandas" \ --additional-dev-apt-deps="gcc g++" You can customize more aspects of the image - such as additional commands executed before apt dependencies @@ -338,7 +338,7 @@ based on example in `this comment ]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" This will install Airflow in 'editable' mode - where sources of Airflow are taken directly from the source code rather than moved to the installation directory. During the installation airflow will install - but then @@ -163,9 +163,9 @@ You can also install Airflow in non-editable mode: .. code-block:: bash - # use the same version of python as you are working with, 3.7, 3.8, 3.9, or 3.10 + # use the same version of python as you are working with, 3.8, 3.9, or 3.10 pip install ".[devel,]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" This will copy the sources to directory where usually python packages are installed. You can see the list of directories via ``python -m site`` command. In this case the providers are installed from PyPI, not from @@ -173,9 +173,9 @@ sources, unless you set ``INSTALL_PROVIDERS_FROM_SOURCES`` environment variable .. code-block:: bash - # use the same version of python as you are working with, 3.7, 3.8, 3.9, or 3.10 + # use the same version of python as you are working with, 3.8, 3.9, or 3.10 INSTALL_PROVIDERS_FROM_SOURCES="true" pip install ".[devel,]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" Note: when you first initialize database (the next step), you may encounter some problems. @@ -234,7 +234,7 @@ before running ``pip install`` command: .. code-block:: bash INSTALL_PROVIDERS_FROM_SOURCES="true" pip install -U -e ".[devel,]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt" This way no providers packages will be installed and they will always be imported from the "airflow/providers" folder. diff --git a/README.md b/README.md index 942dd5241a2e6..4322b342ce061 100644 --- a/README.md +++ b/README.md @@ -86,15 +86,15 @@ Airflow is not a streaming solution, but it is often used to process real-time d Apache Airflow is tested with: -| | Main version (dev) | Stable version (2.6.0) | -|------------|------------------------|------------------------| -| Python | 3.7, 3.8, 3.9, 3.10 | 3.7, 3.8, 3.9, 3.10 | -| Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) | -| Kubernetes | 1.23, 1.24, 1.25, 1.26 | 1.23, 1.24, 1.25, 1.26 | -| PostgreSQL | 11, 12, 13, 14, 15 | 11, 12, 13, 14, 15 | -| MySQL | 5.7, 8 | 5.7, 8 | -| SQLite | 3.15.0+ | 3.15.0+ | -| MSSQL | 2017(\*), 2019(\*) | 2017(\*), 2019(\*) | +| | Main version (dev) | Stable version (2.6.0) | +|-------------|------------------------|------------------------| +| Python | 3.8, 3.9, 3.10 | 3.8, 3.9, 3.10 | +| Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) | +| Kubernetes | 1.23, 1.24, 1.25, 1.26 | 1.23, 1.24, 1.25, 1.26 | +| PostgreSQL | 11, 12, 13, 14, 15 | 11, 12, 13, 14, 15 | +| MySQL | 5.7, 8 | 5.7, 8 | +| SQLite | 3.15.0+ | 3.15.0+ | +| MSSQL | 2017(\*), 2019(\*) | 2017(\*), 2019(\*) | \* Experimental @@ -159,14 +159,14 @@ them to the appropriate format and workflow that your tool requires. ```bash pip install 'apache-airflow==2.6.0' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.6.0/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.6.0/constraints-3.8.txt" ``` 2. Installing with extras (i.e., postgres, google) ```bash pip install 'apache-airflow[postgres,google]==2.6.0' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.6.0/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.6.0/constraints-3.8.txt" ``` For information on installing provider packages, check @@ -298,16 +298,16 @@ They are based on the official release schedule of Python and Kubernetes, nicely 1. We drop support for Python and Kubernetes versions when they reach EOL. Except for Kubernetes, a version stays supported by Airflow if two major cloud providers still provide support for it. We drop support for those EOL versions in main right after EOL date, and it is effectively removed when we release - the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.7 it + the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.8 it means that we will drop support in main right after 27.06.2023, and the first MAJOR or MINOR version of Airflow released after will not have it. 2. The "oldest" supported version of Python/Kubernetes is the default one until we decide to switch to later version. "Default" is only meaningful in terms of "smoke tests" in CI PRs, which are run using this default version and the default reference image available. Currently `apache/airflow:latest` - and `apache/airflow:2.6.0` images are Python 3.7 images. This means that default reference image will - become the default at the time when we start preparing for dropping 3.7 support which is few months - before the end of life for Python 3.7. + and `apache/airflow:2.6.0` images are Python 3.8 images. This means that default reference image will + become the default at the time when we start preparing for dropping 3.8 support which is few months + before the end of life for Python 3.8. 3. We support a new version of Python/Kubernetes in main after they are officially released, as soon as we make them work in our CI pipeline (which might not be immediate due to dependencies catching up with diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst index 0c81ba75d4755..86336b07d3aab 100644 --- a/STATIC_CODE_CHECKS.rst +++ b/STATIC_CODE_CHECKS.rst @@ -41,7 +41,7 @@ use. So, you can be sure your modifications will also work for CI if they pass pre-commit hooks. We have integrated the fantastic `pre-commit `__ framework -in our development workflow. To install and use it, you need at least Python 3.7 locally. +in our development workflow. To install and use it, you need at least Python 3.8 locally. Installing pre-commit hooks ........................... diff --git a/TESTING.rst b/TESTING.rst index e137d1486c452..8ae8aa445a3b9 100644 --- a/TESTING.rst +++ b/TESTING.rst @@ -711,7 +711,7 @@ per each combination of Python and Kubernetes version. This is used during CI wh tests against those different clusters - even in parallel. The cluster name follows the pattern ``airflow-python-X.Y-vA.B.C`` where X.Y is a major/minor Python version -and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.7-v1.24.0`` +and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.8-v1.24.0`` Most of the commands can be executed in parallel for multiple images/clusters by adding ``--run-in-parallel`` to create clusters or deploy airflow. Similarly checking for status, dumping logs and deleting clusters @@ -879,7 +879,7 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.7-v1.24.2/.kindconfig.yaml: + Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kindconfig.yaml: # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -915,7 +915,7 @@ Should result in KinD creating the K8S cluster. - Creating cluster "airflow-python-3.7-v1.24.2" ... + Creating cluster "airflow-python-3.8-v1.24.2" ... ✓ Ensuring node image (kindest/node:v1.24.2) 🖼 ✓ Preparing nodes 📦 📦 ✓ Writing configuration 📜 @@ -923,10 +923,10 @@ Should result in KinD creating the K8S cluster. ✓ Installing CNI 🔌 ✓ Installing StorageClass 💾 ✓ Joining worker nodes 🚜 - Set kubectl context to "kind-airflow-python-3.7-v1.24.2" + Set kubectl context to "kind-airflow-python-3.8-v1.24.2" You can now use your cluster with: - kubectl cluster-info --context kind-airflow-python-3.7-v1.24.2 + kubectl cluster-info --context kind-airflow-python-3.8-v1.24.2 Not sure what to do next? 😅 Check out https://kind.sigs.k8s.io/docs/user/quick-start/ @@ -934,9 +934,9 @@ Should result in KinD creating the K8S cluster. Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.7 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.8 --kubernetes-version v1.24.2` to (re)deploy airflow - KinD cluster airflow-python-3.7-v1.24.2 created! + KinD cluster airflow-python-3.8-v1.24.2 created! NEXT STEP: You might now configure your cluster by: @@ -950,20 +950,20 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Configuring airflow-python-3.7-v1.24.2 to be ready for Airflow deployment - Deleting K8S namespaces for kind-airflow-python-3.7-v1.24.2 + Configuring airflow-python-3.8-v1.24.2 to be ready for Airflow deployment + Deleting K8S namespaces for kind-airflow-python-3.8-v1.24.2 Error from server (NotFound): namespaces "airflow" not found Error from server (NotFound): namespaces "test-namespace" not found Creating namespaces namespace/airflow created namespace/test-namespace created - Created K8S namespaces for cluster kind-airflow-python-3.7-v1.24.2 + Created K8S namespaces for cluster kind-airflow-python-3.8-v1.24.2 - Deploying test resources for cluster kind-airflow-python-3.7-v1.24.2 + Deploying test resources for cluster kind-airflow-python-3.8-v1.24.2 persistentvolume/test-volume created persistentvolumeclaim/test-volume created service/airflow-webserver-node-port created - Deployed test resources for cluster kind-airflow-python-3.7-v1.24.2 + Deployed test resources for cluster kind-airflow-python-3.8-v1.24.2 NEXT STEP: You might now build your k8s image by: @@ -981,45 +981,45 @@ Should show the status of current KinD cluster. .. code-block:: text ======================================================================================================================== - Cluster: airflow-python-3.7-v1.24.2 + Cluster: airflow-python-3.8-v1.24.2 - * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.7-v1.24.2/.kubeconfig - * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.7-v1.24.2/.kindconfig.yaml + * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kubeconfig + * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kindconfig.yaml - Cluster info: airflow-python-3.7-v1.24.2 + Cluster info: airflow-python-3.8-v1.24.2 Kubernetes control plane is running at https://127.0.0.1:48366 CoreDNS is running at https://127.0.0.1:48366/api/v1/namespaces/kube-system/services/kube-dns:dns/proxy To further debug and diagnose cluster problems, use 'kubectl cluster-info dump'. - Storage class for airflow-python-3.7-v1.24.2 + Storage class for airflow-python-3.8-v1.24.2 NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE standard (default) rancher.io/local-path Delete WaitForFirstConsumer false 83s - Running pods for airflow-python-3.7-v1.24.2 + Running pods for airflow-python-3.8-v1.24.2 NAME READY STATUS RESTARTS AGE coredns-6d4b75cb6d-rwp9d 1/1 Running 0 71s coredns-6d4b75cb6d-vqnrc 1/1 Running 0 71s - etcd-airflow-python-3.7-v1.24.2-control-plane 1/1 Running 0 84s + etcd-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s kindnet-ckc8l 1/1 Running 0 69s kindnet-qqt8k 1/1 Running 0 71s - kube-apiserver-airflow-python-3.7-v1.24.2-control-plane 1/1 Running 0 84s - kube-controller-manager-airflow-python-3.7-v1.24.2-control-plane 1/1 Running 0 84s + kube-apiserver-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s + kube-controller-manager-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s kube-proxy-6g7hn 1/1 Running 0 69s kube-proxy-dwfvp 1/1 Running 0 71s - kube-scheduler-airflow-python-3.7-v1.24.2-control-plane 1/1 Running 0 84s + kube-scheduler-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s KinD Cluster API server URL: http://localhost:48366 Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.7 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.8 --kubernetes-version v1.24.2` to (re)deploy airflow - Cluster healthy: airflow-python-3.7-v1.24.2 + Cluster healthy: airflow-python-3.8-v1.24.2 5. Build the image base on PROD Airflow image. You need to build the PROD image first (the command will guide you if you did not - either by running the build separately or passing ``--rebuild-base-image`` flag @@ -1030,15 +1030,15 @@ Should show the status of current KinD cluster. .. code-block:: text - Building the K8S image for Python 3.7 using airflow base image: ghcr.io/apache/airflow/main/prod/python3.7:latest + Building the K8S image for Python 3.8 using airflow base image: ghcr.io/apache/airflow/main/prod/python3.8:latest [+] Building 0.1s (8/8) FINISHED => [internal] load build definition from Dockerfile 0.0s => => transferring dockerfile: 301B 0.0s => [internal] load .dockerignore 0.0s => => transferring context: 35B 0.0s - => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.7:latest 0.0s - => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.7:latest 0.0s + => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.8:latest 0.0s + => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.8:latest 0.0s => [internal] load build context 0.0s => => transferring context: 3.00kB 0.0s => CACHED [2/3] COPY airflow/example_dags/ /opt/airflow/dags/ 0.0s @@ -1046,7 +1046,7 @@ Should show the status of current KinD cluster. => exporting to image 0.0s => => exporting layers 0.0s => => writing image sha256:c0bdd363c549c3b0731b8e8ce34153d081f239ee2b582355b7b3ffd5394c40bb 0.0s - => => naming to ghcr.io/apache/airflow/main/prod/python3.7-kubernetes:latest + => => naming to ghcr.io/apache/airflow/main/prod/python3.8-kubernetes:latest NEXT STEP: You might now upload your k8s image by: @@ -1066,9 +1066,9 @@ Should show the status of current KinD cluster. Good version of kubectl installed: 1.25.0 in /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin Good version of helm installed: 3.9.2 in /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin Stable repo is already added - Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.7-kubernetes to cluster airflow-python-3.7-v1.24.2 - Image: "ghcr.io/apache/airflow/main/prod/python3.7-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.7-v1.24.2-worker", loading... - Image: "ghcr.io/apache/airflow/main/prod/python3.7-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.7-v1.24.2-control-plane", loading... + Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.8-kubernetes to cluster airflow-python-3.8-v1.24.2 + Image: "ghcr.io/apache/airflow/main/prod/python3.8-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.8-v1.24.2-worker", loading... + Image: "ghcr.io/apache/airflow/main/prod/python3.8-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.8-v1.24.2-control-plane", loading... NEXT STEP: You might now deploy airflow by: @@ -1083,8 +1083,8 @@ Should show the status of current KinD cluster. .. code-block:: text - Deploying Airflow for cluster airflow-python-3.7-v1.24.2 - Deploying kind-airflow-python-3.7-v1.24.2 with airflow Helm Chart. + Deploying Airflow for cluster airflow-python-3.8-v1.24.2 + Deploying kind-airflow-python-3.8-v1.24.2 with airflow Helm Chart. Copied chart sources to /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart Deploying Airflow from /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart NAME: airflow @@ -1126,12 +1126,12 @@ Should show the status of current KinD cluster. Information on how to set a static webserver secret key can be found here: https://airflow.apache.org/docs/helm-chart/stable/production-guide.html#webserver-secret-key - Deployed kind-airflow-python-3.7-v1.24.2 with airflow Helm Chart. + Deployed kind-airflow-python-3.8-v1.24.2 with airflow Helm Chart. - Airflow for Python 3.7 and K8S version v1.24.2 has been successfully deployed. + Airflow for Python 3.8 and K8S version v1.24.2 has been successfully deployed. - The KinD cluster name: airflow-python-3.7-v1.24.2 - The kubectl cluster name: kind-airflow-python-3.7-v1.24.2. + The KinD cluster name: airflow-python-3.8-v1.24.2 + The kubectl cluster name: kind-airflow-python-3.8-v1.24.2. KinD Cluster API server URL: http://localhost:48366 @@ -1165,7 +1165,7 @@ The virtualenv required will be created automatically when the scripts are run. .. code-block:: text - Running tests with kind-airflow-python-3.7-v1.24.2 cluster. + Running tests with kind-airflow-python-3.8-v1.24.2 cluster. Command to run: pytest kubernetes_tests ========================================================================================= test session starts ========================================================================================== platform darwin -- Python 3.9.9, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 -- /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin/python @@ -1194,7 +1194,7 @@ Once you enter the environment, you receive this information: Entering interactive k8s shell. - (kind-airflow-python-3.7-v1.24.2:KubernetesExecutor)> + (kind-airflow-python-3.8-v1.24.2:KubernetesExecutor)> In a separate terminal you can open the k9s CLI: @@ -1304,9 +1304,9 @@ Kind has also useful commands to inspect your running cluster: .. code-block:: text - Deleting KinD cluster airflow-python-3.7-v1.24.2! - Deleting cluster "airflow-python-3.7-v1.24.2" ... - KinD cluster airflow-python-3.7-v1.24.2 deleted! + Deleting KinD cluster airflow-python-3.8-v1.24.2! + Deleting cluster "airflow-python-3.8-v1.24.2" ... + KinD cluster airflow-python-3.8-v1.24.2 deleted! Running complete k8s tests @@ -1465,13 +1465,13 @@ Here is the typical session that you need to do to run system tests: .. code-block:: bash breeze stop - breeze --python 3.7 --db-reset --forward-credentials + breeze --python 3.8 --db-reset --forward-credentials This will: * stop the whole environment (i.e. recreates metadata database from the scratch) * run Breeze with: - * python 3.7 version + * python 3.8 version * resetting the Airflow database * forward your local credentials to Breeze @@ -1521,7 +1521,7 @@ Breeze session. They are usually expensive to run. .. code-block:: bash breeze stop - breeze --python 3.7 --db-reset --forward-credentials + breeze --python 3.8 --db-reset --forward-credentials 2. Run create action in helper (to create slowly created resources): diff --git a/airflow/api_internal/endpoints/rpc_api_endpoint.py b/airflow/api_internal/endpoints/rpc_api_endpoint.py index 50e72c614c1e7..b6ac604c05393 100644 --- a/airflow/api_internal/endpoints/rpc_api_endpoint.py +++ b/airflow/api_internal/endpoints/rpc_api_endpoint.py @@ -30,7 +30,7 @@ log = logging.getLogger(__name__) -@functools.lru_cache() +@functools.lru_cache def _initialize_map() -> dict[str, Callable]: from airflow.dag_processing.manager import DagFileProcessorManager from airflow.dag_processing.processor import DagFileProcessor diff --git a/airflow/compat/functools.py b/airflow/compat/functools.py index dc0c520b796c0..989e6b018fa98 100644 --- a/airflow/compat/functools.py +++ b/airflow/compat/functools.py @@ -19,11 +19,6 @@ import sys -if sys.version_info >= (3, 8): - from functools import cached_property -else: - from cached_property import cached_property - if sys.version_info >= (3, 9): from functools import cache else: @@ -32,4 +27,4 @@ cache = lru_cache(maxsize=None) -__all__ = ["cache", "cached_property"] +__all__ = ["cache"] diff --git a/airflow/compat/functools.pyi b/airflow/compat/functools.pyi index 32cbbaa431e1e..7d1bef5939e6e 100644 --- a/airflow/compat/functools.pyi +++ b/airflow/compat/functools.pyi @@ -20,9 +20,8 @@ # TODO: Remove this file after the upstream fix is available in our toolchain. from __future__ import annotations -from typing import Callable, TypeVar +from typing import TypeVar T = TypeVar("T") -def cached_property(f: Callable[..., T]) -> T: ... def cache(f: T) -> T: ... diff --git a/airflow/configuration.py b/airflow/configuration.py index d01d70d6e9a32..c0e66731d894f 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -42,7 +42,6 @@ from typing_extensions import overload -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowConfigException from airflow.secrets import DEFAULT_SECRETS_SEARCH_PATH, BaseSecretsBackend from airflow.utils import yaml @@ -258,11 +257,11 @@ class AirflowConfigParser(ConfigParser): # Now build the inverse so we can go from old_section/old_key to new_section/new_key # if someone tries to retrieve it based on old_section/old_key - @cached_property + @functools.cached_property def inversed_deprecated_options(self): return {(sec, name): key for key, (sec, name, ver) in self.deprecated_options.items()} - @cached_property + @functools.cached_property def inversed_deprecated_sections(self): return { old_section: new_section for new_section, (old_section, ver) in self.deprecated_sections.items() diff --git a/airflow/decorators/base.py b/airflow/decorators/base.py index c5d17122502ac..3b9145ea6bfd5 100644 --- a/airflow/decorators/base.py +++ b/airflow/decorators/base.py @@ -19,6 +19,7 @@ import inspect import re import warnings +from functools import cached_property from itertools import chain from textwrap import dedent from typing import ( @@ -41,7 +42,6 @@ from sqlalchemy.orm import Session from airflow import Dataset -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.models.abstractoperator import DEFAULT_RETRIES, DEFAULT_RETRY_DELAY from airflow.models.baseoperator import ( diff --git a/airflow/jobs/job.py b/airflow/jobs/job.py index 9867efa9ecb51..eca7eb7966c46 100644 --- a/airflow/jobs/job.py +++ b/airflow/jobs/job.py @@ -17,6 +17,7 @@ # under the License. from __future__ import annotations +from functools import cached_property from time import sleep from typing import Callable, NoReturn @@ -25,7 +26,6 @@ from sqlalchemy.orm import backref, foreign, relationship from sqlalchemy.orm.session import Session, make_transient -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import AirflowException from airflow.executors.executor_loader import ExecutorLoader diff --git a/airflow/models/abstractoperator.py b/airflow/models/abstractoperator.py index c8584a9f4a567..b0a3747f03b3e 100644 --- a/airflow/models/abstractoperator.py +++ b/airflow/models/abstractoperator.py @@ -19,9 +19,10 @@ import datetime import inspect +from functools import cached_property from typing import TYPE_CHECKING, Any, Callable, ClassVar, Collection, Iterable, Iterator, Sequence -from airflow.compat.functools import cache, cached_property +from airflow.compat.functools import cache from airflow.configuration import conf from airflow.exceptions import AirflowException from airflow.models.expandinput import NotFullyPopulated diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 9b227db8a5782..9088d002bc3f9 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -64,7 +64,6 @@ import airflow.templates from airflow import settings, utils from airflow.api_internal.internal_api_call import internal_api_call -from airflow.compat.functools import cached_property from airflow.configuration import conf, secrets_backend_list from airflow.exceptions import ( AirflowDagInconsistent, @@ -961,7 +960,7 @@ def next_dagrun_after_date(self, date_last_automated_dagrun: pendulum.DateTime | return None return info.run_after - @cached_property + @functools.cached_property def _time_restriction(self) -> TimeRestriction: start_dates = [t.start_date for t in self.tasks if t.start_date] if self.start_date is not None: @@ -2413,7 +2412,7 @@ def has_task(self, task_id: str): def has_task_group(self, task_group_id: str) -> bool: return task_group_id in self.task_group_dict - @cached_property + @functools.cached_property def task_group_dict(self): return {k: v for k, v in self._task_group.get_task_group_dict().items() if k is not None} diff --git a/airflow/models/xcom.py b/airflow/models/xcom.py index f9e5abc77466b..fde28bb7a6e64 100644 --- a/airflow/models/xcom.py +++ b/airflow/models/xcom.py @@ -26,7 +26,7 @@ import logging import pickle import warnings -from functools import wraps +from functools import cached_property, wraps from typing import TYPE_CHECKING, Any, Generator, Iterable, cast, overload import attr @@ -47,7 +47,6 @@ from airflow import settings from airflow.api_internal.internal_api_call import internal_api_call -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import RemovedInAirflow3Warning from airflow.models.base import COLLATION_ARGS, ID_LEN, Base diff --git a/airflow/operators/bash.py b/airflow/operators/bash.py index e474bd8d68754..6ec1b0e80d6fc 100644 --- a/airflow/operators/bash.py +++ b/airflow/operators/bash.py @@ -20,9 +20,9 @@ import os import shutil import warnings +from functools import cached_property from typing import Container, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowSkipException from airflow.hooks.subprocess import SubprocessHook from airflow.models.baseoperator import BaseOperator diff --git a/airflow/providers/SUSPENDING_AND_RESUMING_PROVIDERS.rst b/airflow/providers/SUSPENDING_AND_RESUMING_PROVIDERS.rst index a6b5a01247ea6..9fa649052d7f7 100644 --- a/airflow/providers/SUSPENDING_AND_RESUMING_PROVIDERS.rst +++ b/airflow/providers/SUSPENDING_AND_RESUMING_PROVIDERS.rst @@ -32,7 +32,7 @@ If you have pre-commit installed, pre-commit will be run automatically on commit manually after commit, you can run it via ``breeze static-checks --last-commit`` some of the tests might fail because suspension of the provider might cause changes in the dependencies, so if you see errors about missing dependencies imports, non-usable classes etc., you will need to build the CI image locally -via ``breeze build-image --python 3.7 --upgrade-to-newer-dependencies`` after the first pre-commit run +via ``breeze build-image --python 3.8 --upgrade-to-newer-dependencies`` after the first pre-commit run and then run the static checks again. If you want to be absolutely sure to run all static checks you can always do this via @@ -73,7 +73,7 @@ Example failing collection after ``google`` provider has been suspended: ImportError while importing test module '/opt/airflow/tests/providers/apache/beam/operators/test_beam.py'. Hint: make sure your test modules/packages have valid Python names. Traceback: - /usr/local/lib/python3.7/importlib/__init__.py:127: in import_module + /usr/local/lib/python3.8/importlib/__init__.py:127: in import_module return _bootstrap._gcd_import(name[level:], package, level) tests/providers/apache/beam/operators/test_beam.py:25: in from airflow.providers.apache.beam.operators.beam import ( @@ -101,7 +101,7 @@ The fix is to add this line at the top of the ``tests/providers/apache/beam/oper Traceback (most recent call last): File "/opt/airflow/scripts/in_container/verify_providers.py", line 266, in import_all_classes _module = importlib.import_module(modinfo.name) - File "/usr/local/lib/python3.7/importlib/__init__.py", line 127, in import_module + File "/usr/local/lib/python3.8/importlib/__init__.py", line 127, in import_module return _bootstrap._gcd_import(name, package, level) File "", line 1006, in _gcd_import File "", line 983, in _find_and_load @@ -109,7 +109,7 @@ The fix is to add this line at the top of the ``tests/providers/apache/beam/oper File "", line 677, in _load_unlocked File "", line 728, in exec_module File "", line 219, in _call_with_frames_removed - File "/usr/local/lib/python3.7/site-packages/airflow/providers/mysql/transfers/s3_to_mysql.py", line 23, in + File "/usr/local/lib/python3.8/site-packages/airflow/providers/mysql/transfers/s3_to_mysql.py", line 23, in from airflow.providers.amazon.aws.hooks.s3 import S3Hook ModuleNotFoundError: No module named 'airflow.providers.amazon' diff --git a/airflow/providers/alibaba/cloud/log/oss_task_handler.py b/airflow/providers/alibaba/cloud/log/oss_task_handler.py index 512eda90c69e1..f70c6378cb5d9 100644 --- a/airflow/providers/alibaba/cloud/log/oss_task_handler.py +++ b/airflow/providers/alibaba/cloud/log/oss_task_handler.py @@ -21,10 +21,10 @@ import os import pathlib import shutil +from functools import cached_property from packaging.version import Version -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.providers.alibaba.cloud.hooks.oss import OSSHook from airflow.utils.log.file_task_handler import FileTaskHandler diff --git a/airflow/providers/alibaba/cloud/sensors/oss_key.py b/airflow/providers/alibaba/cloud/sensors/oss_key.py index 98b2c25beaee1..1395c76de3c3a 100644 --- a/airflow/providers/alibaba/cloud/sensors/oss_key.py +++ b/airflow/providers/alibaba/cloud/sensors/oss_key.py @@ -17,10 +17,10 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from urllib.parse import urlsplit -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.alibaba.cloud.hooks.oss import OSSHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/hooks/appflow.py b/airflow/providers/amazon/aws/hooks/appflow.py index 14dee2ef10206..aaaf1cc5ace18 100644 --- a/airflow/providers/amazon/aws/hooks/appflow.py +++ b/airflow/providers/amazon/aws/hooks/appflow.py @@ -16,9 +16,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook if TYPE_CHECKING: diff --git a/airflow/providers/amazon/aws/hooks/base_aws.py b/airflow/providers/amazon/aws/hooks/base_aws.py index f138497f3a3dd..d80d2097ee760 100644 --- a/airflow/providers/amazon/aws/hooks/base_aws.py +++ b/airflow/providers/amazon/aws/hooks/base_aws.py @@ -31,7 +31,7 @@ import os import uuid from copy import deepcopy -from functools import wraps +from functools import cached_property, wraps from os import PathLike from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar, Union @@ -49,7 +49,6 @@ from dateutil.tz import tzlocal from slugify import slugify -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import ( AirflowException, diff --git a/airflow/providers/amazon/aws/hooks/glue_crawler.py b/airflow/providers/amazon/aws/hooks/glue_crawler.py index 83ca6b9167011..0393cadc9757a 100644 --- a/airflow/providers/amazon/aws/hooks/glue_crawler.py +++ b/airflow/providers/amazon/aws/hooks/glue_crawler.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from time import sleep -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.providers.amazon.aws.hooks.sts import StsHook diff --git a/airflow/providers/amazon/aws/hooks/quicksight.py b/airflow/providers/amazon/aws/hooks/quicksight.py index 11ea728e5ef8c..74c77652a5ed9 100644 --- a/airflow/providers/amazon/aws/hooks/quicksight.py +++ b/airflow/providers/amazon/aws/hooks/quicksight.py @@ -18,11 +18,11 @@ from __future__ import annotations import time +from functools import cached_property from botocore.exceptions import ClientError from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.providers.amazon.aws.hooks.sts import StsHook diff --git a/airflow/providers/amazon/aws/hooks/redshift_sql.py b/airflow/providers/amazon/aws/hooks/redshift_sql.py index e9c2b7fecc78b..11c7dbce26c0c 100644 --- a/airflow/providers/amazon/aws/hooks/redshift_sql.py +++ b/airflow/providers/amazon/aws/hooks/redshift_sql.py @@ -16,6 +16,7 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING import redshift_connector @@ -23,7 +24,6 @@ from sqlalchemy import create_engine from sqlalchemy.engine.url import URL -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.providers.common.sql.hooks.sql import DbApiHook diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py index d2f54cc780da3..5d1074b8402a6 100644 --- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py @@ -18,10 +18,10 @@ from __future__ import annotations from datetime import datetime +from functools import cached_property import watchtower -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook from airflow.utils.log.file_task_handler import FileTaskHandler diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py index 20754075a2dac..a45c228ac0328 100644 --- a/airflow/providers/amazon/aws/log/s3_task_handler.py +++ b/airflow/providers/amazon/aws/log/s3_task_handler.py @@ -20,10 +20,10 @@ import os import pathlib import shutil +from functools import cached_property from packaging.version import Version -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.utils.log.file_task_handler import FileTaskHandler diff --git a/airflow/providers/amazon/aws/operators/appflow.py b/airflow/providers/amazon/aws/operators/appflow.py index ec55ebbda4d11..f2fe75f39501e 100644 --- a/airflow/providers/amazon/aws/operators/appflow.py +++ b/airflow/providers/amazon/aws/operators/appflow.py @@ -17,10 +17,10 @@ from __future__ import annotations from datetime import datetime, timedelta +from functools import cached_property from time import sleep from typing import TYPE_CHECKING, cast -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.operators.python import ShortCircuitOperator diff --git a/airflow/providers/amazon/aws/operators/athena.py b/airflow/providers/amazon/aws/operators/athena.py index 1d544985693eb..a60e27e0fe822 100644 --- a/airflow/providers/amazon/aws/operators/athena.py +++ b/airflow/providers/amazon/aws/operators/athena.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.athena import AthenaHook diff --git a/airflow/providers/amazon/aws/operators/batch.py b/airflow/providers/amazon/aws/operators/batch.py index 6acd2fd3a93de..338b625527bf1 100644 --- a/airflow/providers/amazon/aws/operators/batch.py +++ b/airflow/providers/amazon/aws/operators/batch.py @@ -26,9 +26,9 @@ from __future__ import annotations import warnings +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook diff --git a/airflow/providers/amazon/aws/operators/ecs.py b/airflow/providers/amazon/aws/operators/ecs.py index eb031d24148f6..d4f0bdf045c76 100644 --- a/airflow/providers/amazon/aws/operators/ecs.py +++ b/airflow/providers/amazon/aws/operators/ecs.py @@ -20,11 +20,11 @@ import re import sys from datetime import timedelta +from functools import cached_property from typing import TYPE_CHECKING, Sequence import boto3 -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.models import BaseOperator, XCom from airflow.providers.amazon.aws.exceptions import EcsOperatorError, EcsTaskFailToStart diff --git a/airflow/providers/amazon/aws/operators/emr.py b/airflow/providers/amazon/aws/operators/emr.py index b0bdfc0041d8d..e94252bd26f0e 100644 --- a/airflow/providers/amazon/aws/operators/emr.py +++ b/airflow/providers/amazon/aws/operators/emr.py @@ -19,6 +19,7 @@ import ast import warnings +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence from uuid import uuid4 @@ -33,8 +34,6 @@ if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property - class EmrAddStepsOperator(BaseOperator): """ diff --git a/airflow/providers/amazon/aws/operators/glue_crawler.py b/airflow/providers/amazon/aws/operators/glue_crawler.py index 59ba2031fdd7e..426ca2f084d04 100644 --- a/airflow/providers/amazon/aws/operators/glue_crawler.py +++ b/airflow/providers/amazon/aws/operators/glue_crawler.py @@ -17,12 +17,12 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook diff --git a/airflow/providers/amazon/aws/operators/lambda_function.py b/airflow/providers/amazon/aws/operators/lambda_function.py index 4e3e475976ba8..d99651343680b 100644 --- a/airflow/providers/amazon/aws/operators/lambda_function.py +++ b/airflow/providers/amazon/aws/operators/lambda_function.py @@ -18,9 +18,9 @@ from __future__ import annotations import json +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook diff --git a/airflow/providers/amazon/aws/operators/redshift_data.py b/airflow/providers/amazon/aws/operators/redshift_data.py index b0fad66faedfb..6d6ef9d103b77 100644 --- a/airflow/providers/amazon/aws/operators/redshift_data.py +++ b/airflow/providers/amazon/aws/operators/redshift_data.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.redshift_data import RedshiftDataHook diff --git a/airflow/providers/amazon/aws/operators/sagemaker.py b/airflow/providers/amazon/aws/operators/sagemaker.py index 587bf5b0a16ac..c7ac2616c93fb 100644 --- a/airflow/providers/amazon/aws/operators/sagemaker.py +++ b/airflow/providers/amazon/aws/operators/sagemaker.py @@ -19,11 +19,11 @@ import json import time import warnings +from functools import cached_property from typing import TYPE_CHECKING, Any, Callable, Sequence from botocore.exceptions import ClientError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook diff --git a/airflow/providers/amazon/aws/secrets/secrets_manager.py b/airflow/providers/amazon/aws/secrets/secrets_manager.py index f075f6e5e9376..1ccc95a21b4be 100644 --- a/airflow/providers/amazon/aws/secrets/secrets_manager.py +++ b/airflow/providers/amazon/aws/secrets/secrets_manager.py @@ -21,10 +21,10 @@ import json import re import warnings +from functools import cached_property from typing import Any from urllib.parse import unquote -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.amazon.aws.utils import trim_none_values from airflow.secrets import BaseSecretsBackend diff --git a/airflow/providers/amazon/aws/secrets/systems_manager.py b/airflow/providers/amazon/aws/secrets/systems_manager.py index e4ec9a391f9bf..f15ee384aaacb 100644 --- a/airflow/providers/amazon/aws/secrets/systems_manager.py +++ b/airflow/providers/amazon/aws/secrets/systems_manager.py @@ -19,8 +19,8 @@ from __future__ import annotations import re +from functools import cached_property -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.utils import trim_none_values from airflow.secrets import BaseSecretsBackend from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/amazon/aws/sensors/athena.py b/airflow/providers/amazon/aws/sensors/athena.py index 1954d15a8e004..40dc80a924bfb 100644 --- a/airflow/providers/amazon/aws/sensors/athena.py +++ b/airflow/providers/amazon/aws/sensors/athena.py @@ -17,12 +17,12 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.athena import AthenaHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/batch.py b/airflow/providers/amazon/aws/sensors/batch.py index 26a5e910a7e1a..594be1e55ed93 100644 --- a/airflow/providers/amazon/aws/sensors/batch.py +++ b/airflow/providers/amazon/aws/sensors/batch.py @@ -16,11 +16,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/cloud_formation.py b/airflow/providers/amazon/aws/sensors/cloud_formation.py index d2bd45592654f..df383cf2407a1 100644 --- a/airflow/providers/amazon/aws/sensors/cloud_formation.py +++ b/airflow/providers/amazon/aws/sensors/cloud_formation.py @@ -18,12 +18,12 @@ """This module contains sensors for AWS CloudFormation.""" from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.cloud_formation import CloudFormationHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/dms.py b/airflow/providers/amazon/aws/sensors/dms.py index 9e2e9ea63cc56..35563d4346292 100644 --- a/airflow/providers/amazon/aws/sensors/dms.py +++ b/airflow/providers/amazon/aws/sensors/dms.py @@ -17,11 +17,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Iterable, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.dms import DmsHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/dynamodb.py b/airflow/providers/amazon/aws/sensors/dynamodb.py index bcc8f5f54615a..da244f4b0ff12 100644 --- a/airflow/providers/amazon/aws/sensors/dynamodb.py +++ b/airflow/providers/amazon/aws/sensors/dynamodb.py @@ -16,9 +16,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/ec2.py b/airflow/providers/amazon/aws/sensors/ec2.py index 4377a2644440b..912fc3751e5a9 100644 --- a/airflow/providers/amazon/aws/sensors/ec2.py +++ b/airflow/providers/amazon/aws/sensors/ec2.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/ecs.py b/airflow/providers/amazon/aws/sensors/ecs.py index d3cfacbd414a5..a150a8e87a739 100644 --- a/airflow/providers/amazon/aws/sensors/ecs.py +++ b/airflow/providers/amazon/aws/sensors/ecs.py @@ -16,11 +16,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence import boto3 -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.ecs import ( EcsClusterStates, diff --git a/airflow/providers/amazon/aws/sensors/eks.py b/airflow/providers/amazon/aws/sensors/eks.py index d01a21f3a30d8..e5be3340f7de4 100644 --- a/airflow/providers/amazon/aws/sensors/eks.py +++ b/airflow/providers/amazon/aws/sensors/eks.py @@ -18,9 +18,9 @@ from __future__ import annotations from abc import abstractmethod +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.eks import ( ClusterStates, diff --git a/airflow/providers/amazon/aws/sensors/emr.py b/airflow/providers/amazon/aws/sensors/emr.py index 8bb97f0ac46ae..ff134dc33d867 100644 --- a/airflow/providers/amazon/aws/sensors/emr.py +++ b/airflow/providers/amazon/aws/sensors/emr.py @@ -17,6 +17,7 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Iterable, Sequence from deprecated import deprecated @@ -29,8 +30,6 @@ if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property - class EmrBaseSensor(BaseSensorOperator): """ diff --git a/airflow/providers/amazon/aws/sensors/glacier.py b/airflow/providers/amazon/aws/sensors/glacier.py index 222027b2792be..0ae22fbce61c2 100644 --- a/airflow/providers/amazon/aws/sensors/glacier.py +++ b/airflow/providers/amazon/aws/sensors/glacier.py @@ -18,9 +18,9 @@ from __future__ import annotations from enum import Enum +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.glacier import GlacierHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/glue.py b/airflow/providers/amazon/aws/sensors/glue.py index 761a51609bedc..30e44a9f8e585 100644 --- a/airflow/providers/amazon/aws/sensors/glue.py +++ b/airflow/providers/amazon/aws/sensors/glue.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.glue import GlueJobHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py b/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py index d86136746687c..6a4856b6c950b 100644 --- a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +++ b/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py @@ -17,11 +17,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.glue_catalog import GlueCatalogHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/glue_crawler.py b/airflow/providers/amazon/aws/sensors/glue_crawler.py index 6b8b4fcaea26d..b830fbffa16ec 100644 --- a/airflow/providers/amazon/aws/sensors/glue_crawler.py +++ b/airflow/providers/amazon/aws/sensors/glue_crawler.py @@ -17,11 +17,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/lambda_function.py b/airflow/providers/amazon/aws/sensors/lambda_function.py index 44cb487352e85..2febaba7a6cac 100644 --- a/airflow/providers/amazon/aws/sensors/lambda_function.py +++ b/airflow/providers/amazon/aws/sensors/lambda_function.py @@ -17,6 +17,7 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook @@ -25,7 +26,6 @@ if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/quicksight.py b/airflow/providers/amazon/aws/sensors/quicksight.py index 9145e886bf267..fed0faf3dadce 100644 --- a/airflow/providers/amazon/aws/sensors/quicksight.py +++ b/airflow/providers/amazon/aws/sensors/quicksight.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.quicksight import QuickSightHook from airflow.providers.amazon.aws.hooks.sts import StsHook diff --git a/airflow/providers/amazon/aws/sensors/rds.py b/airflow/providers/amazon/aws/sensors/rds.py index 50f197ef0c48d..45a48e965a35b 100644 --- a/airflow/providers/amazon/aws/sensors/rds.py +++ b/airflow/providers/amazon/aws/sensors/rds.py @@ -16,9 +16,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowNotFoundException from airflow.providers.amazon.aws.hooks.rds import RdsHook from airflow.providers.amazon.aws.utils.rds import RdsDbType diff --git a/airflow/providers/amazon/aws/sensors/redshift_cluster.py b/airflow/providers/amazon/aws/sensors/redshift_cluster.py index 653ccaf0015ef..9734e98bc75b3 100644 --- a/airflow/providers/amazon/aws/sensors/redshift_cluster.py +++ b/airflow/providers/amazon/aws/sensors/redshift_cluster.py @@ -16,11 +16,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/s3.py b/airflow/providers/amazon/aws/sensors/s3.py index 407a054184748..5d18b4619baee 100644 --- a/airflow/providers/amazon/aws/sensors/s3.py +++ b/airflow/providers/amazon/aws/sensors/s3.py @@ -21,6 +21,7 @@ import os import re from datetime import datetime +from functools import cached_property from typing import TYPE_CHECKING, Callable, Sequence from deprecated import deprecated @@ -28,7 +29,6 @@ if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.sensors.base import BaseSensorOperator, poke_mode_only diff --git a/airflow/providers/amazon/aws/sensors/sagemaker.py b/airflow/providers/amazon/aws/sensors/sagemaker.py index b02ea8902b136..7e8340ed90e3c 100644 --- a/airflow/providers/amazon/aws/sensors/sagemaker.py +++ b/airflow/providers/amazon/aws/sensors/sagemaker.py @@ -17,11 +17,11 @@ from __future__ import annotations import time +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.sagemaker import LogState, SageMakerHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/sqs.py b/airflow/providers/amazon/aws/sensors/sqs.py index 6dc032c3fe95b..be6698dcac334 100644 --- a/airflow/providers/amazon/aws/sensors/sqs.py +++ b/airflow/providers/amazon/aws/sensors/sqs.py @@ -19,13 +19,12 @@ from __future__ import annotations import json -from typing import TYPE_CHECKING, Any, Collection, Sequence +from functools import cached_property +from typing import TYPE_CHECKING, Any, Collection, Literal, Sequence from deprecated import deprecated from jsonpath_ng import parse -from typing_extensions import Literal -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection from airflow.providers.amazon.aws.hooks.sqs import SqsHook diff --git a/airflow/providers/amazon/aws/sensors/step_function.py b/airflow/providers/amazon/aws/sensors/step_function.py index 2a0c8b10db993..e3345b4337312 100644 --- a/airflow/providers/amazon/aws/sensors/step_function.py +++ b/airflow/providers/amazon/aws/sensors/step_function.py @@ -17,11 +17,11 @@ from __future__ import annotations import json +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/transfers/s3_to_sql.py b/airflow/providers/amazon/aws/transfers/s3_to_sql.py index 99d4fab6bd4ed..e52ebbaa1bc68 100644 --- a/airflow/providers/amazon/aws/transfers/s3_to_sql.py +++ b/airflow/providers/amazon/aws/transfers/s3_to_sql.py @@ -16,10 +16,10 @@ # under the License. from __future__ import annotations +from functools import cached_property from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING, Callable, Iterable, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook from airflow.models import BaseOperator diff --git a/airflow/providers/amazon/aws/transfers/sql_to_s3.py b/airflow/providers/amazon/aws/transfers/sql_to_s3.py index 8cee9b6cffb15..04aafdd67c0f5 100644 --- a/airflow/providers/amazon/aws/transfers/sql_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/sql_to_s3.py @@ -20,9 +20,7 @@ import enum from collections import namedtuple from tempfile import NamedTemporaryFile -from typing import TYPE_CHECKING, Iterable, Mapping, Sequence - -from typing_extensions import Literal +from typing import TYPE_CHECKING, Iterable, Literal, Mapping, Sequence from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook @@ -157,11 +155,15 @@ def _fix_dtypes(df: DataFrame, file_format: FILE_FORMAT) -> None: notna_series = df[col].dropna().values if np.equal(notna_series, notna_series.astype(int)).all(): # set to dtype that retains integers and supports NaNs - df[col] = np.where(df[col].isnull(), None, df[col]) + # The type ignore can be removed here if https://github.com/numpy/numpy/pull/23690 + # is merged and released as currently NumPy does not consider None as valid for x/y. + df[col] = np.where(df[col].isnull(), None, df[col]) # type: ignore[call-overload] df[col] = df[col].astype(Int64Dtype()) elif np.isclose(notna_series, notna_series.astype(int)).all(): # set to float dtype that retains floats and supports NaNs - df[col] = np.where(df[col].isnull(), None, df[col]) + # The type ignore can be removed here if https://github.com/numpy/numpy/pull/23690 + # is merged and released + df[col] = np.where(df[col].isnull(), None, df[col]) # type: ignore[call-overload] df[col] = df[col].astype(Float64Dtype()) def execute(self, context: Context) -> None: diff --git a/airflow/providers/amazon/aws/triggers/redshift_cluster.py b/airflow/providers/amazon/aws/triggers/redshift_cluster.py index ef19d0b5a1d66..6d9a657e0ce16 100644 --- a/airflow/providers/amazon/aws/triggers/redshift_cluster.py +++ b/airflow/providers/amazon/aws/triggers/redshift_cluster.py @@ -16,9 +16,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import Any, AsyncIterator -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftAsyncHook, RedshiftHook from airflow.triggers.base import BaseTrigger, TriggerEvent diff --git a/airflow/providers/amazon/aws/utils/connection_wrapper.py b/airflow/providers/amazon/aws/utils/connection_wrapper.py index 9a62dc2c84ef4..3fbc479abfe14 100644 --- a/airflow/providers/amazon/aws/utils/connection_wrapper.py +++ b/airflow/providers/amazon/aws/utils/connection_wrapper.py @@ -20,11 +20,11 @@ import warnings from copy import deepcopy from dataclasses import MISSING, InitVar, dataclass, field, fields +from functools import cached_property from typing import TYPE_CHECKING, Any from botocore.config import Config -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.providers.amazon.aws.utils import trim_none_values from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/apache/flink/operators/flink_kubernetes.py b/airflow/providers/apache/flink/operators/flink_kubernetes.py index 3d365f40be57b..245964a730f4d 100644 --- a/airflow/providers/apache/flink/operators/flink_kubernetes.py +++ b/airflow/providers/apache/flink/operators/flink_kubernetes.py @@ -17,11 +17,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from kubernetes.client import CoreV1Api -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook diff --git a/airflow/providers/apache/kafka/hooks/base.py b/airflow/providers/apache/kafka/hooks/base.py index eb5429a6a56b7..777beb116e136 100644 --- a/airflow/providers/apache/kafka/hooks/base.py +++ b/airflow/providers/apache/kafka/hooks/base.py @@ -16,11 +16,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import Any from confluent_kafka.admin import AdminClient -from airflow.compat.functools import cached_property from airflow.hooks.base import BaseHook diff --git a/airflow/providers/arangodb/hooks/arangodb.py b/airflow/providers/arangodb/hooks/arangodb.py index 2f51ac061806a..23b3aa41245ed 100644 --- a/airflow/providers/arangodb/hooks/arangodb.py +++ b/airflow/providers/arangodb/hooks/arangodb.py @@ -18,12 +18,12 @@ """This module allows connecting to a ArangoDB.""" from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any from arango import AQLQueryExecuteError, ArangoClient as ArangoDBClient from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.hooks.base import BaseHook if TYPE_CHECKING: diff --git a/airflow/providers/asana/hooks/asana.py b/airflow/providers/asana/hooks/asana.py index 544a5afb59961..9a8a83f5b682b 100644 --- a/airflow/providers/asana/hooks/asana.py +++ b/airflow/providers/asana/hooks/asana.py @@ -18,13 +18,12 @@ """Connect to Asana.""" from __future__ import annotations -from functools import wraps +from functools import cached_property, wraps from typing import Any from asana import Client # type: ignore[attr-defined] from asana.error import NotFoundError # type: ignore[attr-defined] -from airflow.compat.functools import cached_property from airflow.hooks.base import BaseHook diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py index 045a7aafb286a..27b6b9b0eb333 100644 --- a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py +++ b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py @@ -20,6 +20,7 @@ import json import tempfile import warnings +from functools import cached_property from typing import TYPE_CHECKING, Any, Generator from asgiref.sync import sync_to_async @@ -29,7 +30,6 @@ from kubernetes_asyncio import client as async_client, config as async_config from urllib3.exceptions import HTTPError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.hooks.base import BaseHook from airflow.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive diff --git a/airflow/providers/cncf/kubernetes/operators/pod.py b/airflow/providers/cncf/kubernetes/operators/pod.py index 1bfa2ba03479d..f80e20efa5dfc 100644 --- a/airflow/providers/cncf/kubernetes/operators/pod.py +++ b/airflow/providers/cncf/kubernetes/operators/pod.py @@ -27,13 +27,13 @@ import warnings from collections.abc import Container from contextlib import AbstractContextManager +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence from kubernetes.client import CoreV1Api, models as k8s from slugify import slugify from urllib3.exceptions import HTTPError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException from airflow.kubernetes import pod_generator from airflow.kubernetes.pod_generator import PodGenerator diff --git a/airflow/providers/common/sql/hooks/sql.py b/airflow/providers/common/sql/hooks/sql.py index 48e1a996ff438..32bd42caed523 100644 --- a/airflow/providers/common/sql/hooks/sql.py +++ b/airflow/providers/common/sql/hooks/sql.py @@ -18,12 +18,11 @@ from contextlib import closing from datetime import datetime -from typing import Any, Callable, Iterable, Mapping, Sequence, cast +from typing import Any, Callable, Iterable, Mapping, Protocol, Sequence, cast import sqlparse from packaging.version import Version from sqlalchemy import create_engine -from typing_extensions import Protocol from airflow import AirflowException from airflow.hooks.base import BaseHook diff --git a/airflow/providers/common/sql/operators/sql.py b/airflow/providers/common/sql/operators/sql.py index eef4ba4e679c3..55c7f358bf948 100644 --- a/airflow/providers/common/sql/operators/sql.py +++ b/airflow/providers/common/sql/operators/sql.py @@ -19,9 +19,9 @@ import ast import re +from functools import cached_property from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, NoReturn, Sequence, SupportsAbs -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowFailException from airflow.hooks.base import BaseHook from airflow.models import BaseOperator, SkipMixin diff --git a/airflow/providers/databricks/hooks/databricks_base.py b/airflow/providers/databricks/hooks/databricks_base.py index 50ab2eff6a251..0d347eee4c02d 100644 --- a/airflow/providers/databricks/hooks/databricks_base.py +++ b/airflow/providers/databricks/hooks/databricks_base.py @@ -27,6 +27,7 @@ import copy import platform import time +from functools import cached_property from typing import Any from urllib.parse import urlsplit @@ -45,7 +46,6 @@ ) from airflow import __version__ -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook from airflow.models import Connection diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py index bfeb1c42ba207..3ff1233622cc2 100644 --- a/airflow/providers/databricks/operators/databricks.py +++ b/airflow/providers/databricks/operators/databricks.py @@ -20,10 +20,10 @@ import time import warnings +from functools import cached_property from logging import Logger from typing import TYPE_CHECKING, Any, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models import BaseOperator, BaseOperatorLink, XCom from airflow.providers.databricks.hooks.databricks import DatabricksHook, RunState diff --git a/airflow/providers/databricks/operators/databricks_repos.py b/airflow/providers/databricks/operators/databricks_repos.py index f42114d474267..6ca348ffe142a 100644 --- a/airflow/providers/databricks/operators/databricks_repos.py +++ b/airflow/providers/databricks/operators/databricks_repos.py @@ -19,10 +19,10 @@ from __future__ import annotations import re +from functools import cached_property from typing import TYPE_CHECKING, Sequence from urllib.parse import urlsplit -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.providers.databricks.hooks.databricks import DatabricksHook diff --git a/airflow/providers/databricks/sensors/databricks_sql.py b/airflow/providers/databricks/sensors/databricks_sql.py index 0cb6f2a88d431..8381f75d19024 100644 --- a/airflow/providers/databricks/sensors/databricks_sql.py +++ b/airflow/providers/databricks/sensors/databricks_sql.py @@ -20,9 +20,9 @@ from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Callable, Iterable, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.common.sql.hooks.sql import fetch_all_handler from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook diff --git a/airflow/providers/dbt/cloud/hooks/dbt.py b/airflow/providers/dbt/cloud/hooks/dbt.py index f0f3e6d0dbdd2..e648f5a59564d 100644 --- a/airflow/providers/dbt/cloud/hooks/dbt.py +++ b/airflow/providers/dbt/cloud/hooks/dbt.py @@ -19,7 +19,7 @@ import json import time from enum import Enum -from functools import wraps +from functools import cached_property, wraps from inspect import signature from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, TypeVar, cast @@ -29,7 +29,6 @@ from requests.auth import AuthBase from requests.sessions import Session -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.http.hooks.http import HttpHook from airflow.typing_compat import TypedDict diff --git a/airflow/providers/docker/hooks/docker.py b/airflow/providers/docker/hooks/docker.py index d67846e9ec040..35a8077250e31 100644 --- a/airflow/providers/docker/hooks/docker.py +++ b/airflow/providers/docker/hooks/docker.py @@ -18,13 +18,13 @@ from __future__ import annotations import json +from functools import cached_property from typing import TYPE_CHECKING, Any from docker import APIClient, TLSConfig from docker.constants import DEFAULT_TIMEOUT_SECONDS from docker.errors import APIError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowNotFoundException from airflow.hooks.base import BaseHook diff --git a/airflow/providers/docker/operators/docker.py b/airflow/providers/docker/operators/docker.py index 4443956f2f47d..877f07e0e5fd6 100644 --- a/airflow/providers/docker/operators/docker.py +++ b/airflow/providers/docker/operators/docker.py @@ -23,6 +23,7 @@ import tarfile import warnings from collections.abc import Container +from functools import cached_property from io import BytesIO, StringIO from tempfile import TemporaryDirectory from typing import TYPE_CHECKING, Iterable, Sequence @@ -32,7 +33,6 @@ from docker.types import LogConfig, Mount from dotenv import dotenv_values -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException from airflow.models import BaseOperator from airflow.providers.docker.hooks.docker import DockerHook diff --git a/airflow/providers/elasticsearch/hooks/elasticsearch.py b/airflow/providers/elasticsearch/hooks/elasticsearch.py index ae54db037db2d..63463a178216c 100644 --- a/airflow/providers/elasticsearch/hooks/elasticsearch.py +++ b/airflow/providers/elasticsearch/hooks/elasticsearch.py @@ -18,12 +18,12 @@ from __future__ import annotations import warnings +from functools import cached_property from typing import Any from elasticsearch import Elasticsearch from es.elastic.api import Connection as ESConnection, connect -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.hooks.base import BaseHook from airflow.models.connection import Connection as AirflowConnection diff --git a/airflow/providers/facebook/ads/hooks/ads.py b/airflow/providers/facebook/ads/hooks/ads.py index 86048712c965b..e0a6e67778aa1 100644 --- a/airflow/providers/facebook/ads/hooks/ads.py +++ b/airflow/providers/facebook/ads/hooks/ads.py @@ -20,6 +20,7 @@ import time from enum import Enum +from functools import cached_property from typing import Any from facebook_business.adobjects.adaccount import AdAccount @@ -27,7 +28,6 @@ from facebook_business.adobjects.adsinsights import AdsInsights from facebook_business.api import FacebookAdsApi -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook diff --git a/airflow/providers/ftp/operators/ftp.py b/airflow/providers/ftp/operators/ftp.py index 41b913a64f495..baec63373b6fa 100644 --- a/airflow/providers/ftp/operators/ftp.py +++ b/airflow/providers/ftp/operators/ftp.py @@ -19,10 +19,10 @@ from __future__ import annotations import os +from functools import cached_property from pathlib import Path from typing import Any, Sequence -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.ftp.hooks.ftp import FTPHook, FTPSHook diff --git a/airflow/providers/google/ads/hooks/ads.py b/airflow/providers/google/ads/hooks/ads.py index eadc9cacbab3c..8c5cdc85ab16a 100644 --- a/airflow/providers/google/ads/hooks/ads.py +++ b/airflow/providers/google/ads/hooks/ads.py @@ -18,13 +18,13 @@ """This module contains Google Ad hook.""" from __future__ import annotations +from functools import cached_property from tempfile import NamedTemporaryFile from typing import IO, Any from google.auth.exceptions import GoogleAuthError from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.hooks.base import BaseHook from airflow.providers.google.common.hooks.base_google import get_field from airflow.providers.google_vendor.googleads.client import GoogleAdsClient diff --git a/airflow/providers/google/cloud/_internal_client/secret_manager_client.py b/airflow/providers/google/cloud/_internal_client/secret_manager_client.py index 9ea72e63bbba9..cc511d74576c9 100644 --- a/airflow/providers/google/cloud/_internal_client/secret_manager_client.py +++ b/airflow/providers/google/cloud/_internal_client/secret_manager_client.py @@ -17,12 +17,12 @@ from __future__ import annotations import re +from functools import cached_property import google from google.api_core.exceptions import InvalidArgument, NotFound, PermissionDenied from google.cloud.secretmanager_v1 import SecretManagerServiceClient -from airflow.compat.functools import cached_property from airflow.providers.google.common.consts import CLIENT_INFO from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/google/cloud/hooks/automl.py b/airflow/providers/google/cloud/hooks/automl.py index 6e7e5aa005746..9b52cd98b5f1d 100644 --- a/airflow/providers/google/cloud/hooks/automl.py +++ b/airflow/providers/google/cloud/hooks/automl.py @@ -24,6 +24,7 @@ """ from __future__ import annotations +from functools import cached_property from typing import Sequence from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault @@ -48,7 +49,6 @@ ) from google.protobuf.field_mask_pb2 import FieldMask -from airflow.compat.functools import cached_property from airflow.providers.google.common.consts import CLIENT_INFO from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook diff --git a/airflow/providers/google/cloud/hooks/compute_ssh.py b/airflow/providers/google/cloud/hooks/compute_ssh.py index 7b474070aee37..118e33d37f2a5 100644 --- a/airflow/providers/google/cloud/hooks/compute_ssh.py +++ b/airflow/providers/google/cloud/hooks/compute_ssh.py @@ -18,13 +18,13 @@ import shlex import time +from functools import cached_property from io import StringIO from typing import Any from google.api_core.retry import exponential_sleep_generator from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook from airflow.providers.google.cloud.hooks.os_login import OSLoginHook from airflow.providers.ssh.hooks.ssh import SSHHook diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py b/airflow/providers/google/cloud/hooks/kubernetes_engine.py index f5be0f5d189a2..a54f6098ce312 100644 --- a/airflow/providers/google/cloud/hooks/kubernetes_engine.py +++ b/airflow/providers/google/cloud/hooks/kubernetes_engine.py @@ -29,6 +29,7 @@ import json import time import warnings +from functools import cached_property from typing import Sequence import google.auth.credentials @@ -49,7 +50,6 @@ from urllib3.exceptions import HTTPError from airflow import version -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.kubernetes.pod_generator_deprecated import PodDefaults from airflow.providers.google.common.consts import CLIENT_INFO diff --git a/airflow/providers/google/cloud/hooks/pubsub.py b/airflow/providers/google/cloud/hooks/pubsub.py index 88c9a0262d884..a12c76d547a12 100644 --- a/airflow/providers/google/cloud/hooks/pubsub.py +++ b/airflow/providers/google/cloud/hooks/pubsub.py @@ -27,6 +27,7 @@ import warnings from base64 import b64decode +from functools import cached_property from typing import Sequence from uuid import uuid4 @@ -46,7 +47,6 @@ ) from googleapiclient.errors import HttpError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.google.common.consts import CLIENT_INFO from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook diff --git a/airflow/providers/google/cloud/hooks/vision.py b/airflow/providers/google/cloud/hooks/vision.py index dace3619cff4f..cefc4b7299e4b 100644 --- a/airflow/providers/google/cloud/hooks/vision.py +++ b/airflow/providers/google/cloud/hooks/vision.py @@ -19,6 +19,7 @@ from __future__ import annotations from copy import deepcopy +from functools import cached_property from typing import Any, Callable, Sequence from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault @@ -34,7 +35,6 @@ ) from google.protobuf.json_format import MessageToDict -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.google.common.consts import CLIENT_INFO from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/airflow/providers/google/cloud/log/gcs_task_handler.py index 303145310f10a..c8852a6e8ca3f 100644 --- a/airflow/providers/google/cloud/log/gcs_task_handler.py +++ b/airflow/providers/google/cloud/log/gcs_task_handler.py @@ -20,6 +20,7 @@ import logging import os import shutil +from functools import cached_property from pathlib import Path from typing import Collection @@ -27,7 +28,6 @@ from google.cloud import storage # type: ignore[attr-defined] from packaging.version import Version -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import AirflowNotFoundException from airflow.providers.google.cloud.hooks.gcs import GCSHook, _parse_gcs_url diff --git a/airflow/providers/google/cloud/log/stackdriver_task_handler.py b/airflow/providers/google/cloud/log/stackdriver_task_handler.py index 5190fbad760b5..40a7da19b0a77 100644 --- a/airflow/providers/google/cloud/log/stackdriver_task_handler.py +++ b/airflow/providers/google/cloud/log/stackdriver_task_handler.py @@ -19,6 +19,7 @@ import logging from contextvars import ContextVar +from functools import cached_property from typing import Collection from urllib.parse import urlencode @@ -29,7 +30,6 @@ from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client from google.cloud.logging_v2.types import ListLogEntriesRequest, ListLogEntriesResponse -from airflow.compat.functools import cached_property from airflow.models import TaskInstance from airflow.providers.google.cloud.utils.credentials_provider import get_credentials_and_project_id from airflow.providers.google.common.consts import CLIENT_INFO diff --git a/airflow/providers/google/cloud/operators/bigquery_dts.py b/airflow/providers/google/cloud/operators/bigquery_dts.py index d230e6c157aff..d9e013afa68c3 100644 --- a/airflow/providers/google/cloud/operators/bigquery_dts.py +++ b/airflow/providers/google/cloud/operators/bigquery_dts.py @@ -19,6 +19,7 @@ from __future__ import annotations import time +from functools import cached_property from typing import TYPE_CHECKING, Sequence from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault @@ -31,7 +32,6 @@ ) from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook, get_object_id from airflow.providers.google.cloud.links.bigquery_dts import BigQueryDataTransferConfigLink from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/airflow/providers/google/cloud/operators/dataflow.py index eeec715edfc35..e6ada99c3f734 100644 --- a/airflow/providers/google/cloud/operators/dataflow.py +++ b/airflow/providers/google/cloud/operators/dataflow.py @@ -24,10 +24,10 @@ import warnings from contextlib import ExitStack from enum import Enum +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType from airflow.providers.google.cloud.hooks.dataflow import ( diff --git a/airflow/providers/google/cloud/operators/kubernetes_engine.py b/airflow/providers/google/cloud/operators/kubernetes_engine.py index c1c277e8a0c9f..7d52f7f2d9bee 100644 --- a/airflow/providers/google/cloud/operators/kubernetes_engine.py +++ b/airflow/providers/google/cloud/operators/kubernetes_engine.py @@ -19,13 +19,13 @@ from __future__ import annotations import warnings +from functools import cached_property from typing import TYPE_CHECKING, Sequence from google.api_core.exceptions import AlreadyExists from google.cloud.container_v1.types import Cluster from kubernetes.client.models import V1Pod -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning try: diff --git a/airflow/providers/hashicorp/_internal_client/vault_client.py b/airflow/providers/hashicorp/_internal_client/vault_client.py index ea8aaf0071230..8a463cf1b9188 100644 --- a/airflow/providers/hashicorp/_internal_client/vault_client.py +++ b/airflow/providers/hashicorp/_internal_client/vault_client.py @@ -16,12 +16,13 @@ # under the License. from __future__ import annotations +from functools import cached_property + import hvac from hvac.api.auth_methods import Kubernetes from hvac.exceptions import InvalidPath, VaultError from requests import Response -from airflow.compat.functools import cached_property from airflow.utils.log.logging_mixin import LoggingMixin DEFAULT_KUBERNETES_JWT_PATH = "/var/run/secrets/kubernetes.io/serviceaccount/token" diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/airflow/providers/microsoft/azure/log/wasb_task_handler.py index aab84a04c5499..cb5d0cfcc909c 100644 --- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py +++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py @@ -19,13 +19,13 @@ import os import shutil +from functools import cached_property from pathlib import Path from typing import TYPE_CHECKING, Any from azure.core.exceptions import HttpResponseError from packaging.version import Version -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.utils.log.file_task_handler import FileTaskHandler from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/microsoft/azure/secrets/key_vault.py b/airflow/providers/microsoft/azure/secrets/key_vault.py index 3ab8a663555ae..34b78b5031446 100644 --- a/airflow/providers/microsoft/azure/secrets/key_vault.py +++ b/airflow/providers/microsoft/azure/secrets/key_vault.py @@ -18,12 +18,12 @@ import re import warnings +from functools import cached_property from azure.core.exceptions import ResourceNotFoundError from azure.identity import DefaultAzureCredential from azure.keyvault.secrets import SecretClient -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.secrets import BaseSecretsBackend from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py b/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py index 7f8aa7eb5bb00..37d642036c520 100644 --- a/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py +++ b/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py @@ -20,13 +20,13 @@ import os from collections import namedtuple +from functools import cached_property from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING, Sequence if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.providers.microsoft.azure.hooks.wasb import WasbHook diff --git a/airflow/providers/salesforce/hooks/salesforce.py b/airflow/providers/salesforce/hooks/salesforce.py index 6d2ccd50277c4..7bd2620909d68 100644 --- a/airflow/providers/salesforce/hooks/salesforce.py +++ b/airflow/providers/salesforce/hooks/salesforce.py @@ -26,13 +26,13 @@ import logging import time +from functools import cached_property from typing import Any, Iterable import pandas as pd from requests import Session from simple_salesforce import Salesforce, api -from airflow.compat.functools import cached_property from airflow.hooks.base import BaseHook log = logging.getLogger(__name__) diff --git a/airflow/providers/slack/hooks/slack.py b/airflow/providers/slack/hooks/slack.py index 2ba9d786dc71a..9d0bc588a377f 100644 --- a/airflow/providers/slack/hooks/slack.py +++ b/airflow/providers/slack/hooks/slack.py @@ -19,14 +19,13 @@ import json import warnings -from functools import wraps +from functools import cached_property, wraps from pathlib import Path from typing import TYPE_CHECKING, Any, Sequence from slack_sdk import WebClient from slack_sdk.errors import SlackApiError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowProviderDeprecationWarning from airflow.hooks.base import BaseHook from airflow.providers.slack.utils import ConnectionExtraConfig diff --git a/airflow/providers/slack/hooks/slack_webhook.py b/airflow/providers/slack/hooks/slack_webhook.py index ba2356f59cf23..aef5f29e715a9 100644 --- a/airflow/providers/slack/hooks/slack_webhook.py +++ b/airflow/providers/slack/hooks/slack_webhook.py @@ -19,13 +19,12 @@ import json import warnings -from functools import wraps +from functools import cached_property, wraps from typing import TYPE_CHECKING, Any, Callable from urllib.parse import urlsplit from slack_sdk import WebhookClient -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.hooks.base import BaseHook from airflow.models import Connection diff --git a/airflow/providers/slack/notifications/slack_notifier.py b/airflow/providers/slack/notifications/slack_notifier.py index b48ed3670b06f..d1c0f256f101f 100644 --- a/airflow/providers/slack/notifications/slack_notifier.py +++ b/airflow/providers/slack/notifications/slack_notifier.py @@ -18,9 +18,9 @@ from __future__ import annotations import json +from functools import cached_property from typing import Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowOptionalProviderFeatureException try: diff --git a/airflow/providers/slack/operators/slack.py b/airflow/providers/slack/operators/slack.py index bd6ea4c1a3930..15348494e5c6f 100644 --- a/airflow/providers/slack/operators/slack.py +++ b/airflow/providers/slack/operators/slack.py @@ -19,9 +19,9 @@ import json import warnings +from functools import cached_property from typing import Any, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.models import BaseOperator from airflow.providers.slack.hooks.slack import SlackHook diff --git a/airflow/providers/slack/operators/slack_webhook.py b/airflow/providers/slack/operators/slack_webhook.py index cd11db7ae4a8e..4eac057af3e5c 100644 --- a/airflow/providers/slack/operators/slack_webhook.py +++ b/airflow/providers/slack/operators/slack_webhook.py @@ -18,9 +18,9 @@ from __future__ import annotations import warnings +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models import BaseOperator from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook diff --git a/airflow/providers/ssh/hooks/ssh.py b/airflow/providers/ssh/hooks/ssh.py index 6d914c6eab691..21f743dd6ebea 100644 --- a/airflow/providers/ssh/hooks/ssh.py +++ b/airflow/providers/ssh/hooks/ssh.py @@ -21,6 +21,7 @@ import os import warnings from base64 import decodebytes +from functools import cached_property from io import StringIO from select import select from typing import Any, Sequence @@ -30,7 +31,6 @@ from sshtunnel import SSHTunnelForwarder from tenacity import Retrying, stop_after_attempt, wait_fixed, wait_random -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.hooks.base import BaseHook from airflow.utils.platform import getuser diff --git a/airflow/ti_deps/deps/trigger_rule_dep.py b/airflow/ti_deps/deps/trigger_rule_dep.py index 3c11050bfc3e1..b107fac4d8a07 100644 --- a/airflow/ti_deps/deps/trigger_rule_dep.py +++ b/airflow/ti_deps/deps/trigger_rule_dep.py @@ -130,7 +130,7 @@ def _evaluate_trigger_rule( upstream_tasks = {t.task_id: t for t in task.upstream_list} trigger_rule = task.trigger_rule - @functools.lru_cache() + @functools.lru_cache def _get_expanded_ti_count() -> int: """Get how many tis the current task is supposed to be expanded into. @@ -139,7 +139,7 @@ def _get_expanded_ti_count() -> int: """ return task.get_mapped_ti_count(ti.run_id, session=session) - @functools.lru_cache() + @functools.lru_cache def _get_relevant_upstream_map_indexes(upstream_id: str) -> int | range | None: """Get the given task's map indexes relevant to the current ti. diff --git a/airflow/timetables/_cron.py b/airflow/timetables/_cron.py index b1e315a7d14f1..49f5771966adc 100644 --- a/airflow/timetables/_cron.py +++ b/airflow/timetables/_cron.py @@ -17,6 +17,7 @@ from __future__ import annotations import datetime +from functools import cached_property from typing import Any from cron_descriptor import CasingTypeEnum, ExpressionDescriptor, FormatException, MissingFieldException @@ -24,7 +25,6 @@ from pendulum import DateTime from pendulum.tz.timezone import Timezone -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowTimetableInvalid from airflow.utils.dates import cron_presets from airflow.utils.timezone import convert_to_utc, make_aware, make_naive diff --git a/airflow/typing_compat.py b/airflow/typing_compat.py index e7bc6de304e65..a17c6b621ec6f 100644 --- a/airflow/typing_compat.py +++ b/airflow/typing_compat.py @@ -28,17 +28,13 @@ ] import sys - -if sys.version_info >= (3, 8): - from typing import Protocol, TypedDict, runtime_checkable -else: - from typing_extensions import Protocol, TypedDict, runtime_checkable +from typing import Protocol, TypedDict, runtime_checkable # Literal in 3.8 is limited to one single argument, not e.g. "Literal[1, 2]". if sys.version_info >= (3, 9): from typing import Literal else: - from typing_extensions import Literal + from typing import Literal if sys.version_info >= (3, 10): from typing import ParamSpec, TypeGuard diff --git a/airflow/utils/context.py b/airflow/utils/context.py index 4543a8066a970..3eb4b2b234c46 100644 --- a/airflow/utils/context.py +++ b/airflow/utils/context.py @@ -31,6 +31,7 @@ KeysView, Mapping, MutableMapping, + SupportsIndex, ValuesView, ) @@ -186,7 +187,7 @@ def __init__(self, context: MutableMapping[str, Any] | None = None, **kwargs: An def __repr__(self) -> str: return repr(self._context) - def __reduce_ex__(self, protocol: int) -> tuple[Any, ...]: + def __reduce_ex__(self, protocol: SupportsIndex) -> tuple[Any, ...]: """Pickle the context as a dict. We are intentionally going through ``__getitem__`` in this function, diff --git a/airflow/utils/file.py b/airflow/utils/file.py index 81089e06d4c26..e7db5c126f740 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -25,10 +25,9 @@ import zipfile from collections import OrderedDict from pathlib import Path -from typing import TYPE_CHECKING, Generator, NamedTuple, Pattern, overload +from typing import TYPE_CHECKING, Generator, NamedTuple, Pattern, Protocol, overload from pathspec.patterns import GitWildMatchPattern -from typing_extensions import Protocol from airflow.configuration import conf from airflow.exceptions import RemovedInAirflow3Warning diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index dcd22f557bcb7..eda9ce551abc8 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -23,13 +23,13 @@ import warnings from contextlib import suppress from enum import Enum +from functools import cached_property from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Iterable from urllib.parse import urljoin import pendulum -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import RemovedInAirflow3Warning from airflow.executors.executor_loader import ExecutorLoader diff --git a/airflow/utils/log/log_reader.py b/airflow/utils/log/log_reader.py index 63529e71e2924..2ed475c1e2975 100644 --- a/airflow/utils/log/log_reader.py +++ b/airflow/utils/log/log_reader.py @@ -18,11 +18,11 @@ import logging import time +from functools import cached_property from typing import Iterator from sqlalchemy.orm.session import Session -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.models.taskinstance import TaskInstance from airflow.utils.helpers import render_log_filename diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py index cac82d0dd12e2..042671d4a91de 100644 --- a/airflow/utils/log/secrets_masker.py +++ b/airflow/utils/log/secrets_masker.py @@ -21,6 +21,7 @@ import logging import re import sys +from functools import cached_property from typing import ( TYPE_CHECKING, Any, @@ -37,7 +38,7 @@ ) from airflow import settings -from airflow.compat.functools import cache, cached_property +from airflow.compat.functools import cache from airflow.typing_compat import TypeGuard if TYPE_CHECKING: diff --git a/airflow/www/extensions/init_views.py b/airflow/www/extensions/init_views.py index d3dc3b7c62324..0639c8c805937 100644 --- a/airflow/www/extensions/init_views.py +++ b/airflow/www/extensions/init_views.py @@ -18,6 +18,7 @@ import logging import warnings +from functools import cached_property from os import path from connexion import FlaskApi, ProblemException, Resolver @@ -26,7 +27,6 @@ from flask import Flask, request from airflow.api_connexion.exceptions import common_error_handler -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import RemovedInAirflow3Warning from airflow.security import permissions diff --git a/airflow/www/fab_security/manager.py b/airflow/www/fab_security/manager.py index 8f0363186bfa0..555d9ac99fbc2 100644 --- a/airflow/www/fab_security/manager.py +++ b/airflow/www/fab_security/manager.py @@ -23,6 +23,7 @@ import json import logging import re +from functools import cached_property from typing import Any from uuid import uuid4 @@ -72,7 +73,6 @@ from flask_login import AnonymousUserMixin, LoginManager, current_user from werkzeug.security import check_password_hash, generate_password_hash -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.www.fab_security.sqla.models import Action, Permission, RegisterUser, Resource, Role, User diff --git a/airflow/www/views.py b/airflow/www/views.py index ed2990890bf80..3b7d6c1088fe8 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -30,7 +30,7 @@ import warnings from bisect import insort_left from collections import defaultdict -from functools import wraps +from functools import cached_property, wraps from json import JSONDecodeError from typing import Any, Callable, Collection, Iterator, Mapping, MutableMapping, Sequence from urllib.parse import unquote, urljoin, urlsplit @@ -83,7 +83,6 @@ set_dag_run_state_to_success, set_state, ) -from airflow.compat.functools import cached_property from airflow.configuration import AIRFLOW_CONFIG, conf from airflow.datasets import Dataset from airflow.exceptions import AirflowException, ParamValidationError, RemovedInAirflow3Warning diff --git a/constraints/README.md b/constraints/README.md index 1ddfc1a3dfafc..791450d1bd7c9 100644 --- a/constraints/README.md +++ b/constraints/README.md @@ -29,12 +29,12 @@ This allows you to iterate on dependencies without having to run `--upgrade-to-n Typical workflow in this case is: * download and copy the constraint file to the folder (for example via -[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.7.txt) +[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt) * modify the constraint file in "constraints" folder * build the image using this command ```bash -breeze ci-image build --python 3.7 --airflow-constraints-location constraints/constraints-3.7txt +breeze ci-image build --python 3.8 --airflow-constraints-location constraints/constraints-3.8txt ``` You can continue iterating and updating the constraint file (and rebuilding the image) @@ -46,7 +46,7 @@ pip freeze | sort | \ grep -v "apache_airflow" | \ grep -v "apache-airflow==" | \ grep -v "@" | \ - grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.7.txt + grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.8.txt ``` If you are working with others on updating the dependencies, you can also commit the constraint diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index e248468f3b5ac..e9a5ff3710342 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -571,7 +571,7 @@ Optionally it can be followed with constraints ```shell script pip install apache-airflow==rc \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.7.txt"` + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.8.txt"` ``` Note that the constraints contain python version that you are installing it with. @@ -583,7 +583,7 @@ There is also an easy way of installation with Breeze if you have the latest sou Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler: ```shell script -breeze start-airflow --use-airflow-version rc --python 3.7 --backend postgres +breeze start-airflow --use-airflow-version rc --python 3.8 --backend postgres ``` Once you install and run Airflow, you should perform any verification you see as necessary to check @@ -667,7 +667,7 @@ the older branches, you should set the "skip" field to true. ## Verify production images ```shell script -for PYTHON in 3.7 3.8 3.9 3.10 +for PYTHON in 3.8 3.9 3.10 do docker pull apache/airflow:${VERSION}-python${PYTHON} breeze prod-image verify --image-name apache/airflow:${VERSION}-python${PYTHON} diff --git a/dev/README_RELEASE_PROVIDER_PACKAGES.md b/dev/README_RELEASE_PROVIDER_PACKAGES.md index 98ebc39a8224d..000d8e66fdba8 100644 --- a/dev/README_RELEASE_PROVIDER_PACKAGES.md +++ b/dev/README_RELEASE_PROVIDER_PACKAGES.md @@ -720,7 +720,7 @@ pip install apache-airflow-providers-==rc ### Installing with Breeze ```shell -breeze start-airflow --use-airflow-version 2.2.4 --python 3.7 --backend postgres \ +breeze start-airflow --use-airflow-version 2.2.4 --python 3.8 --backend postgres \ --load-example-dags --load-default-connections ``` @@ -851,7 +851,7 @@ do svn mv "${file}" "${base_file//rc[0-9]/}" done -# Check which old packages will be removed (you need Python 3.7+ and dev/requirements.txt installed) +# Check which old packages will be removed (you need Python 3.8+ and dev/requirements.txt installed) python ${AIRFLOW_REPO_ROOT}/dev/provider_packages/remove_old_releases.py --directory . # Remove those packages diff --git a/dev/breeze/README.md b/dev/breeze/README.md index ad0f24771d291..69bdaf4cede6a 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -52,6 +52,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: f799f7c7b42d8bb6dcee79f989d4af33b96b531bcc5c9a46002e444d8f4c4b316e7ab77f9dfd6a6b396cba8226a3cbfeb450128bd55ffa443d15139a4cc74d22 +Package config hash: 5a58d062de9c220a74215d4beedb98228f93c5b9fc1ef590b3176605ab50197e446e6e38e5e889e1c4e46ba720314c54a0b1efb87964b55f3856bdceaa9852ce --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/SELECTIVE_CHECKS.md b/dev/breeze/SELECTIVE_CHECKS.md index ada5f359bb9e0..f2d2ab72b6725 100644 --- a/dev/breeze/SELECTIVE_CHECKS.md +++ b/dev/breeze/SELECTIVE_CHECKS.md @@ -122,8 +122,8 @@ Github Actions to pass the list of parameters to a command to execute | Output | Meaning of the output | Example value | List as string | |------------------------------------|---------------------------------------------------------------------------------------------------------|------------------------------------------------------------|----------------| | affected-providers-list-as-string | List of providers affected when they are selectively affected. | airbyte http | * | -| all-python-versions | List of all python versions there are available in the form of JSON array | ['3.7', '3.8', '3.9', '3.10'] | | -| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.7 3.8 3.9 3.10 | * | +| all-python-versions | List of all python versions there are available in the form of JSON array | ['3.8', '3.9', '3.10'] | | +| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.8 3.9 3.10 | * | | basic-checks-only | Whether to run all static checks ("false") or only basic set of static checks ("true") | false | | | cache-directive | Which cache should be be used for images ("registry", "local" , "disabled") | registry | | | debug-resources | Whether resources usage should be printed during parallel job execution ("true"/ "false") | false | | @@ -135,14 +135,14 @@ Github Actions to pass the list of parameters to a command to execute | default-mssql-version | Which MsSQL version to use as default | 2017-latest | | | default-mysql-version | Which MySQL version to use as default | 5.7 | | | default-postgres-version | Which Postgres version to use as default | 10 | | -| default-python-version | Which Python version to use as default | 3.7 | | +| default-python-version | Which Python version to use as default | 3.8 | | | docs-build | Whether to build documentation ("true"/"false") | true | | | docs-filter-list-as-string | What filter to apply to docs building - based on which documentation packages should be built | --package-filter apache-airflow -package-filter-helm-chart | | | full-tests-needed | Whether this build runs complete set of tests or only subset (for faster PR builds) | false | | | helm-version | Which Helm version to use for tests | v3.9.4 | | | image-build | Whether CI image build is needed | true | | | kind-version | Which Kind version to use for tests | v0.16.0 | | -| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.7-v1.25.2 3.8-v1.26.4 | * | +| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.8-v1.25.2 3.9-v1.26.4 | * | | kubernetes-versions | All Kubernetes versions to use for tests as JSON array | ['v1.25.2'] | | | kubernetes-versions-list-as-string | All Kubernetes versions to use for tests as space-separated string | v1.25.2 | * | | mssql-exclude | Which versions of MsSQL to exclude for tests as JSON array | [] | | @@ -157,8 +157,8 @@ Github Actions to pass the list of parameters to a command to execute | parallel-test-types-list-as-string | Which test types should be run for unit tests | API Always Providers\[amazon\] Providers\[-amazon\] | * | | postgres-exclude | Which versions of Postgres to exclude for tests as JSON array | [] | | | postgres-versions | Which versions of Postgres to use for tests as JSON array | ['10'] | | -| python-versions | Which versions of Python to use for tests as JSON array | ['3.7'] | | -| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.7 | * | +| python-versions | Which versions of Python to use for tests as JSON array | ['3.8'] | | +| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.8 | * | | run-kubernetes-tests | Whether Kubernetes tests should be run ("true"/"false") | true | | | run-tests | Whether unit tests should be run ("true"/"false") | true | | | run-www-tests | Whether WWW tests should be run ("true"/"false") | true | | diff --git a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md index 8aab0db417534..d0818ad2bb913 100644 --- a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md +++ b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md @@ -138,9 +138,9 @@ There are a few properties of Breeze/CI scripts that should be maintained though run a command and get everything done with the least number of prerequisites * The prerequisites for Breeze and CI are: - * Python 3.7+ (Python 3.7 end of life is Jun 2023) - * Docker (TBD which minimum version supported) - * Docker Compose (TBD which minimum version supported) + * Python 3.8+ (Python 3.8 end of life is October 2024) + * Docker (23.0+) + * Docker Compose (2.16.0+) * No other tools and CLI commands should be needed * The python requirements should be automatically installed when missing in a "Breeze" venv and updated automatically when needed. The number of Python dependencies needed to run Breeze and CI scripts diff --git a/dev/breeze/setup.cfg b/dev/breeze/setup.cfg index 4e59505678969..c1993ffe8ecef 100644 --- a/dev/breeze/setup.cfg +++ b/dev/breeze/setup.cfg @@ -33,7 +33,6 @@ classifiers = Environment :: Console Intended Audience :: Developers License :: OSI Approved :: Apache Software License - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 @@ -48,16 +47,14 @@ project_urls = [options] zip_safe = False include_package_data = True -python_requires = ~=3.7 +python_requires = ~=3.8 package_dir= =src packages = find: install_requires = - cached_property>=1.5.0;python_version<="3.7" click filelock inputimeout - importlib-metadata>=4.4; python_version < "3.8" jinja2 packaging pendulum diff --git a/dev/breeze/src/airflow_breeze/commands/minor_release_command.py b/dev/breeze/src/airflow_breeze/commands/minor_release_command.py index 76ff59c34c84f..3df2936257aa9 100644 --- a/dev/breeze/src/airflow_breeze/commands/minor_release_command.py +++ b/dev/breeze/src/airflow_breeze/commands/minor_release_command.py @@ -173,7 +173,7 @@ def create_minor_version_branch(version_branch): create_branch(version_branch) # Build ci image if confirm_action("Build latest breeze image?"): - run_command(["breeze", "ci-image", "build", "--python", "3.7"], dry_run_override=DRY_RUN, check=True) + run_command(["breeze", "ci-image", "build", "--python", "3.8"], dry_run_override=DRY_RUN, check=True) # Update default branches update_default_branch(version_branch) # Commit changes diff --git a/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py b/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py index c05b067eefed3..023e82b9d6b69 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py +++ b/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py @@ -345,7 +345,7 @@ def publish_release_candidate(version, previous_version, github_token): git_clean() # Build the latest image if confirm_action("Build latest breeze image?"): - run_command(["breeze", "ci-image", "build", "--python", "3.7"], dry_run_override=DRY_RUN, check=True) + run_command(["breeze", "ci-image", "build", "--python", "3.8"], dry_run_override=DRY_RUN, check=True) # Create the tarball tarball_release(version, version_without_rc) # Create the artifacts diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 3ce73ebe91615..9d5ea9084d182 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -36,7 +36,7 @@ APACHE_AIRFLOW_GITHUB_REPOSITORY = "apache/airflow" # Checked before putting in build cache -ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS = ["3.8", "3.9", "3.10"] DEFAULT_PYTHON_MAJOR_MINOR_VERSION = ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS[0] ALLOWED_ARCHITECTURES = [Architecture.X86_64, Architecture.ARM] ALLOWED_BACKENDS = ["sqlite", "mysql", "postgres", "mssql"] @@ -176,7 +176,7 @@ def get_default_platform_machine() -> str: PYTHONDONTWRITEBYTECODE = True PRODUCTION_IMAGE = False -ALL_PYTHON_MAJOR_MINOR_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALL_PYTHON_MAJOR_MINOR_VERSIONS = ["3.8", "3.9", "3.10"] CURRENT_PYTHON_MAJOR_MINOR_VERSIONS = ALL_PYTHON_MAJOR_MINOR_VERSIONS CURRENT_POSTGRES_VERSIONS = ["11", "12", "13", "14", "15"] DEFAULT_POSTGRES_VERSION = CURRENT_POSTGRES_VERSIONS[0] diff --git a/dev/breeze/src/airflow_breeze/params/common_build_params.py b/dev/breeze/src/airflow_breeze/params/common_build_params.py index 220ef183f3970..9c3b56c24ef46 100644 --- a/dev/breeze/src/airflow_breeze/params/common_build_params.py +++ b/dev/breeze/src/airflow_breeze/params/common_build_params.py @@ -61,7 +61,7 @@ class CommonBuildParams: prepare_buildx_cache: bool = False python_image: str | None = None push: bool = False - python: str = "3.7" + python: str = "3.8" tag_as_latest: bool = False upgrade_to_newer_dependencies: bool = False upgrade_on_failure: bool = False diff --git a/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py b/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py index 2b84090008152..65458e6208642 100644 --- a/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py @@ -170,11 +170,7 @@ def _download_tool_if_needed( get_console().print(f"[info]Downloading from:[/] {url}") if get_dry_run(): return - try: - # we can add missing_ok when we drop Python 3.7 - path.unlink() - except OSError: - pass + path.unlink(missing_ok=True) path.parent.mkdir(parents=True, exist_ok=True) num_tries = 4 if not uncompress_file: diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py b/dev/breeze/src/airflow_breeze/utils/run_utils.py index 66bc3e1a406e1..90eeaa7dcf8f6 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py @@ -378,7 +378,7 @@ def check_if_image_exists(image: str) -> bool: def get_ci_image_for_pre_commits() -> str: github_repository = os.environ.get("GITHUB_REPOSITORY", APACHE_AIRFLOW_GITHUB_REPOSITORY) - python_version = "3.7" + python_version = "3.8" airflow_image = f"ghcr.io/{github_repository}/{AIRFLOW_BRANCH}/ci/python{python_version}" skip_image_pre_commits = os.environ.get("SKIP_IMAGE_PRE_COMMITS", "false") if skip_image_pre_commits[0].lower() == "t": diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 3b15c5b39af93..743ed6aee98a5 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -20,29 +20,9 @@ import os import sys from enum import Enum - -from airflow_breeze.utils.exclude_from_matrix import excluded_combos -from airflow_breeze.utils.github_actions import get_ga_output -from airflow_breeze.utils.kubernetes_utils import get_kubernetes_python_combos -from airflow_breeze.utils.path_utils import ( - AIRFLOW_PROVIDERS_ROOT, - AIRFLOW_SOURCES_ROOT, - DOCS_DIR, - SYSTEM_TESTS_PROVIDERS_ROOT, - TESTS_PROVIDERS_ROOT, -) - -if sys.version_info >= (3, 8): - from functools import cached_property -else: - # noinspection PyUnresolvedReferences - from cached_property import cached_property - -from functools import lru_cache +from functools import cached_property, lru_cache from re import match -from typing import Any, Dict, List, TypeVar - -from typing_extensions import Literal +from typing import Any, Dict, List, Literal, TypeVar from airflow_breeze.global_constants import ( ALL_PYTHON_MAJOR_MINOR_VERSIONS, @@ -64,6 +44,16 @@ all_selective_test_types, ) from airflow_breeze.utils.console import get_console +from airflow_breeze.utils.exclude_from_matrix import excluded_combos +from airflow_breeze.utils.github_actions import get_ga_output +from airflow_breeze.utils.kubernetes_utils import get_kubernetes_python_combos +from airflow_breeze.utils.path_utils import ( + AIRFLOW_PROVIDERS_ROOT, + AIRFLOW_SOURCES_ROOT, + DOCS_DIR, + SYSTEM_TESTS_PROVIDERS_ROOT, + TESTS_PROVIDERS_ROOT, +) FULL_TESTS_NEEDED_LABEL = "full tests needed" DEBUG_CI_RESOURCES_LABEL = "debug ci resources" diff --git a/dev/breeze/tests/test_cache.py b/dev/breeze/tests/test_cache.py index a37a91165b4fc..69bee3f7d9a83 100644 --- a/dev/breeze/tests/test_cache.py +++ b/dev/breeze/tests/test_cache.py @@ -36,8 +36,8 @@ [ ("backend", "mysql", (True, ["sqlite", "mysql", "postgres", "mssql"]), None), ("backend", "xxx", (False, ["sqlite", "mysql", "postgres", "mssql"]), None), - ("python_major_minor_version", "3.8", (True, ["3.7", "3.8", "3.9", "3.10"]), None), - ("python_major_minor_version", "3.5", (False, ["3.7", "3.8", "3.9", "3.10"]), None), + ("python_major_minor_version", "3.8", (True, ["3.8", "3.9", "3.10"]), None), + ("python_major_minor_version", "3.7", (False, ["3.8", "3.9", "3.10"]), None), ("missing", "value", None, AttributeError), ], ) diff --git a/dev/breeze/tests/test_exclude_from_matrix.py b/dev/breeze/tests/test_exclude_from_matrix.py index 49bb0e57345d9..7280e0dd550ac 100644 --- a/dev/breeze/tests/test_exclude_from_matrix.py +++ b/dev/breeze/tests/test_exclude_from_matrix.py @@ -24,13 +24,13 @@ @pytest.mark.parametrize( "list_1, list_2, expected_representative_list", [ - (["3.6", "3.7"], ["1", "2"], [("3.6", "1"), ("3.7", "2")]), - (["3.6", "3.7"], ["1", "2", "3"], [("3.6", "1"), ("3.7", "2"), ("3.6", "3")]), - (["3.6", "3.7"], ["1", "2", "3", "4"], [("3.6", "1"), ("3.7", "2"), ("3.6", "3"), ("3.7", "4")]), + (["3.8", "3.9"], ["1", "2"], [("3.8", "1"), ("3.9", "2")]), + (["3.8", "3.9"], ["1", "2", "3"], [("3.8", "1"), ("3.9", "2"), ("3.8", "3")]), + (["3.8", "3.9"], ["1", "2", "3", "4"], [("3.8", "1"), ("3.9", "2"), ("3.8", "3"), ("3.9", "4")]), ( - ["3.6", "3.7", "3.8"], + ["3.8", "3.9", "3.10"], ["1", "2", "3", "4"], - [("3.6", "1"), ("3.7", "2"), ("3.8", "3"), ("3.6", "4")], + [("3.8", "1"), ("3.9", "2"), ("3.10", "3"), ("3.8", "4")], ), ], ) diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 4ac7adb34d286..56aecd36afe76 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -55,10 +55,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("INTHEWILD.md",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "false", "needs-helm-tests": "false", "run-tests": "false", @@ -75,10 +75,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("airflow/api/file.py",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -98,10 +98,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), { "affected-providers-list-as-string": "amazon common.sql google postgres", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -119,10 +119,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("tests/providers/apache/beam/file.py",), { "affected-providers-list-as-string": "apache.beam google", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -140,10 +140,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("docs/file.rst",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "false", @@ -164,10 +164,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), { "affected-providers-list-as-string": "amazon common.sql google postgres", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -192,10 +192,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): { "affected-providers-list-as-string": "airbyte apache.livy " "dbt.cloud dingding discord http", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -219,10 +219,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), { "affected-providers-list-as-string": "airbyte http", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -245,10 +245,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -266,10 +266,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("setup.py",), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", - "python-versions": "['3.7', '3.8', '3.9', '3.10']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "all-python-versions": "['3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.8 3.9 3.10", + "python-versions": "['3.8', '3.9', '3.10']", + "python-versions-list-as-string": "3.8 3.9 3.10", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -289,10 +289,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("generated/provider_dependencies.json",), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", - "python-versions": "['3.7', '3.8', '3.9', '3.10']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "all-python-versions": "['3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.8 3.9 3.10", + "python-versions": "['3.8', '3.9', '3.10']", + "python-versions-list-as-string": "3.8 3.9 3.10", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -312,10 +312,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "affected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.sql exasol ftp google imap " "mongo mysql postgres salesforce ssh", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -333,10 +333,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("tests/providers/airbyte/__init__.py",), { "affected-providers-list-as-string": "airbyte http", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -354,10 +354,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "affected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.sql exasol ftp google imap " "mongo mysql postgres salesforce ssh", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -397,10 +397,10 @@ def test_expected_output_pull_request_main( "main", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", - "python-versions": "['3.7', '3.8', '3.9', '3.10']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "all-python-versions": "['3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.8 3.9 3.10", + "python-versions": "['3.8', '3.9', '3.10']", + "python-versions-list-as-string": "3.8 3.9 3.10", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -424,10 +424,10 @@ def test_expected_output_pull_request_main( "main", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", - "python-versions": "['3.7', '3.8', '3.9', '3.10']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "all-python-versions": "['3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.8 3.9 3.10", + "python-versions": "['3.8', '3.9', '3.10']", + "python-versions-list-as-string": "3.8 3.9 3.10", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -449,10 +449,10 @@ def test_expected_output_pull_request_main( "main", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", - "python-versions": "['3.7', '3.8', '3.9', '3.10']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "all-python-versions": "['3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.8 3.9 3.10", + "python-versions": "['3.8', '3.9', '3.10']", + "python-versions-list-as-string": "3.8 3.9 3.10", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -474,10 +474,10 @@ def test_expected_output_pull_request_main( "v2-3-stable", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", - "python-versions": "['3.7', '3.8', '3.9', '3.10']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "all-python-versions": "['3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.8 3.9 3.10", + "python-versions": "['3.8', '3.9', '3.10']", + "python-versions-list-as-string": "3.8 3.9 3.10", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -515,8 +515,8 @@ def test_expected_output_full_tests_needed( ("INTHEWILD.md",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "false", "needs-helm-tests": "false", "run-tests": "false", @@ -538,8 +538,8 @@ def test_expected_output_full_tests_needed( "affected-providers-list-as-string": "amazon apache.beam apache.cassandra cncf.kubernetes " "common.sql facebook google hashicorp microsoft.azure microsoft.mssql " "mysql oracle postgres presto salesforce sftp ssh trino", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "needs-helm-tests": "false", "image-build": "true", "run-tests": "true", @@ -564,8 +564,8 @@ def test_expected_output_full_tests_needed( "cncf.kubernetes common.sql facebook google " "hashicorp microsoft.azure microsoft.mssql mysql oracle postgres presto " "salesforce sftp ssh trino", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -586,8 +586,8 @@ def test_expected_output_full_tests_needed( ), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -624,8 +624,8 @@ def test_expected_output_pull_request_v2_3( ("INTHEWILD.md",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "false", "needs-helm-tests": "false", "run-tests": "false", @@ -641,8 +641,8 @@ def test_expected_output_pull_request_v2_3( ("tests/system/any_file.py",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -665,8 +665,8 @@ def test_expected_output_pull_request_v2_3( "cncf.kubernetes common.sql " "facebook google hashicorp microsoft.azure microsoft.mssql mysql " "oracle postgres presto salesforce sftp ssh trino", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -708,8 +708,8 @@ def test_expected_output_pull_request_v2_3( ), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -726,8 +726,8 @@ def test_expected_output_pull_request_v2_3( ("airflow/models/test.py",), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -746,8 +746,8 @@ def test_expected_output_pull_request_v2_3( ("airflow/file.py",), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -787,8 +787,8 @@ def test_expected_output_pull_request_target( "main", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "all-python-versions": "['3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.8 3.9 3.10", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -807,8 +807,8 @@ def test_expected_output_pull_request_target( "v2-3-stable", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "all-python-versions": "['3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.8 3.9 3.10", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -826,8 +826,8 @@ def test_expected_output_pull_request_target( "main", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "all-python-versions": "['3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.8 3.9 3.10", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -878,8 +878,8 @@ def test_no_commit_provided_trigger_full_build_for_any_event_type(github_event): ) assert_outputs_are_printed( { - "all-python-versions": "['3.7', '3.8', '3.9', '3.10']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10", + "all-python-versions": "['3.8', '3.9', '3.10']", + "all-python-versions-list-as-string": "3.8 3.9 3.10", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", diff --git a/dev/check_files.py b/dev/check_files.py index 158ac5c046a38..c8085c806670b 100644 --- a/dev/check_files.py +++ b/dev/check_files.py @@ -33,7 +33,7 @@ """ AIRFLOW_DOCKER = """\ -FROM python:3.7 +FROM python:3.8 # Upgrade RUN pip install "apache-airflow=={}" diff --git a/dev/provider_packages/SETUP_TEMPLATE.cfg.jinja2 b/dev/provider_packages/SETUP_TEMPLATE.cfg.jinja2 index a4023b675876a..5fb6a9c74d321 100644 --- a/dev/provider_packages/SETUP_TEMPLATE.cfg.jinja2 +++ b/dev/provider_packages/SETUP_TEMPLATE.cfg.jinja2 @@ -63,7 +63,7 @@ python_tag=py3 [options] zip_safe = False include_package_data = True -python_requires = ~=3.7 +python_requires = ~=3.8 packages = find: setup_requires = {{ SETUP_REQUIREMENTS }} install_requires = {{ INSTALL_REQUIREMENTS }} diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py index 5f994e943292b..774fc3650a817 100755 --- a/dev/provider_packages/prepare_provider_packages.py +++ b/dev/provider_packages/prepare_provider_packages.py @@ -52,7 +52,7 @@ from rich.syntax import Syntax from yaml import safe_load -ALL_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALL_PYTHON_VERSIONS = ["3.8", "3.9", "3.10"] MIN_AIRFLOW_VERSION = "2.4.0" # In case you have some providers that you want to have different min-airflow version for, @@ -1112,7 +1112,7 @@ def get_provider_jinja_context( supported_python_versions = [ p for p in ALL_PYTHON_VERSIONS if p not in provider_details.excluded_python_versions ] - python_requires = "~=3.7" + python_requires = "~=3.8" for p in provider_details.excluded_python_versions: python_requires += f", !={p}" min_airflow_version = MIN_AIRFLOW_VERSION diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py index 8c9f6eb78d386..f364e5bdba947 100755 --- a/dev/retag_docker_images.py +++ b/dev/retag_docker_images.py @@ -31,7 +31,7 @@ import rich_click as click -PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +PYTHON_VERSIONS = ["3.8", "3.9", "3.10"] GHCR_IO_PREFIX = "ghcr.io" diff --git a/dev/stats/get_important_pr_candidates.py b/dev/stats/get_important_pr_candidates.py index 28a8081e01f7a..760e9ced0e2ec 100755 --- a/dev/stats/get_important_pr_candidates.py +++ b/dev/stats/get_important_pr_candidates.py @@ -21,9 +21,9 @@ import math import pickle import re -import sys import textwrap from datetime import datetime +from functools import cached_property import pendulum import rich_click as click @@ -31,11 +31,6 @@ from github.PullRequest import PullRequest from rich.console import Console -if sys.version_info >= (3, 8): - from functools import cached_property -else: - from cached_property import cached_property - logger = logging.getLogger(__name__) diff --git a/dev/validate_version_added_fields_in_config.py b/dev/validate_version_added_fields_in_config.py index 40679331b7ff1..d68ce36a0de3a 100755 --- a/dev/validate_version_added_fields_in_config.py +++ b/dev/validate_version_added_fields_in_config.py @@ -83,7 +83,7 @@ def parse_config_template_old_format(config_content: str) -> set[tuple[str, str, } -@functools.lru_cache() +@functools.lru_cache def fetch_config_options_for_version(version_str: str) -> set[tuple[str, str]]: r = requests.get( f"https://raw.githubusercontent.com/apache/airflow/{version_str}/airflow/config_templates/config.yml" diff --git a/docs/apache-airflow-providers/installing-from-pypi.rst b/docs/apache-airflow-providers/installing-from-pypi.rst index 227398d6111a3..1eb9c6f90b80b 100644 --- a/docs/apache-airflow-providers/installing-from-pypi.rst +++ b/docs/apache-airflow-providers/installing-from-pypi.rst @@ -37,6 +37,6 @@ Typical command to install airflow from PyPI looks like below (you need to use t .. code-block:: - pip install "apache-airflow-providers-celery" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.3.0/constraints-3.7.txt" + pip install "apache-airflow-providers-celery" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.3.0/constraints-3.8.txt" This is an example, see :doc:`apache-airflow:installation/installing-from-pypi` for more examples, including how to upgrade the providers. diff --git a/docs/apache-airflow/administration-and-deployment/modules_management.rst b/docs/apache-airflow/administration-and-deployment/modules_management.rst index e3cf953e46043..ec1a9b11e4195 100644 --- a/docs/apache-airflow/administration-and-deployment/modules_management.rst +++ b/docs/apache-airflow/administration-and-deployment/modules_management.rst @@ -56,10 +56,10 @@ by running an interactive terminal as in the example below: >>> from pprint import pprint >>> pprint(sys.path) ['', - '/home/arch/.pyenv/versions/3.7.4/lib/python37.zip', - '/home/arch/.pyenv/versions/3.7.4/lib/python3.7', - '/home/arch/.pyenv/versions/3.7.4/lib/python3.7/lib-dynload', - '/home/arch/venvs/airflow/lib/python3.7/site-packages'] + '/home/arch/.pyenv/versions/3.8.4/lib/python37.zip', + '/home/arch/.pyenv/versions/3.8.4/lib/python3.8', + '/home/arch/.pyenv/versions/3.8.4/lib/python3.8/lib-dynload', + '/home/arch/venvs/airflow/lib/python3.8/site-packages'] ``sys.path`` is initialized during program startup. The first precedence is given to the current directory, i.e, ``path[0]`` is the directory containing @@ -290,10 +290,10 @@ The ``sys.path`` variable will look like below: >>> pprint(sys.path) ['', '/home/arch/projects/airflow_operators' - '/home/arch/.pyenv/versions/3.7.4/lib/python37.zip', - '/home/arch/.pyenv/versions/3.7.4/lib/python3.7', - '/home/arch/.pyenv/versions/3.7.4/lib/python3.7/lib-dynload', - '/home/arch/venvs/airflow/lib/python3.7/site-packages'] + '/home/arch/.pyenv/versions/3.8.4/lib/python37.zip', + '/home/arch/.pyenv/versions/3.8.4/lib/python3.8', + '/home/arch/.pyenv/versions/3.8.4/lib/python3.8/lib-dynload', + '/home/arch/venvs/airflow/lib/python3.8/site-packages'] As we can see that our provided directory is now added to the path, let's try to import the package now: diff --git a/docs/apache-airflow/extra-packages-ref.rst b/docs/apache-airflow/extra-packages-ref.rst index 4477afdc1ae59..a4e95cd78ffac 100644 --- a/docs/apache-airflow/extra-packages-ref.rst +++ b/docs/apache-airflow/extra-packages-ref.rst @@ -100,7 +100,7 @@ with a consistent set of dependencies based on constraint files provided by Airf :substitutions: pip install apache-airflow[google,amazon,apache.spark]==|version| \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.8.txt" Note, that this will install providers in the versions that were released at the time of Airflow |version| release. You can later upgrade those providers manually if you want to use latest versions of the providers. diff --git a/docs/apache-airflow/installation/installing-from-pypi.rst b/docs/apache-airflow/installation/installing-from-pypi.rst index e6cf162bc3017..9e190a14b7563 100644 --- a/docs/apache-airflow/installation/installing-from-pypi.rst +++ b/docs/apache-airflow/installation/installing-from-pypi.rst @@ -37,7 +37,7 @@ Typical command to install airflow from PyPI looks like below: .. code-block:: - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.7.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.8.txt" This is an example, see further for more explanation. @@ -70,7 +70,7 @@ You can create the URL to the file substituting the variables in the template be where: - ``AIRFLOW_VERSION`` - Airflow version (e.g. :subst-code:`|version|`) or ``main``, ``2-0``, for latest development version -- ``PYTHON_VERSION`` Python version e.g. ``3.8``, ``3.7`` +- ``PYTHON_VERSION`` Python version e.g. ``3.8``, ``3.9`` There is also a ``constraints-no-providers`` constraint file, which contains just constraints required to install Airflow core. This allows to install and upgrade airflow separately and independently from providers. @@ -86,7 +86,7 @@ constraints always points to the "latest" released Airflow version constraints: .. code-block:: - https://raw.githubusercontent.com/apache/airflow/constraints-latest/constraints-3.7.txt + https://raw.githubusercontent.com/apache/airflow/constraints-latest/constraints-3.8.txt Fixing Constraint files at release time @@ -192,9 +192,9 @@ If you don't want to install any extra providers, initially you can use the comm AIRFLOW_VERSION=|version| PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" - # For example: 3.7 + # For example: 3.8 CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-no-providers-${PYTHON_VERSION}.txt" - # For example: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-no-providers-3.7.txt + # For example: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-no-providers-3.8.txt pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" @@ -219,7 +219,7 @@ Symbol not found: ``_Py_GetArgcArgv`` ===================================== If you see ``Symbol not found: _Py_GetArgcArgv`` while starting or importing Airflow, this may mean that you are using an incompatible version of Python. -For a homebrew installed version of Python, this is generally caused by using Python in ``/usr/local/opt/bin`` rather than the Frameworks installation (e.g. for ``python 3.7``: ``/usr/local/opt/python@3.7/Frameworks/Python.framework/Versions/3.7``). +For a homebrew installed version of Python, this is generally caused by using Python in ``/usr/local/opt/bin`` rather than the Frameworks installation (e.g. for ``python 3.8``: ``/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8``). The crux of the issue is that a library Airflow depends on, ``setproctitle``, uses a non-public Python API which is not available from the standard installation ``/usr/local/opt/`` (which symlinks to a path under ``/usr/local/Cellar``). @@ -228,9 +228,9 @@ An easy fix is just to ensure you use a version of Python that has a dylib of th .. code-block:: bash - # Note: these instructions are for python3.7 but can be loosely modified for other versions - brew install python@3.7 - virtualenv -p /usr/local/opt/python@3.7/Frameworks/Python.framework/Versions/3.7/bin/python3 .toy-venv + # Note: these instructions are for python3.8 but can be loosely modified for other versions + brew install python@3.8 + virtualenv -p /usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/bin/python3 .toy-venv source .toy-venv/bin/activate pip install apache-airflow python diff --git a/docs/apache-airflow/installation/prerequisites.rst b/docs/apache-airflow/installation/prerequisites.rst index 82a0310721e3f..a2506c1beffbd 100644 --- a/docs/apache-airflow/installation/prerequisites.rst +++ b/docs/apache-airflow/installation/prerequisites.rst @@ -20,7 +20,7 @@ Prerequisites Starting with Airflow 2.3.0, Airflow is tested with:. -* Python: 3.7, 3.8, 3.9, 3.10 +* Python: 3.8, 3.9, 3.10 * Databases: diff --git a/docs/apache-airflow/installation/supported-versions.rst b/docs/apache-airflow/installation/supported-versions.rst index 9d398fb5d69fe..b582e35fe9f90 100644 --- a/docs/apache-airflow/installation/supported-versions.rst +++ b/docs/apache-airflow/installation/supported-versions.rst @@ -61,8 +61,8 @@ They are based on the official release schedule of Python and Kubernetes, nicely 2. The "oldest" supported version of Python/Kubernetes is the default one. "Default" is only meaningful in terms of "smoke tests" in CI PRs which are run using this default version and default reference image available in DockerHub. Currently the ``apache/airflow:latest`` and ``apache/airflow:2.5.2`` images - are Python 3.7 images, however, in the first MINOR/MAJOR release of Airflow released after 27.06.2023, they will - become Python 3.8 images. + are Python 3.8 images, however, in the first MINOR/MAJOR release of Airflow released after 14.19.2023, + they will become Python 3.9 images. 3. We support a new version of Python/Kubernetes in main after they are officially released, as soon as we make them work in our CI pipeline (which might not be immediate due to dependencies catching up with diff --git a/docs/apache-airflow/start.rst b/docs/apache-airflow/start.rst index 6f198dfded563..96dd6ff9cf692 100644 --- a/docs/apache-airflow/start.rst +++ b/docs/apache-airflow/start.rst @@ -24,8 +24,8 @@ This quick start guide will help you bootstrap an Airflow standalone instance on .. note:: - Successful installation requires a Python 3 environment. Starting with Airflow 2.3.0, Airflow is tested with Python 3.7, 3.8, 3.9, 3.10. Note that - Python 3.11 is not yet supported. + Successful installation requires a Python 3 environment. Starting with Airflow 2.3.0, Airflow is tested with Python 3.8, 3.9, 3.10. + Note that Python 3.11 is not yet supported. Only ``pip`` installation is currently officially supported. @@ -60,7 +60,7 @@ constraint files to enable reproducible installation, so using ``pip`` and const PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-${PYTHON_VERSION}.txt" - # For example this would install |version| with python 3.7: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.7.txt + # For example this would install |version| with python 3.8: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.8.txt pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" diff --git a/docs/docker-stack/README.md b/docs/docker-stack/README.md index 98776d4b29d48..ce877622cbfe9 100644 --- a/docs/docker-stack/README.md +++ b/docs/docker-stack/README.md @@ -33,9 +33,9 @@ for all the supported Python versions. You can find the following images there (Assuming Airflow version `2.7.0.dev0`): -* `apache/airflow:latest` - the latest released Airflow image with default Python version (3.7 currently) +* `apache/airflow:latest` - the latest released Airflow image with default Python version (3.8 currently) * `apache/airflow:latest-pythonX.Y` - the latest released Airflow image with specific Python version -* `apache/airflow:2.7.0.dev0` - the versioned Airflow image with default Python version (3.7 currently) +* `apache/airflow:2.7.0.dev0` - the versioned Airflow image with default Python version (3.8 currently) * `apache/airflow:2.7.0.dev0-pythonX.Y` - the versioned Airflow image with specific Python version Those are "reference" regular images. They contain the most common set of extras, dependencies and providers that are @@ -45,9 +45,9 @@ You can also use "slim" images that contain only core airflow and are about half but you need to add all the [Reference for package extras](https://airflow.apache.org/docs/apache-airflow/stable/extra-packages-ref.html) and providers that you need separately via [Building the image](https://airflow.apache.org/docs/docker-stack/build.html#build-build-image). -* `apache/airflow:slim-latest` - the latest released Airflow image with default Python version (3.7 currently) +* `apache/airflow:slim-latest` - the latest released Airflow image with default Python version (3.8 currently) * `apache/airflow:slim-latest-pythonX.Y` - the latest released Airflow image with specific Python version -* `apache/airflow:slim-2.7.0.dev0` - the versioned Airflow image with default Python version (3.7 currently) +* `apache/airflow:slim-2.7.0.dev0` - the versioned Airflow image with default Python version (3.8 currently) * `apache/airflow:slim-2.7.0.dev0-pythonX.Y` - the versioned Airflow image with specific Python version The Apache Airflow image provided as convenience package is optimized for size, and diff --git a/docs/docker-stack/build-arg-ref.rst b/docs/docker-stack/build-arg-ref.rst index 8d5c8224dea5d..6da9cdf145e2e 100644 --- a/docs/docker-stack/build-arg-ref.rst +++ b/docs/docker-stack/build-arg-ref.rst @@ -30,7 +30,7 @@ Those are the most common arguments that you use when you want to build a custom +------------------------------------------+------------------------------------------+---------------------------------------------+ | Build argument | Default value | Description | +==========================================+==========================================+=============================================+ -| ``PYTHON_BASE_IMAGE`` | ``python:3.7-slim-bullseye`` | Base python image. | +| ``PYTHON_BASE_IMAGE`` | ``python:3.8-slim-bullseye`` | Base python image. | +------------------------------------------+------------------------------------------+---------------------------------------------+ | ``AIRFLOW_VERSION`` | :subst-code:`|airflow-version|` | version of Airflow. | +------------------------------------------+------------------------------------------+---------------------------------------------+ diff --git a/docs/docker-stack/build.rst b/docs/docker-stack/build.rst index 7beed0afed6b0..a49eb8a64b6d7 100644 --- a/docs/docker-stack/build.rst +++ b/docs/docker-stack/build.rst @@ -288,14 +288,14 @@ There are two types of images you can extend your image from: Naming conventions for the images: -+----------------+------------------+---------------------------------+--------------------------------------+ -| Image | Python | Standard image | Slim image | -+================+==================+=================================+======================================+ -| Latest default | 3.7 | apache/airflow:latest | apache/airflow:slim-latest | -| Default | 3.7 | apache/airflow:X.Y.Z | apache/airflow:slim-X.Y.Z | -| Latest | 3.7,3.8,3.9,3.10 | apache/airflow:latest-pythonN.M | apache/airflow:slim-latest-pythonN.M | -| Specific | 3.7,3.8,3.9,3.10 | apache/airflow:X.Y.Z-pythonN.M | apache/airflow:slim-X.Y.Z-pythonN.M | -+----------------+------------------+---------------------------------+--------------------------------------+ ++----------------+--------------+---------------------------------+--------------------------------------+ +| Image | Python | Standard image | Slim image | ++================+==============+=================================+======================================+ +| Latest default | 3.8 | apache/airflow:latest | apache/airflow:slim-latest | +| Default | 3.8 | apache/airflow:X.Y.Z | apache/airflow:slim-X.Y.Z | +| Latest | 3.8,3.9,3.10 | apache/airflow:latest-pythonN.M | apache/airflow:slim-latest-pythonN.M | +| Specific | 3.8,3.9,3.10 | apache/airflow:X.Y.Z-pythonN.M | apache/airflow:slim-X.Y.Z-pythonN.M | ++----------------+--------------+---------------------------------+--------------------------------------+ * The "latest" image is always the latest released stable version available. @@ -665,7 +665,7 @@ Building from PyPI packages This is the basic way of building the custom images from sources. -The following example builds the production image in version ``3.7`` with latest PyPI-released Airflow, +The following example builds the production image in version ``3.8`` with latest PyPI-released Airflow, with default set of Airflow extras and dependencies. The latest PyPI-released Airflow constraints are used automatically. .. exampleinclude:: docker-examples/customizing/stable-airflow.sh @@ -673,7 +673,7 @@ with default set of Airflow extras and dependencies. The latest PyPI-released Ai :start-after: [START build] :end-before: [END build] -The following example builds the production image in version ``3.7`` with default extras from ``2.3.0`` Airflow +The following example builds the production image in version ``3.8`` with default extras from ``2.3.0`` Airflow package. The ``2.3.0`` constraints are used automatically. .. exampleinclude:: docker-examples/customizing/pypi-selected-version.sh @@ -708,7 +708,7 @@ have more complex dependencies to build. Building optimized images ......................... -The following example the production image in version ``3.7`` with additional airflow extras from ``2.0.2`` +The following example the production image in version ``3.8`` with additional airflow extras from ``2.0.2`` PyPI package but it includes additional apt dev and runtime dependencies. The dev dependencies are those that require ``build-essential`` and usually need to involve recompiling @@ -736,7 +736,7 @@ a branch or tag in your repository and use the tag or branch in the URL that you In case of GitHub builds you need to pass the constraints reference manually in case you want to use specific constraints, otherwise the default ``constraints-main`` is used. -The following example builds the production image in version ``3.7`` with default extras from the latest main version and +The following example builds the production image in version ``3.8`` with default extras from the latest main version and constraints are taken from latest version of the constraints-main branch in GitHub. .. exampleinclude:: docker-examples/customizing/github-main.sh diff --git a/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh index 5e264a758f36e..29e8762cf1c24 100755 --- a/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh +++ b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh @@ -31,7 +31,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_PYTHON_DEPS="mpi4py" \ --build-arg ADDITIONAL_DEV_APT_DEPS="libopenmpi-dev" \ diff --git a/docs/docker-stack/docker-examples/customizing/custom-sources.sh b/docs/docker-stack/docker-examples/customizing/custom-sources.sh index 50f06545323b4..7299dce829fb3 100755 --- a/docs/docker-stack/docker-examples/customizing/custom-sources.sh +++ b/docs/docker-stack/docker-examples/customizing/custom-sources.sh @@ -32,7 +32,7 @@ export DOCKER_BUILDKIT=1 docker build . -f Dockerfile \ --pull \ --platform 'linux/amd64' \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack,odbc" \ --build-arg ADDITIONAL_PYTHON_DEPS=" \ diff --git a/docs/docker-stack/docker-examples/customizing/github-main.sh b/docs/docker-stack/docker-examples/customizing/github-main.sh index c78c91c35be2e..3a7e1ec073b7e 100755 --- a/docs/docker-stack/docker-examples/customizing/github-main.sh +++ b/docs/docker-stack/docker-examples/customizing/github-main.sh @@ -30,7 +30,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/main.tar.gz#egg=apache-airflow" \ --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \ --tag "my-github-main:0.0.1" diff --git a/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh b/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh index f8823fa570d61..0323a5103166d 100755 --- a/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh +++ b/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh @@ -32,7 +32,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ diff --git a/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh b/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh index 0bb943599e30c..d0c62c11b8363 100755 --- a/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh +++ b/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh @@ -30,7 +30,7 @@ export AIRFLOW_VERSION=2.3.4 export DOCKER_BUILDKIT=1 docker build . \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --tag "my-pypi-selected-version:0.0.1" # [END build] diff --git a/docs/docker-stack/docker-examples/restricted/restricted_environments.sh b/docs/docker-stack/docker-examples/restricted/restricted_environments.sh index f811a268b6e47..012841bffed50 100755 --- a/docs/docker-stack/docker-examples/restricted/restricted_environments.sh +++ b/docs/docker-stack/docker-examples/restricted/restricted_environments.sh @@ -26,11 +26,11 @@ cp "${AIRFLOW_SOURCES}/Dockerfile" "${TEMP_DOCKER_DIR}" # [START download] mkdir -p docker-context-files -export AIRFLOW_VERSION="2.2.4" +export AIRFLOW_VERSION="2.5.3" rm docker-context-files/*.whl docker-context-files/*.tar.gz docker-context-files/*.txt || true -curl -Lo "docker-context-files/constraints-3.7.txt" \ - "https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-3.7.txt" +curl -Lo "docker-context-files/constraints-3.8.txt" \ + "https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-3.8.txt" echo echo "Make sure you use the right python version here (should be same as in constraints)!" @@ -38,7 +38,7 @@ echo python --version pip download --dest docker-context-files \ - --constraint docker-context-files/constraints-3.7.txt \ + --constraint docker-context-files/constraints-3.8.txt \ "apache-airflow[async,celery,elasticsearch,kubernetes,postgres,redis,ssh,statsd,virtualenv]==${AIRFLOW_VERSION}" # [END download] @@ -47,7 +47,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg INSTALL_MYSQL_CLIENT="false" \ @@ -56,7 +56,7 @@ docker build . \ --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="false" \ --build-arg DOCKER_CONTEXT_FILES="docker-context-files" \ --build-arg INSTALL_PACKAGES_FROM_CONTEXT="true" \ - --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-3.7.txt" \ + --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-3.8.txt" \ --tag airflow-my-restricted-environment:0.0.1 # [END build] diff --git a/docs/docker-stack/entrypoint.rst b/docs/docker-stack/entrypoint.rst index 2b9aa64b1bb2f..8b741da1b7fc4 100644 --- a/docs/docker-stack/entrypoint.rst +++ b/docs/docker-stack/entrypoint.rst @@ -132,7 +132,7 @@ if you specify extra arguments. For example: .. code-block:: bash - docker run -it apache/airflow:2.7.0.dev0-python3.7 bash -c "ls -la" + docker run -it apache/airflow:2.7.0.dev0-python3.8 bash -c "ls -la" total 16 drwxr-xr-x 4 airflow root 4096 Jun 5 18:12 . drwxr-xr-x 1 root root 4096 Jun 5 18:12 .. @@ -144,7 +144,7 @@ you pass extra parameters. For example: .. code-block:: bash - > docker run -it apache/airflow:2.7.0.dev0-python3.7 python -c "print('test')" + > docker run -it apache/airflow:2.7.0.dev0-python3.8 python -c "print('test')" test If first argument equals to "airflow" - the rest of the arguments is treated as an airflow command @@ -152,13 +152,13 @@ to execute. Example: .. code-block:: bash - docker run -it apache/airflow:2.7.0.dev0-python3.7 airflow webserver + docker run -it apache/airflow:2.7.0.dev0-python3.8 airflow webserver If there are any other arguments - they are simply passed to the "airflow" command .. code-block:: bash - > docker run -it apache/airflow:2.7.0.dev0-python3.7 help + > docker run -it apache/airflow:2.7.0.dev0-python3.8 help usage: airflow [-h] GROUP_OR_COMMAND ... positional arguments: diff --git a/docs/docker-stack/index.rst b/docs/docker-stack/index.rst index d54c374b702a7..47d54591db4c8 100644 --- a/docs/docker-stack/index.rst +++ b/docs/docker-stack/index.rst @@ -50,9 +50,9 @@ for all the supported Python versions. You can find the following images there (Assuming Airflow version :subst-code:`|airflow-version|`): -* :subst-code:`apache/airflow:latest` - the latest released Airflow image with default Python version (3.7 currently) +* :subst-code:`apache/airflow:latest` - the latest released Airflow image with default Python version (3.8 currently) * :subst-code:`apache/airflow:latest-pythonX.Y` - the latest released Airflow image with specific Python version -* :subst-code:`apache/airflow:|airflow-version|` - the versioned Airflow image with default Python version (3.7 currently) +* :subst-code:`apache/airflow:|airflow-version|` - the versioned Airflow image with default Python version (3.8 currently) * :subst-code:`apache/airflow:|airflow-version|-pythonX.Y` - the versioned Airflow image with specific Python version Those are "reference" regular images. They contain the most common set of extras, dependencies and providers that are @@ -62,9 +62,9 @@ You can also use "slim" images that contain only core airflow and are about half but you need to add all the :doc:`apache-airflow:extra-packages-ref` and providers that you need separately via :ref:`Building the image `. -* :subst-code:`apache/airflow:slim-latest` - the latest released Airflow image with default Python version (3.7 currently) +* :subst-code:`apache/airflow:slim-latest` - the latest released Airflow image with default Python version (3.8 currently) * :subst-code:`apache/airflow:slim-latest-pythonX.Y` - the latest released Airflow image with specific Python version -* :subst-code:`apache/airflow:slim-|airflow-version|` - the versioned Airflow image with default Python version (3.7 currently) +* :subst-code:`apache/airflow:slim-|airflow-version|` - the versioned Airflow image with default Python version (3.8 currently) * :subst-code:`apache/airflow:slim-|airflow-version|-pythonX.Y` - the versioned Airflow image with specific Python version The Apache Airflow image provided as convenience package is optimized for size, and diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 5a3625662baa8..7fd452d489d66 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -1,7 +1,7 @@ # This file is automatically generated by pre-commit. If you have a conflict with this file # Please do not solve it but run `breeze setup regenerate-command-images`. # This command should fix the conflict and regenerate help images that you have conflict with. -main:3b0efd589fb61236e9fb1e4422de78c9 +main:3b7952d54fba1b4e82e04f35f088a94a build-docs:3b89efaf5551b1782227cd382c019990 ci:fix-ownership:fee2c9ec9ef19686792002ae054fecdd ci:free-space:47234aa0a60b0efd84972e6e797379f8 @@ -9,56 +9,56 @@ ci:get-workflow-info:01ee34c33ad62fa5dc33e0ac8773223f ci:resource-check:1d4fe47dff9fc64ac1648ec4beb2d85c ci:selective-check:3a085894f24cb909812fbc8253a21e13 ci:e51cbc38a202b92b7dc6288f6344c412 -ci-image:build:3ffe4dd24ae7090415543e27d8504955 -ci-image:pull:c16c6e57c748bfe9b365b4ffafb18472 -ci-image:verify:aee88f55e8837028d19316356e29b009 -ci-image:d53ee5067c46a6d8f3b21d0098c56ca2 +ci-image:build:41f65d78d2b63246ef393ea89c6752fb +ci-image:pull:ea24a14ffda5ed03e9cb0613ae1ff16b +ci-image:verify:44b15248e7b9495844946c8b2a1cd1e1 +ci-image:d45b2c4d93a5fc12544c6f5af6fcc792 cleanup:231de69d5f47ba29c883164e4575e310 compile-www-assets:c8a8c4f002f7246d0541897fc7c70313 exec:42bbd3c1659128b0341ae118c3482da2 -k8s:build-k8s-image:bf48ee8742757dc19a4c3fad23f060a7 -k8s:configure-cluster:fe86080387f6369bd0230f709ae18ef4 -k8s:create-cluster:66a9d9ecc83784c9992eca5c57589508 -k8s:delete-cluster:afeead233fd7c3b0b5575a94a0c3d09d -k8s:deploy-airflow:0b080c9411835120ebacceca3d6bd230 -k8s:k9s:6612ff843519c9f4b283baf3febbbc4b -k8s:logs:b24a8b57f7793bc5338e693bc1a6abc5 -k8s:run-complete-tests:c72e037afca75d5cb52d1231fd2b9157 +k8s:build-k8s-image:2e52be72f44122cb4fe653adcad6a57e +k8s:configure-cluster:b84a85f7423e336c49239dd93414f905 +k8s:create-cluster:be587ff9093e0d787a97449514725a09 +k8s:delete-cluster:36043808574c6b82abc4a86ec7291531 +k8s:deploy-airflow:66677370e851e17a04095d6cb83f39d7 +k8s:k9s:2f46bd6b73572e7c55d905ecde0989f9 +k8s:logs:db2be891ac429e808f3eb8c395ad6bf1 +k8s:run-complete-tests:211854e1048e0a3ac3a197b108839733 k8s:setup-env:b0ea72ef1819f831b1f80e8bd4d299ce -k8s:shell:4cfa8447477f91a7a96e9ed816def23c -k8s:status:207ac2ab5daa839080b4a4814d18c137 -k8s:tests:376685f88b26051970e4f23511b08679 -k8s:upload-k8s-image:c457e022410c31a11f16d8f69f93ac57 -k8s:e279615efbe1f7f040f6329598f5e35f -prod-image:build:a1608071b5a80b3cc442da9d00e1525b -prod-image:pull:e3c89dd908fc44adf6e159c2950ebdd0 -prod-image:verify:31bc5efada1d70a0a31990025db1a093 -prod-image:79bd4cc9de03ab7e1d75f025d75eee46 +k8s:shell:11019888d48a0316bb634d39a3311c63 +k8s:status:026ce4ca2096a10ac6cd9d0f503d6c08 +k8s:tests:e60f58da7074d0c4df41f49b68ae0229 +k8s:upload-k8s-image:c12deebea7ed4abefcd8fabe9e7270e7 +k8s:1edd9914acef1a551dd79afd8903f2e9 +prod-image:build:c5fac534900c609681d79ea61437ff82 +prod-image:pull:45ecbd5581296f4d6a343207e008a64b +prod-image:verify:6a9c820077c44c4588daa95af5c52412 +prod-image:294767b6f00a02d237251402dddb76c0 release-management:create-minor-branch:6a01066dce15e09fb269a8385626657c -release-management:generate-constraints:ae30d6ad49a1b2c15b61cb29080fd957 +release-management:generate-constraints:8c4be39342fd595e0ad66d06879dc41e release-management:generate-issue-content-providers:421c1b186818a6251c16f7f3b7807292 release-management:install-provider-packages:5838b06b78e3c5c6e8380024867a1a8d release-management:prepare-airflow-package:3ac14ea6d2b09614959c0ec4fd564789 release-management:prepare-provider-documentation:13f5fa922825a922c3525b5a57b2a80c release-management:prepare-provider-packages:cf41c33c6d6121efef1f1d97333e8710 -release-management:release-prod-images:c9bc40938e0efad49e51ef66e83f9527 +release-management:release-prod-images:b803726bcf3610299e09431fc445857f release-management:start-rc-process:6aafbaceabd7b67b9a1af4c2f59abc4c release-management:start-release:acb384d86e02ff5fde1bf971897be17c release-management:verify-provider-packages:566c60fb1bfdc5ed7c4be590736891b2 -release-management:c0ecc3e5cd4a9f1aac18a9d6eb1ddfb6 +release-management:dbbd6e1e2ba12b2b4c277559961afbe9 setup:autocomplete:03343478bf1d0cf9c101d454cdb63b68 setup:check-all-params-in-groups:c3aca085350fc09451a6d502be9ee821 -setup:config:3ffcd35dd24b486ddf1d08b797e3d017 +setup:config:8ddcefd867b15008763e1a9e2de602cf setup:regenerate-command-images:aaf263095a037d2271640513d8c156fe setup:self-upgrade:d02f70c7a230eae3463ceec2056b63fa setup:version:123b462a421884dc2320ffc5e54b2478 -setup:26f37743534e14f5aad5300aad920301 -shell:bd3e004a92ebcec8feb40fc5cd95872d -start-airflow:ee5066f1420a489864b48bc4e5e472da +setup:45257f124934cd30c4adf3b100cf4a92 +shell:49ec44efdb4347a93651c496ef96e63c +start-airflow:6f45e90c582361cd1d9fbc1665046ae4 static-checks:c22e24fc35d2f82135b30998e4f352c4 stop:e5aa686b4e53707ced4039d8414d5cd6 -testing:docker-compose-tests:b86c044b24138af0659a05ed6331576c +testing:docker-compose-tests:e5d943a1b9bba403a8aeadf344b417cf testing:helm-tests:936cf28fd84ce4ff5113795fdae9624b -testing:integration-tests:6b302f81c1038215bbc9f22f9490fcfc -testing:tests:9a8e3645ff5341788b5cd683b1c4bdcd -testing:405296e5ab9be9aa9e1af3f162bd296a +testing:integration-tests:fb8d5c220701d9c279d497eb024a51a3 +testing:tests:f469fe228cb78cd8f31f86feccc974eb +testing:d040ad39b8902c70105f3dd8fa40e11e diff --git a/images/breeze/output-commands.svg b/images/breeze/output-commands.svg index d1ce7944df5bd..38f349d29dadd 100644 --- a/images/breeze/output-commands.svg +++ b/images/breeze/output-commands.svg @@ -35,8 +35,8 @@ .breeze-help-r1 { fill: #c5c8c6;font-weight: bold } .breeze-help-r2 { fill: #c5c8c6 } .breeze-help-r3 { fill: #d0b344;font-weight: bold } -.breeze-help-r4 { fill: #68a0b3;font-weight: bold } -.breeze-help-r5 { fill: #868887 } +.breeze-help-r4 { fill: #868887 } +.breeze-help-r5 { fill: #68a0b3;font-weight: bold } .breeze-help-r6 { fill: #98a84b;font-weight: bold } .breeze-help-r7 { fill: #8d7b39 } @@ -190,50 +190,50 @@ -Usage: breeze [OPTIONSCOMMAND [ARGS]... +Usage: breeze [OPTIONS] COMMAND [ARGS]... -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>11< | 12 | 13 | 14 | 15)[default: 11] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                             -(all | all-testable | cassandra | celery | kafka | kerberos | mongo | otel | pinot |      -statsd | statsd | trino)                                                                  ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. ---max-timeMaximum time that the command should take - if it takes longer, the command will fail. -(INTEGER RANGE)                                                                        ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Basic developer commands ───────────────────────────────────────────────────────────────────────────────────────────╮ -start-airflow     Enter breeze environment and starts all Airflow components in the tmux session. Compile assets   -if contents of www directory changed.                                                            -static-checks     Run static checks.                                                                               -build-docs        Build documentation in the container.                                                            -stop              Stop running breeze environment.                                                                 -shell             Enter breeze environment. this is the default command use when no other is selected.             -exec              Joins the interactive shell of running airflow container.                                        -compile-www-assetsCompiles www assets.                                                                             -cleanup           Cleans the cache of parameters, docker cache and optionally built CI/PROD images.                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced command groups ────────────────────────────────────────────────────────────────────────────────────────────╮ -testing                Tools that developers can use to run tests                                                  -ci-image               Tools that developers can use to manually manage CI images                                  -k8s                    Tools that developers use to run Kubernetes tests                                           -prod-image             Tools that developers can use to manually manage PROD images                                -setup                  Tools that developers can use to configure Breeze                                           -release-management     Tools that release managers can use to prepare and manage Airflow releases                  -ci                     Tools that CI workflows use to cleanup/manage CI environment                                -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>11< | 12 | 13 | 14 | 15)[default: 11] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(all | all-testable | cassandra | celery | kafka | kerberos | mongo | otel | pinot |      +statsd | statsd | trino)                                                                  +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +--max-timeMaximum time that the command should take - if it takes longer, the command will fail. +(INTEGER RANGE)                                                                        +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Basic developer commands ───────────────────────────────────────────────────────────────────────────────────────────╮ +start-airflow     Enter breeze environment and starts all Airflow components in the tmux session. Compile assets   +if contents of www directory changed.                                                            +static-checks     Run static checks.                                                                               +build-docs        Build documentation in the container.                                                            +stop              Stop running breeze environment.                                                                 +shell             Enter breeze environment. this is the default command use when no other is selected.             +exec              Joins the interactive shell of running airflow container.                                        +compile-www-assetsCompiles www assets.                                                                             +cleanup           Cleans the cache of parameters, docker cache and optionally built CI/PROD images.                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced command groups ────────────────────────────────────────────────────────────────────────────────────────────╮ +testing                Tools that developers can use to run tests                                                  +ci-image               Tools that developers can use to manually manage CI images                                  +k8s                    Tools that developers use to run Kubernetes tests                                           +prod-image             Tools that developers can use to manually manage PROD images                                +setup                  Tools that developers can use to configure Breeze                                           +release-management     Tools that release managers can use to prepare and manage Airflow releases                  +ci                     Tools that CI workflows use to cleanup/manage CI environment                                +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_ci-image.svg b/images/breeze/output_ci-image.svg index bf1df9347a162..3025e6e470c36 100644 --- a/images/breeze/output_ci-image.svg +++ b/images/breeze/output_ci-image.svg @@ -35,8 +35,8 @@ .breeze-ci-image-r1 { fill: #c5c8c6;font-weight: bold } .breeze-ci-image-r2 { fill: #c5c8c6 } .breeze-ci-image-r3 { fill: #d0b344;font-weight: bold } -.breeze-ci-image-r4 { fill: #68a0b3;font-weight: bold } -.breeze-ci-image-r5 { fill: #868887 } +.breeze-ci-image-r4 { fill: #868887 } +.breeze-ci-image-r5 { fill: #68a0b3;font-weight: bold } .breeze-ci-image-r6 { fill: #98a84b;font-weight: bold } @@ -93,18 +93,18 @@ -Usage: breeze ci-image [OPTIONSCOMMAND [ARGS]... +Usage: breeze ci-image [OPTIONS] COMMAND [ARGS]... -Tools that developers can use to manually manage CI images +Tools that developers can use to manually manage CI images -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ -build   Build CI image. Include building multiple images for all python versions.                                  -pull    Pull and optionally verify CI images - possibly in parallel for all Python versions.                       -verify  Verify CI image.                                                                                           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ CI Image tools ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +build   Build CI image. Include building multiple images for all python versions.                                  +pull    Pull and optionally verify CI images - possibly in parallel for all Python versions.                       +verify  Verify CI image.                                                                                           +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_ci-image_build.svg b/images/breeze/output_ci-image_build.svg index 8319e04d12496..85226fd9b4dfc 100644 --- a/images/breeze/output_ci-image_build.svg +++ b/images/breeze/output_ci-image_build.svg @@ -35,8 +35,8 @@ .breeze-ci-image-build-r1 { fill: #c5c8c6;font-weight: bold } .breeze-ci-image-build-r2 { fill: #c5c8c6 } .breeze-ci-image-build-r3 { fill: #d0b344;font-weight: bold } -.breeze-ci-image-build-r4 { fill: #68a0b3;font-weight: bold } -.breeze-ci-image-build-r5 { fill: #868887 } +.breeze-ci-image-build-r4 { fill: #868887 } +.breeze-ci-image-build-r5 { fill: #68a0b3;font-weight: bold } .breeze-ci-image-build-r6 { fill: #98a84b;font-weight: bold } .breeze-ci-image-build-r7 { fill: #8d7b39 } @@ -274,78 +274,78 @@ -Usage: breeze ci-image build [OPTIONS] +Usage: breeze ci-image build [OPTIONS] -Build CI image. Include building multiple images for all python versions. +Build CI image. Include building multiple images for all python versions. -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---upgrade-on-failureWhen set, attempt to run upgrade to newer dependencies when regular build       -fails.                                                                          ---image-tag-tTag the image after building it.(TEXT)[default: latest] ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           ---force-buildForce image build no matter if it is determined as needed. ---github-repository-gGitHub repository used to pull, push run images.(TEXT) -[default: apache/airflow]                        -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ ---builderBuildx builder used to perform `docker buildx build` commands.(TEXT) ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-constraints-locationIf specified, it is used instead of calculating reference to the constraint      -file. It could be full remote URL to the location file, or local file placed in  -`docker-context-files` (in this case it has to start with                        -/opt/airflow/docker-context-files).                                              -(TEXT)                                                                           ---airflow-constraints-modeMode of constraints for CI image building.                              -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---python-imageIf specified this is the base python image used to build the image. Should be    -something like: python:VERSION-slim-bullseye.                                    -(TEXT)                                                                           ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip`      -itself).                                                                         -(TEXT)                                                                           ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---pushPush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.8< | 3.9 | 3.10)                                         +[default: 3.8]                                               +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--upgrade-on-failureWhen set, attempt to run upgrade to newer dependencies when regular build       +fails.                                                                          +--image-tag-tTag the image after building it.(TEXT)[default: latest] +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +--force-buildForce image build no matter if it is determined as needed. +--github-repository-gGitHub repository used to pull, push run images.(TEXT) +[default: apache/airflow]                        +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced options (for power users) ─────────────────────────────────────────────────────────────────────────────────╮ +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-constraints-locationIf specified, it is used instead of calculating reference to the constraint      +file. It could be full remote URL to the location file, or local file placed in  +`docker-context-files` (in this case it has to start with                        +/opt/airflow/docker-context-files).                                              +(TEXT)                                                                           +--airflow-constraints-modeMode of constraints for CI image building.                              +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--python-imageIf specified this is the base python image used to build the image. Should be    +something like: python:VERSION-slim-bullseye.                                    +(TEXT)                                                                           +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip`      +itself).                                                                         +(TEXT)                                                                           +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--pushPush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_ci-image_pull.svg b/images/breeze/output_ci-image_pull.svg index d0e5c8d141651..5282ee09a7e74 100644 --- a/images/breeze/output_ci-image_pull.svg +++ b/images/breeze/output_ci-image_pull.svg @@ -35,8 +35,8 @@ .breeze-ci-image-pull-r1 { fill: #c5c8c6;font-weight: bold } .breeze-ci-image-pull-r2 { fill: #c5c8c6 } .breeze-ci-image-pull-r3 { fill: #d0b344;font-weight: bold } -.breeze-ci-image-pull-r4 { fill: #68a0b3;font-weight: bold } -.breeze-ci-image-pull-r5 { fill: #868887 } +.breeze-ci-image-pull-r4 { fill: #868887 } +.breeze-ci-image-pull-r5 { fill: #68a0b3;font-weight: bold } .breeze-ci-image-pull-r6 { fill: #98a84b;font-weight: bold } .breeze-ci-image-pull-r7 { fill: #8d7b39 } @@ -151,37 +151,37 @@ -Usage: breeze ci-image pull [OPTIONS] [EXTRA_PYTEST_ARGS]... +Usage: breeze ci-image pull [OPTIONS] [EXTRA_PYTEST_ARGS]... -Pull and optionally verify CI images - possibly in parallel for all Python versions. +Pull and optionally verify CI images - possibly in parallel for all Python versions. -╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag of the image which is used to pull the image.(TEXT)[default: latest] ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---verifyVerify image. ---wait-for-imageWait until image is available. ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you    -build or pull image with --image-tag.                                                       ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to pull the image.(TEXT)[default: latest] +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--verifyVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you    +build or pull image with --image-tag.                                                       +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_ci-image_verify.svg b/images/breeze/output_ci-image_verify.svg index 6c18349eed7f2..2d772a8e52d21 100644 --- a/images/breeze/output_ci-image_verify.svg +++ b/images/breeze/output_ci-image_verify.svg @@ -35,8 +35,8 @@ .breeze-ci-image-verify-r1 { fill: #c5c8c6;font-weight: bold } .breeze-ci-image-verify-r2 { fill: #c5c8c6 } .breeze-ci-image-verify-r3 { fill: #d0b344;font-weight: bold } -.breeze-ci-image-verify-r4 { fill: #68a0b3;font-weight: bold } -.breeze-ci-image-verify-r5 { fill: #868887 } +.breeze-ci-image-verify-r4 { fill: #868887 } +.breeze-ci-image-verify-r5 { fill: #68a0b3;font-weight: bold } .breeze-ci-image-verify-r6 { fill: #98a84b;font-weight: bold } .breeze-ci-image-verify-r7 { fill: #8d7b39 } @@ -109,23 +109,23 @@ -Usage: breeze ci-image verify [OPTIONS] [EXTRA_PYTEST_ARGS]... +Usage: breeze ci-image verify [OPTIONS] [EXTRA_PYTEST_ARGS]... -Verify CI image. +Verify CI image. -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---image-tag-tTag of the image when verifying it.(TEXT)[default: latest] ---pullPull image is missing before attempting to verify it. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--image-tag-tTag of the image when verifying it.(TEXT)[default: latest] +--pullPull image is missing before attempting to verify it. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s.svg b/images/breeze/output_k8s.svg index 5ba36f4e52999..b5d8fb3fda19a 100644 --- a/images/breeze/output_k8s.svg +++ b/images/breeze/output_k8s.svg @@ -35,8 +35,8 @@ .breeze-k8s-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-r2 { fill: #c5c8c6 } .breeze-k8s-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-r5 { fill: #868887 } +.breeze-k8s-r4 { fill: #868887 } +.breeze-k8s-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-r6 { fill: #98a84b;font-weight: bold } @@ -147,36 +147,36 @@ -Usage: breeze k8s [OPTIONSCOMMAND [ARGS]... +Usage: breeze k8s [OPTIONS] COMMAND [ARGS]... Tools that developers use to run Kubernetes tests -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ K8S cluster management commands ────────────────────────────────────────────────────────────────────────────────────╮ -setup-env        Setup shared Kubernetes virtual environment and tools.                                            -create-cluster   Create a KinD Cluster for Python and Kubernetes version specified (optionally create all clusters -in parallel).                                                                                     -configure-clusterConfigures cluster for airflow deployment - creates namespaces and test resources (optionally for -all clusters in parallel).                                                                        -build-k8s-image  Build k8s-ready airflow image (optionally all images in parallel).                                -upload-k8s-image Upload k8s-ready airflow image to the KinD cluster (optionally to all clusters in parallel)       -deploy-airflow   Deploy airflow image to the current KinD cluster (or all clusters).                               -delete-cluster   Delete the current KinD Cluster (optionally all clusters).                                        -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ K8S inspection commands ────────────────────────────────────────────────────────────────────────────────────────────╮ -status  Check status of the current cluster and airflow deployed to it (optionally all clusters).                  -logs    Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters).                  -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ K8S testing commands ───────────────────────────────────────────────────────────────────────────────────────────────╮ -tests             Run tests against the current KinD cluster (optionally for all clusters in parallel).            -run-complete-testsRun complete k8s tests consisting of: creating cluster, building and uploading image, deploying  -airflow, running tests and deleting clusters (optionally for all clusters in parallel).          -shell             Run shell environment for the current KinD cluster.                                              -k9s               Run k9s tool. You can pass any k9s args as extra args.                                           -logs              Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters).        -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S cluster management commands ────────────────────────────────────────────────────────────────────────────────────╮ +setup-env        Setup shared Kubernetes virtual environment and tools.                                            +create-cluster   Create a KinD Cluster for Python and Kubernetes version specified (optionally create all clusters +in parallel).                                                                                     +configure-clusterConfigures cluster for airflow deployment - creates namespaces and test resources (optionally for +all clusters in parallel).                                                                        +build-k8s-image  Build k8s-ready airflow image (optionally all images in parallel).                                +upload-k8s-image Upload k8s-ready airflow image to the KinD cluster (optionally to all clusters in parallel)       +deploy-airflow   Deploy airflow image to the current KinD cluster (or all clusters).                               +delete-cluster   Delete the current KinD Cluster (optionally all clusters).                                        +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S inspection commands ────────────────────────────────────────────────────────────────────────────────────────────╮ +status  Check status of the current cluster and airflow deployed to it (optionally all clusters).                  +logs    Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters).                  +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S testing commands ───────────────────────────────────────────────────────────────────────────────────────────────╮ +tests             Run tests against the current KinD cluster (optionally for all clusters in parallel).            +run-complete-testsRun complete k8s tests consisting of: creating cluster, building and uploading image, deploying  +airflow, running tests and deleting clusters (optionally for all clusters in parallel).          +shell             Run shell environment for the current KinD cluster.                                              +k9s               Run k9s tool. You can pass any k9s args as extra args.                                           +logs              Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters).        +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_build-k8s-image.svg b/images/breeze/output_k8s_build-k8s-image.svg index c607491c2c8b0..6070f4df19fac 100644 --- a/images/breeze/output_k8s_build-k8s-image.svg +++ b/images/breeze/output_k8s_build-k8s-image.svg @@ -35,8 +35,8 @@ .breeze-k8s-build-k8s-image-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-build-k8s-image-r2 { fill: #c5c8c6 } .breeze-k8s-build-k8s-image-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-build-k8s-image-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-build-k8s-image-r5 { fill: #868887 } +.breeze-k8s-build-k8s-image-r4 { fill: #868887 } +.breeze-k8s-build-k8s-image-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-build-k8s-image-r6 { fill: #98a84b;font-weight: bold } .breeze-k8s-build-k8s-image-r7 { fill: #8d7b39 } @@ -136,32 +136,32 @@ -Usage: breeze k8s build-k8s-image [OPTIONS] +Usage: breeze k8s build-k8s-image [OPTIONS] Build k8s-ready airflow image (optionally all images in parallel). -╭─ Build image flags ──────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---rebuild-base-imageRebuilds base Airflow image before building K8S image. ---image-tag-tImage tag used to build K8S image from.(TEXT)[default: latest] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Build image flags ──────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--rebuild-base-imageRebuilds base Airflow image before building K8S image. +--image-tag-tImage tag used to build K8S image from.(TEXT)[default: latest] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_configure-cluster.svg b/images/breeze/output_k8s_configure-cluster.svg index 2591f45897fda..19998f6142978 100644 --- a/images/breeze/output_k8s_configure-cluster.svg +++ b/images/breeze/output_k8s_configure-cluster.svg @@ -35,8 +35,8 @@ .breeze-k8s-configure-cluster-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-configure-cluster-r2 { fill: #c5c8c6 } .breeze-k8s-configure-cluster-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-configure-cluster-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-configure-cluster-r5 { fill: #868887 } +.breeze-k8s-configure-cluster-r4 { fill: #868887 } +.breeze-k8s-configure-cluster-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-configure-cluster-r6 { fill: #98a84b;font-weight: bold } .breeze-k8s-configure-cluster-r7 { fill: #8d7b39 } @@ -151,37 +151,37 @@ -Usage: breeze k8s configure-cluster [OPTIONS] +Usage: breeze k8s configure-cluster [OPTIONS] Configures cluster for airflow deployment - creates namespaces and test resources (optionally for all clusters in  parallel). -╭─ Configure cluster flags ────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel for cluster  -operations.                                                                             -(INTEGER RANGE)                                                                         -[default: 2; 1<=x<=4]                                                                   ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Configure cluster flags ────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel for cluster  +operations.                                                                             +(INTEGER RANGE)                                                                         +[default: 2; 1<=x<=4]                                                                   +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_create-cluster.svg b/images/breeze/output_k8s_create-cluster.svg index 8a91e6f6f2acb..68c73454bf8ca 100644 --- a/images/breeze/output_k8s_create-cluster.svg +++ b/images/breeze/output_k8s_create-cluster.svg @@ -1,4 +1,4 @@ - + - + @@ -138,12 +138,9 @@ - - - - Command: k8s create-cluster + Command: k8s create-cluster @@ -154,38 +151,37 @@ -Usage: breeze k8s create-cluster [OPTIONS] +Usage: breeze k8s create-cluster [OPTIONS] Create a KinD Cluster for Python and Kubernetes version specified (optionally create all clusters in parallel). -╭─ K8S cluster creation flags ─────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    ---force-recreate-clusterForce recreation of the cluster even if it is already created. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel for cluster  -operations.                                                                             -(INTEGER RANGE)                                                                         -[default: 2; 1<=x<=4]                                                                   ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S cluster creation flags ─────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +--force-recreate-clusterForce recreation of the cluster even if it is already created. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel for cluster  +operations.                                                                             +(INTEGER RANGE)                                                                         +[default: 2; 1<=x<=4]                                                                   +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_delete-cluster.svg b/images/breeze/output_k8s_delete-cluster.svg index a2e7c8ba62e2c..397e8cab119f4 100644 --- a/images/breeze/output_k8s_delete-cluster.svg +++ b/images/breeze/output_k8s_delete-cluster.svg @@ -35,8 +35,8 @@ .breeze-k8s-delete-cluster-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-delete-cluster-r2 { fill: #c5c8c6 } .breeze-k8s-delete-cluster-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-delete-cluster-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-delete-cluster-r5 { fill: #868887 } +.breeze-k8s-delete-cluster-r4 { fill: #868887 } +.breeze-k8s-delete-cluster-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-delete-cluster-r6 { fill: #98a84b;font-weight: bold } .breeze-k8s-delete-cluster-r7 { fill: #8d7b39 } @@ -109,23 +109,23 @@ -Usage: breeze k8s delete-cluster [OPTIONS] +Usage: breeze k8s delete-cluster [OPTIONS] Delete the current KinD Cluster (optionally all clusters). -╭─ K8S cluster delete flags ───────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    ---allApply it to all created clusters -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S cluster delete flags ───────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +--allApply it to all created clusters +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_deploy-airflow.svg b/images/breeze/output_k8s_deploy-airflow.svg index 807caede03860..775ef96dec3e8 100644 --- a/images/breeze/output_k8s_deploy-airflow.svg +++ b/images/breeze/output_k8s_deploy-airflow.svg @@ -35,8 +35,8 @@ .breeze-k8s-deploy-airflow-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-deploy-airflow-r2 { fill: #c5c8c6 } .breeze-k8s-deploy-airflow-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-deploy-airflow-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-deploy-airflow-r5 { fill: #868887 } +.breeze-k8s-deploy-airflow-r4 { fill: #868887 } +.breeze-k8s-deploy-airflow-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-deploy-airflow-r6 { fill: #98a84b;font-weight: bold } .breeze-k8s-deploy-airflow-r7 { fill: #8d7b39 } @@ -163,41 +163,41 @@ -Usage: breeze k8s deploy-airflow [OPTIONS] [EXTRA_OPTIONS]... +Usage: breeze k8s deploy-airflow [OPTIONS] [EXTRA_OPTIONS]... Deploy airflow image to the current KinD cluster (or all clusters). -╭─ Airflow deploy flags ───────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    ---executorExecutor to use for a kubernetes cluster.                                          -(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) -[default: KubernetesExecutor]                                                      ---upgradeUpgrade Helm Chart rather than installing it. ---wait-time-in-secondsWait for Airflow webserver for specified number of seconds.(INTEGER RANGE) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel for cluster  -operations.                                                                             -(INTEGER RANGE)                                                                         -[default: 2; 1<=x<=4]                                                                   ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Airflow deploy flags ───────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +--executorExecutor to use for a kubernetes cluster.                                          +(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) +[default: KubernetesExecutor]                                                      +--upgradeUpgrade Helm Chart rather than installing it. +--wait-time-in-secondsWait for Airflow webserver for specified number of seconds.(INTEGER RANGE) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel for cluster  +operations.                                                                             +(INTEGER RANGE)                                                                         +[default: 2; 1<=x<=4]                                                                   +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_k9s.svg b/images/breeze/output_k8s_k9s.svg index e523a94540f81..6c117318df440 100644 --- a/images/breeze/output_k8s_k9s.svg +++ b/images/breeze/output_k8s_k9s.svg @@ -35,8 +35,8 @@ .breeze-k8s-k9s-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-k9s-r2 { fill: #c5c8c6 } .breeze-k8s-k9s-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-k9s-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-k9s-r5 { fill: #868887 } +.breeze-k8s-k9s-r4 { fill: #868887 } +.breeze-k8s-k9s-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-k9s-r6 { fill: #98a84b;font-weight: bold } .breeze-k8s-k9s-r7 { fill: #8d7b39 } @@ -106,22 +106,22 @@ -Usage: breeze k8s k9s [OPTIONS] [K9S_ARGS]... +Usage: breeze k8s k9s [OPTIONS] [K9S_ARGS]... Run k9s tool. You can pass any k9s args as extra args. -╭─ K8S k9s flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S k9s flags ──────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_logs.svg b/images/breeze/output_k8s_logs.svg index db1bb9f3b6f1b..a9bdaafb029e9 100644 --- a/images/breeze/output_k8s_logs.svg +++ b/images/breeze/output_k8s_logs.svg @@ -35,8 +35,8 @@ .breeze-k8s-logs-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-logs-r2 { fill: #c5c8c6 } .breeze-k8s-logs-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-logs-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-logs-r5 { fill: #868887 } +.breeze-k8s-logs-r4 { fill: #868887 } +.breeze-k8s-logs-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-logs-r6 { fill: #98a84b;font-weight: bold } .breeze-k8s-logs-r7 { fill: #8d7b39 } @@ -109,23 +109,23 @@ -Usage: breeze k8s logs [OPTIONS] +Usage: breeze k8s logs [OPTIONS] -Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters). +Dump k8s logs to ${TMP_DIR}/kind_logs_<cluster_name> directory (optionally all clusters). -╭─ K8S logs flags ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    ---allApply it to all created clusters -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S logs flags ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +--allApply it to all created clusters +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_run-complete-tests.svg b/images/breeze/output_k8s_run-complete-tests.svg index 09e2fa75eccfc..b15bd8fefaf50 100644 --- a/images/breeze/output_k8s_run-complete-tests.svg +++ b/images/breeze/output_k8s_run-complete-tests.svg @@ -35,8 +35,8 @@ .breeze-k8s-run-complete-tests-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-run-complete-tests-r2 { fill: #c5c8c6 } .breeze-k8s-run-complete-tests-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-run-complete-tests-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-run-complete-tests-r5 { fill: #868887 } +.breeze-k8s-run-complete-tests-r4 { fill: #868887 } +.breeze-k8s-run-complete-tests-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-run-complete-tests-r6 { fill: #8d7b39 } .breeze-k8s-run-complete-tests-r7 { fill: #98a84b;font-weight: bold } @@ -196,52 +196,52 @@ -Usage: breeze k8s run-complete-tests [OPTIONS] [TEST_ARGS]... +Usage: breeze k8s run-complete-tests [OPTIONS] [TEST_ARGS]... Run complete k8s tests consisting of: creating cluster, building and uploading image, deploying airflow, running tests and deleting clusters (optionally for all clusters in parallel). -╭─ K8S cluster creation flags ─────────────────────────────────────────────────────────────────────────────────────────╮ ---force-recreate-clusterForce recreation of the cluster even if it is already created. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Airflow deploy flags ───────────────────────────────────────────────────────────────────────────────────────────────╮ ---upgradeUpgrade Helm Chart rather than installing it. ---wait-time-in-secondsWait for Airflow webserver for specified number of seconds.(INTEGER RANGE) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Build image flags ──────────────────────────────────────────────────────────────────────────────────────────────────╮ ---rebuild-base-imageRebuilds base Airflow image before building K8S image. ---image-tag-tImage tag used to build K8S image from.(TEXT)[default: latest] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ K8S tests flags ────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    ---executorExecutor to use for a kubernetes cluster.                                          -(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) -[default: KubernetesExecutor]                                                      ---force-venv-setupForce recreation of the virtualenv. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel for cluster  -operations.                                                                             -(INTEGER RANGE)                                                                         -[default: 2; 1<=x<=4]                                                                   ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S cluster creation flags ─────────────────────────────────────────────────────────────────────────────────────────╮ +--force-recreate-clusterForce recreation of the cluster even if it is already created. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Airflow deploy flags ───────────────────────────────────────────────────────────────────────────────────────────────╮ +--upgradeUpgrade Helm Chart rather than installing it. +--wait-time-in-secondsWait for Airflow webserver for specified number of seconds.(INTEGER RANGE) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Build image flags ──────────────────────────────────────────────────────────────────────────────────────────────────╮ +--rebuild-base-imageRebuilds base Airflow image before building K8S image. +--image-tag-tImage tag used to build K8S image from.(TEXT)[default: latest] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S tests flags ────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +--executorExecutor to use for a kubernetes cluster.                                          +(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) +[default: KubernetesExecutor]                                                      +--force-venv-setupForce recreation of the virtualenv. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel for cluster  +operations.                                                                             +(INTEGER RANGE)                                                                         +[default: 2; 1<=x<=4]                                                                   +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_shell.svg b/images/breeze/output_k8s_shell.svg index 9dc704655ccd2..9011e6ec31b68 100644 --- a/images/breeze/output_k8s_shell.svg +++ b/images/breeze/output_k8s_shell.svg @@ -35,8 +35,8 @@ .breeze-k8s-shell-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-shell-r2 { fill: #c5c8c6 } .breeze-k8s-shell-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-shell-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-shell-r5 { fill: #868887 } +.breeze-k8s-shell-r4 { fill: #868887 } +.breeze-k8s-shell-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-shell-r6 { fill: #98a84b;font-weight: bold } .breeze-k8s-shell-r7 { fill: #8d7b39 } @@ -118,26 +118,26 @@ -Usage: breeze k8s shell [OPTIONS] [SHELL_ARGS]... +Usage: breeze k8s shell [OPTIONS] [SHELL_ARGS]... Run shell environment for the current KinD cluster. -╭─ K8S shell flags ────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    ---executorExecutor to use for a kubernetes cluster.                                          -(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) -[default: KubernetesExecutor]                                                      ---force-venv-setupForce recreation of the virtualenv. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S shell flags ────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +--executorExecutor to use for a kubernetes cluster.                                          +(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) +[default: KubernetesExecutor]                                                      +--force-venv-setupForce recreation of the virtualenv. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_status.svg b/images/breeze/output_k8s_status.svg index 29134bd49b9ea..546fbc933c010 100644 --- a/images/breeze/output_k8s_status.svg +++ b/images/breeze/output_k8s_status.svg @@ -35,8 +35,8 @@ .breeze-k8s-status-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-status-r2 { fill: #c5c8c6 } .breeze-k8s-status-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-status-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-status-r5 { fill: #868887 } +.breeze-k8s-status-r4 { fill: #868887 } +.breeze-k8s-status-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-status-r6 { fill: #98a84b;font-weight: bold } .breeze-k8s-status-r7 { fill: #8d7b39 } @@ -112,24 +112,24 @@ -Usage: breeze k8s status [OPTIONS] +Usage: breeze k8s status [OPTIONS] Check status of the current cluster and airflow deployed to it (optionally all clusters). -╭─ K8S cluster status flags ───────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    ---wait-time-in-secondsWait for Airflow webserver for specified number of seconds.(INTEGER RANGE) ---allApply it to all created clusters -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S cluster status flags ───────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +--wait-time-in-secondsWait for Airflow webserver for specified number of seconds.(INTEGER RANGE) +--allApply it to all created clusters +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_tests.svg b/images/breeze/output_k8s_tests.svg index 47ab97ccd96c2..c743559a9e912 100644 --- a/images/breeze/output_k8s_tests.svg +++ b/images/breeze/output_k8s_tests.svg @@ -35,8 +35,8 @@ .breeze-k8s-tests-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-tests-r2 { fill: #c5c8c6 } .breeze-k8s-tests-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-tests-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-tests-r5 { fill: #868887 } +.breeze-k8s-tests-r4 { fill: #868887 } +.breeze-k8s-tests-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-tests-r6 { fill: #98a84b;font-weight: bold } .breeze-k8s-tests-r7 { fill: #8d7b39 } @@ -160,40 +160,40 @@ -Usage: breeze k8s tests [OPTIONS] [TEST_ARGS]... +Usage: breeze k8s tests [OPTIONS] [TEST_ARGS]... Run tests against the current KinD cluster (optionally for all clusters in parallel). -╭─ K8S tests flags ────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    ---executorExecutor to use for a kubernetes cluster.                                          -(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) -[default: KubernetesExecutor]                                                      ---force-venv-setupForce recreation of the virtualenv. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel for cluster  -operations.                                                                             -(INTEGER RANGE)                                                                         -[default: 2; 1<=x<=4]                                                                   ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ K8S tests flags ────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +--executorExecutor to use for a kubernetes cluster.                                          +(>KubernetesExecutor< | CeleryExecutor | LocalExecutor | CeleryKubernetesExecutor) +[default: KubernetesExecutor]                                                      +--force-venv-setupForce recreation of the virtualenv. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel for cluster  +operations.                                                                             +(INTEGER RANGE)                                                                         +[default: 2; 1<=x<=4]                                                                   +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_k8s_upload-k8s-image.svg b/images/breeze/output_k8s_upload-k8s-image.svg index cfd3ae2b5807e..f0cf67eb4c2c3 100644 --- a/images/breeze/output_k8s_upload-k8s-image.svg +++ b/images/breeze/output_k8s_upload-k8s-image.svg @@ -35,8 +35,8 @@ .breeze-k8s-upload-k8s-image-r1 { fill: #c5c8c6;font-weight: bold } .breeze-k8s-upload-k8s-image-r2 { fill: #c5c8c6 } .breeze-k8s-upload-k8s-image-r3 { fill: #d0b344;font-weight: bold } -.breeze-k8s-upload-k8s-image-r4 { fill: #68a0b3;font-weight: bold } -.breeze-k8s-upload-k8s-image-r5 { fill: #868887 } +.breeze-k8s-upload-k8s-image-r4 { fill: #868887 } +.breeze-k8s-upload-k8s-image-r5 { fill: #68a0b3;font-weight: bold } .breeze-k8s-upload-k8s-image-r6 { fill: #98a84b;font-weight: bold } .breeze-k8s-upload-k8s-image-r7 { fill: #8d7b39 } @@ -145,35 +145,35 @@ -Usage: breeze k8s upload-k8s-image [OPTIONS] +Usage: breeze k8s upload-k8s-image [OPTIONS] Upload k8s-ready airflow image to the KinD cluster (optionally to all clusters in parallel) -╭─ Upload image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---kubernetes-versionKubernetes version used to create the KinD cluster of. -(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            -[default: v1.23.17]                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) -[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Upload image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--kubernetes-versionKubernetes version used to create the KinD cluster of. +(>v1.23.17< | v1.24.12 | v1.25.8 | v1.26.3)            +[default: v1.23.17]                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel options ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--kubernetes-versionsKubernetes versions used to run in parallel (space separated).(TEXT) +[default: v1.23.17 v1.24.12 v1.25.8 v1.26.3]                   +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_prod-image.svg b/images/breeze/output_prod-image.svg index 35bf4ae86bb6e..cd179a225efee 100644 --- a/images/breeze/output_prod-image.svg +++ b/images/breeze/output_prod-image.svg @@ -35,8 +35,8 @@ .breeze-prod-image-r1 { fill: #c5c8c6;font-weight: bold } .breeze-prod-image-r2 { fill: #c5c8c6 } .breeze-prod-image-r3 { fill: #d0b344;font-weight: bold } -.breeze-prod-image-r4 { fill: #68a0b3;font-weight: bold } -.breeze-prod-image-r5 { fill: #868887 } +.breeze-prod-image-r4 { fill: #868887 } +.breeze-prod-image-r5 { fill: #68a0b3;font-weight: bold } .breeze-prod-image-r6 { fill: #98a84b;font-weight: bold } @@ -93,18 +93,18 @@ -Usage: breeze prod-image [OPTIONSCOMMAND [ARGS]... +Usage: breeze prod-image [OPTIONS] COMMAND [ARGS]... -Tools that developers can use to manually manage PROD images +Tools that developers can use to manually manage PROD images -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ -build  Build Production image. Include building multiple images for all or selected Python versions sequentially.  -pull   Pull and optionally verify Production images - possibly in parallel for all Python versions.                -verify Verify Production image.                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Production Image tools ─────────────────────────────────────────────────────────────────────────────────────────────╮ +build  Build Production image. Include building multiple images for all or selected Python versions sequentially.  +pull   Pull and optionally verify Production images - possibly in parallel for all Python versions.                +verify Verify Production image.                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_prod-image_build.svg b/images/breeze/output_prod-image_build.svg index 0358c680836f0..63bdbd1888d71 100644 --- a/images/breeze/output_prod-image_build.svg +++ b/images/breeze/output_prod-image_build.svg @@ -35,8 +35,8 @@ .breeze-prod-image-build-r1 { fill: #c5c8c6;font-weight: bold } .breeze-prod-image-build-r2 { fill: #c5c8c6 } .breeze-prod-image-build-r3 { fill: #d0b344;font-weight: bold } -.breeze-prod-image-build-r4 { fill: #68a0b3;font-weight: bold } -.breeze-prod-image-build-r5 { fill: #868887 } +.breeze-prod-image-build-r4 { fill: #868887 } +.breeze-prod-image-build-r5 { fill: #68a0b3;font-weight: bold } .breeze-prod-image-build-r6 { fill: #98a84b;font-weight: bold } .breeze-prod-image-build-r7 { fill: #8d7b39 } @@ -334,98 +334,98 @@ -Usage: breeze prod-image build [OPTIONS] +Usage: breeze prod-image build [OPTIONS] Build Production image. Include building multiple images for all or selected Python versions sequentially. -╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) ---upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. ---upgrade-on-failureWhen set, attempt to run upgrade to newer dependencies when regular build       -fails.                                                                          ---image-tag-tTag the image after building it.(TEXT)[default: latest] ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful -when you build or pull image with --image-tag.                                  ---docker-cache-cCache option for image used during the build.(registry | local | disabled) -[default: registry]                           ---github-repository-gGitHub repository used to pull, push run images.(TEXT) -[default: apache/airflow]                        -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ ---builderBuildx builder used to perform `docker buildx build` commands.(TEXT) ---install-providers-from-sourcesInstall providers from sources when installing. ---airflow-extrasExtras to install by default.                                                    -(TEXT)                                                                           -[default:                                                                        -aiobotocore,amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,g… ---airflow-constraints-locationIf specified, it is used instead of calculating reference to the constraint      -file. It could be full remote URL to the location file, or local file placed in  -`docker-context-files` (in this case it has to start with                        -/opt/airflow/docker-context-files).                                              -(TEXT)                                                                           ---airflow-constraints-modeMode of constraints for PROD image building.                            -(constraints | constraints-no-providers | constraints-source-providers) -[default: constraints]                                                  ---airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) ---python-imageIf specified this is the base python image used to build the image. Should be    -something like: python:VERSION-slim-bullseye.                                    -(TEXT)                                                                           ---additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) ---additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip`      -itself).                                                                         -(TEXT)                                                                           ---additional-python-depsAdditional python dependencies to use when building the images.(TEXT) ---additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) ---additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) ---additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) ---additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) ---additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) ---additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) ---runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) ---runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) ---dev-apt-depsApt dev dependencies to use when building the images.(TEXT) ---dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ ---install-packages-from-contextInstall wheels from local docker-context-files when building image.        -Implies --disable-airflow-repo-cache.                                      ---cleanup-contextClean up docker context files before running build (cannot be used         -together with --install-packages-from-context).                            ---disable-mysql-client-installationDo not install MySQL client. ---disable-mssql-client-installationDo not install MsSQl client. ---disable-postgres-client-installationDo not install Postgres client. ---disable-airflow-repo-cacheDisable cache from Airflow repository during building. ---install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) ---installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---github-usernameThe user name used to authenticate to GitHub.(TEXT) ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) ---pushPush image after building it. ---empty-imagePrepare empty image tagged with the same name as the Airflow image. ---prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  -image).                                                                                    -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.8< | 3.9 | 3.10)                                         +[default: 3.8]                                               +--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT) +--upgrade-to-newer-dependencies-uWhen set, upgrade all PIP packages to latest. +--upgrade-on-failureWhen set, attempt to run upgrade to newer dependencies when regular build       +fails.                                                                          +--image-tag-tTag the image after building it.(TEXT)[default: latest] +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful +when you build or pull image with --image-tag.                                  +--docker-cache-cCache option for image used during the build.(registry | local | disabled) +[default: registry]                           +--github-repository-gGitHub repository used to pull, push run images.(TEXT) +[default: apache/airflow]                        +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Options for customizing images ─────────────────────────────────────────────────────────────────────────────────────╮ +--builderBuildx builder used to perform `docker buildx build` commands.(TEXT) +--install-providers-from-sourcesInstall providers from sources when installing. +--airflow-extrasExtras to install by default.                                                    +(TEXT)                                                                           +[default:                                                                        +aiobotocore,amazon,async,celery,cncf.kubernetes,dask,docker,elasticsearch,ftp,g… +--airflow-constraints-locationIf specified, it is used instead of calculating reference to the constraint      +file. It could be full remote URL to the location file, or local file placed in  +`docker-context-files` (in this case it has to start with                        +/opt/airflow/docker-context-files).                                              +(TEXT)                                                                           +--airflow-constraints-modeMode of constraints for PROD image building.                            +(constraints | constraints-no-providers | constraints-source-providers) +[default: constraints]                                                  +--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT) +--python-imageIf specified this is the base python image used to build the image. Should be    +something like: python:VERSION-slim-bullseye.                                    +(TEXT)                                                                           +--additional-extrasAdditional extra package while installing Airflow in the image.(TEXT) +--additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip`      +itself).                                                                         +(TEXT)                                                                           +--additional-python-depsAdditional python dependencies to use when building the images.(TEXT) +--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT) +--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT) +--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT) +--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT) +--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT) +--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT) +--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT) +--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT) +--dev-apt-depsApt dev dependencies to use when building the images.(TEXT) +--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Customization options (for specific customization needs) ───────────────────────────────────────────────────────────╮ +--install-packages-from-contextInstall wheels from local docker-context-files when building image.        +Implies --disable-airflow-repo-cache.                                      +--cleanup-contextClean up docker context files before running build (cannot be used         +together with --install-packages-from-context).                            +--disable-mysql-client-installationDo not install MySQL client. +--disable-mssql-client-installationDo not install MsSQl client. +--disable-postgres-client-installationDo not install Postgres client. +--disable-airflow-repo-cacheDisable cache from Airflow repository during building. +--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT) +--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow) +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮ +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--github-usernameThe user name used to authenticate to GitHub.(TEXT) +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64) +--pushPush image after building it. +--empty-imagePrepare empty image tagged with the same name as the Airflow image. +--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the  +image).                                                                                    +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_prod-image_pull.svg b/images/breeze/output_prod-image_pull.svg index a4a8028029481..24d4bdc8131ff 100644 --- a/images/breeze/output_prod-image_pull.svg +++ b/images/breeze/output_prod-image_pull.svg @@ -35,8 +35,8 @@ .breeze-prod-image-pull-r1 { fill: #c5c8c6;font-weight: bold } .breeze-prod-image-pull-r2 { fill: #c5c8c6 } .breeze-prod-image-pull-r3 { fill: #d0b344;font-weight: bold } -.breeze-prod-image-pull-r4 { fill: #68a0b3;font-weight: bold } -.breeze-prod-image-pull-r5 { fill: #868887 } +.breeze-prod-image-pull-r4 { fill: #868887 } +.breeze-prod-image-pull-r5 { fill: #68a0b3;font-weight: bold } .breeze-prod-image-pull-r6 { fill: #98a84b;font-weight: bold } .breeze-prod-image-pull-r7 { fill: #8d7b39 } @@ -151,37 +151,37 @@ -Usage: breeze prod-image pull [OPTIONS] [EXTRA_PYTEST_ARGS]... +Usage: breeze prod-image pull [OPTIONS] [EXTRA_PYTEST_ARGS]... Pull and optionally verify Production images - possibly in parallel for all Python versions. -╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag of the image which is used to pull the image.(TEXT)[default: latest] ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---github-tokenThe token used to authenticate to GitHub.(TEXT) ---verifyVerify image. ---wait-for-imageWait until image is available. ---tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you    -build or pull image with --image-tag.                                                       ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel. -(INTEGER RANGE)                                                             -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. ---include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Pull image flags ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to pull the image.(TEXT)[default: latest] +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--github-tokenThe token used to authenticate to GitHub.(TEXT) +--verifyVerify image. +--wait-for-imageWait until image is available. +--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when you    +build or pull image with --image-tag.                                                       +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel. +(INTEGER RANGE)                                                             +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_prod-image_verify.svg b/images/breeze/output_prod-image_verify.svg index 9c01cdbdb6fb5..e3647cd1513a6 100644 --- a/images/breeze/output_prod-image_verify.svg +++ b/images/breeze/output_prod-image_verify.svg @@ -35,8 +35,8 @@ .breeze-prod-image-verify-r1 { fill: #c5c8c6;font-weight: bold } .breeze-prod-image-verify-r2 { fill: #c5c8c6 } .breeze-prod-image-verify-r3 { fill: #d0b344;font-weight: bold } -.breeze-prod-image-verify-r4 { fill: #68a0b3;font-weight: bold } -.breeze-prod-image-verify-r5 { fill: #868887 } +.breeze-prod-image-verify-r4 { fill: #868887 } +.breeze-prod-image-verify-r5 { fill: #68a0b3;font-weight: bold } .breeze-prod-image-verify-r6 { fill: #98a84b;font-weight: bold } .breeze-prod-image-verify-r7 { fill: #8d7b39 } @@ -112,24 +112,24 @@ -Usage: breeze prod-image verify [OPTIONS] [EXTRA_PYTEST_ARGS]... +Usage: breeze prod-image verify [OPTIONS] [EXTRA_PYTEST_ARGS]... Verify Production image. -╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---slim-imageThe image to verify is slim and non-slim tests should be skipped. ---image-tag-tTag of the image when verifying it.(TEXT)[default: latest] ---pullPull image is missing before attempting to verify it. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Verify image flags ─────────────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--slim-imageThe image to verify is slim and non-slim tests should be skipped. +--image-tag-tTag of the image when verifying it.(TEXT)[default: latest] +--pullPull image is missing before attempting to verify it. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_release-management.svg b/images/breeze/output_release-management.svg index 9ea0d2537c31d..0621ddfce1807 100644 --- a/images/breeze/output_release-management.svg +++ b/images/breeze/output_release-management.svg @@ -35,8 +35,8 @@ .breeze-release-management-r1 { fill: #c5c8c6;font-weight: bold } .breeze-release-management-r2 { fill: #c5c8c6 } .breeze-release-management-r3 { fill: #d0b344;font-weight: bold } -.breeze-release-management-r4 { fill: #68a0b3;font-weight: bold } -.breeze-release-management-r5 { fill: #868887 } +.breeze-release-management-r4 { fill: #868887 } +.breeze-release-management-r5 { fill: #68a0b3;font-weight: bold } .breeze-release-management-r6 { fill: #98a84b;font-weight: bold } @@ -108,23 +108,23 @@ -Usage: breeze release-management [OPTIONSCOMMAND [ARGS]... +Usage: breeze release-management [OPTIONS] COMMAND [ARGS]... Tools that release managers can use to prepare and manage Airflow releases -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Commands ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -generate-constraints              Generates pinned constraint files with all extras from setup.py in parallel.     -generate-issue-content-providers  Generates content for issue to test the release.                                 -install-provider-packages         Installs provider packages that can be found in dist.                            -prepare-airflow-package           Prepare sdist/whl package of Airflow.                                            -prepare-provider-documentation    Prepare CHANGELOGREADME and COMMITS information for providers.                 -prepare-provider-packages         Prepare sdist/whl packages of Airflow Providers.                                 -release-prod-images               Release production images to DockerHub (needs DockerHub permissions).            -verify-provider-packages          Verifies if all provider code is following expectations for providers.           -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Commands ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +generate-constraints              Generates pinned constraint files with all extras from setup.py in parallel.     +generate-issue-content-providers  Generates content for issue to test the release.                                 +install-provider-packages         Installs provider packages that can be found in dist.                            +prepare-airflow-package           Prepare sdist/whl package of Airflow.                                            +prepare-provider-documentation    Prepare CHANGELOG, README and COMMITS information for providers.                 +prepare-provider-packages         Prepare sdist/whl packages of Airflow Providers.                                 +release-prod-images               Release production images to DockerHub (needs DockerHub permissions).            +verify-provider-packages          Verifies if all provider code is following expectations for providers.           +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_release-management_generate-constraints.svg b/images/breeze/output_release-management_generate-constraints.svg index f5b6bd70235c4..4208b1d50897e 100644 --- a/images/breeze/output_release-management_generate-constraints.svg +++ b/images/breeze/output_release-management_generate-constraints.svg @@ -1,4 +1,4 @@ - + - + @@ -138,12 +138,9 @@ - - - - Command: release-management generate-constraints + Command: release-management generate-constraints @@ -154,38 +151,37 @@ -Usage: breeze release-management generate-constraints [OPTIONS] +Usage: breeze release-management generate-constraints [OPTIONS] Generates pinned constraint files with all extras from setup.py in parallel. -╭─ Generate constraints flags ─────────────────────────────────────────────────────────────────────────────────────────╮ ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip). -(TEXT)                                                                          -[default: latest]                                                               ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---airflow-constraints-modeMode of constraints for CI image building.                              -(constraints-source-providers | constraints | constraints-no-providers) -[default: constraints-source-providers]                                 ---debugDrop user in shell instead of running the command. Useful for debugging. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ ---run-in-parallelRun the operation in parallel on all or selected subset of Python versions. ---parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) -[default: 4; 1<=x<=8]                                                       ---python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) -[default: 3.7 3.8 3.9 3.10]                                                    ---skip-cleanupSkip cleanup of temporary files created during parallel run. ---debug-resourcesWhether to show resource information while running in parallel. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Generate constraints flags ─────────────────────────────────────────────────────────────────────────────────────────╮ +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip). +(TEXT)                                                                          +[default: latest]                                                               +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--airflow-constraints-modeMode of constraints for CI image building.                              +(constraints-source-providers | constraints | constraints-no-providers) +[default: constraints-source-providers]                                 +--debugDrop user in shell instead of running the command. Useful for debugging. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮ +--run-in-parallelRun the operation in parallel on all or selected subset of Python versions. +--parallelismMaximum number of processes to use while running the operation in parallel.(INTEGER RANGE) +[default: 4; 1<=x<=8]                                                       +--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT) +[default: 3.8 3.9 3.10]                                                        +--skip-cleanupSkip cleanup of temporary files created during parallel run. +--debug-resourcesWhether to show resource information while running in parallel. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_release-management_release-prod-images.svg b/images/breeze/output_release-management_release-prod-images.svg index dba5274a626d1..3701e36950f86 100644 --- a/images/breeze/output_release-management_release-prod-images.svg +++ b/images/breeze/output_release-management_release-prod-images.svg @@ -35,9 +35,9 @@ .breeze-release-management-release-prod-images-r1 { fill: #c5c8c6;font-weight: bold } .breeze-release-management-release-prod-images-r2 { fill: #c5c8c6 } .breeze-release-management-release-prod-images-r3 { fill: #d0b344;font-weight: bold } -.breeze-release-management-release-prod-images-r4 { fill: #68a0b3;font-weight: bold } -.breeze-release-management-release-prod-images-r5 { fill: #868887 } -.breeze-release-management-release-prod-images-r6 { fill: #cc555a } +.breeze-release-management-release-prod-images-r4 { fill: #868887 } +.breeze-release-management-release-prod-images-r5 { fill: #cc555a } +.breeze-release-management-release-prod-images-r6 { fill: #68a0b3;font-weight: bold } .breeze-release-management-release-prod-images-r7 { fill: #8d7b39 } .breeze-release-management-release-prod-images-r8 { fill: #8a4346 } .breeze-release-management-release-prod-images-r9 { fill: #98a84b;font-weight: bold } @@ -129,29 +129,29 @@ -Usage: breeze release-management release-prod-images [OPTIONS] +Usage: breeze release-management release-prod-images [OPTIONS] Release production images to DockerHub (needs DockerHub permissions). -╭─ Release PROD IMAGE flags ───────────────────────────────────────────────────────────────────────────────────────────╮ -*--airflow-versionAirflow version to release (2.3.02.3.0rc1 etc.)(TEXT)[required] ---dockerhub-repoDockerHub repository for the images(TEXT)[default: apache/airflow] ---slim-imagesWhether to prepare slim images instead of the regular ones. ---limit-pythonSpecific python to build slim images for (if not specified - the images are built for all    -available python versions)                                                                   -(3.7 | 3.8 | 3.9 | 3.10)                                                                     ---limit-platformSpecific platform to build images for (if not specified, multiplatform images will be built. -(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64)                                        -[default: linux/amd64,linux/arm64]                                                           ---skip-latestWhether to skip publishing the latest images (so that 'latest' images are not updated). This -should only be used if you release image for previous branches. Automatically set when       -rc/alpha/beta images are built.                                                              -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Release PROD IMAGE flags ───────────────────────────────────────────────────────────────────────────────────────────╮ +*--airflow-versionAirflow version to release (2.3.0, 2.3.0rc1 etc.)(TEXT)[required] +--dockerhub-repoDockerHub repository for the images(TEXT)[default: apache/airflow] +--slim-imagesWhether to prepare slim images instead of the regular ones. +--limit-pythonSpecific python to build slim images for (if not specified - the images are built for all    +available python versions)                                                                   +(3.8 | 3.9 | 3.10)                                                                           +--limit-platformSpecific platform to build images for (if not specified, multiplatform images will be built. +(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64)                                        +[default: linux/amd64,linux/arm64]                                                           +--skip-latestWhether to skip publishing the latest images (so that 'latest' images are not updated). This +should only be used if you release image for previous branches. Automatically set when       +rc/alpha/beta images are built.                                                              +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_setup.svg b/images/breeze/output_setup.svg index c313db50bab0c..65185a603f97a 100644 --- a/images/breeze/output_setup.svg +++ b/images/breeze/output_setup.svg @@ -35,8 +35,8 @@ .breeze-setup-r1 { fill: #c5c8c6;font-weight: bold } .breeze-setup-r2 { fill: #c5c8c6 } .breeze-setup-r3 { fill: #d0b344;font-weight: bold } -.breeze-setup-r4 { fill: #68a0b3;font-weight: bold } -.breeze-setup-r5 { fill: #868887 } +.breeze-setup-r4 { fill: #868887 } +.breeze-setup-r5 { fill: #68a0b3;font-weight: bold } .breeze-setup-r6 { fill: #98a84b;font-weight: bold } @@ -102,21 +102,21 @@ -Usage: breeze setup [OPTIONSCOMMAND [ARGS]... +Usage: breeze setup [OPTIONS] COMMAND [ARGS]... Tools that developers can use to configure Breeze -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Commands ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ -autocomplete                     Enables autocompletion of breeze commands.                                        -check-all-params-in-groups       Check that all parameters are put in groups.                                      -config                           Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                -regenerate-command-images        Regenerate breeze command images.                                                 -self-upgrade                     Self upgrade Breeze.                                                              -version                          Print information about version of apache-airflow-breeze.                         -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Commands ───────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +autocomplete                     Enables autocompletion of breeze commands.                                        +check-all-params-in-groups       Check that all parameters are put in groups.                                      +config                           Show/update configuration (Python, Backend, Cheatsheet, ASCIIART).                +regenerate-command-images        Regenerate breeze command images.                                                 +self-upgrade                     Self upgrade Breeze.                                                              +version                          Print information about version of apache-airflow-breeze.                         +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_setup_config.svg b/images/breeze/output_setup_config.svg index d97ad0f3ab1da..87214f91523b1 100644 --- a/images/breeze/output_setup_config.svg +++ b/images/breeze/output_setup_config.svg @@ -35,8 +35,8 @@ .breeze-setup-config-r1 { fill: #c5c8c6;font-weight: bold } .breeze-setup-config-r2 { fill: #c5c8c6 } .breeze-setup-config-r3 { fill: #d0b344;font-weight: bold } -.breeze-setup-config-r4 { fill: #68a0b3;font-weight: bold } -.breeze-setup-config-r5 { fill: #868887 } +.breeze-setup-config-r4 { fill: #868887 } +.breeze-setup-config-r5 { fill: #68a0b3;font-weight: bold } .breeze-setup-config-r6 { fill: #98a84b;font-weight: bold } .breeze-setup-config-r7 { fill: #8d7b39 } @@ -118,26 +118,26 @@ -Usage: breeze setup config [OPTIONS] +Usage: breeze setup config [OPTIONS] -Show/update configuration (Python, Backend, Cheatsheet, ASCIIART). +Show/update configuration (Python, Backend, Cheatsheet, ASCIIART). -╭─ Config flags ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql) -[default: sqlite]        ---postgres-version-PVersion of Postgres used.(>11< | 12 | 13 | 14 | 15)[default: 11] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---cheatsheet/--no-cheatsheet-C/-cEnable/disable cheatsheet. ---asciiart/--no-asciiart-A/-aEnable/disable ASCIIart. ---colour/--no-colourEnable/disable Colour mode (useful for colour blind-friendly communication). -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Config flags ───────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images. +(>3.8< | 3.9 | 3.10)                                         +[default: 3.8]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql) +[default: sqlite]        +--postgres-version-PVersion of Postgres used.(>11< | 12 | 13 | 14 | 15)[default: 11] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--cheatsheet/--no-cheatsheet-C/-cEnable/disable cheatsheet. +--asciiart/--no-asciiart-A/-aEnable/disable ASCIIart. +--colour/--no-colourEnable/disable Colour mode (useful for colour blind-friendly communication). +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_shell.svg b/images/breeze/output_shell.svg index dc84cf0b2194c..6cc8711a2815b 100644 --- a/images/breeze/output_shell.svg +++ b/images/breeze/output_shell.svg @@ -35,8 +35,8 @@ .breeze-shell-r1 { fill: #c5c8c6;font-weight: bold } .breeze-shell-r2 { fill: #c5c8c6 } .breeze-shell-r3 { fill: #d0b344;font-weight: bold } -.breeze-shell-r4 { fill: #68a0b3;font-weight: bold } -.breeze-shell-r5 { fill: #868887 } +.breeze-shell-r4 { fill: #868887 } +.breeze-shell-r5 { fill: #68a0b3;font-weight: bold } .breeze-shell-r6 { fill: #98a84b;font-weight: bold } .breeze-shell-r7 { fill: #8d7b39 } @@ -223,61 +223,61 @@ -Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... +Usage: breeze shell [OPTIONS] [EXTRA_ARGS]... Enter breeze environment. this is the default command use when no other is selected. -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---postgres-version-PVersion of Postgres used.(>11< | 12 | 13 | 14 | 15)[default: 11] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                             -(all | all-testable | cassandra | celery | kafka | kerberos | mongo | otel | pinot |      -statsd | statsd | trino)                                                                  ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ ---install-selected-providersComma-separated list of providers selected to be installed (implies             ---use-packages-from-dist).                                                      -(TEXT)                                                                          ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,      -`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages -or sdist packages available in dist folder respectively. Implies                ---mount-sources `remove`.                                                       -(none | wheel | sdist | <airflow_version>)                                      ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64) ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist'       -folder when entering breeze.                                                    ---package-formatFormat of packages that should be installed from dist.(wheel | sdist) -[default: wheel]                                       ---force-buildForce image build no matter if it is determined as needed. ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip). -(TEXT)                                                                          -[default: latest]                                                               ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed       -(default = selected).                                                           -(selected | all | skip | remove)                                                -[default: selected]                                                             ---include-mypy-volumeWhether to include mounting of the mypy volume (useful for debugging mypy). ---max-timeMaximum time that the command should take - if it takes longer, the command     -will fail.                                                                      -(INTEGER RANGE)                                                                 -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--postgres-version-PVersion of Postgres used.(>11< | 12 | 13 | 14 | 15)[default: 11] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                             +(all | all-testable | cassandra | celery | kafka | kerberos | mongo | otel | pinot |      +statsd | statsd | trino)                                                                  +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--install-selected-providersComma-separated list of providers selected to be installed (implies             +--use-packages-from-dist).                                                      +(TEXT)                                                                          +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,      +`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages +or sdist packages available in dist folder respectively. Implies                +--mount-sources `remove`.                                                       +(none | wheel | sdist | <airflow_version>)                                      +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64) +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist'       +folder when entering breeze.                                                    +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--force-buildForce image build no matter if it is determined as needed. +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip). +(TEXT)                                                                          +[default: latest]                                                               +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed       +(default = selected).                                                           +(selected | all | skip | remove)                                                +[default: selected]                                                             +--include-mypy-volumeWhether to include mounting of the mypy volume (useful for debugging mypy). +--max-timeMaximum time that the command should take - if it takes longer, the command     +will fail.                                                                      +(INTEGER RANGE)                                                                 +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_start-airflow.svg b/images/breeze/output_start-airflow.svg index 9a562e9eb99e1..6c5b98ff7e824 100644 --- a/images/breeze/output_start-airflow.svg +++ b/images/breeze/output_start-airflow.svg @@ -1,4 +1,4 @@ - + - + @@ -216,12 +216,9 @@ - - - - Command: start-airflow + Command: start-airflow @@ -232,64 +229,63 @@ -Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... +Usage: breeze start-airflow [OPTIONS] [EXTRA_ARGS]... Enter breeze environment and starts all Airflow components in the tmux session. Compile assets if contents of www  directory changed. -╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---python-pPython major/minor version used in Airflow image for images. -(>3.7< | 3.8 | 3.9 | 3.10)                                   -[default: 3.7]                                               ---load-example-dags-eEnable configuration to load example DAGs when starting Airflow. ---load-default-connections-cEnable configuration to load default connections when starting Airflow. ---backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---platformPlatform for Airflow image.(linux/amd64 | linux/arm64) ---postgres-version-PVersion of Postgres used.(>11< | 12 | 13 | 14 | 15)[default: 11] ---mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] ---mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] ---integrationIntegration(s) to enable when running (can be more than one).                        -(all | all-testable | cassandra | celery | kafka | kerberos | mongo | otel | pinot | -statsd | statsd | trino)                                                             ---forward-credentials-fForward local credentials to container when running. ---db-reset-dReset DB when entering the container. ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Asset compilation options ──────────────────────────────────────────────────────────────────────────────────────────╮ ---skip-asset-compilationSkips compilation of assets when starting airflow even if the content of www changed     -(mutually exclusive with --dev-mode).                                                    ---dev-modeStarts webserver in dev mode (assets are always recompiled in this case when starting)   -(mutually exclusive with --skip-asset-compilation).                                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ ---use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,      -`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages -or sdist packages available in dist folder respectively. Implies                ---mount-sources `remove`.                                                       -(none | wheel | sdist | <airflow_version>)                                      ---airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     -specify constraints for the installed version and to find newer dependencies    -(TEXT)                                                                          ---airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) ---use-packages-from-distInstall all found packages (--package-format determines type) from 'dist'       -folder when entering breeze.                                                    ---package-formatFormat of packages that should be installed from dist.(wheel | sdist) -[default: wheel]                                       ---force-buildForce image build no matter if it is determined as needed. ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip). -(TEXT)                                                                          -[default: latest]                                                               ---mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed       -(default = selected).                                                           -(selected | all | skip | remove)                                                -[default: selected]                                                             -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---answer-aForce answer to questions.(y | n | q | yes | no | quit) ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Basic flags ────────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--load-example-dags-eEnable configuration to load example DAGs when starting Airflow. +--load-default-connections-cEnable configuration to load default connections when starting Airflow. +--backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] +--platformPlatform for Airflow image.(linux/amd64 | linux/arm64) +--postgres-version-PVersion of Postgres used.(>11< | 12 | 13 | 14 | 15)[default: 11] +--mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] +--mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] +--integrationIntegration(s) to enable when running (can be more than one).                        +(all | all-testable | cassandra | celery | kafka | kerberos | mongo | otel | pinot | +statsd | statsd | trino)                                                             +--forward-credentials-fForward local credentials to container when running. +--db-reset-dReset DB when entering the container. +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Asset compilation options ──────────────────────────────────────────────────────────────────────────────────────────╮ +--skip-asset-compilationSkips compilation of assets when starting airflow even if the content of www changed     +(mutually exclusive with --dev-mode).                                                    +--dev-modeStarts webserver in dev mode (assets are always recompiled in this case when starting)   +(mutually exclusive with --skip-asset-compilation).                                      +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Advanced flag for running ──────────────────────────────────────────────────────────────────────────────────────────╮ +--use-airflow-versionUse (reinstall at entry) Airflow version from PyPI. It can also be `none`,      +`wheel`, or `sdist` if Airflow should be removed, installed from wheel packages +or sdist packages available in dist folder respectively. Implies                +--mount-sources `remove`.                                                       +(none | wheel | sdist | <airflow_version>)                                      +--airflow-constraints-referenceConstraint reference to use. Useful with --use-airflow-version parameter to     +specify constraints for the installed version and to find newer dependencies    +(TEXT)                                                                          +--airflow-extrasAirflow extras to install when --use-airflow-version is used(TEXT) +--use-packages-from-distInstall all found packages (--package-format determines type) from 'dist'       +folder when entering breeze.                                                    +--package-formatFormat of packages that should be installed from dist.(wheel | sdist) +[default: wheel]                                       +--force-buildForce image build no matter if it is determined as needed. +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip). +(TEXT)                                                                          +[default: latest]                                                               +--mount-sourcesChoose scope of local sources that should be mounted, skipped, or removed       +(default = selected).                                                           +(selected | all | skip | remove)                                                +[default: selected]                                                             +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--answer-aForce answer to questions.(y | n | q | yes | no | quit) +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_testing_docker-compose-tests.svg b/images/breeze/output_testing_docker-compose-tests.svg index 61fc891ab7404..458dba447e330 100644 --- a/images/breeze/output_testing_docker-compose-tests.svg +++ b/images/breeze/output_testing_docker-compose-tests.svg @@ -35,8 +35,8 @@ .breeze-testing-docker-compose-tests-r1 { fill: #c5c8c6;font-weight: bold } .breeze-testing-docker-compose-tests-r2 { fill: #c5c8c6 } .breeze-testing-docker-compose-tests-r3 { fill: #d0b344;font-weight: bold } -.breeze-testing-docker-compose-tests-r4 { fill: #68a0b3;font-weight: bold } -.breeze-testing-docker-compose-tests-r5 { fill: #868887 } +.breeze-testing-docker-compose-tests-r4 { fill: #868887 } +.breeze-testing-docker-compose-tests-r5 { fill: #68a0b3;font-weight: bold } .breeze-testing-docker-compose-tests-r6 { fill: #98a84b;font-weight: bold } .breeze-testing-docker-compose-tests-r7 { fill: #8d7b39 } @@ -109,23 +109,23 @@ -Usage: breeze testing docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... +Usage: breeze testing docker-compose-tests [OPTIONS] [EXTRA_PYTEST_ARGS]... Run docker-compose tests. -╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ ---image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) ---image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip).(TEXT) -[default: latest]                                                               ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               ---github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ ---verbose-vPrint verbose information about performed steps. ---dry-run-DIf dry-run is set, commands are only printed, not executed. ---help-hShow this message and exit. -╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Docker-compose tests flag ──────────────────────────────────────────────────────────────────────────────────────────╮ +--image-name-nName of the image to verify (overrides --python and --image-tag).(TEXT) +--image-tag-tTag of the image which is used to run the image (implies --mount-sources=skip).(TEXT) +[default: latest]                                                               +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               +--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow] +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮ +--verbose-vPrint verbose information about performed steps. +--dry-run-DIf dry-run is set, commands are only printed, not executed. +--help-hShow this message and exit. +╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯ diff --git a/images/breeze/output_testing_integration-tests.svg b/images/breeze/output_testing_integration-tests.svg index 2eb9adde6b72e..6060d28653f9d 100644 --- a/images/breeze/output_testing_integration-tests.svg +++ b/images/breeze/output_testing_integration-tests.svg @@ -173,8 +173,8 @@ [default: 60; x>=0]                                                               --db-reset-dReset DB when entering the container. --backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               --postgres-version-PVersion of Postgres used.(>11< | 12 | 13 | 14 | 15)[default: 11] --mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] --mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] diff --git a/images/breeze/output_testing_tests.svg b/images/breeze/output_testing_tests.svg index e044d6468f371..98a42ac25def2 100644 --- a/images/breeze/output_testing_tests.svg +++ b/images/breeze/output_testing_tests.svg @@ -230,8 +230,8 @@ --collect-onlyCollect tests only, do not run them. --db-reset-dReset DB when entering the container. --backend-bDatabase backend to use.(>sqlite< | mysql | postgres | mssql)[default: sqlite] ---python-pPython major/minor version used in Airflow image for images.(>3.7< | 3.8 | 3.9 | 3.10) -[default: 3.7]                                               +--python-pPython major/minor version used in Airflow image for images.(>3.8< | 3.9 | 3.10) +[default: 3.8]                                               --postgres-version-PVersion of Postgres used.(>11< | 12 | 13 | 14 | 15)[default: 11] --mysql-version-MVersion of MySQL used.(>5.7< | 8)[default: 5.7] --mssql-version-SVersion of MsSQL used.(>2017-latest< | 2019-latest)[default: 2017-latest] diff --git a/pyproject.toml b/pyproject.toml index 0568ed62f9674..0048f4d7d976d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,9 @@ target-version = ['py37', 'py38', 'py39', 'py310'] requires = ['setuptools==67.2.0'] build-backend = "setuptools.build_meta" +[project] +requires-python = ">=3.8" + [tool.ruff] typing-modules = ["airflow.typing_compat"] line-length = 110 @@ -38,9 +41,6 @@ extend-exclude = [ "airflow/providers/common/sql/*/*.pyi" ] -# TODO: Bump to Python 3.8 when support for Python 3.7 is dropped in Airflow. -target-version = "py37" - extend-select = [ "I", # Missing required import (auto-fixable) "UP", # Pyupgrade diff --git a/scripts/ci/docker-compose/devcontainer.env b/scripts/ci/docker-compose/devcontainer.env index d2ca64922b6f7..d66767fa02c92 100644 --- a/scripts/ci/docker-compose/devcontainer.env +++ b/scripts/ci/docker-compose/devcontainer.env @@ -15,10 +15,10 @@ # specific language governing permissions and limitations # under the License. HOME= -AIRFLOW_CI_IMAGE="ghcr.io/apache/airflow/main/ci/python3.7:latest" +AIRFLOW_CI_IMAGE="ghcr.io/apache/airflow/main/ci/python3.8:latest" ANSWER= AIRFLOW_ENABLE_AIP_44="true" -PYTHON_MAJOR_MINOR_VERSION="3.7" +PYTHON_MAJOR_MINOR_VERSION="3.8" AIRFLOW_EXTRAS= BASE_BRANCH="main" BREEZE="true" diff --git a/scripts/ci/docker-compose/devcontainer.yml b/scripts/ci/docker-compose/devcontainer.yml index e7f013684c375..a9fc3d6fa788c 100644 --- a/scripts/ci/docker-compose/devcontainer.yml +++ b/scripts/ci/docker-compose/devcontainer.yml @@ -20,7 +20,7 @@ services: airflow: stdin_open: true # docker run -i tty: true # docker run -t - image: ghcr.io/apache/airflow/main/ci/python3.7 + image: ghcr.io/apache/airflow/main/ci/python3.8 env_file: devcontainer.env ports: - "22:22" diff --git a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py index 97b66a04f0786..995ff18ba0407 100755 --- a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py +++ b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py @@ -62,6 +62,6 @@ if cmd_result.returncode != 0: get_console().print( "[warning]If you see strange stacktraces above, " - "run `breeze ci-image build --python 3.7` and try again." + "run `breeze ci-image build --python 3.8` and try again." ) sys.exit(cmd_result.returncode) diff --git a/scripts/ci/pre_commit/pre_commit_migration_reference.py b/scripts/ci/pre_commit/pre_commit_migration_reference.py index 6117de057af59..2f508aaf3a252 100755 --- a/scripts/ci/pre_commit/pre_commit_migration_reference.py +++ b/scripts/ci/pre_commit/pre_commit_migration_reference.py @@ -58,6 +58,6 @@ if cmd_result.returncode != 0: get_console().print( "[warning]If you see strange stacktraces above, " - "run `breeze ci-image build --python 3.7` and try again." + "run `breeze ci-image build --python 3.8` and try again." ) sys.exit(cmd_result.returncode) diff --git a/scripts/ci/pre_commit/pre_commit_mypy.py b/scripts/ci/pre_commit/pre_commit_mypy.py index 2c6ac05bfe98a..0f0fb47710163 100755 --- a/scripts/ci/pre_commit/pre_commit_mypy.py +++ b/scripts/ci/pre_commit/pre_commit_mypy.py @@ -78,7 +78,7 @@ if cmd_result.returncode != 0: get_console().print( "[warning]If you see strange stacktraces above, " - "run `breeze ci-image build --python 3.7` and try again. " + "run `breeze ci-image build --python 3.8` and try again. " "You can also run `breeze stop --cleanup-mypy-cache` to clean up the cache used." ) sys.exit(cmd_result.returncode) diff --git a/scripts/ci/pre_commit/pre_commit_update_er_diagram.py b/scripts/ci/pre_commit/pre_commit_update_er_diagram.py index feaef34c7b855..d333f008e057c 100755 --- a/scripts/ci/pre_commit/pre_commit_update_er_diagram.py +++ b/scripts/ci/pre_commit/pre_commit_update_er_diagram.py @@ -66,7 +66,7 @@ if cmd_result.returncode != 0: get_console().print( "[warning]If you see strange stacktraces above, " - "run `breeze ci-image build --python 3.7` and try again." + "run `breeze ci-image build --python 3.8` and try again." ) sys.exit(cmd_result.returncode) diff --git a/scripts/docker/entrypoint_ci.sh b/scripts/docker/entrypoint_ci.sh index ca853e74c90b3..8e4eab6ff2a47 100755 --- a/scripts/docker/entrypoint_ci.sh +++ b/scripts/docker/entrypoint_ci.sh @@ -40,7 +40,7 @@ chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) -PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.7} +PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.8} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} diff --git a/setup.cfg b/setup.cfg index 39e0c4450dffa..7b10c82538098 100644 --- a/setup.cfg +++ b/setup.cfg @@ -36,7 +36,6 @@ classifiers = Intended Audience :: Developers Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 @@ -54,7 +53,7 @@ project_urls = [options] zip_safe = False include_package_data = True -python_requires = ~=3.7 +python_requires = ~=3.8 packages = find: setup_requires = gitpython @@ -72,7 +71,6 @@ install_requires = asgiref attrs>=22.1.0 blinker - cached_property>=1.5.0;python_version<="3.7" cattrs>=22.1.0 # Colorlog 6.x merges TTYColoredFormatter into ColoredFormatter, breaking backwards compatibility with 4.x # Update CustomTTYColoredFormatter to remove @@ -205,11 +203,11 @@ disable_error_code = var-annotated [mypy-airflow.migrations.*] ignore_errors = True -# Let's assume all google.cloud packages have no implicit optional +# Let's assume all google.cloud packages have implicit optionals # Most of them don't but even if they do, it does not matter [mypy-google.cloud.*] no_implicit_optional = False -# Let's assume all azure packages have no implicit optional +# Let's assume all azure packages have implicit optionals [mypy-azure.*] no_implicit_optional = False diff --git a/setup.py b/setup.py index b6174e2e2c290..81e2ed4aa3d2f 100644 --- a/setup.py +++ b/setup.py @@ -256,9 +256,6 @@ def write_version(filename: str = str(AIRFLOW_SOURCES_ROOT / "airflow" / "git_ve #
tags for sections "docutils<0.17.0", "eralchemy2", - # Without this, Sphinx goes in to a _very_ large backtrack on Python 3.7, - # even though Sphinx 4.4.0 has this but with python_version<3.10. - 'importlib-metadata>=4.4; python_version < "3.8"', "sphinx-airflow-theme", "sphinx-argparse>=0.1.13", "sphinx-autoapi>=2.0.0", diff --git a/tests/executors/test_debug_executor.py b/tests/executors/test_debug_executor.py index d4b40f637a2ec..0cc82d3021e7b 100644 --- a/tests/executors/test_debug_executor.py +++ b/tests/executors/test_debug_executor.py @@ -67,9 +67,9 @@ def test_queue_task_instance(self): } def test_trigger_tasks(self): - execute_async_mock = MagicMock() + execute_mock = MagicMock() executor = DebugExecutor() - executor.execute_async = execute_async_mock + executor.execute_async = execute_mock executor.queued_tasks = { "t1": (None, 1, None, MagicMock(key="t1")), @@ -80,7 +80,7 @@ def test_trigger_tasks(self): assert not executor.queued_tasks assert len(executor.running) == 2 assert len(executor.tasks_to_run) == 2 - assert not execute_async_mock.called + assert not execute_mock.called def test_end(self): ti = MagicMock(key="ti_key") diff --git a/tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py b/tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py index 969729efa30cd..340564de8b5d1 100644 --- a/tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py +++ b/tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py @@ -16,13 +16,13 @@ # under the License. from __future__ import annotations +from unittest import mock from unittest.mock import ANY import pytest from airflow.models.connection import Connection from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseAsyncHook -from tests.providers.amazon.aws.utils.compat import async_mock pytest.importorskip("aiobotocore") @@ -56,8 +56,8 @@ def __eq__(self, other): return self.compare(self.obj, other) @pytest.mark.asyncio - @async_mock.patch("airflow.hooks.base.BaseHook.get_connection") - @async_mock.patch("aiobotocore.session.AioClientCreator.create_client") + @mock.patch("airflow.hooks.base.BaseHook.get_connection") + @mock.patch("aiobotocore.session.AioClientCreator.create_client") async def test_get_client_async(self, mock_client, mock_get_connection): """Check the connection credential passed while creating client""" mock_get_connection.return_value = Connection( diff --git a/tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py b/tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py index 4388c48b72a5d..2693524231681 100644 --- a/tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py +++ b/tests/providers/amazon/aws/deferrable/hooks/test_redshift_cluster.py @@ -17,19 +17,19 @@ from __future__ import annotations import asyncio +from unittest import mock import pytest from botocore.exceptions import ClientError from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftAsyncHook -from tests.providers.amazon.aws.utils.compat import async_mock pytest.importorskip("aiobotocore") class TestRedshiftAsyncHook: @pytest.mark.asyncio - @async_mock.patch("aiobotocore.client.AioBaseClient._make_api_call") + @mock.patch("aiobotocore.client.AioBaseClient._make_api_call") async def test_cluster_status(self, mock_make_api_call): """Test that describe_clusters get called with correct param""" hook = RedshiftAsyncHook(aws_conn_id="aws_default", client_type="redshift", resource_type="redshift") @@ -39,7 +39,7 @@ async def test_cluster_status(self, mock_make_api_call): ) @pytest.mark.asyncio - @async_mock.patch("aiobotocore.client.AioBaseClient._make_api_call") + @mock.patch("aiobotocore.client.AioBaseClient._make_api_call") async def test_pause_cluster(self, mock_make_api_call): """Test that pause_cluster get called with correct param""" hook = RedshiftAsyncHook(aws_conn_id="aws_default", client_type="redshift", resource_type="redshift") @@ -49,10 +49,8 @@ async def test_pause_cluster(self, mock_make_api_call): ) @pytest.mark.asyncio - @async_mock.patch( - "airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.get_client_async" - ) - @async_mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.cluster_status") + @mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.get_client_async") + @mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.cluster_status") async def test_get_cluster_status(self, cluster_status, mock_client): """Test get_cluster_status async function with success response""" flag = asyncio.Event() @@ -62,7 +60,7 @@ async def test_get_cluster_status(self, cluster_status, mock_client): assert result == {"status": "success", "cluster_state": "available"} @pytest.mark.asyncio - @async_mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.cluster_status") + @mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.cluster_status") async def test_get_cluster_status_exception(self, cluster_status): """Test get_cluster_status async function with exception response""" flag = asyncio.Event() @@ -84,7 +82,7 @@ async def test_get_cluster_status_exception(self, cluster_status): } @pytest.mark.asyncio - @async_mock.patch("aiobotocore.client.AioBaseClient._make_api_call") + @mock.patch("aiobotocore.client.AioBaseClient._make_api_call") async def test_resume_cluster(self, mock_make_api_call): """Test Resume cluster async hook function by mocking return value of resume_cluster""" @@ -95,9 +93,7 @@ async def test_resume_cluster(self, mock_make_api_call): ) @pytest.mark.asyncio - @async_mock.patch( - "airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.get_client_async" - ) + @mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.get_client_async") async def test_resume_cluster_exception(self, mock_client): """Test Resume cluster async hook function with exception by mocking return value of resume_cluster""" mock_client.return_value.__aenter__.return_value.resume_cluster.side_effect = ClientError( diff --git a/tests/providers/amazon/aws/deferrable/triggers/test_redshift_cluster.py b/tests/providers/amazon/aws/deferrable/triggers/test_redshift_cluster.py index 8f685f8c9f64d..f2430f6d032f1 100644 --- a/tests/providers/amazon/aws/deferrable/triggers/test_redshift_cluster.py +++ b/tests/providers/amazon/aws/deferrable/triggers/test_redshift_cluster.py @@ -16,13 +16,14 @@ # under the License. from __future__ import annotations +from unittest import mock + import pytest from airflow.providers.amazon.aws.triggers.redshift_cluster import ( RedshiftClusterTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.amazon.aws.utils.compat import async_mock pytest.importorskip("aiobotocore") @@ -56,7 +57,7 @@ def test_pause_serialization(self): } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.pause_cluster") + @mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.pause_cluster") async def test_pause_trigger_run(self, mock_pause_cluster): """ Test trigger event for the pause_cluster response @@ -76,7 +77,7 @@ async def test_pause_trigger_run(self, mock_pause_cluster): ) @pytest.mark.asyncio - @async_mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.pause_cluster") + @mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.pause_cluster") async def test_pause_trigger_failure(self, mock_pause_cluster): """Test trigger event when pause cluster raise exception""" mock_pause_cluster.side_effect = Exception("Test exception") @@ -102,7 +103,7 @@ async def test_pause_trigger_failure(self, mock_pause_cluster): ), ], ) - @async_mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.resume_cluster") + @mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.resume_cluster") async def test_resume_trigger_run_error( self, mock_resume_cluster, operation_type, return_value, response ): @@ -131,7 +132,7 @@ async def test_resume_trigger_run_error( ), ], ) - @async_mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.resume_cluster") + @mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.resume_cluster") async def test_resume_trigger_run_success( self, mock_resume_cluster, operation_type, return_value, response ): @@ -150,7 +151,7 @@ async def test_resume_trigger_run_success( assert response == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.resume_cluster") + @mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftAsyncHook.resume_cluster") async def test_resume_trigger_failure(self, mock_resume_cluster): """Test RedshiftClusterTrigger resume cluster with failure status""" mock_resume_cluster.side_effect = Exception("Test exception") diff --git a/tests/providers/amazon/aws/triggers/test_redshift_cluster.py b/tests/providers/amazon/aws/triggers/test_redshift_cluster.py index 941258659e9ae..769199a4031e1 100644 --- a/tests/providers/amazon/aws/triggers/test_redshift_cluster.py +++ b/tests/providers/amazon/aws/triggers/test_redshift_cluster.py @@ -16,20 +16,14 @@ # under the License. from __future__ import annotations -import sys +from unittest import mock +from unittest.mock import AsyncMock import pytest from airflow.providers.amazon.aws.triggers.redshift_cluster import RedshiftCreateClusterTrigger from airflow.triggers.base import TriggerEvent -if sys.version_info < (3, 8): - from asynctest import CoroutineMock as AsyncMock, mock as async_mock -else: - from unittest import mock as async_mock - from unittest.mock import AsyncMock - - TEST_CLUSTER_IDENTIFIER = "test-cluster" TEST_POLL_INTERVAL = 10 TEST_MAX_ATTEMPT = 10 @@ -55,11 +49,11 @@ def test_redshift_create_cluster_trigger_serialize(self): assert args["aws_conn_id"] == TEST_AWS_CONN_ID @pytest.mark.asyncio - @async_mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftHook.async_conn") + @mock.patch("airflow.providers.amazon.aws.hooks.redshift_cluster.RedshiftHook.async_conn") async def test_redshift_create_cluster_trigger_run(self, mock_async_conn): - mock = async_mock.MagicMock() - mock_async_conn.__aenter__.return_value = mock - mock.get_waiter().wait = AsyncMock() + the_mock = mock.MagicMock() + mock_async_conn.__aenter__.return_value = the_mock + the_mock.get_waiter().wait = AsyncMock() redshift_create_cluster_trigger = RedshiftCreateClusterTrigger( cluster_identifier=TEST_CLUSTER_IDENTIFIER, diff --git a/tests/providers/amazon/aws/utils/compat.py b/tests/providers/amazon/aws/utils/compat.py deleted file mode 100644 index 9c03414adcac4..0000000000000 --- a/tests/providers/amazon/aws/utils/compat.py +++ /dev/null @@ -1,37 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import annotations - -__all__ = ["async_mock", "AsyncMock"] - -import sys - -if sys.version_info < (3, 8): - # For compatibility with Python 3.7 - from asynctest import mock as async_mock - - # ``asynctest.mock.CoroutineMock`` which provide compatibility not working well with autospec=True - # as result "TypeError: object MagicMock can't be used in 'await' expression" could be raised. - # Best solution in this case provide as spec actual awaitable object - # >>> from tests.providers.amazon.cloud.utils.compat import AsyncMock - # >>> from foo.bar import SpamEgg - # >>> mock_something = AsyncMock(SpamEgg) - from asynctest.mock import CoroutineMock as AsyncMock -else: - from unittest import mock as async_mock - from unittest.mock import AsyncMock diff --git a/tests/providers/apache/livy/compat.py b/tests/providers/apache/livy/compat.py deleted file mode 100644 index af1f0d225713c..0000000000000 --- a/tests/providers/apache/livy/compat.py +++ /dev/null @@ -1,37 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import annotations - -__all__ = ["async_mock", "AsyncMock"] - -import sys - -if sys.version_info < (3, 8): - # For compatibility with Python 3.7 - from asynctest import mock as async_mock - - # ``asynctest.mock.CoroutineMock`` which provide compatibility not working well with autospec=True - # as result "TypeError: object MagicMock can't be used in 'await' expression" could be raised. - # Best solution in this case provide as spec actual awaitable object - # >>> from tests.providers.apache.livy.compat import AsyncMock - # >>> from foo.bar import SpamEgg - # >>> mock_something = AsyncMock(SpamEgg) - from asynctest.mock import CoroutineMock as AsyncMock -else: - from unittest import mock as async_mock - from unittest.mock import AsyncMock diff --git a/tests/providers/apache/livy/hooks/test_livy.py b/tests/providers/apache/livy/hooks/test_livy.py index 63266dfe2329f..913c44c661a4b 100644 --- a/tests/providers/apache/livy/hooks/test_livy.py +++ b/tests/providers/apache/livy/hooks/test_livy.py @@ -17,7 +17,8 @@ from __future__ import annotations import json -from unittest.mock import MagicMock, patch +from unittest import mock +from unittest.mock import AsyncMock, MagicMock, patch import multidict import pytest @@ -28,7 +29,6 @@ from airflow.models import Connection from airflow.providers.apache.livy.hooks.livy import BatchState, LivyAsyncHook, LivyHook from airflow.utils import db -from tests.providers.apache.livy.compat import AsyncMock, async_mock from tests.test_utils.db import clear_db_connections LIVY_CONN_ID = LivyHook.default_conn_name @@ -408,7 +408,7 @@ def test_alternate_auth_type(self): class TestLivyAsyncHook: @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.run_method") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.run_method") async def test_get_batch_state_running(self, mock_run_method): """Asserts the batch state as running with success response.""" mock_run_method.return_value = {"status": "success", "response": {"state": BatchState.RUNNING}} @@ -421,7 +421,7 @@ async def test_get_batch_state_running(self, mock_run_method): } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.run_method") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.run_method") async def test_get_batch_state_error(self, mock_run_method): """Asserts the batch state as error with error response.""" mock_run_method.return_value = {"status": "error", "response": {"state": "error"}} @@ -430,7 +430,7 @@ async def test_get_batch_state_error(self, mock_run_method): assert state["status"] == "error" @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.run_method") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.run_method") async def test_get_batch_state_error_without_state(self, mock_run_method): """Asserts the batch state as error without state returned as part of mock.""" mock_run_method.return_value = {"status": "success", "response": {}} @@ -439,7 +439,7 @@ async def test_get_batch_state_error_without_state(self, mock_run_method): assert state["status"] == "error" @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.run_method") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.run_method") async def test_get_batch_logs_success(self, mock_run_method): """Asserts the batch log as success.""" mock_run_method.return_value = {"status": "success", "response": {}} @@ -448,7 +448,7 @@ async def test_get_batch_logs_success(self, mock_run_method): assert state["status"] == "success" @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.run_method") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.run_method") async def test_get_batch_logs_error(self, mock_run_method): """Asserts the batch log for error.""" mock_run_method.return_value = {"status": "error", "response": {}} @@ -457,7 +457,7 @@ async def test_get_batch_logs_error(self, mock_run_method): assert state["status"] == "error" @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_batch_logs") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_batch_logs") async def test_dump_batch_logs_success(self, mock_get_batch_logs): """Asserts the log dump log for success response.""" mock_get_batch_logs.return_value = { @@ -469,7 +469,7 @@ async def test_dump_batch_logs_success(self, mock_get_batch_logs): assert log_dump == ["mock_log_1", "mock_log_2", "mock_log_3"] @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_batch_logs") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_batch_logs") async def test_dump_batch_logs_error(self, mock_get_batch_logs): """Asserts the log dump log for error response.""" mock_get_batch_logs.return_value = { @@ -481,7 +481,7 @@ async def test_dump_batch_logs_error(self, mock_get_batch_logs): assert log_dump == {"id": 1, "log": ["mock_log_1", "mock_log_2"]} @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook._do_api_call_async") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook._do_api_call_async") async def test_run_method_success(self, mock_do_api_call_async): """Asserts the run_method for success response.""" mock_do_api_call_async.return_value = {"status": "error", "response": {"id": 1}} @@ -490,7 +490,7 @@ async def test_run_method_success(self, mock_do_api_call_async): assert response["status"] == "success" @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook._do_api_call_async") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook._do_api_call_async") async def test_run_method_error(self, mock_do_api_call_async): """Asserts the run_method for error response.""" mock_do_api_call_async.return_value = {"status": "error", "response": {"id": 1}} @@ -499,8 +499,8 @@ async def test_run_method_error(self, mock_do_api_call_async): assert response == {"status": "error", "response": "Invalid http method abc"} @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") + @mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") async def test_do_api_call_async_post_method_with_success(self, mock_get_connection, mock_session): """Asserts the _do_api_call_async for success response for POST method.""" @@ -522,8 +522,8 @@ async def mock_fun(arg1, arg2, arg3, arg4): assert response == {"status": "success"} @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") + @mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") async def test_do_api_call_async_get_method_with_success(self, mock_get_connection, mock_session): """Asserts the _do_api_call_async for GET method.""" @@ -547,8 +547,8 @@ async def mock_fun(arg1, arg2, arg3, arg4): assert response == {"status": "success"} @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") + @mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") async def test_do_api_call_async_patch_method_with_success(self, mock_get_connection, mock_session): """Asserts the _do_api_call_async for PATCH method.""" @@ -572,8 +572,8 @@ async def mock_fun(arg1, arg2, arg3, arg4): assert response == {"status": "success"} @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") + @mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") async def test_do_api_call_async_unexpected_method_error(self, mock_get_connection, mock_session): """Asserts the _do_api_call_async for unexpected method error""" GET_RUN_ENDPOINT = "api/jobs/runs/get" @@ -588,8 +588,8 @@ async def test_do_api_call_async_unexpected_method_error(self, mock_get_connecti assert response == {"Response": "Unexpected HTTP Method: abc", "status": "error"} @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") + @mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") async def test_do_api_call_async_with_type_error(self, mock_get_connection, mock_session): """Asserts the _do_api_call_async for TypeError.""" @@ -607,8 +607,8 @@ async def mock_fun(arg1, arg2, arg3, arg4): await hook._do_api_call_async(endpoint="", data="test", headers=mock_fun, extra_options=mock_fun) @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") + @mock.patch("airflow.providers.apache.livy.hooks.livy.aiohttp.ClientSession") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_connection") async def test_do_api_call_async_with_client_response_error(self, mock_get_connection, mock_session): """Asserts the _do_api_call_async for Client Response Error.""" diff --git a/tests/providers/apache/livy/triggers/test_livy.py b/tests/providers/apache/livy/triggers/test_livy.py index 9e6dc02e75ed2..0bdf8fad49902 100644 --- a/tests/providers/apache/livy/triggers/test_livy.py +++ b/tests/providers/apache/livy/triggers/test_livy.py @@ -17,6 +17,7 @@ from __future__ import annotations import asyncio +from unittest import mock import pytest from aiohttp import ClientConnectionError @@ -24,7 +25,6 @@ from airflow.providers.apache.livy.hooks.livy import BatchState, LivyHook from airflow.providers.apache.livy.triggers.livy import LivyTrigger from airflow.triggers.base import TriggerEvent -from tests.providers.apache.livy.compat import async_mock class TestLivyTrigger: @@ -49,7 +49,7 @@ def test_livy_trigger_serialization(self): } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.triggers.livy.LivyTrigger.poll_for_termination") + @mock.patch("airflow.providers.apache.livy.triggers.livy.LivyTrigger.poll_for_termination") async def test_livy_trigger_run_with_no_poll_interval(self, mock_poll_for_termination): """ Test if the task ran in the triggerer successfully with poll interval=0. @@ -69,7 +69,7 @@ async def test_livy_trigger_run_with_no_poll_interval(self, mock_poll_for_termin ) @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.triggers.livy.LivyTrigger.poll_for_termination") + @mock.patch("airflow.providers.apache.livy.triggers.livy.LivyTrigger.poll_for_termination") async def test_livy_trigger_run_with_poll_interval_success(self, mock_poll_for_termination): """ Test if the task ran in the triggerer successfully with poll interval>0. In the case when @@ -85,7 +85,7 @@ async def test_livy_trigger_run_with_poll_interval_success(self, mock_poll_for_t assert TriggerEvent({"status": "success"}) == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.triggers.livy.LivyTrigger.poll_for_termination") + @mock.patch("airflow.providers.apache.livy.triggers.livy.LivyTrigger.poll_for_termination") async def test_livy_trigger_run_with_poll_interval_error(self, mock_poll_for_termination): """Test if the task in the trigger returned an error when poll_for_termination returned error.""" mock_poll_for_termination.return_value = {"status": "error"} @@ -133,8 +133,8 @@ async def test_livy_trigger_poll_for_termination_with_client_error(self): await trigger.poll_for_termination(1) @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_batch_state") - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.dump_batch_logs") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_batch_state") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.dump_batch_logs") async def test_livy_trigger_poll_for_termination_success( self, mock_dump_batch_logs, mock_get_batch_state ): @@ -158,8 +158,8 @@ async def test_livy_trigger_poll_for_termination_success( } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_batch_state") - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.dump_batch_logs") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_batch_state") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.dump_batch_logs") async def test_livy_trigger_poll_for_termination_error(self, mock_dump_batch_logs, mock_get_batch_state): """ Test if the poll_for_termination() in the trigger returned error response when get_batch_state() @@ -181,8 +181,8 @@ async def test_livy_trigger_poll_for_termination_error(self, mock_dump_batch_log } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_batch_state") - @async_mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.dump_batch_logs") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.get_batch_state") + @mock.patch("airflow.providers.apache.livy.hooks.livy.LivyAsyncHook.dump_batch_logs") async def test_livy_trigger_poll_for_termination_state(self, mock_dump_batch_logs, mock_get_batch_state): """ Test if the poll_for_termination() in the trigger is still polling when get_batch_state() returned diff --git a/tests/providers/databricks/hooks/test_databricks.py b/tests/providers/databricks/hooks/test_databricks.py index ec3d9e28c6ad8..f55c55dfc364c 100644 --- a/tests/providers/databricks/hooks/test_databricks.py +++ b/tests/providers/databricks/hooks/test_databricks.py @@ -19,8 +19,9 @@ import itertools import json -import sys import time +from unittest import mock +from unittest.mock import AsyncMock import aiohttp import pytest @@ -47,13 +48,6 @@ ) from airflow.utils.session import provide_session -if sys.version_info < (3, 8): - from asynctest import mock - from asynctest.mock import CoroutineMock as AsyncMock -else: - from unittest import mock - from unittest.mock import AsyncMock - TASK_ID = "databricks-operator" DEFAULT_CONN_ID = "databricks_default" NOTEBOOK_TASK = {"notebook_path": "/test"} diff --git a/tests/providers/dbt/cloud/triggers/test_dbt_cloud.py b/tests/providers/dbt/cloud/triggers/test_dbt_cloud.py index 1436112d224d4..2a1f26b49d838 100644 --- a/tests/providers/dbt/cloud/triggers/test_dbt_cloud.py +++ b/tests/providers/dbt/cloud/triggers/test_dbt_cloud.py @@ -17,8 +17,9 @@ from __future__ import annotations import asyncio -import sys import time +from unittest import mock +from unittest.mock import AsyncMock import pytest @@ -26,14 +27,6 @@ from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger from airflow.triggers.base import TriggerEvent -if sys.version_info < (3, 8): - # For compatibility with Python 3.7 - from asynctest import mock as async_mock - from asynctest.mock import CoroutineMock as AsyncMock -else: - from unittest import mock as async_mock - from unittest.mock import AsyncMock - class TestDbtCloudRunJobTrigger: DAG_ID = "dbt_cloud_run" @@ -64,7 +57,7 @@ def test_serialization(self): } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") + @mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") async def test_dbt_run_job_trigger(self, mocked_is_still_running): """Test DbtCloudRunJobTrigger is triggered with mocked details and run successfully.""" mocked_is_still_running.return_value = True @@ -89,8 +82,8 @@ async def test_dbt_run_job_trigger(self, mocked_is_still_running): (DbtCloudJobRunStatus.SUCCESS.value, "success", "Job run 1234 has completed successfully."), ], ) - @async_mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") - @async_mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") + @mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") + @mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") async def test_dbt_job_run_for_terminal_status_success( self, mock_get_job_status, mocked_is_still_running, mock_value, mock_status, mock_message ): @@ -121,8 +114,8 @@ async def test_dbt_job_run_for_terminal_status_success( (DbtCloudJobRunStatus.CANCELLED.value, "cancelled", "Job run 1234 has been cancelled."), ], ) - @async_mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") - @async_mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") + @mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") + @mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") async def test_dbt_job_run_for_terminal_status_cancelled( self, mock_get_job_status, mocked_is_still_running, mock_value, mock_status, mock_message ): @@ -153,8 +146,8 @@ async def test_dbt_job_run_for_terminal_status_cancelled( (DbtCloudJobRunStatus.ERROR.value, "error", "Job run 1234 has failed."), ], ) - @async_mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") - @async_mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") + @mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") + @mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") async def test_dbt_job_run_for_terminal_status_error( self, mock_get_job_status, mocked_is_still_running, mock_value, mock_status, mock_message ): @@ -179,8 +172,8 @@ async def test_dbt_job_run_for_terminal_status_error( asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") - @async_mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") + @mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") + @mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") async def test_dbt_job_run_exception(self, mock_get_job_status, mocked_is_still_running): """Assert that run catch exception if dbt cloud job API throw exception""" mocked_is_still_running.return_value = False @@ -204,8 +197,8 @@ async def test_dbt_job_run_exception(self, mock_get_job_status, mocked_is_still_ assert response in task @pytest.mark.asyncio - @async_mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") - @async_mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") + @mock.patch("airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger.is_still_running") + @mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") async def test_dbt_job_run_timeout(self, mock_get_job_status, mocked_is_still_running): """Assert that run timeout after end_time elapsed""" mocked_is_still_running.return_value = True @@ -237,7 +230,7 @@ async def test_dbt_job_run_timeout(self, mock_get_job_status, mocked_is_still_ru (DbtCloudJobRunStatus.SUCCESS.value, False), ], ) - @async_mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") + @mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") async def test_dbt_job_run_is_still_running_success( self, mock_get_job_status, mock_response, expected_status ): @@ -262,7 +255,7 @@ async def test_dbt_job_run_is_still_running_success( (DbtCloudJobRunStatus.RUNNING.value, True), ], ) - @async_mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") + @mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") async def test_dbt_job_run_is_still_running(self, mock_get_job_status, mock_response, expected_status): """Test is_still_running with mocked response job status and assert the return response with expected value""" @@ -285,7 +278,7 @@ async def test_dbt_job_run_is_still_running(self, mock_get_job_status, mock_resp (DbtCloudJobRunStatus.QUEUED.value, True), ], ) - @async_mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") + @mock.patch("airflow.providers.dbt.cloud.hooks.dbt.DbtCloudHook.get_job_status") async def test_dbt_job_run_is_still_running_queued( self, mock_get_job_status, mock_response, expected_status ): diff --git a/tests/providers/google/cloud/hooks/test_bigquery.py b/tests/providers/google/cloud/hooks/test_bigquery.py index 0e09080fa7d83..e6bd6e8a23a0e 100644 --- a/tests/providers/google/cloud/hooks/test_bigquery.py +++ b/tests/providers/google/cloud/hooks/test_bigquery.py @@ -20,6 +20,7 @@ import re from datetime import datetime from unittest import mock +from unittest.mock import AsyncMock import pytest from gcloud.aio.bigquery import Job, Table as Table_async @@ -42,7 +43,6 @@ _validate_value, split_tablename, ) -from tests.providers.google.cloud.utils.compat import AsyncMock, async_mock PROJECT_ID = "bq-project" CREDENTIALS = "bq-credentials" @@ -2132,14 +2132,14 @@ def get_credentials_and_project_id(self): class TestBigQueryAsyncHookMethods(_BigQueryBaseAsyncTestClass): @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.ClientSession") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.ClientSession") async def test_get_job_instance(self, mock_session): hook = BigQueryAsyncHook() result = await hook.get_job_instance(project_id=PROJECT_ID, job_id=JOB_ID, session=mock_session) assert isinstance(result, Job) @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") async def test_get_job_status_success(self, mock_job_instance): hook = BigQueryAsyncHook() mock_job_client = AsyncMock(Job) @@ -2150,7 +2150,7 @@ async def test_get_job_status_success(self, mock_job_instance): assert resp == response @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") async def test_get_job_status_oserror(self, mock_job_instance): """Assets that the BigQueryAsyncHook returns a pending response when OSError is raised""" mock_job_instance.return_value.result.side_effect = OSError() @@ -2159,7 +2159,7 @@ async def test_get_job_status_oserror(self, mock_job_instance): assert job_status == "pending" @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") async def test_get_job_status_exception(self, mock_job_instance, caplog): """Assets that the logging is done correctly when BigQueryAsyncHook raises Exception""" mock_job_instance.return_value.result.side_effect = Exception() @@ -2168,7 +2168,7 @@ async def test_get_job_status_exception(self, mock_job_instance, caplog): assert "Query execution finished with errors..." in caplog.text @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") async def test_get_job_output_assert_once_with(self, mock_job_instance): hook = BigQueryAsyncHook() mock_job_client = AsyncMock(Job) @@ -2231,7 +2231,7 @@ def test_interval_check_for_success(self): assert response is None @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") async def test_get_job_output(self, mock_job_instance): """ Tests to check if a particular object in Google Cloud Storage @@ -2311,7 +2311,7 @@ def test_convert_to_float_if_possible(self, test_input, expected): assert BigQueryAsyncHook._convert_to_float_if_possible(test_input) == expected @pytest.mark.asyncio - @async_mock.patch("aiohttp.client.ClientSession") + @mock.patch("aiohttp.client.ClientSession") async def test_get_table_client(self, mock_session): """Test get_table_client async function and check whether the return value is a Table instance object""" diff --git a/tests/providers/google/cloud/hooks/test_bigquery_dts.py b/tests/providers/google/cloud/hooks/test_bigquery_dts.py index d37125a5862ff..8beb54bc1b691 100644 --- a/tests/providers/google/cloud/hooks/test_bigquery_dts.py +++ b/tests/providers/google/cloud/hooks/test_bigquery_dts.py @@ -19,6 +19,8 @@ from asyncio import Future from copy import deepcopy +from unittest import mock +from unittest.mock import AsyncMock import pytest from google.api_core.gapic_v1.method import DEFAULT @@ -29,7 +31,6 @@ BiqQueryDataTransferServiceHook, ) from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id -from tests.providers.google.cloud.utils.compat import AsyncMock, async_mock CREDENTIALS = "test-creds" PROJECT_ID = "id" @@ -60,19 +61,19 @@ def test_delegate_to_runtime_error(self): BiqQueryDataTransferServiceHook(gcp_conn_id="GCP_CONN_ID", delegate_to="delegate_to") def setup_method(self): - with async_mock.patch( + with mock.patch( "airflow.providers.google.cloud.hooks.bigquery_dts.GoogleBaseHook.__init__", new=mock_base_gcp_hook_no_default_project_id, ): self.hook = BiqQueryDataTransferServiceHook() - self.hook.get_credentials = async_mock.MagicMock(return_value=CREDENTIALS) # type: ignore + self.hook.get_credentials = mock.MagicMock(return_value=CREDENTIALS) # type: ignore def test_disable_auto_scheduling(self): expected = deepcopy(TRANSFER_CONFIG) expected.schedule_options.disable_auto_scheduling = True assert expected == self.hook._disable_auto_scheduling(TRANSFER_CONFIG) - @async_mock.patch( + @mock.patch( "airflow.providers.google.cloud.hooks.bigquery_dts." "DataTransferServiceClient.create_transfer_config" ) @@ -88,7 +89,7 @@ def test_create_transfer_config(self, service_mock): timeout=None, ) - @async_mock.patch( + @mock.patch( "airflow.providers.google.cloud.hooks.bigquery_dts." "DataTransferServiceClient.delete_transfer_config" ) @@ -100,7 +101,7 @@ def test_delete_transfer_config(self, service_mock): request=dict(name=name), metadata=(), retry=DEFAULT, timeout=None ) - @async_mock.patch( + @mock.patch( "airflow.providers.google.cloud.hooks.bigquery_dts." "DataTransferServiceClient.start_manual_transfer_runs" ) @@ -115,7 +116,7 @@ def test_start_manual_transfer_runs(self, service_mock): timeout=None, ) - @async_mock.patch( + @mock.patch( "airflow.providers.google.cloud.hooks.bigquery_dts.DataTransferServiceClient.get_transfer_run" ) def test_get_transfer_run(self, service_mock): @@ -138,14 +139,14 @@ def test_delegate_to_runtime_error(self): @pytest.fixture() def mock_client(self): - with async_mock.patch( + with mock.patch( f"{self.HOOK_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook._get_conn", new_callable=AsyncMock, ) as mock_client: transfer_result = Future() - transfer_result.set_result(async_mock.MagicMock()) + transfer_result.set_result(mock.MagicMock()) - mock_client.return_value.get_transfer_run = async_mock.MagicMock(return_value=transfer_result) + mock_client.return_value.get_transfer_run = mock.MagicMock(return_value=transfer_result) yield mock_client @pytest.fixture diff --git a/tests/providers/google/cloud/hooks/test_cloud_build.py b/tests/providers/google/cloud/hooks/test_cloud_build.py index 2e21bf5ae9a86..fb59027cfeb41 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_build.py +++ b/tests/providers/google/cloud/hooks/test_cloud_build.py @@ -32,7 +32,6 @@ from airflow.providers.google.cloud.hooks.cloud_build import CloudBuildAsyncHook, CloudBuildHook from airflow.providers.google.common.consts import CLIENT_INFO from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id -from tests.providers.google.cloud.utils.compat import async_mock PROJECT_ID = "cloud-build-project" LOCATION = "test-location" @@ -337,11 +336,11 @@ def hook(self): ) @pytest.mark.asyncio - @async_mock.patch.object( + @mock.patch.object( CloudBuildAsyncClient, "__init__", lambda self, credentials, client_info, client_options: None ) - @async_mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncHook.get_credentials")) - @async_mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncClient.get_build")) + @mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncHook.get_credentials")) + @mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncClient.get_build")) async def test_async_cloud_build_service_client_creation_should_execute_successfully( self, mocked_get_build, mock_get_creds, hook ): diff --git a/tests/providers/google/cloud/hooks/test_cloud_composer.py b/tests/providers/google/cloud/hooks/test_cloud_composer.py index c91e9969e9f47..1072b310b2192 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_composer.py +++ b/tests/providers/google/cloud/hooks/test_cloud_composer.py @@ -18,13 +18,13 @@ from __future__ import annotations from unittest import mock +from unittest.mock import AsyncMock import pytest from google.api_core.gapic_v1.method import DEFAULT from google.cloud.orchestration.airflow.service_v1 import EnvironmentsAsyncClient from airflow.providers.google.cloud.hooks.cloud_composer import CloudComposerAsyncHook, CloudComposerHook -from tests.providers.google.cloud.utils.compat import AsyncMock, async_mock TEST_GCP_REGION = "global" TEST_GCP_PROJECT = "test-project" @@ -204,11 +204,11 @@ def test_delegate_to_runtime_error(self): CloudComposerAsyncHook(gcp_conn_id="GCP_CONN_ID", delegate_to="delegate_to") def setup_method(self, method): - with async_mock.patch(BASE_STRING.format("GoogleBaseHook.__init__"), new=mock_init): + with mock.patch(BASE_STRING.format("GoogleBaseHook.__init__"), new=mock_init): self.hook = CloudComposerAsyncHook(gcp_conn_id="test") @pytest.mark.asyncio - @async_mock.patch(COMPOSER_STRING.format("CloudComposerAsyncHook.get_environment_client")) + @mock.patch(COMPOSER_STRING.format("CloudComposerAsyncHook.get_environment_client")) async def test_create_environment(self, mock_client) -> None: mock_env_client = AsyncMock(EnvironmentsAsyncClient) mock_client.return_value = mock_env_client @@ -232,7 +232,7 @@ async def test_create_environment(self, mock_client) -> None: ) @pytest.mark.asyncio - @async_mock.patch(COMPOSER_STRING.format("CloudComposerAsyncHook.get_environment_client")) + @mock.patch(COMPOSER_STRING.format("CloudComposerAsyncHook.get_environment_client")) async def test_delete_environment(self, mock_client) -> None: mock_env_client = AsyncMock(EnvironmentsAsyncClient) mock_client.return_value = mock_env_client @@ -255,7 +255,7 @@ async def test_delete_environment(self, mock_client) -> None: ) @pytest.mark.asyncio - @async_mock.patch(COMPOSER_STRING.format("CloudComposerAsyncHook.get_environment_client")) + @mock.patch(COMPOSER_STRING.format("CloudComposerAsyncHook.get_environment_client")) async def test_update_environment(self, mock_client) -> None: mock_env_client = AsyncMock(EnvironmentsAsyncClient) mock_client.return_value = mock_env_client diff --git a/tests/providers/google/cloud/hooks/test_dataproc.py b/tests/providers/google/cloud/hooks/test_dataproc.py index 5482fa198e7e8..b5233dc37e8e0 100644 --- a/tests/providers/google/cloud/hooks/test_dataproc.py +++ b/tests/providers/google/cloud/hooks/test_dataproc.py @@ -18,7 +18,7 @@ from __future__ import annotations from unittest import mock -from unittest.mock import ANY +from unittest.mock import ANY, AsyncMock import pytest from google.api_core.gapic_v1.method import DEFAULT @@ -35,7 +35,6 @@ from airflow.providers.google.cloud.hooks.dataproc import DataprocAsyncHook, DataprocHook, DataProcJobBuilder from airflow.providers.google.common.consts import CLIENT_INFO from airflow.version import version -from tests.providers.google.cloud.utils.compat import AsyncMock, async_mock AIRFLOW_VERSION = "v" + version.replace(".", "-").replace("+", "-") @@ -587,7 +586,7 @@ def test_get_batch_client_region(self, mock_client, mock_get_credentials): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) async def test_create_cluster(self, mock_client): mock_cluster_client = AsyncMock(ClusterControllerAsyncClient) mock_client.return_value = mock_cluster_client @@ -612,7 +611,7 @@ async def test_create_cluster(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) async def test_delete_cluster(self, mock_client): mock_cluster_client = AsyncMock(ClusterControllerAsyncClient) mock_client.return_value = mock_cluster_client @@ -632,7 +631,7 @@ async def test_delete_cluster(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) async def test_diagnose_cluster(self, mock_client): mock_cluster_client = AsyncMock(ClusterControllerAsyncClient) mock_client.return_value = mock_cluster_client @@ -653,7 +652,7 @@ async def test_diagnose_cluster(self, mock_client): mock_client.return_value.diagnose_cluster.return_value.result.assert_called_once_with() @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) async def test_get_cluster(self, mock_client): mock_cluster_client = AsyncMock(ClusterControllerAsyncClient) mock_client.return_value = mock_cluster_client @@ -671,7 +670,7 @@ async def test_get_cluster(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) async def test_list_clusters(self, mock_client): filter_ = "filter" mock_cluster_client = AsyncMock(ClusterControllerAsyncClient) @@ -691,7 +690,7 @@ async def test_list_clusters(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_cluster_client")) async def test_update_cluster(self, mock_client): update_mask = "update-mask" mock_cluster_client = AsyncMock(ClusterControllerAsyncClient) @@ -730,7 +729,7 @@ def test_update_cluster_missing_region(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_template_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_template_client")) async def test_create_workflow_template(self, mock_client): template = {"test": "test"} parent = f"projects/{GCP_PROJECT}/regions/{GCP_LOCATION}" @@ -744,7 +743,7 @@ async def test_create_workflow_template(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_template_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_template_client")) async def test_instantiate_workflow_template(self, mock_client): template_name = "template_name" name = f"projects/{GCP_PROJECT}/regions/{GCP_LOCATION}/workflowTemplates/{template_name}" @@ -761,7 +760,7 @@ async def test_instantiate_workflow_template(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_operation")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_operation")) async def test_get_operation(self, mock_client): mock_client.return_value = None hook = DataprocAsyncHook(gcp_conn_id="google_cloud_default", impersonation_chain=None) @@ -775,7 +774,7 @@ def test_instantiate_workflow_template_missing_region(self, mock_client): self.hook.instantiate_workflow_template(template_name="template_name", project_id=GCP_PROJECT) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_template_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_template_client")) async def test_instantiate_inline_workflow_template(self, mock_client): template = {"test": "test"} parent = f"projects/{GCP_PROJECT}/regions/{GCP_LOCATION}" @@ -797,7 +796,7 @@ def test_instantiate_inline_workflow_template_missing_region(self, mock_client): self.hook.instantiate_inline_workflow_template(template={"test": "test"}, project_id=GCP_PROJECT) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_job_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_job_client")) async def test_get_job(self, mock_client): mock_job_client = AsyncMock(JobControllerAsyncClient) mock_client.return_value = mock_job_client @@ -820,7 +819,7 @@ def test_get_job_missing_region(self, mock_client): self.hook.get_job(job_id=JOB_ID, project_id=GCP_PROJECT) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_job_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_job_client")) async def test_submit_job(self, mock_client): mock_job_client = AsyncMock(JobControllerAsyncClient) mock_client.return_value = mock_job_client @@ -844,7 +843,7 @@ def test_submit_job_missing_region(self, mock_client): self.hook.submit_job(job=JOB, project_id=GCP_PROJECT) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_job_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_job_client")) async def test_cancel_job(self, mock_client): mock_job_client = AsyncMock(JobControllerAsyncClient) mock_client.return_value = mock_job_client @@ -862,7 +861,7 @@ async def test_cancel_job(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_batch_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_batch_client")) async def test_create_batch(self, mock_client): mock_batch_client = AsyncMock(BatchControllerAsyncClient) mock_client.return_value = mock_batch_client @@ -886,7 +885,7 @@ async def test_create_batch(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_batch_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_batch_client")) async def test_delete_batch(self, mock_client): mock_batch_client = AsyncMock(BatchControllerAsyncClient) mock_client.return_value = mock_batch_client @@ -906,7 +905,7 @@ async def test_delete_batch(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_batch_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_batch_client")) async def test_get_batch(self, mock_client): mock_batch_client = AsyncMock(BatchControllerAsyncClient) mock_client.return_value = mock_batch_client @@ -926,7 +925,7 @@ async def test_get_batch(self, mock_client): ) @pytest.mark.asyncio - @async_mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_batch_client")) + @mock.patch(DATAPROC_STRING.format("DataprocAsyncHook.get_batch_client")) async def test_list_batches(self, mock_client): mock_batch_client = AsyncMock(BatchControllerAsyncClient) mock_client.return_value = mock_batch_client diff --git a/tests/providers/google/cloud/operators/test_mlengine.py b/tests/providers/google/cloud/operators/test_mlengine.py index cb4faf4052a16..deaeab0b61a8e 100644 --- a/tests/providers/google/cloud/operators/test_mlengine.py +++ b/tests/providers/google/cloud/operators/test_mlengine.py @@ -936,7 +936,7 @@ def test_create_training_job_should_throw_exception_when_job_failed(self, mock_h TEST_GCP_PROJECT_ID = "test-project" TEST_REGION = "us-central1" TEST_RUNTIME_VERSION = "1.15" -TEST_PYTHON_VERSION = "3.7" +TEST_PYTHON_VERSION = "3.8" TEST_JOB_DIR = "gs://example_mlengine_bucket/job-dir" TEST_PACKAGE_URIS = ["gs://system-tests-resources/example_gcp_mlengine/trainer-0.1.tar.gz"] TEST_TRAINING_PYTHON_MODULE = "trainer.task" diff --git a/tests/providers/google/cloud/triggers/test_bigquery.py b/tests/providers/google/cloud/triggers/test_bigquery.py index 410aa14b1a7ff..79e427e603593 100644 --- a/tests/providers/google/cloud/triggers/test_bigquery.py +++ b/tests/providers/google/cloud/triggers/test_bigquery.py @@ -19,6 +19,8 @@ import asyncio import logging from typing import Any +from unittest import mock +from unittest.mock import AsyncMock import pytest from aiohttp import ClientResponseError, RequestInfo @@ -37,7 +39,6 @@ BigQueryValueCheckTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.google.cloud.utils.compat import AsyncMock, async_mock TEST_CONN_ID = "bq_default" TEST_JOB_ID = "1234" @@ -159,7 +160,7 @@ def test_serialization(self, insert_job_trigger): } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_bigquery_insert_job_op_trigger_success(self, mock_job_status, insert_job_trigger): """ Tests the BigQueryInsertJobTrigger only fires once the query execution reaches a successful state. @@ -173,7 +174,7 @@ async def test_bigquery_insert_job_op_trigger_success(self, mock_job_status, ins ) @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") async def test_bigquery_insert_job_trigger_running(self, mock_job_instance, caplog, insert_job_trigger): """Test that BigQuery Triggers do not fire while a query is still running.""" @@ -195,7 +196,7 @@ async def test_bigquery_insert_job_trigger_running(self, mock_job_instance, capl asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_bigquery_op_trigger_terminated(self, mock_job_status, caplog, insert_job_trigger): """Test that BigQuery Triggers fire the correct event in case of an error.""" # Set the status to a value other than success or pending @@ -207,7 +208,7 @@ async def test_bigquery_op_trigger_terminated(self, mock_job_status, caplog, ins assert TriggerEvent({"status": "error", "message": "error"}) == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_bigquery_op_trigger_exception(self, mock_job_status, caplog, insert_job_trigger): """Test that BigQuery Triggers fire the correct event in case of an error.""" mock_job_status.side_effect = Exception("Test exception") @@ -233,7 +234,7 @@ def test_bigquery_get_data_trigger_serialization(self, get_data_trigger): } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") async def test_bigquery_get_data_trigger_running(self, mock_job_instance, caplog, get_data_trigger): """Test that BigQuery Triggers do not fire while a query is still running.""" @@ -255,7 +256,7 @@ async def test_bigquery_get_data_trigger_running(self, mock_job_instance, caplog asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_bigquery_get_data_trigger_terminated(self, mock_job_status, caplog, get_data_trigger): """Test that BigQuery Triggers fire the correct event in case of an error.""" # Set the status to a value other than success or pending @@ -267,7 +268,7 @@ async def test_bigquery_get_data_trigger_terminated(self, mock_job_status, caplo assert TriggerEvent({"status": "error", "message": "error"}) == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_bigquery_get_data_trigger_exception(self, mock_job_status, caplog, get_data_trigger): """Test that BigQuery Triggers fire the correct event in case of an error.""" mock_job_status.side_effect = Exception("Test exception") @@ -277,8 +278,8 @@ async def test_bigquery_get_data_trigger_exception(self, mock_job_status, caplog assert TriggerEvent({"status": "error", "message": "Test exception"}) == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_output") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_output") async def test_bigquery_get_data_trigger_success_with_data( self, mock_job_output, mock_job_status, get_data_trigger ): @@ -328,7 +329,7 @@ async def test_bigquery_get_data_trigger_success_with_data( class TestBigQueryCheckTrigger: @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_instance") async def test_bigquery_check_trigger_running(self, mock_job_instance, caplog, check_trigger): """Test that BigQuery Triggers do not fire while a query is still running.""" @@ -350,7 +351,7 @@ async def test_bigquery_check_trigger_running(self, mock_job_instance, caplog, c asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_bigquery_check_trigger_terminated(self, mock_job_status, caplog, check_trigger): """Test that BigQuery Triggers fire the correct event in case of an error.""" # Set the status to a value other than success or pending @@ -362,7 +363,7 @@ async def test_bigquery_check_trigger_terminated(self, mock_job_status, caplog, assert TriggerEvent({"status": "error", "message": "error"}) == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_check_trigger_exception(self, mock_job_status, caplog, check_trigger): """Test that BigQuery Triggers fire the correct event in case of an error.""" mock_job_status.side_effect = Exception("Test exception") @@ -386,8 +387,8 @@ def test_check_trigger_serialization(self, check_trigger): } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_output") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_output") async def test_check_trigger_success_with_data(self, mock_job_output, mock_job_status, check_trigger): """ Test the BigQueryCheckTrigger only fires once the query execution reaches a successful state. @@ -415,8 +416,8 @@ async def test_check_trigger_success_with_data(self, mock_job_output, mock_job_s assert TriggerEvent({"status": "success", "records": [22]}) == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_output") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_output") async def test_check_trigger_success_without_data(self, mock_job_output, mock_job_status, check_trigger): """ Tests that BigQueryCheckTrigger sends TriggerEvent as { "status": "success", "records": None} @@ -471,8 +472,8 @@ def test_interval_check_trigger_serialization(self, interval_check_trigger): } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_output") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_output") async def test_interval_check_trigger_success( self, mock_get_job_output, mock_job_status, interval_check_trigger ): @@ -487,7 +488,7 @@ async def test_interval_check_trigger_success( assert actual == TriggerEvent({"status": "error", "message": "The second SQL query returned None"}) @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_interval_check_trigger_pending(self, mock_job_status, caplog, interval_check_trigger): """ Tests that the BigQueryIntervalCheckTrigger do not fire while a query is still running. @@ -508,7 +509,7 @@ async def test_interval_check_trigger_pending(self, mock_job_status, caplog, int asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_interval_check_trigger_terminated(self, mock_job_status, interval_check_trigger): """Tests the BigQueryIntervalCheckTrigger fires the correct event in case of an error.""" # Set the status to a value other than success or pending @@ -520,7 +521,7 @@ async def test_interval_check_trigger_terminated(self, mock_job_status, interval assert TriggerEvent({"status": "error", "message": "error", "data": None}) == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_interval_check_trigger_exception(self, mock_job_status, caplog, interval_check_trigger): """Tests that the BigQueryIntervalCheckTrigger fires the correct event in case of an error.""" mock_job_status.side_effect = Exception("Test exception") @@ -557,9 +558,9 @@ def test_bigquery_value_check_op_trigger_serialization(self, value_check_trigger } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_records") - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_output") - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_records") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_output") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_value_check_op_trigger_success( self, mock_job_status, get_job_output, get_records, value_check_trigger ): @@ -578,7 +579,7 @@ async def test_value_check_op_trigger_success( assert actual == TriggerEvent({"status": "success", "message": "Job completed", "records": [4]}) @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_value_check_op_trigger_pending(self, mock_job_status, caplog, value_check_trigger): """ Tests BigQueryValueCheckTrigger only fires once the query execution reaches a successful state. @@ -600,7 +601,7 @@ async def test_value_check_op_trigger_pending(self, mock_job_status, caplog, val asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_value_check_op_trigger_fail(self, mock_job_status, value_check_trigger): """ Tests BigQueryValueCheckTrigger only fires once the query execution reaches a successful state. @@ -612,7 +613,7 @@ async def test_value_check_op_trigger_fail(self, mock_job_status, value_check_tr assert TriggerEvent({"status": "error", "message": "dummy", "records": None}) == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryAsyncHook.get_job_status") async def test_value_check_trigger_exception(self, mock_job_status): """Tests the BigQueryValueCheckTrigger does not fire if there is an exception.""" mock_job_status.side_effect = Exception("Test exception") @@ -655,7 +656,7 @@ def test_table_existence_trigger_serialization(self, table_existence_trigger): } @pytest.mark.asyncio - @async_mock.patch( + @mock.patch( "airflow.providers.google.cloud.triggers.bigquery.BigQueryTableExistenceTrigger._table_exists" ) async def test_big_query_table_existence_trigger_success( @@ -669,7 +670,7 @@ async def test_big_query_table_existence_trigger_success( assert TriggerEvent({"status": "success", "message": "success"}) == actual @pytest.mark.asyncio - @async_mock.patch( + @mock.patch( "airflow.providers.google.cloud.triggers.bigquery.BigQueryTableExistenceTrigger._table_exists" ) async def test_table_existence_trigger_pending(self, mock_table_exists, table_existence_trigger): @@ -684,7 +685,7 @@ async def test_table_existence_trigger_pending(self, mock_table_exists, table_ex asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch( + @mock.patch( "airflow.providers.google.cloud.triggers.bigquery.BigQueryTableExistenceTrigger._table_exists" ) async def test_table_existence_trigger_exception(self, mock_table_exists, table_existence_trigger): @@ -696,7 +697,7 @@ async def test_table_existence_trigger_exception(self, mock_table_exists, table_ assert TriggerEvent({"status": "error", "message": "Test exception"}) in task @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryTableAsyncHook.get_table_client") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryTableAsyncHook.get_table_client") async def test_table_exists(self, mock_get_table_client, table_existence_trigger): """Test BigQueryTableExistenceTrigger._table_exists async function with mocked value and mocked return value""" @@ -709,7 +710,7 @@ async def test_table_exists(self, mock_get_table_client, table_existence_trigger assert res is True @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryTableAsyncHook.get_table_client") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryTableAsyncHook.get_table_client") async def test_table_exists_exception(self, mock_get_table_client, table_existence_trigger): """Test BigQueryTableExistenceTrigger._table_exists async function with exception and return False""" hook = BigQueryTableAsyncHook() @@ -732,7 +733,7 @@ async def test_table_exists_exception(self, mock_get_table_client, table_existen assert res == expected_response @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryTableAsyncHook.get_table_client") + @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryTableAsyncHook.get_table_client") async def test_table_exists_raise_exception(self, mock_get_table_client, table_existence_trigger): """Test BigQueryTableExistenceTrigger._table_exists async function with raise exception""" hook = BigQueryTableAsyncHook() diff --git a/tests/providers/google/cloud/triggers/test_bigquery_dts.py b/tests/providers/google/cloud/triggers/test_bigquery_dts.py index 90821ea7c579e..8225a0ee56441 100644 --- a/tests/providers/google/cloud/triggers/test_bigquery_dts.py +++ b/tests/providers/google/cloud/triggers/test_bigquery_dts.py @@ -19,13 +19,13 @@ import asyncio import logging +from unittest import mock import pytest from google.cloud.bigquery_datatransfer_v1 import TransferState from airflow.providers.google.cloud.triggers.bigquery_dts import BigQueryDataTransferRunTrigger from airflow.triggers.base import TriggerEvent -from tests.providers.google.cloud.utils.compat import async_mock PROJECT_ID = "test-project-id" CONFIG_ID = "test-config-id" @@ -82,9 +82,9 @@ def test_get_async_hook(self, attr, expected_value, trigger): assert actual_value == expected_value @pytest.mark.asyncio - @async_mock.patch(f"{TRIGGER_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook.get_transfer_run") + @mock.patch(f"{TRIGGER_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook.get_transfer_run") async def test_run_returns_success_event(self, mock_hook, trigger): - mock_hook.return_value = async_mock.MagicMock(state=TransferState.SUCCEEDED) + mock_hook.return_value = mock.MagicMock(state=TransferState.SUCCEEDED) expected_event = TriggerEvent( { "run_id": RUN_ID, @@ -98,9 +98,9 @@ async def test_run_returns_success_event(self, mock_hook, trigger): assert actual_event == expected_event @pytest.mark.asyncio - @async_mock.patch(f"{TRIGGER_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook.get_transfer_run") + @mock.patch(f"{TRIGGER_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook.get_transfer_run") async def test_run_returns_failed_event(self, mock_hook, trigger): - mock_hook.return_value = async_mock.MagicMock(state=TransferState.FAILED) + mock_hook.return_value = mock.MagicMock(state=TransferState.FAILED) expected_event = TriggerEvent( { "status": "failed", @@ -113,7 +113,7 @@ async def test_run_returns_failed_event(self, mock_hook, trigger): assert actual_event == expected_event @pytest.mark.asyncio - @async_mock.patch(f"{TRIGGER_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook.get_transfer_run") + @mock.patch(f"{TRIGGER_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook.get_transfer_run") async def test_run_returns_exception_event(self, mock_hook, trigger): error_msg = "test error msg" mock_hook.side_effect = Exception(error_msg) @@ -128,9 +128,9 @@ async def test_run_returns_exception_event(self, mock_hook, trigger): assert actual_event == expected_event @pytest.mark.asyncio - @async_mock.patch(f"{TRIGGER_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook.get_transfer_run") + @mock.patch(f"{TRIGGER_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook.get_transfer_run") async def test_run_returns_cancelled_event(self, mock_hook, trigger): - mock_hook.return_value = async_mock.MagicMock(state=TransferState.CANCELLED) + mock_hook.return_value = mock.MagicMock(state=TransferState.CANCELLED) expected_event = TriggerEvent( { "status": "cancelled", @@ -143,9 +143,9 @@ async def test_run_returns_cancelled_event(self, mock_hook, trigger): assert actual_event == expected_event @pytest.mark.asyncio - @async_mock.patch(f"{TRIGGER_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook.get_transfer_run") + @mock.patch(f"{TRIGGER_MODULE_PATH}.AsyncBiqQueryDataTransferServiceHook.get_transfer_run") async def test_run_loop_is_still_running(self, mock_hook, trigger, caplog): - mock_hook.return_value = async_mock.MagicMock(state=TransferState.RUNNING) + mock_hook.return_value = mock.MagicMock(state=TransferState.RUNNING) caplog.set_level(logging.INFO) diff --git a/tests/providers/google/cloud/triggers/test_cloud_build.py b/tests/providers/google/cloud/triggers/test_cloud_build.py index 8687263f8e65e..f3dd952c0573d 100644 --- a/tests/providers/google/cloud/triggers/test_cloud_build.py +++ b/tests/providers/google/cloud/triggers/test_cloud_build.py @@ -19,13 +19,13 @@ import asyncio import logging from asyncio import Future +from unittest import mock import pytest from google.cloud.devtools.cloudbuild_v1.types import Build, BuildStep from airflow.providers.google.cloud.triggers.cloud_build import CloudBuildCreateBuildTrigger from airflow.triggers.base import TriggerEvent -from tests.providers.google.cloud.utils.compat import async_mock CLOUD_BUILD_PATH = "airflow.providers.google.cloud.triggers.cloud_build.{}" TEST_PROJECT_ID = "cloud-build-project" @@ -116,7 +116,7 @@ def test_serialization(self, trigger): } @pytest.mark.asyncio - @async_mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncHook")) + @mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncHook")) async def test_trigger_on_success_yield_successfully(self, mock_hook, trigger): """ Tests the CloudBuildCreateBuildTrigger only fires once the job execution reaches a successful state. @@ -139,7 +139,7 @@ async def test_trigger_on_success_yield_successfully(self, mock_hook, trigger): ) @pytest.mark.asyncio - @async_mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncHook")) + @mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncHook")) async def test_trigger_on_running_wait_successfully(self, mock_hook, caplog, trigger): """ Test that CloudBuildCreateBuildTrigger does not fire while a build is still running. @@ -162,7 +162,7 @@ async def test_trigger_on_running_wait_successfully(self, mock_hook, caplog, tri asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncHook")) + @mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncHook")) async def test_trigger_on_error_yield_successfully(self, mock_hook, caplog, trigger): """ Test that CloudBuildCreateBuildTrigger fires the correct event in case of an error. @@ -182,7 +182,7 @@ async def test_trigger_on_error_yield_successfully(self, mock_hook, caplog, trig assert TriggerEvent({"status": "error", "message": "error"}) == actual @pytest.mark.asyncio - @async_mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncHook")) + @mock.patch(CLOUD_BUILD_PATH.format("CloudBuildAsyncHook")) async def test_trigger_on_exec_yield_successfully(self, mock_hook, trigger): """ Test that CloudBuildCreateBuildTrigger fires the correct event in case of an error. diff --git a/tests/providers/google/cloud/triggers/test_dataflow.py b/tests/providers/google/cloud/triggers/test_dataflow.py index 6da1a1e79484a..74d68680bb839 100644 --- a/tests/providers/google/cloud/triggers/test_dataflow.py +++ b/tests/providers/google/cloud/triggers/test_dataflow.py @@ -21,13 +21,13 @@ import logging import sys from asyncio import Future +from unittest import mock import pytest from google.cloud.dataflow_v1beta3 import JobState from airflow.providers.google.cloud.triggers.dataflow import TemplateJobStartTrigger from airflow.triggers.base import TriggerEvent -from tests.providers.google.cloud.utils.compat import async_mock PROJECT_ID = "test-project-id" JOB_ID = "test_job_id_2012-12-23-10:00" @@ -98,7 +98,7 @@ def test_get_async_hook(self, trigger, attr, expected): assert actual == expected @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataflow.AsyncDataflowHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.dataflow.AsyncDataflowHook.get_job_status") async def test_run_loop_return_success_event(self, mock_job_status, trigger, make_mock_awaitable): make_mock_awaitable(mock_job_status, JobState.JOB_STATE_DONE) @@ -114,7 +114,7 @@ async def test_run_loop_return_success_event(self, mock_job_status, trigger, mak assert actual_event == expected_event @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataflow.AsyncDataflowHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.dataflow.AsyncDataflowHook.get_job_status") async def test_run_loop_return_failed_event(self, mock_job_status, trigger, make_mock_awaitable): make_mock_awaitable(mock_job_status, JobState.JOB_STATE_FAILED) @@ -129,7 +129,7 @@ async def test_run_loop_return_failed_event(self, mock_job_status, trigger, make assert actual_event == expected_event @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataflow.AsyncDataflowHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.dataflow.AsyncDataflowHook.get_job_status") async def test_run_loop_return_stopped_event(self, mock_job_status, trigger, make_mock_awaitable): make_mock_awaitable(mock_job_status, JobState.JOB_STATE_STOPPED) expected_event = TriggerEvent( @@ -143,7 +143,7 @@ async def test_run_loop_return_stopped_event(self, mock_job_status, trigger, mak assert actual_event == expected_event @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataflow.AsyncDataflowHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.dataflow.AsyncDataflowHook.get_job_status") async def test_run_loop_is_still_running(self, mock_job_status, trigger, caplog, make_mock_awaitable): make_mock_awaitable(mock_job_status, JobState.JOB_STATE_RUNNING) caplog.set_level(logging.INFO) diff --git a/tests/providers/google/cloud/triggers/test_datafusion.py b/tests/providers/google/cloud/triggers/test_datafusion.py index 716ebc39a7750..8e85bd0504121 100644 --- a/tests/providers/google/cloud/triggers/test_datafusion.py +++ b/tests/providers/google/cloud/triggers/test_datafusion.py @@ -18,12 +18,12 @@ import asyncio import logging +from unittest import mock import pytest from airflow.providers.google.cloud.triggers.datafusion import DataFusionStartPipelineTrigger from airflow.triggers.base import TriggerEvent -from tests.providers.google.cloud.utils.compat import async_mock HOOK_STATUS_STR = "airflow.providers.google.cloud.hooks.datafusion.DataFusionAsyncHook.get_pipeline_status" CLASSPATH = "airflow.providers.google.cloud.triggers.datafusion.DataFusionStartPipelineTrigger" @@ -73,7 +73,7 @@ def test_start_pipeline_trigger_serialization_should_execute_successfully(self, } @pytest.mark.asyncio - @async_mock.patch(HOOK_STATUS_STR) + @mock.patch(HOOK_STATUS_STR) async def test_start_pipeline_trigger_on_success_should_execute_successfully( self, mock_pipeline_status, trigger ): @@ -89,7 +89,7 @@ async def test_start_pipeline_trigger_on_success_should_execute_successfully( ) @pytest.mark.asyncio - @async_mock.patch(HOOK_STATUS_STR) + @mock.patch(HOOK_STATUS_STR) async def test_start_pipeline_trigger_running_should_execute_successfully( self, mock_pipeline_status, trigger, caplog ): @@ -112,7 +112,7 @@ async def test_start_pipeline_trigger_running_should_execute_successfully( asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch(HOOK_STATUS_STR) + @mock.patch(HOOK_STATUS_STR) async def test_start_pipeline_trigger_error_should_execute_successfully( self, mock_pipeline_status, trigger ): @@ -126,7 +126,7 @@ async def test_start_pipeline_trigger_error_should_execute_successfully( assert TriggerEvent({"status": "error", "message": "error"}) == actual @pytest.mark.asyncio - @async_mock.patch(HOOK_STATUS_STR) + @mock.patch(HOOK_STATUS_STR) async def test_start_pipeline_trigger_exception_should_execute_successfully( self, mock_pipeline_status, trigger ): diff --git a/tests/providers/google/cloud/triggers/test_dataproc.py b/tests/providers/google/cloud/triggers/test_dataproc.py index ea75aae5ccc3d..1b674af70c677 100644 --- a/tests/providers/google/cloud/triggers/test_dataproc.py +++ b/tests/providers/google/cloud/triggers/test_dataproc.py @@ -19,6 +19,7 @@ import asyncio import logging from asyncio import Future +from unittest import mock import pytest from google.cloud.dataproc_v1 import Batch, ClusterStatus @@ -30,7 +31,6 @@ DataprocWorkflowTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.google.cloud.utils.compat import async_mock TEST_PROJECT_ID = "project-id" TEST_REGION = "region" @@ -87,7 +87,7 @@ def workflow_trigger(): @pytest.fixture() def async_get_cluster(): def func(**kwargs): - m = async_mock.MagicMock() + m = mock.MagicMock() m.configure_mock(**kwargs) f = asyncio.Future() f.set_result(m) @@ -99,7 +99,7 @@ def func(**kwargs): @pytest.fixture() def async_get_batch(): def func(**kwargs): - m = async_mock.MagicMock() + m = mock.MagicMock() m.configure_mock(**kwargs) f = Future() f.set_result(m) @@ -111,7 +111,7 @@ def func(**kwargs): @pytest.fixture() def async_get_operation(): def func(**kwargs): - m = async_mock.MagicMock() + m = mock.MagicMock() m.configure_mock(**kwargs) f = Future() f.set_result(m) @@ -134,7 +134,7 @@ def test_async_cluster_trigger_serialization_should_execute_successfully(self, c } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_cluster") + @mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_cluster") async def test_async_cluster_triggers_on_success_should_execute_successfully( self, mock_hook, cluster_trigger, async_get_cluster ): @@ -158,7 +158,7 @@ async def test_async_cluster_triggers_on_success_should_execute_successfully( assert expected_event == actual_event @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_cluster") + @mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_cluster") async def test_async_cluster_trigger_run_returns_error_event( self, mock_hook, cluster_trigger, async_get_cluster ): @@ -182,7 +182,7 @@ async def test_async_cluster_trigger_run_returns_error_event( assert expected_event == actual_event @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_cluster") + @mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_cluster") async def test_cluster_run_loop_is_still_running( self, mock_hook, cluster_trigger, caplog, async_get_cluster ): @@ -222,7 +222,7 @@ def test_async_create_batch_trigger_serialization_should_execute_successfully(se } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_batch") + @mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_batch") async def test_async_create_batch_trigger_triggers_on_success_should_execute_successfully( self, mock_hook, batch_trigger, async_get_batch ): @@ -244,7 +244,7 @@ async def test_async_create_batch_trigger_triggers_on_success_should_execute_suc assert expected_event == actual_event @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_batch") + @mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_batch") async def test_async_create_batch_trigger_run_returns_failed_event( self, mock_hook, batch_trigger, async_get_batch ): @@ -257,7 +257,7 @@ async def test_async_create_batch_trigger_run_returns_failed_event( assert expected_event == actual_event @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_batch") + @mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_batch") async def test_create_batch_run_returns_cancelled_event(self, mock_hook, batch_trigger, async_get_batch): mock_hook.return_value = async_get_batch(state=Batch.State.CANCELLED, batch_id=TEST_BATCH_ID) @@ -268,7 +268,7 @@ async def test_create_batch_run_returns_cancelled_event(self, mock_hook, batch_t assert expected_event == actual_event @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_batch") + @mock.patch("airflow.providers.google.cloud.hooks.dataproc.DataprocAsyncHook.get_batch") async def test_create_batch_run_loop_is_still_running( self, mock_hook, batch_trigger, caplog, async_get_batch ): @@ -298,7 +298,7 @@ def test_async_cluster_trigger_serialization_should_execute_successfully(self, w } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.triggers.dataproc.DataprocBaseTrigger.get_async_hook") + @mock.patch("airflow.providers.google.cloud.triggers.dataproc.DataprocBaseTrigger.get_async_hook") async def test_async_workflow_triggers_on_success_should_execute_successfully( self, mock_hook, workflow_trigger, async_get_operation ): @@ -318,7 +318,7 @@ async def test_async_workflow_triggers_on_success_should_execute_successfully( assert expected_event == actual_event @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.triggers.dataproc.DataprocBaseTrigger.get_async_hook") + @mock.patch("airflow.providers.google.cloud.triggers.dataproc.DataprocBaseTrigger.get_async_hook") async def test_async_workflow_triggers_on_error(self, mock_hook, workflow_trigger, async_get_operation): mock_hook.return_value.get_operation.return_value = async_get_operation( name=TEST_OPERATION_NAME, done=True, response={}, error=Status(message="test_error") diff --git a/tests/providers/google/cloud/triggers/test_gcs.py b/tests/providers/google/cloud/triggers/test_gcs.py index 9a83cf473a9b3..0771ba0d32e94 100644 --- a/tests/providers/google/cloud/triggers/test_gcs.py +++ b/tests/providers/google/cloud/triggers/test_gcs.py @@ -19,6 +19,8 @@ import asyncio from datetime import datetime +from unittest import mock +from unittest.mock import AsyncMock import pytest from gcloud.aio.storage import Bucket, Storage @@ -30,7 +32,6 @@ GCSPrefixBlobTrigger, ) from airflow.triggers.base import TriggerEvent -from tests.providers.google.cloud.utils.compat import AsyncMock, async_mock TEST_BUCKET = "TEST_BUCKET" TEST_OBJECT = "TEST_OBJECT" @@ -70,7 +71,7 @@ def test_gcs_blob_trigger_serialization(self, trigger): } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.triggers.gcs.GCSBlobTrigger._object_exists") + @mock.patch("airflow.providers.google.cloud.triggers.gcs.GCSBlobTrigger._object_exists") async def test_gcs_blob_trigger_success(self, mock_object_exists, trigger): """ Tests that the GCSBlobTrigger is success case @@ -82,7 +83,7 @@ async def test_gcs_blob_trigger_success(self, mock_object_exists, trigger): assert TriggerEvent({"status": "success", "message": "success"}) == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.triggers.gcs.GCSBlobTrigger._object_exists") + @mock.patch("airflow.providers.google.cloud.triggers.gcs.GCSBlobTrigger._object_exists") async def test_gcs_blob_trigger_pending(self, mock_object_exists, trigger): """ Test that GCSBlobTrigger is in loop if file isn't found. @@ -97,7 +98,7 @@ async def test_gcs_blob_trigger_pending(self, mock_object_exists, trigger): asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.triggers.gcs.GCSBlobTrigger._object_exists") + @mock.patch("airflow.providers.google.cloud.triggers.gcs.GCSBlobTrigger._object_exists") async def test_gcs_blob_trigger_exception(self, mock_object_exists, trigger): """ Tests the GCSBlobTrigger does fire if there is an exception. @@ -159,9 +160,7 @@ def test_gcs_prefix_blob_trigger_serialization(self): } @pytest.mark.asyncio - @async_mock.patch( - "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix" - ) + @mock.patch("airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix") async def test_gcs_prefix_blob_trigger_success(self, mock_list_blobs_with_prefixs): """ Tests that the GCSPrefixBlobTrigger is success case @@ -176,9 +175,7 @@ async def test_gcs_prefix_blob_trigger_success(self, mock_list_blobs_with_prefix ) @pytest.mark.asyncio - @async_mock.patch( - "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix" - ) + @mock.patch("airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix") async def test_gcs_prefix_blob_trigger_exception(self, mock_list_blobs_with_prefixs): """ Tests the GCSPrefixBlobTrigger does fire if there is an exception. @@ -190,9 +187,7 @@ async def test_gcs_prefix_blob_trigger_exception(self, mock_list_blobs_with_pref assert TriggerEvent({"status": "error", "message": "Test exception"}) in task @pytest.mark.asyncio - @async_mock.patch( - "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix" - ) + @mock.patch("airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix") async def test_gcs_prefix_blob_trigger_pending(self, mock_list_blobs_with_prefixs): """ Test that GCSPrefixBlobTrigger is in loop if file isn't found. @@ -252,7 +247,7 @@ def test_gcs_blob_update_trigger_serialization(self): } @pytest.mark.asyncio - @async_mock.patch( + @mock.patch( "airflow.providers.google.cloud.triggers.gcs.GCSCheckBlobUpdateTimeTrigger._is_blob_updated_after" ) async def test_gcs_blob_update_trigger_success(self, mock_blob_updated): @@ -266,7 +261,7 @@ async def test_gcs_blob_update_trigger_success(self, mock_blob_updated): assert TriggerEvent({"status": "success", "message": "success"}) == actual @pytest.mark.asyncio - @async_mock.patch( + @mock.patch( "airflow.providers.google.cloud.triggers.gcs.GCSCheckBlobUpdateTimeTrigger._is_blob_updated_after" ) async def test_gcs_blob_update_trigger_pending(self, mock_blob_updated): @@ -283,7 +278,7 @@ async def test_gcs_blob_update_trigger_pending(self, mock_blob_updated): asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch( + @mock.patch( "airflow.providers.google.cloud.triggers.gcs.GCSCheckBlobUpdateTimeTrigger._is_blob_updated_after" ) async def test_gcs_blob_update_trigger_exception(self, mock_object_exists): diff --git a/tests/providers/google/cloud/triggers/test_mlengine.py b/tests/providers/google/cloud/triggers/test_mlengine.py index da220e6ea5a05..4a539c8c5defb 100644 --- a/tests/providers/google/cloud/triggers/test_mlengine.py +++ b/tests/providers/google/cloud/triggers/test_mlengine.py @@ -18,19 +18,19 @@ import asyncio import logging +from unittest import mock import pytest from airflow.providers.google.cloud.triggers.mlengine import MLEngineStartTrainingJobTrigger from airflow.triggers.base import TriggerEvent -from tests.providers.google.cloud.utils.compat import async_mock TEST_CONN_ID = "ml_default" TEST_JOB_ID = "1234" TEST_GCP_PROJECT_ID = "test-project" TEST_REGION = "us-central1" TEST_RUNTIME_VERSION = "1.15" -TEST_PYTHON_VERSION = "3.7" +TEST_PYTHON_VERSION = "3.8" TEST_JOB_DIR = "gs://example_mlengine_bucket/job-dir" TEST_PACKAGE_URIS = ["gs://system-tests-resources/example_gcp_mlengine/trainer-0.1.tar.gz"] TEST_TRAINING_PYTHON_MODULE = "trainer.task" @@ -81,7 +81,7 @@ def test_serialize(self, trigger): } @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineAsyncHook.get_job_status") async def test_trigger_on_success_yield_successfully(self, mock_job_status, trigger): """ Tests the MLEngineStartTrainingJobTrigger only fires once the job execution reaches a successful state @@ -95,7 +95,7 @@ async def test_trigger_on_success_yield_successfully(self, mock_job_status, trig ) @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineAsyncHook.get_job") + @mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineAsyncHook.get_job") async def test_trigger_on_running_wait_successfully(self, mocked_get, caplog, trigger): """ Test that MLEngineStartTrainingJobTrigger does not fire while a job is still running. @@ -117,7 +117,7 @@ async def test_trigger_on_running_wait_successfully(self, mocked_get, caplog, tr asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineAsyncHook.get_job_status") async def test_trigger_on_error_yield_successfully(self, mock_job_status, trigger): """ Test that MLEngineStartTrainingJobTrigger fires the correct event in case of an error. @@ -130,7 +130,7 @@ async def test_trigger_on_error_yield_successfully(self, mock_job_status, trigge assert TriggerEvent({"status": "error", "message": "error"}) == actual @pytest.mark.asyncio - @async_mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineAsyncHook.get_job_status") + @mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineAsyncHook.get_job_status") async def test_trigger_exec_yield_successfully(self, mock_job_status, trigger): """ Test that MLEngineStartTrainingJobTrigger fires the correct event in case of an error. diff --git a/tests/providers/google/cloud/utils/compat.py b/tests/providers/google/cloud/utils/compat.py deleted file mode 100644 index cb98f804b2d01..0000000000000 --- a/tests/providers/google/cloud/utils/compat.py +++ /dev/null @@ -1,37 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import annotations - -__all__ = ["async_mock", "AsyncMock"] - -import sys - -if sys.version_info < (3, 8): - # For compatibility with Python 3.7 - from asynctest import mock as async_mock - - # ``asynctest.mock.CoroutineMock`` which provide compatibility not working well with autospec=True - # as result "TypeError: object MagicMock can't be used in 'await' expression" could be raised. - # Best solution in this case provide as spec actual awaitable object - # >>> from tests.providers.google.cloud.utils.compat import AsyncMock - # >>> from foo.bar import SpamEgg - # >>> mock_something = AsyncMock(SpamEgg) - from asynctest.mock import CoroutineMock as AsyncMock -else: - from unittest import mock as async_mock - from unittest.mock import AsyncMock diff --git a/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py index 6fd3b202f81db..95cceb4e3fa84 100644 --- a/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py +++ b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py @@ -18,7 +18,7 @@ import json import os -import sys +from unittest import mock from unittest.mock import MagicMock, PropertyMock, patch import pytest @@ -39,12 +39,6 @@ ) from airflow.utils import db -if sys.version_info < (3, 8): - # For compatibility with Python 3.7 - from asynctest import mock as async_mock -else: - from unittest import mock as async_mock - DEFAULT_RESOURCE_GROUP = "defaultResourceGroup" AZURE_DATA_FACTORY_CONN_ID = "azure_data_factory_default" RESOURCE_GROUP_NAME = "team_provider_resource_group_test" @@ -728,8 +722,8 @@ def test_backcompat_prefix_both_prefers_short(mock_connect): class TestAzureDataFactoryAsyncHook: @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") async def test_get_adf_pipeline_run_status_queued(self, mock_get_pipeline_run, mock_conn): """Test get_adf_pipeline_run_status function with mocked status""" mock_status = "Queued" @@ -739,8 +733,8 @@ async def test_get_adf_pipeline_run_status_queued(self, mock_get_pipeline_run, m assert response == mock_status @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") async def test_get_adf_pipeline_run_status_inprogress( self, mock_get_pipeline_run, @@ -754,8 +748,8 @@ async def test_get_adf_pipeline_run_status_inprogress( assert response == mock_status @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") async def test_get_adf_pipeline_run_status_success(self, mock_get_pipeline_run, mock_conn): """Test get_adf_pipeline_run_status function with mocked status""" mock_status = "Succeeded" @@ -765,8 +759,8 @@ async def test_get_adf_pipeline_run_status_success(self, mock_get_pipeline_run, assert response == mock_status @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") async def test_get_adf_pipeline_run_status_failed(self, mock_get_pipeline_run, mock_conn): """Test get_adf_pipeline_run_status function with mocked status""" mock_status = "Failed" @@ -776,8 +770,8 @@ async def test_get_adf_pipeline_run_status_failed(self, mock_get_pipeline_run, m assert response == mock_status @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") async def test_get_adf_pipeline_run_status_cancelled(self, mock_get_pipeline_run, mock_conn): """Test get_adf_pipeline_run_status function with mocked status""" mock_status = "Cancelled" @@ -787,8 +781,8 @@ async def test_get_adf_pipeline_run_status_cancelled(self, mock_get_pipeline_run assert response == mock_status @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_pipeline_run") async def test_get_adf_pipeline_run_status_exception(self, mock_get_pipeline_run, mock_conn): """Test get_adf_pipeline_run_status function with exception""" mock_get_pipeline_run.side_effect = Exception("Test exception") @@ -797,9 +791,9 @@ async def test_get_adf_pipeline_run_status_exception(self, mock_get_pipeline_run await hook.get_adf_pipeline_run_status(RUN_ID, RESOURCE_GROUP_NAME, DATAFACTORY_NAME) @pytest.mark.asyncio - @async_mock.patch("azure.mgmt.datafactory.models._models_py3.PipelineRun") - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_connection") - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") + @mock.patch("azure.mgmt.datafactory.models._models_py3.PipelineRun") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_connection") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") async def test_get_pipeline_run_exception_without_resource( self, mock_conn, mock_get_connection, mock_pipeline_run ): @@ -817,7 +811,7 @@ async def test_get_pipeline_run_exception_without_resource( await hook.get_pipeline_run(RUN_ID, None, DATAFACTORY_NAME) @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_async_conn") async def test_get_pipeline_run_exception(self, mock_conn): """Test get_pipeline_run function with exception""" mock_conn.return_value.pipeline_runs.get.side_effect = Exception("Test exception") @@ -826,7 +820,7 @@ async def test_get_pipeline_run_exception(self, mock_conn): await hook.get_pipeline_run(RUN_ID, RESOURCE_GROUP_NAME, DATAFACTORY_NAME) @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_connection") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_connection") async def test_get_async_conn(self, mock_connection): """""" mock_conn = Connection( @@ -849,7 +843,7 @@ async def test_get_async_conn(self, mock_connection): assert isinstance(response, DataFactoryManagementClient) @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_connection") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_connection") async def test_get_async_conn_without_login_id(self, mock_connection): """Test get_async_conn function without login id""" mock_conn = Connection( @@ -880,7 +874,7 @@ async def test_get_async_conn_without_login_id(self, mock_connection): } ], ) - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_connection") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_connection") async def test_get_async_conn_key_error_subscription_id(self, mock_connection, mock_connection_params): """Test get_async_conn function when subscription_id is missing in the connection""" mock_conn = Connection( @@ -906,7 +900,7 @@ async def test_get_async_conn_key_error_subscription_id(self, mock_connection, m }, ], ) - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_connection") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_connection") async def test_get_async_conn_key_error_tenant_id(self, mock_connection, mock_connection_params): """Test get_async_conn function when tenant id is missing in the connection""" mock_conn = Connection( diff --git a/tests/providers/microsoft/azure/triggers/test_azure_data_factory.py b/tests/providers/microsoft/azure/triggers/test_azure_data_factory.py index 1d02d80709c86..c7c31d2818eeb 100644 --- a/tests/providers/microsoft/azure/triggers/test_azure_data_factory.py +++ b/tests/providers/microsoft/azure/triggers/test_azure_data_factory.py @@ -17,8 +17,8 @@ from __future__ import annotations import asyncio -import sys import time +from unittest import mock import pytest @@ -29,12 +29,6 @@ ) from airflow.triggers.base import TriggerEvent -if sys.version_info < (3, 8): - # For compatibility with Python 3.7 - from asynctest import mock as async_mock -else: - from unittest import mock as async_mock - RESOURCE_GROUP_NAME = "team_provider_resource_group_test" DATAFACTORY_NAME = "ADFProvidersTeamDataFactory" AZURE_DATA_FACTORY_CONN_ID = "azure_data_factory_default" @@ -80,7 +74,7 @@ def test_adf_pipeline_run_status_sensors_trigger_serialization(self): "Queued", ], ) - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_adf_pipeline_run_status_sensors_trigger_run_queued(self, mock_data_factory, mock_status): """ Test if the task is run is in trigger successfully. @@ -101,7 +95,7 @@ async def test_adf_pipeline_run_status_sensors_trigger_run_queued(self, mock_dat "InProgress", ], ) - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_adf_pipeline_run_status_sensors_trigger_run_inprogress( self, mock_data_factory, mock_status ): @@ -122,7 +116,7 @@ async def test_adf_pipeline_run_status_sensors_trigger_run_inprogress( "mock_status", ["Succeeded"], ) - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_adf_pipeline_run_status_sensors_trigger_completed(self, mock_data_factory, mock_status): """Test if the task pipeline status is in succeeded status.""" mock_data_factory.return_value = mock_status @@ -139,7 +133,7 @@ async def test_adf_pipeline_run_status_sensors_trigger_completed(self, mock_data ("Failed", f"Pipeline run {RUN_ID} has Failed."), ], ) - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_adf_pipeline_run_status_sensors_trigger_failed( self, mock_data_factory, mock_status, mock_message ): @@ -157,7 +151,7 @@ async def test_adf_pipeline_run_status_sensors_trigger_failed( ("Cancelled", f"Pipeline run {RUN_ID} has been Cancelled."), ], ) - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_adf_pipeline_run_status_sensors_trigger_cancelled( self, mock_data_factory, mock_status, mock_message ): @@ -169,7 +163,7 @@ async def test_adf_pipeline_run_status_sensors_trigger_cancelled( assert TriggerEvent({"status": "error", "message": mock_message}) == actual @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_adf_pipeline_run_status_sensors_trigger_exception(self, mock_data_factory): """Test EMR container sensors with raise exception""" mock_data_factory.side_effect = Exception("Test exception") @@ -204,7 +198,7 @@ def test_azure_data_factory_trigger_serialization(self): } @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_azure_data_factory_trigger_run_without_wait(self, mock_pipeline_run_status): """Assert that run trigger without waiting if wait_for_termination is set to false""" mock_pipeline_run_status.return_value = AzureDataFactoryPipelineRunStatus.SUCCEEDED @@ -229,7 +223,7 @@ async def test_azure_data_factory_trigger_run_without_wait(self, mock_pipeline_r assert actual == expected @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_azure_data_factory_trigger_run_queued(self, mock_pipeline_run_status): """Assert that run wait if pipeline run is in queued state""" mock_pipeline_run_status.return_value = AzureDataFactoryPipelineRunStatus.QUEUED @@ -242,7 +236,7 @@ async def test_azure_data_factory_trigger_run_queued(self, mock_pipeline_run_sta asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_azure_data_factory_trigger_run_inprogress(self, mock_pipeline_run_status): """Assert that run wait if pipeline run is in progress state""" mock_pipeline_run_status.return_value = AzureDataFactoryPipelineRunStatus.IN_PROGRESS @@ -255,7 +249,7 @@ async def test_azure_data_factory_trigger_run_inprogress(self, mock_pipeline_run asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_azure_data_factory_trigger_run_canceling(self, mock_pipeline_run_status): """Assert that run wait if pipeline run is in canceling state""" mock_pipeline_run_status.return_value = AzureDataFactoryPipelineRunStatus.CANCELING @@ -268,7 +262,7 @@ async def test_azure_data_factory_trigger_run_canceling(self, mock_pipeline_run_ asyncio.get_event_loop().stop() @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_azure_data_factory_trigger_run_success(self, mock_pipeline_run_status): """Assert that the trigger generates success event in case of pipeline success""" mock_pipeline_run_status.return_value = AzureDataFactoryPipelineRunStatus.SUCCEEDED @@ -286,7 +280,7 @@ async def test_azure_data_factory_trigger_run_success(self, mock_pipeline_run_st assert expected == actual @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_azure_data_factory_trigger_run_failed(self, mock_pipeline_run_status): """Assert that run trigger error message in case of pipeline fail""" mock_pipeline_run_status.return_value = AzureDataFactoryPipelineRunStatus.FAILED @@ -304,7 +298,7 @@ async def test_azure_data_factory_trigger_run_failed(self, mock_pipeline_run_sta assert expected == actual @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_azure_data_factory_trigger_run_cancelled(self, mock_pipeline_run_status): """Assert that run trigger error message in case of pipeline fail""" mock_pipeline_run_status.return_value = AzureDataFactoryPipelineRunStatus.CANCELLED @@ -322,7 +316,7 @@ async def test_azure_data_factory_trigger_run_cancelled(self, mock_pipeline_run_ assert expected == actual @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_azure_data_factory_trigger_run_exception(self, mock_pipeline_run_status): """Assert that run catch exception if Azure API throw exception""" mock_pipeline_run_status.side_effect = Exception("Test exception") @@ -339,7 +333,7 @@ async def test_azure_data_factory_trigger_run_exception(self, mock_pipeline_run_ assert response in task @pytest.mark.asyncio - @async_mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") + @mock.patch(f"{MODULE}.hooks.data_factory.AzureDataFactoryAsyncHook.get_adf_pipeline_run_status") async def test_azure_data_factory_trigger_run_timeout(self, mock_pipeline_run_status): """Assert that pipeline run times out after end_time elapses""" mock_pipeline_run_status.return_value = AzureDataFactoryPipelineRunStatus.QUEUED diff --git a/tests/system/providers/google/cloud/ml_engine/example_mlengine.py b/tests/system/providers/google/cloud/ml_engine/example_mlengine.py index f61e35298443d..22be4ab47de52 100644 --- a/tests/system/providers/google/cloud/ml_engine/example_mlengine.py +++ b/tests/system/providers/google/cloud/ml_engine/example_mlengine.py @@ -105,7 +105,7 @@ def write_predict_file(path_to_file: str): region="us-central1", job_id="training-job-{{ ts_nodash }}-{{ params.model_name }}", runtime_version="1.15", - python_version="3.7", + python_version="3.8", job_dir=JOB_DIR, package_uris=[TRAINER_URI], training_python_module=TRAINER_PY_MODULE, @@ -151,7 +151,7 @@ def write_predict_file(path_to_file: str): "runtime_version": "1.15", "machineType": "mls1-c1-m2", "framework": "TENSORFLOW", - "pythonVersion": "3.7", + "pythonVersion": "3.8", }, ) # [END howto_operator_gcp_mlengine_create_version1] @@ -168,7 +168,7 @@ def write_predict_file(path_to_file: str): "runtime_version": "1.15", "machineType": "mls1-c1-m2", "framework": "TENSORFLOW", - "pythonVersion": "3.7", + "pythonVersion": "3.8", }, ) # [END howto_operator_gcp_mlengine_create_version2] diff --git a/tests/system/providers/google/cloud/ml_engine/example_mlengine_async.py b/tests/system/providers/google/cloud/ml_engine/example_mlengine_async.py index b870754b0c16a..c7f6aac9f7928 100644 --- a/tests/system/providers/google/cloud/ml_engine/example_mlengine_async.py +++ b/tests/system/providers/google/cloud/ml_engine/example_mlengine_async.py @@ -105,7 +105,7 @@ def write_predict_file(path_to_file: str): region="us-central1", job_id="async_training-job-{{ ts_nodash }}-{{ params.model_name }}", runtime_version="1.15", - python_version="3.7", + python_version="3.8", job_dir=JOB_DIR, package_uris=[TRAINER_URI], training_python_module=TRAINER_PY_MODULE, @@ -152,7 +152,7 @@ def write_predict_file(path_to_file: str): "runtime_version": "1.15", "machineType": "mls1-c1-m2", "framework": "TENSORFLOW", - "pythonVersion": "3.7", + "pythonVersion": "3.8", }, ) # [END howto_operator_gcp_mlengine_create_version1] @@ -169,7 +169,7 @@ def write_predict_file(path_to_file: str): "runtime_version": "1.15", "machineType": "mls1-c1-m2", "framework": "TENSORFLOW", - "pythonVersion": "3.7", + "pythonVersion": "3.8", }, ) # [END howto_operator_gcp_mlengine_create_version2] diff --git a/tests/system/providers/papermill/input_notebook.ipynb b/tests/system/providers/papermill/input_notebook.ipynb index d450712435bec..6c1d53a5a780c 100644 --- a/tests/system/providers/papermill/input_notebook.ipynb +++ b/tests/system/providers/papermill/input_notebook.ipynb @@ -112,7 +112,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.5" + "version": "3.8.5" } }, "nbformat": 4,