diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6ab4213153a1..922efd9854dd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1169,11 +1169,61 @@ jobs: breeze testing db-tests --parallel-test-types "${{needs.build-info.outputs.parallel-test-types-list-as-string}}" - name: > - Post Tests success: ${{needs.build-info.outputs.default-python-version}}:Boto" + Post Tests success: ${{needs.build-info.outputs.default-python-version}}:MinSQLAlchemy" uses: ./.github/actions/post_tests_success if: success() - name: > - Post Tests failure: ${{needs.build-info.outputs.default-python-version}}:Boto" + Post Tests failure: ${{needs.build-info.outputs.default-python-version}}:MinSQLAlchemy" + uses: ./.github/actions/post_tests_failure + if: failure() + + tests-postgres-pendulum-2: + timeout-minutes: 130 + name: > + DB:Postgres${{needs.build-info.outputs.default-postgres-version}}, + Pendulum2,Py${{needs.build-info.outputs.default-python-version}}: + ${{needs.build-info.outputs.parallel-test-types-list-as-string}} + runs-on: ${{fromJSON(needs.build-info.outputs.runs-on)}} + needs: [build-info, wait-for-ci-images] + env: + RUNS_ON: "${{needs.build-info.outputs.runs-on}}" + PARALLEL_TEST_TYPES: "${{needs.build-info.outputs.parallel-test-types-list-as-string}}" + PR_LABELS: "${{needs.build-info.outputs.pull-request-labels}}" + FULL_TESTS_NEEDED: "${{needs.build-info.outputs.full-tests-needed}}" + DEBUG_RESOURCES: "${{needs.build-info.outputs.debug-resources}}" + BACKEND: "postgres" + ENABLE_COVERAGE: "${{needs.build-info.outputs.run-coverage}}" + PYTHON_MAJOR_MINOR_VERSION: "${{needs.build-info.outputs.default-python-version}}" + PYTHON_VERSION: "${needs.build-info.outputs.default-python-version}}" + POSTGRES_VERSION: "${{needs.build-info.outputs.default-postgres-version}}" + BACKEND_VERSION: "${{needs.build-info.outputs.default-postgres-version}}" + DOWNGRADE_PENDULUM: "true" + JOB_ID: > + postgres-pendulum-2-${{needs.build-info.outputs.default-python-version}}- + ${{needs.build-info.outputs.default-postgres-version}} + if: needs.build-info.outputs.run-tests == 'true' + steps: + - name: Cleanup repo + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + persist-credentials: false + - name: > + Prepare breeze & CI image: ${{needs.build-info.outputs.default-python-version}}:${{env.IMAGE_TAG}} + uses: ./.github/actions/prepare_breeze_and_image + - name: > + Tests: ${{matrix.python-version}}:${{needs.build-info.outputs.parallel-test-types-list-as-string}} + run: > + breeze testing db-tests + --parallel-test-types "${{needs.build-info.outputs.parallel-test-types-list-as-string}}" + - name: > + Post Tests success: ${{needs.build-info.outputs.default-python-version}}:Pendulum2" + uses: ./.github/actions/post_tests_success + if: success() + - name: > + Post Tests failure: ${{needs.build-info.outputs.default-python-version}}:Pendulum2" uses: ./.github/actions/post_tests_failure if: failure() @@ -1616,6 +1666,44 @@ jobs: uses: ./.github/actions/post_tests_failure if: failure() + tests-no-db-pendulum-2: + timeout-minutes: 60 + name: > + Non-DB: Pendulum2, Py${{needs.build-info.outputs.default-python-version}}: + ${{needs.build-info.outputs.parallel-test-types-list-as-string}} + runs-on: ${{fromJSON(needs.build-info.outputs.runs-on)}} + needs: [build-info, wait-for-ci-images] + env: + RUNS_ON: "${{needs.build-info.outputs.runs-on}}" + PR_LABELS: "${{needs.build-info.outputs.pull-request-labels}}" + PYTHON_MAJOR_MINOR_VERSION: "${{needs.build-info.outputs.default-python-version}}" + DEBUG_RESOURCES: "${{needs.build-info.outputs.debug-resources}}" + JOB_ID: "quarantined-${{needs.build-info.outputs.default-python-version}}" + ENABLE_COVERAGE: "${{needs.build-info.outputs.run-coverage}}" + DOWNGRADE_PENDULUM: "true" + if: needs.build-info.outputs.run-tests == 'true' + steps: + - name: Cleanup repo + shell: bash + run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*" + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v4 + with: + persist-credentials: false + - name: > + Prepare breeze & CI image: ${{needs.build-info.outputs.default-python-version}}:${{env.IMAGE_TAG}} + uses: ./.github/actions/prepare_breeze_and_image + - name: "Tests: ${{matrix.python-version}}:Non-DB-Pendulum2" + run: > + breeze testing non-db-tests + --parallel-test-types "${{needs.build-info.outputs.parallel-test-types-list-as-string}}" + - name: "Post Tests success: Non-DB-Pendulum2" + uses: ./.github/actions/post_tests_success + if: success() + - name: "Post Tests failure: Non-DB-Pendulum2" + uses: ./.github/actions/post_tests_failure + if: failure() + summarize-warnings: timeout-minutes: 15 name: "Summarize warnings" diff --git a/Dockerfile.ci b/Dockerfile.ci index 0f10757de59e..5487e32af020 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -908,6 +908,18 @@ function check_download_sqlalchemy() { pip check } +function check_download_pendulum() { + if [[ ${DOWNGRADE_PENDULUM=} != "true" ]]; then + return + fi + min_pendulum_version=$(grep "\"pendulum>=" pyproject.toml | sed "s/.*>=\([0-9\.]*\).*/\1/" | xargs) + echo + echo "${COLOR_BLUE}Downgrading pendulum to minimum supported version: ${min_pendulum_version}${COLOR_RESET}" + echo + pip install --root-user-action ignore "pendulum==${min_pendulum_version}" + pip check +} + function check_run_tests() { if [[ ${RUN_TESTS=} != "true" ]]; then return @@ -937,6 +949,7 @@ determine_airflow_to_use environment_initialization check_boto_upgrade check_download_sqlalchemy +check_download_pendulum check_run_tests "${@}" exec /bin/bash "${@}" diff --git a/airflow/models/dag.py b/airflow/models/dag.py index d0f46feed2bf..c0abadf33961 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -138,7 +138,7 @@ if TYPE_CHECKING: from types import ModuleType - from pendulum.tz.timezone import Timezone + from pendulum.tz.timezone import FixedTimezone, Timezone from sqlalchemy.orm.query import Query from sqlalchemy.orm.session import Session @@ -213,7 +213,7 @@ def _get_model_data_interval( return DataInterval(start, end) -def create_timetable(interval: ScheduleIntervalArg, timezone: Timezone) -> Timetable: +def create_timetable(interval: ScheduleIntervalArg, timezone: Timezone | FixedTimezone) -> Timetable: """Create a Timetable instance from a ``schedule_interval`` argument.""" if interval is NOTSET: return DeltaDataIntervalTimetable(DEFAULT_SCHEDULE_INTERVAL) @@ -529,7 +529,7 @@ def __init__( tzinfo = None if date.tzinfo else settings.TIMEZONE tz = pendulum.instance(date, tz=tzinfo).timezone - self.timezone: Timezone = tz or settings.TIMEZONE + self.timezone: Timezone | FixedTimezone = tz or settings.TIMEZONE # Apply the timezone we settled on to end_date if it wasn't supplied if "end_date" in self.default_args and self.default_args["end_date"]: diff --git a/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py b/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py index 18799ed920e7..6c5f038b0abe 100644 --- a/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py +++ b/airflow/providers/cncf/kubernetes/pod_launcher_deprecated.py @@ -21,7 +21,7 @@ import math import time import warnings -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast import pendulum import tenacity @@ -148,13 +148,13 @@ def monitor_pod(self, pod: V1Pod, get_logs: bool) -> tuple[State, str | None]: """ if get_logs: read_logs_since_sec = None - last_log_time = None + last_log_time: pendulum.DateTime | None = None while True: logs = self.read_pod_logs(pod, timestamps=True, since_seconds=read_logs_since_sec) for line in logs: timestamp, message = self.parse_log_line(line.decode("utf-8")) if timestamp: - last_log_time = pendulum.parse(timestamp) + last_log_time = cast(pendulum.DateTime, pendulum.parse(timestamp)) self.log.info(message) time.sleep(1) diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index 48aa59593346..87ee4d4a73be 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -65,6 +65,7 @@ from airflow.utils.module_loading import import_string, qualname from airflow.utils.operator_resources import Resources from airflow.utils.task_group import MappedTaskGroup, TaskGroup +from airflow.utils.timezone import parse_timezone from airflow.utils.types import NOTSET, ArgNotSet if TYPE_CHECKING: @@ -144,7 +145,7 @@ def decode_relativedelta(var: dict[str, Any]) -> relativedelta.relativedelta: return relativedelta.relativedelta(**var) -def encode_timezone(var: Timezone) -> str | int: +def encode_timezone(var: Timezone | FixedTimezone) -> str | int: """ Encode a Pendulum Timezone for serialization. @@ -167,9 +168,9 @@ def encode_timezone(var: Timezone) -> str | int: ) -def decode_timezone(var: str | int) -> Timezone: +def decode_timezone(var: str | int) -> Timezone | FixedTimezone: """Decode a previously serialized Pendulum Timezone.""" - return pendulum.tz.timezone(var) + return parse_timezone(var) def _get_registered_timetable(importable_string: str) -> type[Timetable] | None: @@ -607,7 +608,7 @@ def deserialize(cls, encoded_var: Any, use_pydantic_models=False) -> Any: raise TypeError(f"Invalid type {type_!s} in deserialization.") _deserialize_datetime = pendulum.from_timestamp - _deserialize_timezone = pendulum.tz.timezone + _deserialize_timezone = parse_timezone @classmethod def _deserialize_timedelta(cls, seconds: int) -> datetime.timedelta: diff --git a/airflow/serialization/serializers/datetime.py b/airflow/serialization/serializers/datetime.py index d32dd8897bce..69058b8c02a8 100644 --- a/airflow/serialization/serializers/datetime.py +++ b/airflow/serialization/serializers/datetime.py @@ -24,6 +24,7 @@ serialize as serialize_timezone, ) from airflow.utils.module_loading import qualname +from airflow.utils.timezone import parse_timezone if TYPE_CHECKING: import datetime @@ -62,23 +63,22 @@ def deserialize(classname: str, version: int, data: dict | str) -> datetime.date import datetime from pendulum import DateTime - from pendulum.tz import fixed_timezone, timezone tz: datetime.tzinfo | None = None if isinstance(data, dict) and TIMEZONE in data: if version == 1: # try to deserialize unsupported timezones timezone_mapping = { - "EDT": fixed_timezone(-4 * 3600), - "CDT": fixed_timezone(-5 * 3600), - "MDT": fixed_timezone(-6 * 3600), - "PDT": fixed_timezone(-7 * 3600), - "CEST": timezone("CET"), + "EDT": parse_timezone(-4 * 3600), + "CDT": parse_timezone(-5 * 3600), + "MDT": parse_timezone(-6 * 3600), + "PDT": parse_timezone(-7 * 3600), + "CEST": parse_timezone("CET"), } if data[TIMEZONE] in timezone_mapping: tz = timezone_mapping[data[TIMEZONE]] else: - tz = timezone(data[TIMEZONE]) + tz = parse_timezone(data[TIMEZONE]) else: tz = ( deserialize_timezone(data[TIMEZONE][1], data[TIMEZONE][2], data[TIMEZONE][0]) diff --git a/airflow/serialization/serializers/timezone.py b/airflow/serialization/serializers/timezone.py index 23901b9d444e..0f580adef83f 100644 --- a/airflow/serialization/serializers/timezone.py +++ b/airflow/serialization/serializers/timezone.py @@ -74,7 +74,7 @@ def serialize(o: object) -> tuple[U, str, int, bool]: def deserialize(classname: str, version: int, data: object) -> Any: - from pendulum.tz import fixed_timezone, timezone + from airflow.utils.timezone import parse_timezone if not isinstance(data, (str, int)): raise TypeError(f"{data} is not of type int or str but of {type(data)}") @@ -82,9 +82,6 @@ def deserialize(classname: str, version: int, data: object) -> Any: if version > __version__: raise TypeError(f"serialized {version} of {classname} > {__version__}") - if isinstance(data, int): - return fixed_timezone(data) - if "zoneinfo.ZoneInfo" in classname: try: from zoneinfo import ZoneInfo @@ -93,7 +90,7 @@ def deserialize(classname: str, version: int, data: object) -> Any: return ZoneInfo(data) - return timezone(data) + return parse_timezone(data) # ported from pendulum.tz.timezone._get_tzinfo_name diff --git a/airflow/settings.py b/airflow/settings.py index 1a38a59ed301..53c5cc6aa4b6 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -26,7 +26,6 @@ import warnings from typing import TYPE_CHECKING, Any, Callable -import pendulum import pluggy import sqlalchemy from sqlalchemy import create_engine, exc, text @@ -40,6 +39,7 @@ from airflow.logging_config import configure_logging from airflow.utils.orm_event_handlers import setup_event_handlers from airflow.utils.state import State +from airflow.utils.timezone import local_timezone, parse_timezone, utc if TYPE_CHECKING: from sqlalchemy.engine import Engine @@ -50,13 +50,12 @@ log = logging.getLogger(__name__) try: - tz = conf.get_mandatory_value("core", "default_timezone") - if tz == "system": - TIMEZONE = pendulum.tz.local_timezone() + if (tz := conf.get_mandatory_value("core", "default_timezone")) != "system": + TIMEZONE = parse_timezone(tz) else: - TIMEZONE = pendulum.tz.timezone(tz) + TIMEZONE = local_timezone() except Exception: - TIMEZONE = pendulum.tz.timezone("UTC") + TIMEZONE = utc log.info("Configured default timezone %s", TIMEZONE) diff --git a/airflow/timetables/_cron.py b/airflow/timetables/_cron.py index b0e6e256ee0e..fa2fb1266fc2 100644 --- a/airflow/timetables/_cron.py +++ b/airflow/timetables/_cron.py @@ -19,17 +19,16 @@ import datetime from typing import TYPE_CHECKING, Any -import pendulum from cron_descriptor import CasingTypeEnum, ExpressionDescriptor, FormatException, MissingFieldException from croniter import CroniterBadCronError, CroniterBadDateError, croniter from airflow.exceptions import AirflowTimetableInvalid from airflow.utils.dates import cron_presets -from airflow.utils.timezone import convert_to_utc, make_aware, make_naive +from airflow.utils.timezone import convert_to_utc, make_aware, make_naive, parse_timezone if TYPE_CHECKING: from pendulum import DateTime - from pendulum.tz.timezone import Timezone + from pendulum.tz.timezone import FixedTimezone, Timezone def _covers_every_hour(cron: croniter) -> bool: @@ -63,11 +62,11 @@ def _covers_every_hour(cron: croniter) -> bool: class CronMixin: """Mixin to provide interface to work with croniter.""" - def __init__(self, cron: str, timezone: str | Timezone) -> None: + def __init__(self, cron: str, timezone: str | Timezone | FixedTimezone) -> None: self._expression = cron_presets.get(cron, cron) if isinstance(timezone, str): - timezone = pendulum.tz.timezone(timezone) + timezone = parse_timezone(timezone) self._timezone = timezone try: diff --git a/airflow/timetables/trigger.py b/airflow/timetables/trigger.py index 95d29238037c..2a0df645daca 100644 --- a/airflow/timetables/trigger.py +++ b/airflow/timetables/trigger.py @@ -26,7 +26,7 @@ if TYPE_CHECKING: from dateutil.relativedelta import relativedelta - from pendulum.tz.timezone import Timezone + from pendulum.tz.timezone import FixedTimezone, Timezone from airflow.timetables.base import TimeRestriction @@ -48,7 +48,7 @@ def __init__( self, cron: str, *, - timezone: str | Timezone, + timezone: str | Timezone | FixedTimezone, interval: datetime.timedelta | relativedelta = datetime.timedelta(), ) -> None: super().__init__(cron, timezone) @@ -77,7 +77,12 @@ def serialize(self) -> dict[str, Any]: return {"expression": self._expression, "timezone": timezone, "interval": interval} def infer_manual_data_interval(self, *, run_after: DateTime) -> DataInterval: - return DataInterval(run_after - self._interval, run_after) + return DataInterval( + # pendulum.Datetime ± timedelta should return pendulum.Datetime + # however mypy decide that output would be datetime.datetime + run_after - self._interval, # type: ignore[arg-type] + run_after, + ) def next_dagrun_info( self, @@ -101,4 +106,9 @@ def next_dagrun_info( next_start_time = max(start_time_candidates) if restriction.latest is not None and restriction.latest < next_start_time: return None - return DagRunInfo.interval(next_start_time - self._interval, next_start_time) + return DagRunInfo.interval( + # pendulum.Datetime ± timedelta should return pendulum.Datetime + # however mypy decide that output would be datetime.datetime + next_start_time - self._interval, # type: ignore[arg-type] + next_start_time, + ) diff --git a/airflow/utils/sqlalchemy.py b/airflow/utils/sqlalchemy.py index a042d4e9024d..fb241f482f51 100644 --- a/airflow/utils/sqlalchemy.py +++ b/airflow/utils/sqlalchemy.py @@ -24,7 +24,6 @@ import logging from typing import TYPE_CHECKING, Any, Generator, Iterable, overload -import pendulum from dateutil import relativedelta from sqlalchemy import TIMESTAMP, PickleType, and_, event, false, nullsfirst, or_, true, tuple_ from sqlalchemy.dialects import mssql, mysql @@ -34,7 +33,7 @@ from airflow import settings from airflow.configuration import conf from airflow.serialization.enums import Encoding -from airflow.utils.timezone import make_naive +from airflow.utils.timezone import make_naive, utc if TYPE_CHECKING: from kubernetes.client.models.v1_pod import V1Pod @@ -46,8 +45,6 @@ log = logging.getLogger(__name__) -utc = pendulum.tz.timezone("UTC") - class UtcDateTime(TypeDecorator): """ diff --git a/airflow/utils/timezone.py b/airflow/utils/timezone.py index 12c75bef5976..8ac9a49e0e8b 100644 --- a/airflow/utils/timezone.py +++ b/airflow/utils/timezone.py @@ -18,14 +18,20 @@ from __future__ import annotations import datetime as dt -from typing import overload +from typing import TYPE_CHECKING, overload import pendulum from dateutil.relativedelta import relativedelta from pendulum.datetime import DateTime -# UTC time zone as a tzinfo instance. -utc = pendulum.tz.timezone("UTC") +if TYPE_CHECKING: + from pendulum.tz.timezone import FixedTimezone, Timezone + +_PENDULUM3 = pendulum.__version__.startswith("3") +# UTC Timezone as a tzinfo instance. Actual value depends on pendulum version: +# - Timezone("UTC") in pendulum 3 +# - FixedTimezone(0, "UTC") in pendulum 2 +utc = pendulum.UTC def is_localized(value): @@ -135,12 +141,10 @@ def make_aware(value: dt.datetime | None, timezone: dt.tzinfo | None = None) -> # Check that we won't overwrite the timezone of an aware datetime. if is_localized(value): raise ValueError(f"make_aware expects a naive datetime, got {value}") - if hasattr(value, "fold"): - # In case of python 3.6 we want to do the same that pendulum does for python3.5 - # i.e in case we move clock back we want to schedule the run at the time of the second - # instance of the same clock time rather than the first one. - # Fold parameter has no impact in other cases so we can safely set it to 1 here - value = value.replace(fold=1) + # In case we move clock back we want to schedule the run at the time of the second + # instance of the same clock time rather than the first one. + # Fold parameter has no impact in other cases, so we can safely set it to 1 here + value = value.replace(fold=1) localized = getattr(timezone, "localize", None) if localized is not None: # This method is available for pytz time zones @@ -273,3 +277,31 @@ def _format_part(key: str) -> str: if not joined: return "<1s" return joined + + +def parse_timezone(name: str | int) -> FixedTimezone | Timezone: + """ + Parse timezone and return one of the pendulum Timezone. + + Provide the same interface as ``pendulum.timezone(name)`` + + :param name: Either IANA timezone or offset to UTC in seconds. + + :meta private: + """ + if _PENDULUM3: + # This only presented in pendulum 3 and code do not reached into the pendulum 2 + return pendulum.timezone(name) # type: ignore[operator] + # In pendulum 2 this refers to the function, in pendulum 3 refers to the module + return pendulum.tz.timezone(name) # type: ignore[operator] + + +def local_timezone() -> FixedTimezone | Timezone: + """ + Return local timezone. + + Provide the same interface as ``pendulum.tz.local_timezone()`` + + :meta private: + """ + return pendulum.tz.local_timezone() diff --git a/dev/breeze/src/airflow_breeze/commands/common_options.py b/dev/breeze/src/airflow_breeze/commands/common_options.py index 12d0ee77b8d7..a280db4e2cb1 100644 --- a/dev/breeze/src/airflow_breeze/commands/common_options.py +++ b/dev/breeze/src/airflow_breeze/commands/common_options.py @@ -151,6 +151,12 @@ def _set_default_from_parent(ctx: click.core.Context, option: click.core.Option, is_flag=True, envvar="DOWNGRADE_SQLALCHEMY", ) +option_downgrade_pendulum = click.option( + "--downgrade-pendulum", + help="Downgrade Pendulum to minimum supported version.", + is_flag=True, + envvar="DOWNGRADE_PENDULUM", +) option_dry_run = click.option( "-D", "--dry-run", diff --git a/dev/breeze/src/airflow_breeze/commands/developer_commands.py b/dev/breeze/src/airflow_breeze/commands/developer_commands.py index ede6dfd933d4..27c43fc11bb5 100644 --- a/dev/breeze/src/airflow_breeze/commands/developer_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/developer_commands.py @@ -39,6 +39,7 @@ option_database_isolation, option_db_reset, option_docker_host, + option_downgrade_pendulum, option_downgrade_sqlalchemy, option_dry_run, option_forward_credentials, @@ -248,6 +249,7 @@ def run(self): @option_db_reset @option_docker_host @option_downgrade_sqlalchemy +@option_downgrade_pendulum @option_dry_run @option_executor_shell @option_force_build @@ -294,6 +296,7 @@ def shell( database_isolation: bool, db_reset: bool, downgrade_sqlalchemy: bool, + downgrade_pendulum: bool, docker_host: str | None, executor: str, extra_args: tuple, @@ -354,6 +357,7 @@ def shell( database_isolation=database_isolation, db_reset=db_reset, downgrade_sqlalchemy=downgrade_sqlalchemy, + downgrade_pendulum=downgrade_pendulum, docker_host=docker_host, executor=executor, extra_args=extra_args if not max_time else ["exit"], diff --git a/dev/breeze/src/airflow_breeze/commands/developer_commands_config.py b/dev/breeze/src/airflow_breeze/commands/developer_commands_config.py index 88b734f513c9..911ed9ebb59e 100644 --- a/dev/breeze/src/airflow_breeze/commands/developer_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/developer_commands_config.py @@ -159,6 +159,7 @@ "options": [ "--upgrade-boto", "--downgrade-sqlalchemy", + "--downgrade-pendulum", ], }, { diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands.py b/dev/breeze/src/airflow_breeze/commands/testing_commands.py index c4aff8797e79..f826d9bdff1c 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands.py @@ -29,6 +29,7 @@ option_backend, option_db_reset, option_debug_resources, + option_downgrade_pendulum, option_downgrade_sqlalchemy, option_dry_run, option_forward_credentials, @@ -471,6 +472,7 @@ def _verify_parallelism_parameters( @option_excluded_parallel_test_types @option_upgrade_boto @option_downgrade_sqlalchemy +@option_downgrade_pendulum @option_collect_only @option_remove_arm_packages @option_skip_docker_compose_down @@ -513,6 +515,7 @@ def command_for_tests(**kwargs): @option_excluded_parallel_test_types @option_upgrade_boto @option_downgrade_sqlalchemy +@option_downgrade_pendulum @option_collect_only @option_remove_arm_packages @option_skip_docker_compose_down @@ -548,6 +551,7 @@ def command_for_db_tests(**kwargs): @option_collect_only @option_debug_resources @option_downgrade_sqlalchemy +@option_downgrade_pendulum @option_dry_run @option_enable_coverage @option_excluded_parallel_test_types @@ -589,6 +593,7 @@ def _run_test_command( db_reset: bool, debug_resources: bool, downgrade_sqlalchemy: bool, + downgrade_pendulum: bool, enable_coverage: bool, excluded_parallel_test_types: str, extra_pytest_args: tuple, @@ -632,6 +637,7 @@ def _run_test_command( backend=backend, collect_only=collect_only, downgrade_sqlalchemy=downgrade_sqlalchemy, + downgrade_pendulum=downgrade_pendulum, enable_coverage=enable_coverage, forward_credentials=forward_credentials, forward_ports=False, diff --git a/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py b/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py index 404e0cabe0eb..370cdad91f40 100644 --- a/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py +++ b/dev/breeze/src/airflow_breeze/commands/testing_commands_config.py @@ -79,6 +79,7 @@ "--mount-sources", "--upgrade-boto", "--downgrade-sqlalchemy", + "--downgrade-pendulum", "--remove-arm-packages", "--skip-docker-compose-down", ], @@ -126,6 +127,7 @@ "--mount-sources", "--upgrade-boto", "--downgrade-sqlalchemy", + "--downgrade-pendulum", "--remove-arm-packages", "--skip-docker-compose-down", ], @@ -177,6 +179,7 @@ "--mount-sources", "--upgrade-boto", "--downgrade-sqlalchemy", + "--downgrade-pendulum", "--remove-arm-packages", "--skip-docker-compose-down", ], diff --git a/dev/breeze/src/airflow_breeze/params/shell_params.py b/dev/breeze/src/airflow_breeze/params/shell_params.py index 85fbecd6ca8e..fbc02b792279 100644 --- a/dev/breeze/src/airflow_breeze/params/shell_params.py +++ b/dev/breeze/src/airflow_breeze/params/shell_params.py @@ -148,6 +148,7 @@ class ShellParams: dev_mode: bool = False docker_host: str | None = os.environ.get("DOCKER_HOST") downgrade_sqlalchemy: bool = False + downgrade_pendulum: bool = False dry_run: bool = False enable_coverage: bool = False executor: str = START_AIRFLOW_DEFAULT_ALLOWED_EXECUTOR @@ -516,6 +517,7 @@ def env_variables_for_docker_commands(self) -> dict[str, str]: _set_var(_env, "DEV_MODE", self.dev_mode) _set_var(_env, "DOCKER_IS_ROOTLESS", self.rootless_docker) _set_var(_env, "DOWNGRADE_SQLALCHEMY", self.downgrade_sqlalchemy) + _set_var(_env, "DOWNGRADE_PENDULUM", self.downgrade_pendulum) _set_var(_env, "ENABLED_SYSTEMS", None, "") _set_var(_env, "FLOWER_HOST_PORT", None, FLOWER_HOST_PORT) _set_var(_env, "GITHUB_ACTIONS", self.github_actions) diff --git a/images/breeze/output_shell.svg b/images/breeze/output_shell.svg index 8d9d7350d5a4..84151382c729 100644 --- a/images/breeze/output_shell.svg +++ b/images/breeze/output_shell.svg @@ -1,4 +1,4 @@ - +