From 07afd547c21bc29e7c04d2addeb14267be566494 Mon Sep 17 00:00:00 2001 From: Gopal Dirisala <39794726+dirrao@users.noreply.github.com> Date: Tue, 4 Jun 2024 09:37:44 +0530 Subject: [PATCH] Resolve common providers deprecations in tests (#40036) --- .../cncf/kubernetes/cli/kubernetes_command.py | 4 ++-- tests/deprecations_ignore.yml | 11 ----------- .../apache/spark/operators/test_spark_sql.py | 2 +- tests/providers/common/sql/hooks/test_dbapi.py | 7 ++++++- .../providers/common/sql/operators/test_sql.py | 18 ++++++++++++++++-- 5 files changed, 25 insertions(+), 17 deletions(-) diff --git a/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py b/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py index e05d5d2326321..085a877482dc3 100644 --- a/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py +++ b/airflow/providers/cncf/kubernetes/cli/kubernetes_command.py @@ -30,7 +30,7 @@ from airflow.providers.cncf.kubernetes import pod_generator from airflow.providers.cncf.kubernetes.executors.kubernetes_executor import KubeConfig from airflow.providers.cncf.kubernetes.kube_client import get_kube_client -from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_pod_id +from airflow.providers.cncf.kubernetes.kubernetes_helper_functions import create_unique_id from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator from airflow.utils import cli as cli_utils, yaml from airflow.utils.cli import get_dag @@ -52,7 +52,7 @@ def generate_pod_yaml(args): pod = PodGenerator.construct_pod( dag_id=args.dag_id, task_id=ti.task_id, - pod_id=create_pod_id(args.dag_id, ti.task_id), + pod_id=create_unique_id(args.dag_id, ti.task_id), try_number=ti.try_number, kube_image=kube_config.kube_image, date=ti.execution_date, diff --git a/tests/deprecations_ignore.yml b/tests/deprecations_ignore.yml index ae4f6776e0ff9..d164c9c068519 100644 --- a/tests/deprecations_ignore.yml +++ b/tests/deprecations_ignore.yml @@ -205,17 +205,6 @@ - tests/providers/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_exception - tests/providers/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_resuming_status - tests/providers/amazon/aws/triggers/test_redshift_cluster.py::TestRedshiftClusterTrigger::test_redshift_cluster_sensor_trigger_success -- tests/providers/apache/spark/operators/test_spark_sql.py::TestSparkSqlOperator::test_execute -- tests/providers/common/sql/hooks/test_dbapi.py::TestDbApiHook::test_instance_check_works_for_legacy_db_api_hook -- tests/providers/common/sql/operators/test_sql.py::TestSQLCheckOperatorDbHook::test_get_hook -- tests/providers/common/sql/operators/test_sql.py::TestSqlBranch::test_branch_false_with_dag_run -- tests/providers/common/sql/operators/test_sql.py::TestSqlBranch::test_branch_list_with_dag_run -- tests/providers/common/sql/operators/test_sql.py::TestSqlBranch::test_branch_single_value_with_dag_run -- tests/providers/common/sql/operators/test_sql.py::TestSqlBranch::test_branch_true_with_dag_run -- tests/providers/common/sql/operators/test_sql.py::TestSqlBranch::test_invalid_query_result_with_dag_run -- tests/providers/common/sql/operators/test_sql.py::TestSqlBranch::test_with_skip_in_branch_downstream_dependencies -- tests/providers/common/sql/operators/test_sql.py::TestSqlBranch::test_with_skip_in_branch_downstream_dependencies2 -- tests/providers/cncf/kubernetes/cli/test_kubernetes_command.py::TestGenerateDagYamlCommand::test_generate_dag_yaml - tests/providers/databricks/hooks/test_databricks_sql.py::test_incorrect_column_names - tests/providers/databricks/hooks/test_databricks_sql.py::test_no_query - tests/providers/databricks/hooks/test_databricks_sql.py::test_query diff --git a/tests/providers/apache/spark/operators/test_spark_sql.py b/tests/providers/apache/spark/operators/test_spark_sql.py index 83f1b26825b9b..ebb1109ce4d09 100644 --- a/tests/providers/apache/spark/operators/test_spark_sql.py +++ b/tests/providers/apache/spark/operators/test_spark_sql.py @@ -51,7 +51,7 @@ def test_execute(self): # Given / When operator = SparkSqlOperator(task_id="spark_sql_job", dag=self.dag, **self._config) - assert self._config["sql"] == operator._sql + assert self._config["sql"] == operator.sql assert self._config["conn_id"] == operator._conn_id assert self._config["total_executor_cores"] == operator._total_executor_cores assert self._config["executor_cores"] == operator._executor_cores diff --git a/tests/providers/common/sql/hooks/test_dbapi.py b/tests/providers/common/sql/hooks/test_dbapi.py index a7c33b55491f6..090ec80e682b1 100644 --- a/tests/providers/common/sql/hooks/test_dbapi.py +++ b/tests/providers/common/sql/hooks/test_dbapi.py @@ -24,6 +24,7 @@ import pytest from pyodbc import Cursor +from airflow.exceptions import RemovedInAirflow3Warning from airflow.hooks.base import BaseHook from airflow.models import Connection from airflow.providers.common.sql.hooks.sql import DbApiHook, fetch_all_handler, fetch_one_handler @@ -537,7 +538,11 @@ def test_instance_check_works_for_non_db_api_hook(self): assert not isinstance(NonDbApiHook(), DbApiHook) def test_instance_check_works_for_legacy_db_api_hook(self): - from airflow.hooks.dbapi import DbApiHook as LegacyDbApiHook + with pytest.warns( + RemovedInAirflow3Warning, + match="This module is deprecated. Please use `airflow.providers.common.sql.hooks.sql`.", + ): + from airflow.hooks.dbapi import DbApiHook as LegacyDbApiHook assert isinstance(DbApiHookInProvider(), LegacyDbApiHook) diff --git a/tests/providers/common/sql/operators/test_sql.py b/tests/providers/common/sql/operators/test_sql.py index 105099fda0531..e95a018f13610 100644 --- a/tests/providers/common/sql/operators/test_sql.py +++ b/tests/providers/common/sql/operators/test_sql.py @@ -24,7 +24,7 @@ import pytest from airflow import DAG -from airflow.exceptions import AirflowException +from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models import Connection, DagRun, TaskInstance as TI, XCom from airflow.operators.empty import EmptyOperator from airflow.providers.common.sql.hooks.sql import fetch_all_handler @@ -608,7 +608,14 @@ def test_get_hook(self, database): ) as mock_get_conn: if database: self._operator.database = database - assert isinstance(self._operator._hook, PostgresHook) + if database: + with pytest.warns( + AirflowProviderDeprecationWarning, + match='The "schema" variable has been renamed to "database" as it contained the database name.Please use "database" to set the database name.', + ): + assert isinstance(self._operator._hook, PostgresHook) + else: + assert isinstance(self._operator._hook, PostgresHook) mock_get_conn.assert_called_once_with(self.conn_id) def test_not_allowed_conn_type(self): @@ -1120,6 +1127,7 @@ def test_branch_single_value_with_dag_run(self, mock_get_db_hook): start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING, + data_interval=(DEFAULT_DATE, DEFAULT_DATE), ) mock_get_records = mock_get_db_hook.return_value.get_first @@ -1160,6 +1168,7 @@ def test_branch_true_with_dag_run(self, mock_get_db_hook): start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING, + data_interval=(DEFAULT_DATE, DEFAULT_DATE), ) mock_get_records = mock_get_db_hook.return_value.get_first @@ -1201,6 +1210,7 @@ def test_branch_false_with_dag_run(self, mock_get_db_hook): start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING, + data_interval=(DEFAULT_DATE, DEFAULT_DATE), ) mock_get_records = mock_get_db_hook.return_value.get_first @@ -1243,6 +1253,7 @@ def test_branch_list_with_dag_run(self, mock_get_db_hook): start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING, + data_interval=(DEFAULT_DATE, DEFAULT_DATE), ) mock_get_records = mock_get_db_hook.return_value.get_first @@ -1282,6 +1293,7 @@ def test_invalid_query_result_with_dag_run(self, mock_get_db_hook): start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING, + data_interval=(DEFAULT_DATE, DEFAULT_DATE), ) mock_get_records = mock_get_db_hook.return_value.get_first @@ -1312,6 +1324,7 @@ def test_with_skip_in_branch_downstream_dependencies(self, mock_get_db_hook): start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING, + data_interval=(DEFAULT_DATE, DEFAULT_DATE), ) mock_get_records = mock_get_db_hook.return_value.get_first @@ -1351,6 +1364,7 @@ def test_with_skip_in_branch_downstream_dependencies2(self, mock_get_db_hook): start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING, + data_interval=(DEFAULT_DATE, DEFAULT_DATE), ) mock_get_records = mock_get_db_hook.return_value.get_first