-
Notifications
You must be signed in to change notification settings - Fork 186
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
bed60ca
commit 8b4518f
Showing
6 changed files
with
111 additions
and
22 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,3 @@ | ||
# pragma: no cover | ||
# TODO: Implement it | ||
|
||
from typing import Any | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
import json | ||
from unittest.mock import patch | ||
|
||
import pytest | ||
from airflow.models.connection import Connection | ||
|
||
|
||
@pytest.fixture() | ||
def mock_bigquery_conn(): # type: ignore | ||
""" | ||
Mocks and returns an Airflow BigQuery connection. | ||
""" | ||
extra = { | ||
"project": "my_project", | ||
"key_path": "my_key_path.json", | ||
} | ||
conn = Connection( | ||
conn_id="my_bigquery_connection", | ||
conn_type="google_cloud_platform", | ||
extra=json.dumps(extra), | ||
) | ||
|
||
with patch("airflow.hooks.base.BaseHook.get_connection", return_value=conn): | ||
yield conn |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,68 @@ | ||
from unittest.mock import MagicMock, patch | ||
|
||
import pytest | ||
|
||
from cosmos import ProfileConfig | ||
from cosmos.exceptions import CosmosValueError | ||
from cosmos.operators._asynchronous.bigquery import DbtRunAirflowAsyncBigqueryOperator | ||
from cosmos.profiles import get_automatic_profile_mapping | ||
from cosmos.settings import AIRFLOW_IO_AVAILABLE | ||
|
||
|
||
def test_get_remote_sql_airflow_io_unavailable(mock_bigquery_conn): | ||
profile_mapping = get_automatic_profile_mapping( | ||
mock_bigquery_conn.conn_id, | ||
profile_args={ | ||
"dataset": "my_dataset", | ||
}, | ||
) | ||
bigquery_profile_config = ProfileConfig( | ||
profile_name="my_profile", target_name="dev", profile_mapping=profile_mapping | ||
) | ||
operator = DbtRunAirflowAsyncBigqueryOperator( | ||
task_id="test_task", project_dir="/tmp", profile_config=bigquery_profile_config | ||
) | ||
|
||
operator.extra_context = { | ||
"dbt_node_config": {"file_path": "/some/path/to/file.sql"}, | ||
"dbt_dag_task_group_identifier": "task_group_1", | ||
} | ||
|
||
if not AIRFLOW_IO_AVAILABLE: | ||
with pytest.raises( | ||
CosmosValueError, match="Cosmos async support is only available starting in Airflow 2.8 or later." | ||
): | ||
operator.get_remote_sql() | ||
|
||
|
||
def test_get_remote_sql_success(mock_bigquery_conn): | ||
profile_mapping = get_automatic_profile_mapping( | ||
mock_bigquery_conn.conn_id, | ||
profile_args={ | ||
"dataset": "my_dataset", | ||
}, | ||
) | ||
bigquery_profile_config = ProfileConfig( | ||
profile_name="my_profile", target_name="dev", profile_mapping=profile_mapping | ||
) | ||
operator = DbtRunAirflowAsyncBigqueryOperator( | ||
task_id="test_task", project_dir="/tmp", profile_config=bigquery_profile_config | ||
) | ||
|
||
operator.extra_context = { | ||
"dbt_node_config": {"file_path": "/some/path/to/file.sql"}, | ||
"dbt_dag_task_group_identifier": "task_group_1", | ||
} | ||
operator.project_dir = "/tmp" | ||
|
||
mock_object_storage_path = MagicMock() | ||
mock_file = MagicMock() | ||
mock_file.read.return_value = "SELECT * FROM table" | ||
|
||
mock_object_storage_path.open.return_value.__enter__.return_value = mock_file | ||
|
||
with patch("airflow.io.path.ObjectStoragePath", return_value=mock_object_storage_path): | ||
remote_sql = operator.get_remote_sql() | ||
|
||
assert remote_sql == "SELECT * FROM table" | ||
mock_object_storage_path.open.assert_called_once() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
import pytest | ||
|
||
from cosmos.operators._asynchronous.databricks import DbtRunAirflowAsyncDatabricksOperator | ||
|
||
|
||
def test_execute_should_raise_not_implemented_error(): | ||
operator = DbtRunAirflowAsyncDatabricksOperator(task_id="test_task") | ||
with pytest.raises(NotImplementedError): | ||
operator.execute(context={}) |