From d1f80a1f3f90bcddbca58aa6de2aede743e106e8 Mon Sep 17 00:00:00 2001 From: Serhii Date: Tue, 22 Mar 2022 17:35:13 +0200 Subject: [PATCH] Increased unit test coverage to 90 --- .../source-hubspot/unit_tests/conftest.py | 57 +++++++ .../source-hubspot/unit_tests/test_source.py | 106 ++++++++----- .../source-hubspot/unit_tests/test_streams.py | 140 ++++++++++++++++++ .../source-hubspot/unit_tests/utils.py | 27 ++++ 4 files changed, 294 insertions(+), 36 deletions(-) create mode 100644 airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py create mode 100644 airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py create mode 100644 airbyte-integrations/connectors/source-hubspot/unit_tests/utils.py diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py new file mode 100644 index 000000000000..311d2be22557 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/conftest.py @@ -0,0 +1,57 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import pytest +from source_hubspot.source import SourceHubspot +from source_hubspot.streams import API + +NUMBER_OF_PROPERTIES = 2000 + + +@pytest.fixture(name="oauth_config") +def oauth_config_fixture(): + return { + "start_date": "2021-10-10T00:00:00Z", + "credentials": { + "credentials_title": "OAuth Credentials", + "redirect_uri": "https://airbyte.io", + "client_id": "test_client_id", + "client_secret": "test_client_secret", + "refresh_token": "test_refresh_token", + "access_token": "test_access_token", + "token_expires": "2021-05-30T06:00:00Z", + }, + } + + +@pytest.fixture(name="common_params") +def common_params_fixture(config): + source = SourceHubspot() + common_params = source.get_common_params(config=config) + return common_params + + +@pytest.fixture(name="config") +def config_fixture(): + return {"start_date": "2021-01-10T00:00:00Z", "credentials": {"credentials_title": "API Key Credentials", "api_key": "test_api_key"}} + + +@pytest.fixture(name="some_credentials") +def some_credentials_fixture(): + return {"credentials_title": "API Key Credentials", "api_key": "wrong_key"} + + +@pytest.fixture(name="creds_with_wrong_permissions") +def creds_with_wrong_permissions(): + return {"credentials_title": "API Key Credentials", "api_key": "THIS-IS-THE-API_KEY"} + + +@pytest.fixture(name="fake_properties_list") +def fake_properties_list(): + return [f"property_number_{i}" for i in range(NUMBER_OF_PROPERTIES)] + + +@pytest.fixture(name="api") +def api(some_credentials): + return API(some_credentials) diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py index fbcdbeb3ca33..b79f0fbdd45a 100644 --- a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_source.py @@ -4,58 +4,96 @@ import logging +from http import HTTPStatus +from unittest.mock import MagicMock +import pendulum import pytest from airbyte_cdk.models import ConfiguredAirbyteCatalog, SyncMode, Type +from source_hubspot.errors import HubspotRateLimited from source_hubspot.source import SourceHubspot -from source_hubspot.streams import API, PROPERTIES_PARAM_MAX_LENGTH, Companies, Deals, Products, Workflows, split_properties +from source_hubspot.streams import API, PROPERTIES_PARAM_MAX_LENGTH, Companies, Deals, Products, Stream, Workflows, split_properties NUMBER_OF_PROPERTIES = 2000 logger = logging.getLogger("test_client") -@pytest.fixture(name="oauth_config") -def oauth_config_fixture(): - return { - "start_date": "2021-10-10T00:00:00Z", - "credentials": { - "credentials_title": "OAuth Credentials", - "redirect_uri": "https://airbyte.io", - "client_id": "test_client_id", - "client_secret": "test_client_secret", - "refresh_token": "test_refresh_token", - "access_token": "test_access_token", - "token_expires": "2021-05-30T06:00:00Z", - }, - } +def test_check_connection_ok(requests_mock, config): + responses = [ + {"json": [], "status_code": 200}, + ] + + requests_mock.register_uri("GET", "/properties/v2/contact/properties", responses) + ok, error_msg = SourceHubspot().check_connection(logger, config=config) + assert ok + assert not error_msg -@pytest.fixture(name="common_params") -def common_params_fixture(config): - source = SourceHubspot() - common_params = source.get_common_params(config=config) - return common_params + +def test_check_connection_empty_config(config): + config = {} + + with pytest.raises(KeyError): + SourceHubspot().check_connection(logger, config=config) + + +def test_check_connection_invalid_config(config): + config.pop("start_date") + + with pytest.raises(TypeError): + SourceHubspot().check_connection(logger, config=config) + + +def test_check_connection_exception(config): + ok, error_msg = SourceHubspot().check_connection(logger, config=config) + + assert not ok + assert error_msg -@pytest.fixture(name="config") -def config_fixture(): - return {"start_date": "2021-01-10T00:00:00Z", "credentials": {"credentials_title": "API Key Credentials", "api_key": "test_api_key"}} +def test_streams(config): + streams = SourceHubspot().streams(config) + assert len(streams) == 27 -@pytest.fixture(name="some_credentials") -def some_credentials_fixture(): - return {"credentials_title": "API Key Credentials", "api_key": "wrong_key"} +def test_check_credential_title_exception(config): + config["credentials"].pop("credentials_title") -@pytest.fixture(name="creds_with_wrong_permissions") -def creds_with_wrong_permissions(): - return {"credentials_title": "API Key Credentials", "api_key": "THIS-IS-THE-API_KEY"} + with pytest.raises(Exception): + SourceHubspot().check_connection(logger, config=config) -@pytest.fixture(name="fake_properties_list") -def fake_properties_list(): - return [f"property_number_{i}" for i in range(NUMBER_OF_PROPERTIES)] +def test_parse_and_handle_errors(some_credentials): + response = MagicMock() + response.status_code = HTTPStatus.TOO_MANY_REQUESTS + + with pytest.raises(HubspotRateLimited): + API(some_credentials)._parse_and_handle_errors(response) + + +def test_convert_datetime_to_string(): + pendulum_time = pendulum.now() + + assert Stream._convert_datetime_to_string(pendulum_time, declared_format="date") + assert Stream._convert_datetime_to_string(pendulum_time, declared_format="date-time") + + +def test_cast_datetime(common_params, caplog): + field_value = pendulum.now() + field_name = "curent_time" + + Companies(**common_params)._cast_datetime(field_name, field_value) + + expected_warining_message = { + "type": "LOG", + "log": { + "level": "WARN", + "message": f"Couldn't parse date/datetime string in {field_name}, trying to parse timestamp... Field value: {field_value}. Ex: argument of type 'DateTime' is not iterable", + }, + } + assert expected_warining_message["log"]["message"] in caplog.text def test_check_connection_backoff_on_limit_reached(requests_mock, config): @@ -132,10 +170,6 @@ class TestSplittingPropertiesFunctionality: "archived": False, } - @pytest.fixture - def api(self, some_credentials): - return API(some_credentials) - @staticmethod def set_mock_properties(requests_mock, url, fake_properties_list): properties_response = [ diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py new file mode 100644 index 000000000000..7c2088ee49e0 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/test_streams.py @@ -0,0 +1,140 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +import pytest +from source_hubspot.streams import ( + Campaigns, + Companies, + ContactLists, + Contacts, + DealPipelines, + Deals, + EmailEvents, + EngagementsCalls, + EngagementsEmails, + EngagementsMeetings, + EngagementsNotes, + EngagementsTasks, + FeedbackSubmissions, + Forms, + FormSubmissions, + LineItems, + MarketingEmails, + Owners, + Products, + Quotes, + TicketPipelines, + Tickets, + Workflows, +) + +from .utils import read_full_refresh, read_incremental + + +@pytest.mark.parametrize( + "stream, endpoint", + [ + (Campaigns, "campaigns"), + (Companies, "company"), + (ContactLists, "contact"), + (Contacts, "contact"), + (Deals, "deal"), + (DealPipelines, "deal"), + (Quotes, "quote"), + (EmailEvents, ""), + (EngagementsCalls, "calls"), + (EngagementsEmails, "emails"), + (EngagementsMeetings, "meetings"), + (EngagementsNotes, "notes"), + (EngagementsTasks, "tasks"), + (FeedbackSubmissions, "feedback_submissions"), + (Forms, "form"), + (FormSubmissions, "form"), + (LineItems, "line_item"), + (MarketingEmails, ""), + (Owners, ""), + (Products, "product"), + (Quotes, "quote"), + (TicketPipelines, ""), + (Tickets, "ticket"), + (Workflows, ""), + ], +) +def test_streams_read(stream, endpoint, requests_mock, common_params, fake_properties_list): + stream = stream(**common_params) + responses = [ + { + "json": { + stream.data_field: [ + { + "id": "test_id", + "created": "2022-02-25T16:43:11Z", + "updatedAt": "2022-02-25T16:43:11Z", + "lastUpdatedTime": "2022-02-25T16:43:11Z", + } + ], + } + } + ] + properties_response = [ + { + "json": [ + {"name": property_name, "type": "string", "updatedAt": 1571085954360, "createdAt": 1565059306048} + for property_name in fake_properties_list + ], + "status_code": 200, + } + ] + is_form_submission = isinstance(stream, FormSubmissions) + stream_url = stream.url + "/test_id" if is_form_submission else stream.url + + requests_mock.register_uri("GET", stream_url, responses) + requests_mock.register_uri("GET", "/marketing/v3/forms", responses) + requests_mock.register_uri("GET", "/email/public/v1/campaigns/test_id", responses) + requests_mock.register_uri("GET", f"/properties/v2/{endpoint}/properties", properties_response) + + records = read_incremental(stream, {}) + + assert records + + +@pytest.mark.parametrize( + "error_response", + [ + {"json": {}, "status_code": 429}, + {"json": {}, "status_code": 502}, + {"json": {}, "status_code": 504}, + ], +) +def test_common_error_retry(error_response, requests_mock, common_params, fake_properties_list): + """Error once, check that we retry and not fail""" + properties_response = [ + {"name": property_name, "type": "string", "updatedAt": 1571085954360, "createdAt": 1565059306048} + for property_name in fake_properties_list + ] + responses = [ + error_response, + { + "json": properties_response, + "status_code": 200, + }, + ] + + stream = Companies(**common_params) + + response = { + stream.data_field: [ + { + "id": "test_id", + "created": "2022-02-25T16:43:11Z", + "updatedAt": "2022-02-25T16:43:11Z", + "lastUpdatedTime": "2022-02-25T16:43:11Z", + } + ], + } + requests_mock.register_uri("GET", "/properties/v2/company/properties", responses) + requests_mock.register_uri("GET", stream.url, [{"json": response}]) + records = read_full_refresh(stream) + + assert [response[stream.data_field][0]] == records diff --git a/airbyte-integrations/connectors/source-hubspot/unit_tests/utils.py b/airbyte-integrations/connectors/source-hubspot/unit_tests/utils.py new file mode 100644 index 000000000000..c92ac97b0373 --- /dev/null +++ b/airbyte-integrations/connectors/source-hubspot/unit_tests/utils.py @@ -0,0 +1,27 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + +from typing import Any, MutableMapping + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.streams import Stream + + +def read_incremental(stream_instance: Stream, stream_state: MutableMapping[str, Any]): + res = [] + slices = stream_instance.stream_slices(sync_mode=SyncMode.incremental, stream_state=stream_state) + for slice in slices: + records = stream_instance.read_records(sync_mode=SyncMode.incremental, stream_slice=slice, stream_state=stream_state) + for record in records: + stream_state = stream_instance.get_updated_state(stream_state, record) + res.append(record) + return res + + +def read_full_refresh(stream_instance: Stream): + records = [] + slices = stream_instance.stream_slices(sync_mode=SyncMode.full_refresh) + for slice in slices: + records.extend(list(stream_instance.read_records(stream_slice=slice, sync_mode=SyncMode.full_refresh))) + return records