Skip to content

Commit

Permalink
update comments, pin requirements, bypass setup, and use unittest.moc…
Browse files Browse the repository at this point in the history
…k for patch
  • Loading branch information
raman325 committed Jan 13, 2021
1 parent 3496573 commit 5fbf1ab
Show file tree
Hide file tree
Showing 6 changed files with 52 additions and 13 deletions.
3 changes: 1 addition & 2 deletions requirements_test.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
-r requirements_dev.txt
pytest
pytest-homeassistant-custom-component
pytest-homeassistant-custom-component==0.1.0
13 changes: 8 additions & 5 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,16 @@
#
# See here for more info: https://docs.pytest.org/en/latest/fixture.html (note that
# pytest includes fixtures OOB which you can use as defined on this page)
from unittest.mock import patch

import pytest
from pytest_homeassistant_custom_component.async_mock import patch

pytest_plugins = "pytest_homeassistant_custom_component"


# Bypass persistent notification service which isn't useful for running tests
# This fixture is used to prevent HomeAssistant from attempting to create and dismiss persistent
# notifications. These calls would fail without this fixture since the persistent_notification
# integration is never loaded during a test.
@pytest.fixture(name="skip_notifications", autouse=True)
def skip_notifications_fixture():
"""Skip notification calls."""
Expand All @@ -31,7 +33,8 @@ def skip_notifications_fixture():
yield


# Effectively skips calls to `async_get_data` which is called on every polling update.
# This fixture, when used, will result in calls to async_get_data to return None. To have the call
# return a value, we would add the `return_value=<VALUE_TO_RETURN>` parameter to the patch call.
@pytest.fixture(name="bypass_get_data")
def bypass_get_data_fixture():
"""Skip calls to get data from API."""
Expand All @@ -41,8 +44,8 @@ def bypass_get_data_fixture():
yield


# Force a call to `async_get_data` to raise an Exception. Useful for validating
# exception handling code.
# In this fixture, we are forcing calls to async_get_data to raise an Exception. This is useful
# for exception handling.
@pytest.fixture(name="error_on_get_data")
def error_get_data_fixture():
"""Simulate error when retrieving data from API."""
Expand Down
14 changes: 12 additions & 2 deletions tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,30 @@
async def test_api(hass, aioclient_mock, caplog):
"""Test API calls."""

# Create new API instance
# To test the api submodule, we first create an instance of our API client
api = IntegrationBlueprintApiClient("test", "test", async_get_clientsession(hass))

# Use aioclient_mock which is provided by `pytest_homeassistant_custom_components`
# to mock responses to aiohttp requests. In this case we are telling the mock to
# return {"test": "test"} when a `GET` call is made to the specified URL
# return {"test": "test"} when a `GET` call is made to the specified URL. We then
# call `async_get_data` which will make that `GET` request.
aioclient_mock.get(
"https://jsonplaceholder.typicode.com/posts/1", json={"test": "test"}
)
assert await api.async_get_data() == {"test": "test"}

# We do the same for `async_set_title`. Note the difference in the mock call
# between the previous step and this one. We use `patch` here instead of `get`
# because we know that `async_set_title` calls `api_wrapper` with `patch` as the
# first parameter
aioclient_mock.patch("https://jsonplaceholder.typicode.com/posts/1")
assert await api.async_set_title("test") is None

# In order to get 100% coverage, we need to test `api_wrapper` to test the code
# that isn't already called by `async_get_data` and `async_set_title`. Because the
# only logic that lives inside `api_wrapper` that is not being handled by a third
# party library (aiohttp) is the exception handling, we also want to simulate
# raising the exceptions to ensure that the function handles them as expected.
# The caplog fixture allows access to log messages in tests. This is particularly
# useful during exception handling testing since often the only action as part of
# exception handling is a logging statement
Expand Down
27 changes: 24 additions & 3 deletions tests/test_config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,25 @@
from .const import MOCK_CONFIG


# This fixture bypasses the actual setup of the integration
# since we only want to test the config flow. We test the
# actual functionality of the integration in other test modules.
@pytest.fixture(autouse=True)
def bypass_setup_fixture():
"""Prevent setup."""
with patch(
"custom_components.{{cookiecutter.domain_name}}.async_setup",
return_value=True,
), patch(
"custom_components.{{cookiecutter.domain_name}}.async_setup_entry",
return_value=True,
):
yield


# Here we simiulate a successful config flow from the backend.
# Note that we use the `bypass_get_data` fixture here because
# we want the config flow validation to succeed during the test.
async def test_successful_config_flow(hass, bypass_get_data):
"""Test a successful config flow."""
# Initialize a config flow
Expand All @@ -38,11 +57,12 @@ async def test_successful_config_flow(hass, bypass_get_data):
assert result["result"]


# In this case, we want to simulate a failure during the config flow.
# We use the `error_on_get_data` mock instead of `bypass_get_data`
# (note the function parameters) to raise an Exception during
# validation of the input config.
async def test_failed_config_flow(hass, error_on_get_data):
"""Test a failed config flow due to credential validation failure."""
# The logic of this test is the same as `test_successful_config_flow`
# but note the different parameter names for each function, which
# refer to two different fixtures from `conftest.py`
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
Expand All @@ -58,6 +78,7 @@ async def test_failed_config_flow(hass, error_on_get_data):
assert result["errors"] == {"base": "auth"}


# Our config flow also has an options flow, so we must test it as well.
async def test_options_flow(hass):
"""Test an options flow."""
# Create a new MockConfigEntry and add to HASS (we're bypassing config
Expand Down
5 changes: 5 additions & 0 deletions tests/test_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,11 @@
from .const import MOCK_CONFIG


# We can pass fixtures as defined in conftest.py to tell pytest to use the fixture
# for a given test. We can also leverage fixtures and mocks that are available in
# Home Assistant using the pytest_homeassistant_custom_component plugin.
# Assertions allow you to verify that the return value of whatever is on the left
# side of the assertion matches with the right side.
async def test_setup_unload_and_reload_entry(hass, bypass_get_data):
"""Test entry setup and unload."""
# Create a mock entry so we don't have to go through config flow
Expand Down
3 changes: 2 additions & 1 deletion tests/test_switch.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
"""Test integration_blueprint switch."""
from unittest.mock import call, patch

from homeassistant.components.switch import SERVICE_TURN_OFF, SERVICE_TURN_ON
from homeassistant.const import ATTR_ENTITY_ID
from pytest_homeassistant_custom_component.async_mock import call, patch
from pytest_homeassistant_custom_component.common import MockConfigEntry

from custom_components.integration_blueprint import async_setup_entry
Expand Down

0 comments on commit 5fbf1ab

Please sign in to comment.