Skip to content

Commit

Permalink
[DPE-3944] Add tests for upgrades (#399)
Browse files Browse the repository at this point in the history
## Issue
Tests for upgrades missing

## Solution
Add tests for upgrades

Integration tests made to test upgrading from latest stable, but since
stable does not have the ability to upgrade we cannot run these tests
(hence the `pytest.skip`)
  • Loading branch information
MiaAltieri committed Apr 17, 2024
1 parent 1d4469c commit d563269
Show file tree
Hide file tree
Showing 3 changed files with 125 additions and 11 deletions.
18 changes: 18 additions & 0 deletions tests/integration/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
from pytest_operator.plugin import OpsTest
from tenacity import RetryError

from config import Config

from .ha_tests.helpers import kill_unit_process
from .helpers import (
PORT,
Expand Down Expand Up @@ -57,6 +59,22 @@ async def test_build_and_deploy(ops_test: OpsTest) -> None:
await ops_test.model.wait_for_idle()


@pytest.mark.group(1)
async def test_consistency_between_workload_and_metadata(ops_test: OpsTest):
"""Verifies that the dependencies in the charm version are accurate."""
# retrieve current version
app_name = await get_app_name(ops_test)
leader_unit = await find_unit(ops_test, leader=True, app_name=app_name)
password = await get_password(ops_test, app_name=app_name)
client = MongoClient(unit_uri(leader_unit.public_address, password, app_name))
# version has format x.y.z-a
mongod_version = client.server_info()["version"].split("-")[0]

assert (
mongod_version == Config.DEPENDENCIES["mongod_service"]["version"]
), f"Version of mongod running does not match dependency matrix, update DEPENDENCIES in src/config.py to {mongod_version}"


@pytest.mark.group(1)
@pytest.mark.abort_on_fail
async def test_status(ops_test: OpsTest) -> None:
Expand Down
51 changes: 42 additions & 9 deletions tests/integration/upgrade/test_upgrade.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# See LICENSE file for licensing details.

import logging
import os

import pytest
from pytest_operator.plugin import OpsTest
Expand All @@ -17,12 +16,15 @@
MEDIAN_REELECTION_TIME = 12


@pytest.fixture()
async def continuous_writes(ops_test: OpsTest):
"""Starts continuous write operations to MongoDB for test and clears writes at end of test."""
await ha_helpers.start_continous_writes(ops_test, 1)
yield
await ha_helpers.clear_db_writes(ops_test)


@pytest.mark.group(1)
@pytest.mark.skipif(
os.environ.get("PYTEST_SKIP_DEPLOY", False),
reason="skipping deploy, model expected to be provided.",
)
@pytest.mark.abort_on_fail
async def test_build_and_deploy(ops_test: OpsTest) -> None:
"""Build and deploy one unit of MongoDB."""
# it is possible for users to provide their own cluster for testing. Hence check if there
Expand All @@ -31,9 +33,40 @@ async def test_build_and_deploy(ops_test: OpsTest) -> None:
if app_name:
return await check_or_scale_app(ops_test, app_name)

my_charm = await ops_test.build_charm(".")
await ops_test.model.deploy(my_charm, num_units=3)
await ops_test.model.wait_for_idle()
# TODO: When `6/stable` track supports upgrades deploy and test that revision instead.
await ops_test.model.deploy("mongodb", channel="edge", num_units=3)

await ops_test.model.wait_for_idle(
apps=["mongodb"], status="active", timeout=1000, idle_period=120
)
app_name = await get_app_name(ops_test)


@pytest.mark.group(1)
async def test_upgrade(ops_test: OpsTest, continuous_writes) -> None:
"""Verifies that the upgrade can run successfully."""
app_name = await get_app_name(ops_test)
leader_unit = await find_unit(ops_test, leader=True, app_name=app_name)
logger.info("Calling pre-upgrade-check")
action = await leader_unit.run_action("pre-upgrade-check")
await action.wait()

await ops_test.model.wait_for_idle(
apps=[app_name], status="active", timeout=1000, idle_period=120
)

new_charm = await ops_test.build_charm(".")
app_name = await get_app_name(ops_test)
await ops_test.model.applications[app_name].refresh(path=new_charm)
await ops_test.model.wait_for_idle(
apps=[app_name], status="active", timeout=1000, idle_period=120
)
# verify that the cluster is actually correctly configured after upgrade

# verify that the no writes were skipped
total_expected_writes = await ha_helpers.stop_continous_writes(ops_test, app_name=app_name)
actual_writes = await ha_helpers.count_writes(ops_test, app_name=app_name)
assert total_expected_writes["number"] == actual_writes


@pytest.mark.group(1)
Expand Down
67 changes: 65 additions & 2 deletions tests/unit/test_upgrade.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@
# See LICENSE file for licensing details.
import unittest
from unittest import mock
from unittest.mock import patch
from unittest.mock import MagicMock, patch

from ops.model import ActiveStatus, BlockedStatus
from charms.data_platform_libs.v0.upgrade import ClusterNotReadyError
from charms.operator_libs_linux.v1 import snap
from ops.model import ActiveStatus, BlockedStatus, MaintenanceStatus
from ops.testing import Harness

from charm import MongodbOperatorCharm
Expand All @@ -18,6 +20,7 @@ def setUp(self, *unused):
self.addCleanup(self.harness.cleanup)
self.harness.begin()
self.peer_rel_id = self.harness.add_relation("database-peers", "database-peers")
self.peer_rel_id = self.harness.add_relation("upgrade", "upgrade")

@patch_network_get(private_address="1.1.1.1")
@patch("charms.mongodb.v0.upgrade.MongoDBConnection")
Expand Down Expand Up @@ -67,3 +70,63 @@ def test_is_replica_set_able_read_write(self, is_excepted_write_on_replica, conn
# case 2: writes are present on secondaries
is_excepted_write_on_replica.return_value = True
assert self.harness.charm.upgrade.is_replica_set_able_read_write()

@patch_network_get(private_address="1.1.1.1")
@patch("charm.MongoDBConnection")
def test_build_upgrade_stack(self, connection):
"""Tests that build upgrade stack puts the primary unit at the bottom of the stack."""
rel_id = self.harness.charm.model.get_relation("database-peers").id
self.harness.add_relation_unit(rel_id, "mongodb/1")
connection.return_value.__enter__.return_value.primary.return_value = "1.1.1.1"
assert self.harness.charm.upgrade.build_upgrade_stack() == [0, 1]

@patch_network_get(private_address="1.1.1.1")
@patch("charms.mongodb.v0.upgrade.Retrying")
@patch("charm.MongoDBUpgrade.is_excepted_write_on_replica")
@patch("charm.MongodbOperatorCharm.restart_charm_services")
@patch("charm.MongoDBConnection")
@patch("charms.mongodb.v0.upgrade.MongoDBConnection")
@patch("charm.MongodbOperatorCharm.install_snap_packages")
@patch("charm.MongodbOperatorCharm.stop_charm_services")
@patch("charm.MongoDBUpgrade.post_upgrade_check")
def test_on_upgrade_granted(
self,
post_upgrade_check,
stop_charm_services,
install_snap_packages,
connection_1,
connection_2,
restart,
is_excepted_write_on_replica,
retrying,
):
# upgrades need a peer relation to proceed
rel_id = self.harness.charm.model.get_relation("database-peers").id
self.harness.add_relation_unit(rel_id, "mongodb/1")

# case 1: fails to install snap_packages
install_snap_packages.side_effect = snap.SnapError
mock_event = MagicMock()
self.harness.charm.upgrade._on_upgrade_granted(mock_event)
restart.assert_not_called()

# case 2: post_upgrade_check fails
install_snap_packages.side_effect = None
# disable_retry
post_upgrade_check.side_effect = ClusterNotReadyError(
"post-upgrade check failed and cannot safely upgrade",
cause="Cluster cannot read/write",
)
mock_event = MagicMock()
self.harness.charm.upgrade._on_upgrade_granted(mock_event)
restart.assert_called()
self.assertTrue(isinstance(self.harness.charm.unit.status, BlockedStatus))

# case 3: everything works
install_snap_packages.side_effect = None
is_excepted_write_on_replica.return_value = True
post_upgrade_check.side_effect = None
mock_event = MagicMock()
self.harness.charm.upgrade._on_upgrade_granted(mock_event)
restart.assert_called()
self.assertTrue(isinstance(self.harness.charm.unit.status, MaintenanceStatus))

0 comments on commit d563269

Please sign in to comment.