From f5782b7499ff6ffc00fbf694b10fcbe3dddb7ff8 Mon Sep 17 00:00:00 2001 From: Neha Oudin <17551419+Gu1nness@users.noreply.github.com> Date: Wed, 28 Aug 2024 09:18:25 +0200 Subject: [PATCH] chore: re-enable tests (#463) --- lib/charms/mongodb/v0/set_status.py | 4 +- scripts/gen_charm_internal_version.sh | 23 ----- .../upgrade/test_sharding_rollback.py | 53 ++++++---- .../upgrade/test_sharding_upgrade.py | 3 - tests/unit/test_set_status.py | 97 +++++++++++++++++++ tox.ini | 4 +- 6 files changed, 136 insertions(+), 48 deletions(-) delete mode 100644 scripts/gen_charm_internal_version.sh diff --git a/lib/charms/mongodb/v0/set_status.py b/lib/charms/mongodb/v0/set_status.py index 98d1dabac..de6734ded 100644 --- a/lib/charms/mongodb/v0/set_status.py +++ b/lib/charms/mongodb/v0/set_status.py @@ -22,7 +22,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 3 +LIBPATCH = 4 AUTH_FAILED_CODE = 18 UNAUTHORISED_CODE = 13 @@ -153,7 +153,7 @@ def is_unit_status_ready_for_upgrade(self) -> bool: if isinstance(current_status, ActiveStatus): return True - if not isinstance(current_status, WaitingStatus): + if not isinstance(current_status, BlockedStatus): return False if status_message and "is not up-to date with config-server" in status_message: diff --git a/scripts/gen_charm_internal_version.sh b/scripts/gen_charm_internal_version.sh deleted file mode 100644 index 50ccd5a48..000000000 --- a/scripts/gen_charm_internal_version.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/sh - -# Copyright 2024 Canonical Ltd. -# See LICENSE file for licensing details. - -# This file consistently generates a hash, whether there's uncommited code or not -# and writes it into the `charm_internal_version` file - -if git --version > /dev/null; then - # Compute base files - BASE=$(git ls-files -s) - # Compute diff files - DIFF=$(git diff --raw) - # Compute staged files - STAGED=$(git diff --raw --staged) - - HASH=$(echo $BASE $DIFF $STAGED | git hash-object --stdin | cut -c 1-8) - echo $HASH > charm_internal_version - echo "Hash for this build is ${HASH}" -else - echo "Git is not installed" - exit 1 -fi diff --git a/tests/integration/upgrade/test_sharding_rollback.py b/tests/integration/upgrade/test_sharding_rollback.py index 93802f56b..929948119 100644 --- a/tests/integration/upgrade/test_sharding_rollback.py +++ b/tests/integration/upgrade/test_sharding_rollback.py @@ -2,12 +2,13 @@ # Copyright 2024 Canonical Ltd. # See LICENSE file for licensing details. +import asyncio from pathlib import Path import pytest from pytest_operator.plugin import OpsTest -from ..helpers import find_unit +from ..helpers import find_unit, wait_for_mongodb_units_blocked from ..sharding_tests.helpers import deploy_cluster_components, integrate_cluster from ..sharding_tests.writes_helpers import ( SHARD_ONE_DB_NAME, @@ -32,7 +33,6 @@ @pytest.mark.runner(["self-hosted", "linux", "X64", "jammy", "large"]) @pytest.mark.group(1) @pytest.mark.abort_on_fail -@pytest.mark.skip("Need a new version published with upgrade bug fixed") async def test_build_and_deploy(ops_test: OpsTest) -> None: """Build deploy, and integrate, a sharded cluster.""" num_units_cluster_config = { @@ -54,7 +54,6 @@ async def test_build_and_deploy(ops_test: OpsTest) -> None: @pytest.mark.runner(["self-hosted", "linux", "X64", "jammy", "large"]) @pytest.mark.group(1) @pytest.mark.abort_on_fail -@pytest.mark.skip("Need a new version published with upgrade bug fixed") async def test_rollback_on_config_server( ops_test: OpsTest, continuous_writes_to_shard_one, continuous_writes_to_shard_two ) -> None: @@ -75,7 +74,7 @@ async def test_rollback_on_config_server( # await ops_test.model.applications[CONFIG_SERVER_APP_NAME].refresh( # channel="6/edge", switch="ch:mongodb" # ) - await refresh_with_juju(ops_test, CONFIG_SERVER_APP_NAME, "6/stable") + await refresh_with_juju(ops_test, CONFIG_SERVER_APP_NAME, "6/edge") # verify no writes were skipped during upgrade/rollback process shard_one_expected_writes = await stop_continous_writes( @@ -102,6 +101,10 @@ async def test_rollback_on_config_server( shard_two_actual_writes == shard_two_expected_writes["number"] ), "continuous writes to shard two failed during upgrade" + await ops_test.model.wait_for_idle( + apps=CLUSTER_COMPONENTS, status="active", timeout=1000, idle_period=20 + ) + # after all shards have upgraded, verify that the balancer has been turned back on # TODO implement this check once we have implemented the post-cluster-upgrade code DPE-4143 @@ -109,7 +112,6 @@ async def test_rollback_on_config_server( @pytest.mark.runner(["self-hosted", "linux", "X64", "jammy", "large"]) @pytest.mark.group(1) @pytest.mark.abort_on_fail -@pytest.mark.skip("Need a new version published with upgrade bug fixed") async def test_rollback_on_shard_and_config_server( ops_test: OpsTest, continuous_writes_to_shard_one, continuous_writes_to_shard_two ) -> None: @@ -117,21 +119,36 @@ async def test_rollback_on_shard_and_config_server( new_charm = await ops_test.build_charm(".") await run_upgrade_sequence(ops_test, CONFIG_SERVER_APP_NAME, new_charm=new_charm) - shard_unit = await find_unit(ops_test, leader=True, app_name=SHARD_ONE_APP_NAME) - action = await shard_unit.run_action("pre-upgrade-check") - await action.wait() - assert action.status == "completed", "pre-upgrade-check failed, expected to succeed." + with open("charm_internal_version", mode="r") as fd: + revision = fd.read().strip() + + # Wait for statuses to settle down + asyncio.gather( + wait_for_mongodb_units_blocked(ops_test, SHARD_ONE_APP_NAME), + wait_for_mongodb_units_blocked(ops_test, SHARD_TWO_APP_NAME), + ops_test.model.wait_for_idle( + apps=[CONFIG_SERVER_APP_NAME], + timeout=1000, + idle_period=20, + status=f"Waiting for shards to upgrade/downgrade to revision {revision}-locally built.", + ), + ) - # TODO: Use this when https://github.com/juju/python-libjuju/issues/1086 is fixed - # await ops_test.model.applications[SHARD_ONE_APP_NAME].refresh( - # channel="6/edge", switch="ch:mongodb" - # ) - await refresh_with_juju(ops_test, SHARD_ONE_APP_NAME, "6/stable") - await ops_test.model.wait_for_idle( - apps=[CONFIG_SERVER_APP_NAME], timeout=1000, idle_period=120 + await run_upgrade_sequence(ops_test, SHARD_ONE_APP_NAME, new_charm=new_charm) + + # Wait for statuses to settle down + asyncio.gather( + wait_for_mongodb_units_blocked(ops_test, SHARD_TWO_APP_NAME), + ops_test.model.wait_for_idle(apps=[SHARD_ONE_APP_NAME], timeout=1000, idle_period=20), + ops_test.model.wait_for_idle( + apps=[CONFIG_SERVER_APP_NAME], + timeout=1000, + idle_period=20, + status=f"Waiting for shards to upgrade/downgrade to revision {revision}-locally built.", + ), ) - await run_upgrade_sequence(ops_test, CONFIG_SERVER_APP_NAME, channel="6/edge") + await refresh_with_juju(ops_test, CONFIG_SERVER_APP_NAME, channel="6/edge") # verify no writes were skipped during upgrade process shard_one_expected_writes = await stop_continous_writes( @@ -163,7 +180,7 @@ async def test_rollback_on_shard_and_config_server( async def refresh_with_juju(ops_test: OpsTest, app_name: str, channel: str) -> None: - refresh_cmd = f"refresh {app_name} --channel {channel} --switch ch:mongodb" + refresh_cmd = f"refresh {app_name} --model {ops_test.model.info.name} --channel {channel} --switch ch:mongodb" await ops_test.juju(*refresh_cmd.split()) diff --git a/tests/integration/upgrade/test_sharding_upgrade.py b/tests/integration/upgrade/test_sharding_upgrade.py index 10ca2b655..1356519e5 100644 --- a/tests/integration/upgrade/test_sharding_upgrade.py +++ b/tests/integration/upgrade/test_sharding_upgrade.py @@ -37,7 +37,6 @@ @pytest.mark.runner(["self-hosted", "linux", "X64", "jammy", "large"]) @pytest.mark.group(1) @pytest.mark.abort_on_fail -@pytest.mark.skip("Need a new version published with new charm internal version") async def test_build_and_deploy(ops_test: OpsTest) -> None: """Build deploy, and integrate, a sharded cluster.""" num_units_cluster_config = { @@ -59,7 +58,6 @@ async def test_build_and_deploy(ops_test: OpsTest) -> None: @pytest.mark.runner(["self-hosted", "linux", "X64", "jammy", "large"]) @pytest.mark.group(1) @pytest.mark.abort_on_fail -@pytest.mark.skip("Need a new version published with new charm internal version") async def test_upgrade( ops_test: OpsTest, continuous_writes_to_shard_one, continuous_writes_to_shard_two ) -> None: @@ -115,7 +113,6 @@ async def test_upgrade( @pytest.mark.runner(["self-hosted", "linux", "X64", "jammy", "large"]) @pytest.mark.group(1) @pytest.mark.abort_on_fail -@pytest.mark.skip("Need a new version published with new charm internal version") async def test_pre_upgrade_check_failure(ops_test: OpsTest) -> None: """Verify that the pre-upgrade check fails if there is a problem with one of the shards.""" # Disable network on a replicas prior to integration. diff --git a/tests/unit/test_set_status.py b/tests/unit/test_set_status.py index f89cb658f..5887e6226 100644 --- a/tests/unit/test_set_status.py +++ b/tests/unit/test_set_status.py @@ -4,7 +4,9 @@ from unittest import mock from unittest.mock import patch +from ops.model import ActiveStatus, BlockedStatus, StatusBase, WaitingStatus from ops.testing import Harness +from parameterized import parameterized from charm import MongodbOperatorCharm @@ -42,3 +44,98 @@ def test_are_all_units_ready_for_upgrade(self) -> None: self.harness.charm.model._backend = run_mock assert not self.harness.charm.status.are_all_units_ready_for_upgrade() + + @parameterized.expand( + [ + [ActiveStatus(), True], + [BlockedStatus("is not up-to date with config-server"), True], + [BlockedStatus("Wrong status"), False], + [WaitingStatus("tests status"), False], + ], + ) + def test_is_unit_status_ready_for_upgrade(self, status: StatusBase, expected: bool) -> None: + """Tests different cases of statuses for is_unit_status_ready_for_upgrade.""" + self.harness.charm.unit.status = status + + assert self.harness.charm.status.is_unit_status_ready_for_upgrade() == expected + + @parameterized.expand( + [ + [BlockedStatus("Invalid"), ActiveStatus(), ActiveStatus(), ActiveStatus(), 0], + [WaitingStatus("Waiting"), ActiveStatus(), ActiveStatus(), ActiveStatus(), 0], + [ActiveStatus(), BlockedStatus("Invalid"), ActiveStatus(), ActiveStatus(), 1], + [ActiveStatus(), WaitingStatus("Waiting"), ActiveStatus(), ActiveStatus(), 1], + [ActiveStatus(), None, BlockedStatus("Invalid"), ActiveStatus(), 2], + [ActiveStatus(), None, WaitingStatus("Waiting"), ActiveStatus(), 2], + [ActiveStatus(), None, None, BlockedStatus("Invalid"), 3], + [ActiveStatus(), None, None, WaitingStatus("Waiting"), 3], + [ActiveStatus(), None, None, None, 0], + [ActiveStatus(), ActiveStatus(), ActiveStatus(), ActiveStatus(), 0], + ] + ) + def test_prioritize_status( + self, + mongodb_status: StatusBase, + shard_status: StatusBase | None, + config_server_status: StatusBase | None, + pbm_status: StatusBase | None, + expected_index: int, + ): + """Tests different cases of statuses for prioritize_status.""" + statuses = (mongodb_status, shard_status, config_server_status, pbm_status) + assert self.harness.charm.status.prioritize_statuses(statuses) == statuses[expected_index] + + @parameterized.expand( + [ + [ + False, + True, + ActiveStatus(), + BlockedStatus( + "Relation to mongos not supported, config role must be config-server" + ), + ], + [ + False, + False, + ActiveStatus(), + BlockedStatus( + "Relation to mongos not supported, config role must be config-server" + ), + ], + [ + True, + False, + ActiveStatus(), + BlockedStatus( + "Relation to s3-integrator is not supported, config role must be config-server" + ), + ], + [True, True, None, None], + [True, True, ActiveStatus(), ActiveStatus()], + [True, True, BlockedStatus(""), BlockedStatus("")], + [True, True, WaitingStatus(""), WaitingStatus("")], + ] + ) + def test_get_invalid_integration_status( + self, + mongos_integration: bool, + valid_s3_integration: bool, + mismatched_revision_status: StatusBase | None, + expected_status: StatusBase | None, + ): + """Tests different cases of statuses for get_invalid_integration_status.""" + get_mismatched_revision_mock = mock.Mock() + get_mismatched_revision_mock.return_value = mismatched_revision_status + + mongos_integration_mock = mock.Mock() + mongos_integration_mock.return_value = mongos_integration + + valid_s3_integration_mock = mock.Mock() + valid_s3_integration_mock.return_value = valid_s3_integration + + self.harness.charm.get_cluster_mismatched_revision_status = get_mismatched_revision_mock + self.harness.charm.cluster.is_valid_mongos_integration = mongos_integration_mock + self.harness.charm.backups.is_valid_s3_integration = valid_s3_integration_mock + + assert self.harness.charm.status.get_invalid_integration_status() == expected_status diff --git a/tox.ini b/tox.ini index e3f515994..d23137996 100644 --- a/tox.ini +++ b/tox.ini @@ -31,8 +31,8 @@ allowlist_externals = mv rm commands_pre = + python -c 'import pathlib; import shutil; import subprocess; git_hash=subprocess.run(["git", "describe", "--always", "--dirty"], capture_output=True, check=True, encoding="utf-8").stdout; file = pathlib.Path("charm_internal_version"); file.write_text(f"{git_hash}")' poetry export --only main,charm-libs --output requirements.txt - sh scripts/gen_charm_internal_version.sh commands = build-production: charmcraft pack {posargs} build-dev: charmcraftcache pack {posargs} @@ -73,8 +73,8 @@ allowlist_externals = {[testenv]allowlist_externals} rm commands_pre = + python -c 'import pathlib; import shutil; import subprocess; git_hash=subprocess.run(["git", "describe", "--always", "--dirty"], capture_output=True, check=True, encoding="utf-8").stdout; file = pathlib.Path("charm_internal_version"); file.write_text(f"{git_hash}")' poetry install --only main,charm-libs,unit - sh scripts/gen_charm_internal_version.sh commands = poetry run coverage run --source={[vars]src_path},{[vars]lib_path} \ -m pytest -v --tb native -s {posargs} {[vars]tests_path}/unit