From 0826933f78e5626762fbcb95d1ecce35463fa35f Mon Sep 17 00:00:00 2001 From: Daniel Alley Date: Thu, 26 May 2022 16:31:43 -0400 Subject: [PATCH] Use retain_package_versions to filter out older pkgs at sync time closes #2479 --- CHANGES/2479.feature | 1 + pulp_rpm/app/metadata_parsing.py | 15 +- pulp_rpm/app/rpm_version.py | 243 ++++++++++++++++++ pulp_rpm/app/tasks/synchronizing.py | 156 +++++++---- .../functional/api/test_retention_policy.py | 82 +++++- pulp_rpm/tests/functional/api/test_sync.py | 13 +- 6 files changed, 439 insertions(+), 71 deletions(-) create mode 100644 CHANGES/2479.feature create mode 100644 pulp_rpm/app/rpm_version.py diff --git a/CHANGES/2479.feature b/CHANGES/2479.feature new file mode 100644 index 0000000000..cde1826559 --- /dev/null +++ b/CHANGES/2479.feature @@ -0,0 +1 @@ +Using `retain_package_versions` (with the required "additive" `sync_policy`) will now avoid downloading the older packages when synced with download_policy "on_demand", resulting in much faster and more efficient syncs. diff --git a/pulp_rpm/app/metadata_parsing.py b/pulp_rpm/app/metadata_parsing.py index c049f72f18..c0b4197a1f 100644 --- a/pulp_rpm/app/metadata_parsing.py +++ b/pulp_rpm/app/metadata_parsing.py @@ -34,19 +34,12 @@ def from_metadata_files(primary_xml_path, filelists_xml_path, other_xml_path): parser.other_xml_path = other_xml_path return parser - def count_packages(self): - """Count the total number of packages.""" - # It would be much faster to just read the number in the header of the metadata. - # But there's no way to do that, and also we can't necessarily rely on that number because - # of duplicates. - packages = 0 - - def pkgcb(pkg): - nonlocal packages - packages += 1 + def for_each_pkg_primary(self, pkgcb): + """Execute a callback for each package, parsed from only primary package metadata. + Only primary metadata means no files or changelogs. + """ cr.xml_parse_primary(self.primary_xml_path, pkgcb=pkgcb, do_files=False) - return packages def as_iterator(self): """Return a package iterator.""" diff --git a/pulp_rpm/app/rpm_version.py b/pulp_rpm/app/rpm_version.py new file mode 100644 index 0000000000..b7a30a2244 --- /dev/null +++ b/pulp_rpm/app/rpm_version.py @@ -0,0 +1,243 @@ +# Sourced from https://github.com/nexB/univers +# +# Copyright (c) SAS Institute Inc. +# Copyright (c) Facebook, Inc. and its affiliates. +# +# SPDX-License-Identifier: MIT AND Apache-2.0 +# Version comparison utility extracted from python-rpm-vercmp and further +# stripped down and significantly modified from the original at python-rpm-vercmp +# Also includes updates from Facebook antlir merged in. +# +# Visit https://aboutcode.org and https://github.com/nexB/univers for support and download. + +# flake8: noqa + +import re +from typing import NamedTuple +from typing import Union + + +class RpmVersion(NamedTuple): + """ + Represent an RPM version. It is ordered. + """ + + epoch: int + version: str + release: str + + def __str__(self, *args, **kwargs): + return self.to_string() + + def to_string(self): + if self.release: + vr = f"{self.version}-{self.release}" + else: + vr = self.version + + if self.epoch: + vr = f"{self.epoch}:{vr}" + return vr + + @classmethod + def from_string(cls, s): + s.strip() + e, v, r = from_evr(s) + return cls(e, v, r) + + def __lt__(self, other): + return compare_rpm_versions(self, other) < 0 + + def __gt__(self, other): + return compare_rpm_versions(self, other) > 0 + + def __eq__(self, other): + return compare_rpm_versions(self, other) == 0 + + def __le__(self, other): + return compare_rpm_versions(self, other) <= 0 + + def __ge__(self, other): + return compare_rpm_versions(self, other) >= 0 + + +def from_evr(s): + """ + Return an (E, V, R) tuple given a string by splitting + [e:]version-release into the three possible subcomponents. + Default epoch to 0, version and release to empty string if not specified. + + >>> assert from_evr("1:11.13.2.0-1") == (1, "11.13.2.0", "1") + >>> assert from_evr("11.13.2.0-1") == (0, "11.13.2.0", "1") + """ + if ":" in s: + e, _, vr = s.partition(":") + else: + e = "0" + vr = s + + e = int(e) + + if "-" in vr: + v, _, r = vr.partition("-") + else: + v = vr + r = "" + return e, v, r + + +def compare_rpm_versions(a: Union[RpmVersion, str], b: Union[RpmVersion, str]) -> int: + """ + Compare two RPM versions ``a`` and ``b`` and return: + - 1 if the version of a is newer than b + - 0 if the versions match + - -1 if the version of a is older than b + + These are the legacy "cmp()" function semantics. + + This implementation is adapted from both this blog post: + https://blog.jasonantman.com/2014/07/how-yum-and-rpm-compare-versions/ + and this Apache 2 licensed implementation: + https://github.com/sassoftware/python-rpm-vercmp/blob/master/rpm_vercmp/vercmp.py + + For example:: + >>> assert compare_rpm_versions("1.0", "1.1") == -1 + >>> assert compare_rpm_versions("1.1", "1.0") == 1 + >>> assert compare_rpm_versions("11.13.2-1", "11.13.2.0-1") == -1 + >>> assert compare_rpm_versions("11.13.2.0-1", "11.13.2-1") == 1 + """ + if isinstance(a, str): + a = RpmVersion.from_string(a) + if isinstance(b, str): + b = RpmVersion.from_string(b) + if not isinstance(a, RpmVersion) and not isinstance(b, RpmVersion): + raise TypeError(f"{a!r} and {b!r} must be RpmVersion or strings") + + # First compare the epoch, if set. If the epoch's are not the same, then + # the higher one wins no matter what the rest of the EVR is. + if a.epoch != b.epoch: + if a.epoch > b.epoch: + return 1 # a > b + else: + return -1 # a < b + + # Epoch is the same, if version + release are the same we have a match + if (a.version == b.version) and (a.release == b.release): + return 0 # a == b + + # Compare version first, if version is equal then compare release + compare_res = vercmp(a.version, b.version) + if compare_res != 0: # a > b || a < b + return compare_res + else: + return vercmp(a.release, b.release) + + +class Vercmp: + R_NONALNUMTILDE_CARET = re.compile(rb"^([^a-zA-Z0-9~\^]*)(.*)$") + R_NUM = re.compile(rb"^([\d]+)(.*)$") + R_ALPHA = re.compile(rb"^([a-zA-Z]+)(.*)$") + + @classmethod + def compare(cls, first, second): + # Rpm versions can only be ascii, anything else is just ignored + first = first.encode("ascii", "ignore") + second = second.encode("ascii", "ignore") + + if first == second: + return 0 + + while first or second: + m1 = cls.R_NONALNUMTILDE_CARET.match(first) + m2 = cls.R_NONALNUMTILDE_CARET.match(second) + m1_head, first = m1.group(1), m1.group(2) + m2_head, second = m2.group(1), m2.group(2) + if m1_head or m2_head: + # Ignore junk at the beginning + continue + + # handle the tilde separator, it sorts before everything else + if first.startswith(b"~"): + if not second.startswith(b"~"): + return -1 + first, second = first[1:], second[1:] + continue + if second.startswith(b"~"): + return 1 + + # Now look at the caret, which is like the tilde but pointier. + if first.startswith(b"^"): + # first has a caret but second has ended + if not second: + return 1 # first > second + + # first has a caret but second continues on + elif not second.startswith(b"^"): + return -1 # first < second + + # strip the ^ and start again + first, second = first[1:], second[1:] + continue + + # Caret means the version is less... Unless the other version + # has ended, then do the exact opposite. + if second.startswith(b"^"): + return -1 if not first else 1 + + # We've run out of characters to compare. + # Note: we have to do this after we compare the ~ and ^ madness + # because ~'s and ^'s take precedance. + # If we ran to the end of either, we are finished with the loop + if not first or not second: + break + + # grab first completely alpha or completely numeric segment + m1 = cls.R_NUM.match(first) + if m1: + m2 = cls.R_NUM.match(second) + if not m2: + # numeric segments are always newer than alpha segments + return 1 + isnum = True + else: + m1 = cls.R_ALPHA.match(first) + m2 = cls.R_ALPHA.match(second) + if not m2: + return -1 + isnum = False + + m1_head, first = m1.group(1), m1.group(2) + m2_head, second = m2.group(1), m2.group(2) + + if isnum: + # throw away any leading zeros - it's a number, right? + m1_head = m1_head.lstrip(b"0") + m2_head = m2_head.lstrip(b"0") + + # whichever number has more digits wins + m1hlen = len(m1_head) + m2hlen = len(m2_head) + if m1hlen < m2hlen: + return -1 + if m1hlen > m2hlen: + return 1 + + # Same number of chars + if m1_head < m2_head: + return -1 + if m1_head > m2_head: + return 1 + # Both segments equal + continue + + m1len = len(first) + m2len = len(second) + if m1len == m2len == 0: + return 0 + if m1len != 0: + return 1 + return -1 + + +def vercmp(first, second): + return Vercmp.compare(first, second) diff --git a/pulp_rpm/app/tasks/synchronizing.py b/pulp_rpm/app/tasks/synchronizing.py index 5548791784..1830001167 100644 --- a/pulp_rpm/app/tasks/synchronizing.py +++ b/pulp_rpm/app/tasks/synchronizing.py @@ -84,11 +84,12 @@ parse_defaults, parse_modulemd, ) -from pulp_rpm.app.kickstart.treeinfo import PulpTreeInfo, TreeinfoData from pulp_rpm.app.comps import strdict_to_dict, dict_digest -from pulp_rpm.app.shared_utils import is_previous_version, get_sha256, urlpath_sanitize +from pulp_rpm.app.kickstart.treeinfo import PulpTreeInfo, TreeinfoData from pulp_rpm.app.metadata_parsing import MetadataParser +from pulp_rpm.app.shared_utils import is_previous_version, get_sha256, urlpath_sanitize +from pulp_rpm.app.rpm_version import RpmVersion import gi @@ -866,16 +867,18 @@ async def parse_repository_metadata(self, repomd, metadata_results): # modularity-parsing MUST COME BEFORE package-parsing! # The only way to know if a package is 'modular' in a repo, is to # know that it is referenced in modulemd. - modulemd_list = [] + modulemd_dcs = [] modulemd_result = metadata_results.get("modules", None) + modulemd_list = [] if modulemd_result: - modulemd_list = await self.parse_modules_metadata(modulemd_result) + (modulemd_dcs, modulemd_list) = await self.parse_modules_metadata(modulemd_result) # **Now** we can successfully parse package-metadata await self.parse_packages( metadata_results["primary"], metadata_results["filelists"], metadata_results["other"], + modulemd_list=modulemd_list, ) groups_list = [] @@ -888,7 +891,7 @@ async def parse_repository_metadata(self, repomd, metadata_results): await self.parse_advisories(updateinfo_result) # now send modules and groups down the pipeline since all relations have been set up - for modulemd_dc in modulemd_list: + for modulemd_dc in modulemd_dcs: await self.put(modulemd_dc) for group_dc in groups_list: @@ -939,7 +942,7 @@ async def parse_modules_metadata(self, modulemd_result): modulemd_names = modulemd_index.get_module_names() or [] modulemd_all = parse_modulemd(modulemd_names, modulemd_index) - modulemd_list = [] + modulemd_dcs = [] # Parsing modules happens all at one time, and from here on no useful work happens. # So just report that it finished this stage. @@ -968,7 +971,7 @@ async def parse_modules_metadata(self, modulemd_result): # dc.content.artifacts are Modulemd artifacts for artifact in dc.content.artifacts: self.nevra_to_module.setdefault(artifact, set()).add(dc) - modulemd_list.append(dc) + modulemd_dcs.append(dc) # Parse modulemd default names modulemd_default_names = parse_defaults(modulemd_index) @@ -1002,7 +1005,7 @@ async def parse_modules_metadata(self, modulemd_result): for default_content_dc in default_content_dcs: await self.put(default_content_dc) - return modulemd_list + return (modulemd_dcs, modulemd_all) async def parse_packages_components(self, comps_result): """Parse packages' components that define how are the packages bundled.""" @@ -1106,26 +1109,38 @@ async def parse_packages_components(self, comps_result): return dc_groups - async def parse_packages(self, primary_xml, filelists_xml, other_xml): + async def parse_packages(self, primary_xml, filelists_xml, other_xml, modulemd_list=None): """Parse packages from the remote repository.""" parser = MetadataParser.from_metadata_files( primary_xml.path, filelists_xml.path, other_xml.path ) - progress_data = { - "message": "Parsed Packages", - "code": "sync.parsing.packages", - "total": parser.count_packages(), - } - - async with ProgressReport(**progress_data) as packages_pb: - # skip SRPM if defined - skip_srpms = "srpm" in self.skip_types and not self.mirror_metadata - - nevras = set() - checksums = set() - pkgid_warning_triggered = False - nevra_warning_triggered = False + # skip SRPM if defined + skip_srpms = "srpm" in self.skip_types + nevras = set() + checksums = set() + modular_artifact_nevras = set() + pkgid_warning_triggered = False + nevra_warning_triggered = False + num_packages = 0 + + for modulemd in modulemd_list: + modular_artifact_nevras |= set(modulemd[PULP_MODULE_ATTR.ARTIFACTS]) + + package_skip_nevras = set() + # The repository can contain packages of arbitrary arches, and they are not comparable. + # {"x86_64": {"glibc": [...]}, "i686": {"glibc": [...], "src": {"glibc": [...]}} + latest_packages_by_arch_and_name = defaultdict(lambda: defaultdict(list)) + + # Perform various checks and potentially filter out unwanted packages + # We parse all of primary.xml first and fail fast if something is wrong. + # Collect a list of any package nevras() we don't want to include. + def verification_and_skip_callback(pkg): + nonlocal pkgid_warning_triggered + nonlocal nevra_warning_triggered + nonlocal package_skip_nevras + nonlocal latest_packages_by_arch_and_name + nonlocal num_packages ERR_MSG = _( "The repository metadata being synced into Pulp is erroneous in a way that " @@ -1141,34 +1156,79 @@ async def parse_packages(self, primary_xml, filelists_xml, other_xml): "Please read https://github.com/pulp/pulp_rpm/issues/2402 for more details." ) - pkg_iterator = parser.as_iterator() - - for pkg in pkg_iterator: - if not pkgid_warning_triggered and pkg.pkgId in checksums: - pkgid_warning_triggered = True - if self.mirror_metadata: - raise Exception(ERR_MSG.format("PKGIDs")) - else: - log.warn(WARN_MSG.format("PKGIDs")) - if not nevra_warning_triggered and pkg.nevra() in nevras: - nevra_warning_triggered = True - if self.mirror_metadata: - raise Exception(ERR_MSG.format("NEVRAs")) - else: - log.warn(WARN_MSG.format("NEVRAs")) - nevras.add(pkg.nevra()) - checksums.add(pkg.pkgId) - - if skip_srpms and pkg.arch == "src": - continue + num_packages += 1 + # Check for packages with duplicate pkgids + if not pkgid_warning_triggered and pkg.pkgId in checksums: + pkgid_warning_triggered = True if self.mirror_metadata: - uses_base_url = pkg.location_base - illegal_relative_path = self.is_illegal_relative_path(pkg.location_href) - - if uses_base_url or illegal_relative_path: - raise ValueError(MIRROR_INCOMPATIBLE_REPO_ERR_MSG) + raise Exception(ERR_MSG.format("PKGIDs")) + else: + log.warn(WARN_MSG.format("PKGIDs")) + # Check for packages with duplicate NEVRAs + if not nevra_warning_triggered and pkg.nevra() in nevras: + nevra_warning_triggered = True + if self.mirror_metadata: + raise Exception(ERR_MSG.format("NEVRAs")) + else: + log.warn(WARN_MSG.format("NEVRAs")) + + pkg_nevra = pkg.nevra() + + nevras.add(pkg_nevra) + checksums.add(pkg.pkgId) + + # Check that all packages are within the root of the repo (if in mirror_complete mode) + if self.mirror_metadata: + uses_base_url = pkg.location_base + illegal_relative_path = self.is_illegal_relative_path(pkg.location_href) + + if uses_base_url or illegal_relative_path: + raise ValueError(MIRROR_INCOMPATIBLE_REPO_ERR_MSG) + + # Add any srpms to the skip set + if skip_srpms and pkg.arch == "src": + package_skip_nevras.add(pkg_nevra) + + # Collect the N highest-version packages, kick out the older ones and add those + # to the skip list. Don't include any modular packages in the EVR comparisons + # since they may be older and we don't want to skip them, or newer and we don't want to + # exclude any nonmodular packages on the basis of the modular package existing. + if self.repository.retain_package_versions and pkg_nevra not in modular_artifact_nevras: + pkg_evr = RpmVersion(pkg.epoch, pkg.version, pkg.release) + latest_packages_by_arch_and_name[pkg.arch][pkg.name].append((pkg_evr, pkg_nevra)) + + # Ew, callback-based API, gross. The streaming API doesn't support optionally + # specifying particular files yet so we have to use the old way. + parser.for_each_pkg_primary(verification_and_skip_callback) + + # Go through the package lists, sort them descending by EVR, ignore the first N and then + # add the remaining ones to the skip list. + for arch, packages in latest_packages_by_arch_and_name.items(): + for name, versions in packages.items(): + versions.sort(key=lambda p: p[0], reverse=True) + for pkg in versions[self.repository.retain_package_versions :]: + (evr, nevra) = pkg + package_skip_nevras.add(nevra) + + del latest_packages_by_arch_and_name + + log.debug( + "Skipping {} packages due to retain_package_versions".format(len(package_skip_nevras)) + ) + # the progress bar message is slightly misleading here because we're potentially parsing + # more packages than this. + num_pkgs_to_sync = num_packages - len(package_skip_nevras) + progress_data = { + "message": "Parsed Packages", + "code": "sync.parsing.packages", + "total": num_pkgs_to_sync, + } + async with ProgressReport(**progress_data) as packages_pb: + for pkg in parser.as_iterator(): + if package_skip_nevras and pkg.nevra() in package_skip_nevras: + continue package = Package(**Package.createrepo_to_dict(pkg)) base_url = pkg.location_base or self.remote_url url = urlpath_sanitize(base_url, package.location_href) diff --git a/pulp_rpm/tests/functional/api/test_retention_policy.py b/pulp_rpm/tests/functional/api/test_retention_policy.py index 8a7b66792f..5ecef94458 100644 --- a/pulp_rpm/tests/functional/api/test_retention_policy.py +++ b/pulp_rpm/tests/functional/api/test_retention_policy.py @@ -19,6 +19,9 @@ from pulp_rpm.tests.functional.constants import ( PULP_TYPE_PACKAGE, RPM_FIXTURE_SUMMARY, + RPM_PACKAGE_COUNT, + RPM_MODULES_STATIC_CONTEXT_FIXTURE_URL, + RPM_MODULAR_STATIC_FIXTURE_SUMMARY, ) from pulp_rpm.tests.functional.utils import ( gen_rpm_client, @@ -43,7 +46,6 @@ def setUpClass(cls): cls.client = gen_rpm_client() cls.repo_api = RepositoriesRpmApi(cls.client) cls.remote_api = RemotesRpmApi(cls.client) - delete_orphans() def test_sync_with_retention(self): """Verify functionality with sync. @@ -60,18 +62,22 @@ def test_sync_with_retention(self): 6. Assert that repository version is different from the previous one. 7. Assert the repository version we end with has only one version of each package. """ + delete_orphans() + repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) - remote = self.remote_api.create(gen_rpm_remote()) + remote = self.remote_api.create(gen_rpm_remote(policy="on_demand")) self.addCleanup(self.remote_api.delete, remote.pulp_href) - self.sync(repository=repo, remote=remote, optimize=False) + task = self.sync(repository=repo, remote=remote, optimize=False) repo = self.repo_api.read(repo.pulp_href) # Test that, by default, everything is retained / nothing is tossed out. self.assertDictEqual(get_content_summary(repo.to_dict()), RPM_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), RPM_FIXTURE_SUMMARY) + # Test that the # of packages processed is correct + self.assertEqual(self.get_num_parsed_packages(task), RPM_PACKAGE_COUNT) # Set the retention policy to retain only 1 version of each package repo_data = repo.to_dict() @@ -79,7 +85,7 @@ def test_sync_with_retention(self): self.repo_api.update(repo.pulp_href, repo_data) repo = self.repo_api.read(repo.pulp_href) - self.sync(repository=repo, remote=remote, optimize=False) + task = self.sync(repository=repo, remote=remote, optimize=False) repo = self.repo_api.read(repo.pulp_href) # Test that only one version of each package is present @@ -97,7 +103,65 @@ def test_sync_with_retention(self): {"duck": ["0.6", "0.7"], "kangaroo": ["0.2"], "walrus": ["0.71"]}, versions_for_packages, ) - # TODO: Test that modular RPMs unaffected? + # Test that the number of packages processed is correct (doesn't include older ones) + self.assertEqual(self.get_num_parsed_packages(task), RPM_PACKAGE_COUNT - 4) + + def test_sync_with_retention_and_modules(self): + """Verify functionality with sync. + + Do the following: + + 1. Create a repository, and a remote. + 2. Sync the remote. + 3. Assert that the correct number of units were added and are present in the repo. + 4. Change the "retain_package_versions" on the repository to 1 (retain the latest + version only). + 5. Sync the remote one more time. + 6. Assert that repository version is the same as the previous one, because the older + versions are part of modules, and they should be ignored by the retention policy. + """ + delete_orphans() + + repo = self.repo_api.create(gen_repo()) + self.addCleanup(self.repo_api.delete, repo.pulp_href) + + remote = self.remote_api.create( + gen_rpm_remote( + url=RPM_MODULES_STATIC_CONTEXT_FIXTURE_URL, + policy="on_demand", + ) + ) + self.addCleanup(self.remote_api.delete, remote.pulp_href) + + task = self.sync(repository=repo, remote=remote, optimize=False) + repo = self.repo_api.read(repo.pulp_href) + + self.addCleanup(delete_orphans) # TODO: #2587 + + # Test that, by default, everything is retained / nothing is tossed out. + self.assertDictEqual( + get_content_summary(repo.to_dict()), RPM_MODULAR_STATIC_FIXTURE_SUMMARY + ) + self.assertDictEqual( + get_added_content_summary(repo.to_dict()), RPM_MODULAR_STATIC_FIXTURE_SUMMARY + ) + # Test that the # of packages processed is correct + self.assertEqual(self.get_num_parsed_packages(task), RPM_PACKAGE_COUNT) + + # Set the retention policy to retain only 1 version of each package + repo_data = repo.to_dict() + repo_data.update({"retain_package_versions": 1}) + self.repo_api.update(repo.pulp_href, repo_data) + repo = self.repo_api.read(repo.pulp_href) + + task = self.sync(repository=repo, remote=remote, optimize=False) + repo = self.repo_api.read(repo.pulp_href) + + # Test that no RPMs were removed (and no advisories etc. touched) + # it should be the same because the older version are covered by modules) + self.assertDictEqual(get_removed_content_summary(repo.to_dict()), {}) + # Test that the number of packages processed is correct + self.assertEqual(self.get_num_parsed_packages(task), RPM_PACKAGE_COUNT) def test_mirror_sync_with_retention_fails(self): """Verify functionality with sync. @@ -120,6 +184,12 @@ def test_mirror_sync_with_retention_fails(self): self.sync(repository=repo, remote=remote, optimize=False, mirror=True) self.assertEqual(exc.code, 400) + def get_num_parsed_packages(self, task): + """Get the number of packages parsed from the progress report.""" + for report in task.progress_reports: + if report.code == "sync.parsing.packages": + return report.total + def versions_for_packages(self, packages): """Get a list of versions for each package present in a list of Package dicts. @@ -169,4 +239,4 @@ def sync(self, repository, remote, optimize=True, mirror=False): remote=remote.pulp_href, optimize=optimize, mirror=mirror ) sync_response = self.repo_api.sync(repository.pulp_href, repository_sync_data) - return monitor_task(sync_response.task).progress_reports + return monitor_task(sync_response.task) diff --git a/pulp_rpm/tests/functional/api/test_sync.py b/pulp_rpm/tests/functional/api/test_sync.py index 805d35d72e..c1af9dbfc9 100644 --- a/pulp_rpm/tests/functional/api/test_sync.py +++ b/pulp_rpm/tests/functional/api/test_sync.py @@ -194,12 +194,7 @@ def test_sync_from_invalid_mirror_list_feed(self): self.fail("A task was completed without a failure.") def test_sync_modular(self): - """Sync RPM modular content. - - This test targets the following issue: - - * `Pulp #5408 `_ - """ + """Sync RPM modular content.""" body = gen_rpm_remote(RPM_MODULAR_FIXTURE_URL) remote = self.remote_api.create(body) @@ -207,6 +202,7 @@ def test_sync_modular(self): self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) + self.addCleanup(delete_orphans) # TODO: #2587 self.assertDictEqual(get_content_summary(repo.to_dict()), RPM_MODULAR_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), RPM_MODULAR_FIXTURE_SUMMARY) @@ -354,6 +350,8 @@ def test_mutated_packages(self): body = gen_rpm_remote(RPM_SIGNED_FIXTURE_URL) remote = self.remote_api.create(body) + self.addCleanup(self.remote_api.delete, remote.pulp_href) + # sync again repo, remote = self.do_test(repo, remote) @@ -411,6 +409,8 @@ def test_sync_diff_checksum_packages(self): body = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL, policy="on_demand") remote = self.remote_api.create(body) + self.addCleanup(self.remote_api.delete, remote.pulp_href) + # sync with SHA256 repo, remote = self.do_test(remote=remote) @@ -981,6 +981,7 @@ def test_sync_modular_static_context(self): self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) + self.addCleanup(delete_orphans) # TODO: #2587 summary = get_content_summary(repo.to_dict()) added = get_added_content_summary(repo.to_dict())