From 7d24e7510d3e7e74e86dc9fdedc44ceec20b329c Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 12:18:59 +0100 Subject: [PATCH 01/92] added patch command for subworkflows --- nf_core/subworkflows/patch.py | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 nf_core/subworkflows/patch.py diff --git a/nf_core/subworkflows/patch.py b/nf_core/subworkflows/patch.py new file mode 100644 index 0000000000..3c8b3d5e4d --- /dev/null +++ b/nf_core/subworkflows/patch.py @@ -0,0 +1,10 @@ +import logging + +from nf_core.components.patch import ComponentPatch + +log = logging.getLogger(__name__) + + +class SubworkflowPatch(ComponentPatch): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=False): + super().__init__(pipeline_dir, "subworkflows", remote_url, branch, no_pull, installed_by) From 86e3e2f32dc235879340730fbb0f9bc17b6a4c72 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 12:29:37 +0100 Subject: [PATCH 02/92] forgot to import patch in init --- nf_core/subworkflows/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/subworkflows/__init__.py b/nf_core/subworkflows/__init__.py index 88e8a09388..8e3c85a271 100644 --- a/nf_core/subworkflows/__init__.py +++ b/nf_core/subworkflows/__init__.py @@ -3,5 +3,6 @@ from .install import SubworkflowInstall from .lint import SubworkflowLint from .list import SubworkflowList +from .patch import SubworkflowPatch from .remove import SubworkflowRemove from .update import SubworkflowUpdate From 26b39c2f336db7d31ccfa421c34f1a2137dd6640 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 12:58:13 +0100 Subject: [PATCH 03/92] added files for the tests --- nf_core/__main__.py | 35 +++++++++++++++++++++++++++++++++++ tests/test_subworkflows.py | 9 +++++++++ 2 files changed, 44 insertions(+) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index d6f6077be9..c372e88973 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1684,6 +1684,41 @@ def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): log.error(e) sys.exit(1) +# nf-core subworkflows patch +@subworkflows.command("patch") +@click.pass_context +@click.argument("tool", type=str, required=False, metavar=" or ") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option("-r", "--remove", is_flag=True, default=False) +def subworkflows_patch(ctx, tool, dir, remove): + """ + Create a patch file for minor changes in a subworkflow + + Checks if a subworkflow has been modified locally and creates a patch file + describing how the module has changed from the remote version + """ + from nf_core.subworkflows import SubworkflowPatch + + try: + subworkflow_patch = SubworkflowPatch( + dir, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + if remove: + subworkflow_patch.remove(tool) + else: + subworkflow_patch.patch(tool) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) # nf-core subworkflows remove @subworkflows.command("remove") diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 6163faa7a9..5a781cd871 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -138,6 +138,15 @@ def tearDown(self): test_subworkflows_list_remote, test_subworkflows_list_remote_gitlab, ) + from .subworkflows.patch import( # type: ignore[misc] + test_create_patch_change, + test_create_patch_no_change, + test_create_patch_try_apply_failed, + test_create_patch_try_apply_successful, + test_create_patch_update_fail, + test_create_patch_update_success, + test_remove_patch, + ) from .subworkflows.remove import ( # type: ignore[misc] test_subworkflows_remove_included_subworkflow, test_subworkflows_remove_one_of_two_subworkflow, From 015a61be7ae0d0a59c98615a251bd982c14e777a Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 13:37:46 +0100 Subject: [PATCH 04/92] created draft for test file --- tests/subworkflows/patch.py | 77 +++++++++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 tests/subworkflows/patch.py diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py new file mode 100644 index 0000000000..c2d7cd97f2 --- /dev/null +++ b/tests/subworkflows/patch.py @@ -0,0 +1,77 @@ +import os + +import pytest + +from nf_core.modules.modules_json import ModulesJson +from nf_core.subworkflows.install import SubworkflowInstall + +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_SUBWORKFLOWS_BRANCH, + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + with_temporary_folder, +) + +""" +Test the 'nf-core subworkflows patch' command +""" + +def setup_patch(self, pipeline_dir, modify_subworkflow): + # Install the subworkflow bam_sort_stats_samtools + subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + sub_subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools") + samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") + samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") + samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") + samtools_idxstats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats") + samtools_flagstat_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat") + + + if modify_subworkflow: + # Modify the subworkflow + subworkflow_path = Path(pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + modify_subworkflow(subworkflow_path / "main.nf") + + +def modify_subworkflow(path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # We want a patch file that looks something like: + # - ch_fasta // channel: [ val(meta), path(fasta) ] + for line_index in range(len(lines)): + if lines[line_index] == " ch_fasta // channel: [ val(meta), path(fasta) ]\n": + to_pop = line_index + lines.pop(to_pop) + with open(path, "w") as fh: + fh.writelines(lines) + +def test_create_patch_change(self): + """Test creating a patch when there is a change to the module""" + +def test_create_patch_no_change(self): + """Test creating a patch when there is no change to the subworkflow""" + # Try creating a patch file + # Check that no patch file has been added to the directory + +def test_create_patch_try_apply_failed(self): + """Test creating a patch file and applying it to a new version of the the files""" + +def test_create_patch_try_apply_successful(self): + """Test creating a patch file and applying it to a new version of the the files""" + +def test_create_patch_update_fail(self): + """Test creating a patch file and updating a subworkflow when there is a diff conflict""" + +def test_create_patch_update_success (self): + """ + Test creating a patch file and the updating the subworkflow + + Should have the same effect as 'test_create_patch_try_apply_successful' + but uses higher level api + """ + +def test_remove_patch(self): + """Test creating a patch when there is no change to the subworkflow""" From 563e2326fb063603508b67de6205998004fac9e5 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 13:41:28 +0100 Subject: [PATCH 05/92] ruff format --- nf_core/__main__.py | 2 ++ tests/subworkflows/patch.py | 11 +++++++++-- tests/test_subworkflows.py | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index c372e88973..3468f74e52 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1684,6 +1684,7 @@ def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): log.error(e) sys.exit(1) + # nf-core subworkflows patch @subworkflows.command("patch") @click.pass_context @@ -1720,6 +1721,7 @@ def subworkflows_patch(ctx, tool, dir, remove): log.error(e) sys.exit(1) + # nf-core subworkflows remove @subworkflows.command("remove") @click.pass_context diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index c2d7cd97f2..b340d27fed 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -18,6 +18,7 @@ Test the 'nf-core subworkflows patch' command """ + def setup_patch(self, pipeline_dir, modify_subworkflow): # Install the subworkflow bam_sort_stats_samtools subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") @@ -28,7 +29,6 @@ def setup_patch(self, pipeline_dir, modify_subworkflow): samtools_idxstats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats") samtools_flagstat_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat") - if modify_subworkflow: # Modify the subworkflow subworkflow_path = Path(pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") @@ -48,24 +48,30 @@ def modify_subworkflow(path): with open(path, "w") as fh: fh.writelines(lines) + def test_create_patch_change(self): """Test creating a patch when there is a change to the module""" + def test_create_patch_no_change(self): """Test creating a patch when there is no change to the subworkflow""" # Try creating a patch file # Check that no patch file has been added to the directory + def test_create_patch_try_apply_failed(self): """Test creating a patch file and applying it to a new version of the the files""" + def test_create_patch_try_apply_successful(self): """Test creating a patch file and applying it to a new version of the the files""" + def test_create_patch_update_fail(self): """Test creating a patch file and updating a subworkflow when there is a diff conflict""" -def test_create_patch_update_success (self): + +def test_create_patch_update_success(self): """ Test creating a patch file and the updating the subworkflow @@ -73,5 +79,6 @@ def test_create_patch_update_success (self): but uses higher level api """ + def test_remove_patch(self): """Test creating a patch when there is no change to the subworkflow""" diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 5a781cd871..6a58473e0b 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -138,7 +138,7 @@ def tearDown(self): test_subworkflows_list_remote, test_subworkflows_list_remote_gitlab, ) - from .subworkflows.patch import( # type: ignore[misc] + from .subworkflows.patch import ( # type: ignore[misc] test_create_patch_change, test_create_patch_no_change, test_create_patch_try_apply_failed, From 31505ab1ae67254a81537950a00bf12b846049ad Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 14:16:11 +0100 Subject: [PATCH 06/92] cleaning up --- tests/subworkflows/patch.py | 21 +-------------------- 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index b340d27fed..c34711ba07 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -1,18 +1,5 @@ import os - -import pytest - -from nf_core.modules.modules_json import ModulesJson -from nf_core.subworkflows.install import SubworkflowInstall - -from ..utils import ( - GITLAB_BRANCH_TEST_BRANCH, - GITLAB_REPO, - GITLAB_SUBWORKFLOWS_BRANCH, - GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, - GITLAB_URL, - with_temporary_folder, -) +from pathlib import Path """ Test the 'nf-core subworkflows patch' command @@ -22,12 +9,6 @@ def setup_patch(self, pipeline_dir, modify_subworkflow): # Install the subworkflow bam_sort_stats_samtools subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - sub_subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_stats_samtools") - samtools_index_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "index") - samtools_sort_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "sort") - samtools_stats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "stats") - samtools_idxstats_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "idxstats") - samtools_flagstat_path = os.path.join(self.subworkflow_install.dir, "modules", "nf-core", "samtools", "flagstat") if modify_subworkflow: # Modify the subworkflow From 4d0dc8532d6a5975531a9b9a47f06105762eeddc Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 14:59:23 +0100 Subject: [PATCH 07/92] added tests --- tests/subworkflows/patch.py | 149 ++++++++++++++++++++++++++++++++++-- 1 file changed, 142 insertions(+), 7 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index c34711ba07..cbb248e7e2 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -1,5 +1,28 @@ import os +import tempfile from pathlib import Path +from unittest import mock + + +import pytest + +from nf_core.modules.modules_json import ModulesJson +from nf_core.subworkflows.install import SubworkflowInstall +import nf_core.subworkflows +import nf_core.components.components_command + + +from ..utils import ( + GITLAB_BRANCH_TEST_BRANCH, + GITLAB_REPO, + GITLAB_SUBWORKFLOWS_BRANCH, + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + with_temporary_folder, +) + +# TODO: #Change this for the correct SUCCEED_SHA +SUCCEED_SHA = "????" """ Test the 'nf-core subworkflows patch' command @@ -30,29 +53,123 @@ def modify_subworkflow(path): fh.writelines(lines) -def test_create_patch_change(self): +def test_create_patch_no_change(self): """Test creating a patch when there is a change to the module""" + setup_patch(self.pipeline_dir, False) + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + with pytest.raises(UserWarning): + patch_obj.patch("bam_sort_stats_samtools") -def test_create_patch_no_change(self): + subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + + # Check that no patch file has been added to the directory + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + +def test_create_patch_change(self): """Test creating a patch when there is no change to the subworkflow""" + setup_patch(self.pipeline_dir, True) + # Try creating a patch file - # Check that no patch file has been added to the directory + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") -def test_create_patch_try_apply_failed(self): - """Test creating a patch file and applying it to a new version of the the files""" + patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check that the correct lines are in the patch file + with open(subworkflow_path / patch_fn) as fh: + patch_lines = fh.readlines() + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ val(meta), path(fasta) ]" in patch_lines def test_create_patch_try_apply_successful(self): """Test creating a patch file and applying it to a new version of the the files""" + setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", SUCCEED_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / patch_fn + assert update_obj.try_apply_patch("bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir) is True + + # Move the files from the temporary directory + update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, "nf-core", SUCCEED_SHA) + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check that the correct lines are in the patch file + with open(subworkflow_path / patch_fn) as fh: + patch_lines = fh.readlines() + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ val(meta), path(fasta) ]" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(subworkflow_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert " ch_fasta // channel: [ val(meta), path(fasta) ]\n" not in main_nf_lines -def test_create_patch_update_fail(self): - """Test creating a patch file and updating a subworkflow when there is a diff conflict""" +def test_create_patch_try_apply_failed(self): + """Test creating a patch file and applying it to a new version of the the files""" + setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH + ) + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", SUCCEED_SHA, update_obj.modules_repo, install_dir) + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / patch_fn + assert update_obj.try_apply_patch("bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir) is False + +# TODO: create those two missing tests def test_create_patch_update_success(self): + """Test creating a patch file and updating a subworkflow when there is a diff conflict""" + + +def test_create_patch_update_fail(self): """ Test creating a patch file and the updating the subworkflow @@ -63,3 +180,21 @@ def test_create_patch_update_success(self): def test_remove_patch(self): """Test creating a patch when there is no change to the subworkflow""" + setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + + patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} + + with mock.patch.object(nf_core.create.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove("bam_sort_stats_samtools") + # Check that the diff file has been removed + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + From 3030a76c5adfc867a876a34c0be405b3935c5f61 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:01:22 +0100 Subject: [PATCH 08/92] ruff --- tests/subworkflows/patch.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index cbb248e7e2..44c07d7d62 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -3,22 +3,14 @@ from pathlib import Path from unittest import mock - import pytest -from nf_core.modules.modules_json import ModulesJson -from nf_core.subworkflows.install import SubworkflowInstall -import nf_core.subworkflows import nf_core.components.components_command - +import nf_core.subworkflows from ..utils import ( GITLAB_BRANCH_TEST_BRANCH, - GITLAB_REPO, - GITLAB_SUBWORKFLOWS_BRANCH, - GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL, - with_temporary_folder, ) # TODO: #Change this for the correct SUCCEED_SHA From 6b885ff43b3a031fd9f8da3af58b4eb72f5a6d3e Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:02:58 +0100 Subject: [PATCH 09/92] ruff format --- tests/subworkflows/patch.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index 44c07d7d62..6e26d4844f 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -59,6 +59,7 @@ def test_create_patch_no_change(self): # Check that no patch file has been added to the directory assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + def test_create_patch_change(self): """Test creating a patch when there is no change to the subworkflow""" setup_patch(self.pipeline_dir, True) @@ -107,7 +108,12 @@ def test_create_patch_try_apply_successful(self): # Try applying the patch subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" patch_relpath = subworkflow_relpath / patch_fn - assert update_obj.try_apply_patch("bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir) is True + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is True + ) # Move the files from the temporary directory update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, "nf-core", SUCCEED_SHA) @@ -129,6 +135,7 @@ def test_create_patch_try_apply_successful(self): # These lines should have been removed by the patch assert " ch_fasta // channel: [ val(meta), path(fasta) ]\n" not in main_nf_lines + def test_create_patch_try_apply_failed(self): """Test creating a patch file and applying it to a new version of the the files""" setup_patch(self.pipeline_dir, True) @@ -154,7 +161,13 @@ def test_create_patch_try_apply_failed(self): # Try applying the patch subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" patch_relpath = subworkflow_relpath / patch_fn - assert update_obj.try_apply_patch("bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir) is False + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is False + ) + # TODO: create those two missing tests def test_create_patch_update_success(self): @@ -189,4 +202,3 @@ def test_remove_patch(self): patch_obj.remove("bam_sort_stats_samtools") # Check that the diff file has been removed assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} - From dfcfb5b5111aa1d4d64ebcbd75a989d6b36ba0db Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:07:51 +0100 Subject: [PATCH 10/92] removed split --- tests/subworkflows/patch.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index 6e26d4844f..ebec1171fa 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -70,7 +70,7 @@ def test_create_patch_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -93,7 +93,7 @@ def test_create_patch_try_apply_successful(self): patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -146,7 +146,7 @@ def test_create_patch_try_apply_failed(self): patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -193,7 +193,7 @@ def test_remove_patch(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools".split('/'))}.diff" + patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} From 299193020c8c1e37b72633036d9e9a1b375cb899 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:09:59 +0100 Subject: [PATCH 11/92] mypy --- tests/subworkflows/patch.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index ebec1171fa..440c633e7e 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -70,7 +70,7 @@ def test_create_patch_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" + patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -93,7 +93,7 @@ def test_create_patch_try_apply_successful(self): patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" + patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -146,7 +146,7 @@ def test_create_patch_try_apply_failed(self): patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" + patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -193,7 +193,7 @@ def test_remove_patch(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - patch_fn = f"{'-'.join("bam_sort_stats_samtools")}.diff" + patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} From e909d3c19330f014c59dc3101d06e0fad24e70c4 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:22:22 +0100 Subject: [PATCH 12/92] setup_patch --- tests/subworkflows/patch.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index 440c633e7e..06bb5bcadd 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -15,15 +15,22 @@ # TODO: #Change this for the correct SUCCEED_SHA SUCCEED_SHA = "????" +ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" + """ Test the 'nf-core subworkflows patch' command """ -def setup_patch(self, pipeline_dir, modify_subworkflow): +def setup_patch(pipeline_dir, modify_subworkflow): # Install the subworkflow bam_sort_stats_samtools - subworkflow_path = os.path.join(self.subworkflow_install.dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + install_obj = nf_core.subworkflows.SubworkflowInstall( + pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=ORG_SHA + ) + + # Install the module + install_obj.install("bam_sort_stats_samtools") if modify_subworkflow: # Modify the subworkflow From fd5b0d14193c16cf1fc716db851dd38df16277e2 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 15:33:05 +0100 Subject: [PATCH 13/92] called function correctly --- tests/subworkflows/patch.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index 06bb5bcadd..19afc757f3 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -35,10 +35,10 @@ def setup_patch(pipeline_dir, modify_subworkflow): if modify_subworkflow: # Modify the subworkflow subworkflow_path = Path(pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") - modify_subworkflow(subworkflow_path / "main.nf") + modify_main_nf(subworkflow_path / "main.nf") -def modify_subworkflow(path): +def modify_main_nf(path): """Modify a file to test patch creation""" with open(path) as fh: lines = fh.readlines() From cdd9cfb67ac2e173440bcdc3eb112ddc41c795b7 Mon Sep 17 00:00:00 2001 From: ctuni Date: Mon, 18 Mar 2024 16:09:20 +0100 Subject: [PATCH 14/92] wraping up for the day --- tests/subworkflows/patch.py | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py index 19afc757f3..66065784cf 100644 --- a/tests/subworkflows/patch.py +++ b/tests/subworkflows/patch.py @@ -9,8 +9,9 @@ import nf_core.subworkflows from ..utils import ( - GITLAB_BRANCH_TEST_BRANCH, + GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL, + GITLAB_REPO ) # TODO: #Change this for the correct SUCCEED_SHA @@ -26,7 +27,7 @@ def setup_patch(pipeline_dir, modify_subworkflow): # Install the subworkflow bam_sort_stats_samtools install_obj = nf_core.subworkflows.SubworkflowInstall( - pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=ORG_SHA + pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH, sha=ORG_SHA ) # Install the module @@ -34,7 +35,7 @@ def setup_patch(pipeline_dir, modify_subworkflow): if modify_subworkflow: # Modify the subworkflow - subworkflow_path = Path(pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") modify_main_nf(subworkflow_path / "main.nf") @@ -57,11 +58,11 @@ def test_create_patch_no_change(self): setup_patch(self.pipeline_dir, False) # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) with pytest.raises(UserWarning): patch_obj.patch("bam_sort_stats_samtools") - subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that no patch file has been added to the directory assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} @@ -72,10 +73,10 @@ def test_create_patch_change(self): setup_patch(self.pipeline_dir, True) # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created @@ -93,11 +94,11 @@ def test_create_patch_change(self): def test_create_patch_try_apply_successful(self): """Test creating a patch file and applying it to a new version of the the files""" setup_patch(self.pipeline_dir, True) - subworkflow_relpath = Path("subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) patch_obj.patch("bam_sort_stats_samtools") patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" @@ -105,7 +106,7 @@ def test_create_patch_try_apply_successful(self): assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} update_obj = nf_core.subworkflows.SubworkflowUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) # Install the new files @@ -117,13 +118,13 @@ def test_create_patch_try_apply_successful(self): patch_relpath = subworkflow_relpath / patch_fn assert ( update_obj.try_apply_patch( - "bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir ) is True ) # Move the files from the temporary directory - update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, "nf-core", SUCCEED_SHA) + update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, SUCCEED_SHA) # Check that a patch file with the correct name has been created assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} @@ -146,11 +147,11 @@ def test_create_patch_try_apply_successful(self): def test_create_patch_try_apply_failed(self): """Test creating a patch file and applying it to a new version of the the files""" setup_patch(self.pipeline_dir, True) - subworkflow_relpath = Path("subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) patch_obj.patch("bam_sort_stats_samtools") patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" @@ -158,7 +159,7 @@ def test_create_patch_try_apply_failed(self): assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} update_obj = nf_core.subworkflows.SubworkflowUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH + self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) # Install the new files @@ -170,7 +171,7 @@ def test_create_patch_try_apply_failed(self): patch_relpath = subworkflow_relpath / patch_fn assert ( update_obj.try_apply_patch( - "bam_sort_stats_samtools", "nf-core", patch_relpath, subworkflow_path, subworkflow_install_dir + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir ) is False ) @@ -195,10 +196,10 @@ def test_remove_patch(self): setup_patch(self.pipeline_dir, True) # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_BRANCH_TEST_BRANCH) + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) patch_obj.patch("bam_sort_stats_samtools") - subworkflow_path = Path(self.pipeline_dir, "subworkflows", "nf-core", "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" # Check that a patch file with the correct name has been created From 5bc36e41ad319d4f869693294831dd45c18f77ff Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 17 Oct 2024 13:54:29 +0200 Subject: [PATCH 15/92] allow mixed list and dict in lint config --- nf_core/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 87dd307e70..ff8da1eeab 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1088,7 +1088,7 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] +LintConfigType = Optional[Dict[str, Union[List[str], List[Union[List[str], Dict[str, List[str]]]], bool]]] class NFCoreYamlConfig(BaseModel): @@ -1153,7 +1153,7 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] except ValidationError as e: error_message = f"Config file '{config_fn}' is invalid" for error in e.errors(): - error_message += f"\n{error['loc'][0]}: {error['msg']}" + error_message += f"\n{error['loc'][0]}: {error['msg']}\ninput: {error['input']}" raise AssertionError(error_message) wf_config = fetch_wf_config(Path(directory)) From 81bdb3b3587a0fe52339abc31720eaffe5898fcc Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 17 Oct 2024 14:11:18 +0200 Subject: [PATCH 16/92] nested too deeply --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index ff8da1eeab..4b6e2ddc73 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1088,7 +1088,7 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -LintConfigType = Optional[Dict[str, Union[List[str], List[Union[List[str], Dict[str, List[str]]]], bool]]] +LintConfigType = Optional[Dict[str, Union[List[str], List[Union[str, Dict[str, List[str]]]], bool]]] class NFCoreYamlConfig(BaseModel): From 3d8d4b7de9cb1356511d61c346f02f43ffad5936 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Thu, 17 Oct 2024 12:14:43 +0000 Subject: [PATCH 17/92] [automated] Update CHANGELOG.md --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 364a079a70..7a4fd35834 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,8 @@ ### Linting +- allow mixed str and dict in lint config ([#3228](https://github.com/nf-core/tools/pull/3228)) + ### Modules ### Subworkflows From 2b4029b699471e73114a4bd4c9da930c8259d55d Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 13:51:55 +0200 Subject: [PATCH 18/92] handle new nf-core.yml structure --- nf_core/pipelines/lint/files_exist.py | 2 ++ nf_core/pipelines/lint/files_unchanged.py | 2 ++ nf_core/pipelines/lint/template_strings.py | 4 +++- 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index 9dd307d8b5..62af34845e 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -200,6 +200,8 @@ def files_exist(self) -> Dict[str, List[str]]: # Remove files that should be ignored according to the linting config ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] + if ignore_files is None: + ignore_files = [] def pf(file_path: Union[str, Path]) -> Path: return Path(self.wf_path, file_path) diff --git a/nf_core/pipelines/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py index 300b3674b2..2a0f8ffd3c 100644 --- a/nf_core/pipelines/lint/files_unchanged.py +++ b/nf_core/pipelines/lint/files_unchanged.py @@ -144,6 +144,8 @@ def _tf(file_path: Union[str, Path]) -> Path: return Path(test_pipeline_dir, file_path) ignore_files = self.lint_config.get("files_unchanged", []) if self.lint_config is not None else [] + if ignore_files is None: + ignore_files = [] # Files that must be completely unchanged from template for files in files_exact: diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 11c5e82516..0bf2ccbeca 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -39,8 +39,10 @@ def template_strings(self): ignored = [] # Files that should be ignored according to the linting config ignore_files = self.lint_config.get("template_strings", []) if self.lint_config is not None else [] - files = self.list_files() + if ignore_files is None: + ignore_files = [] + files = self.list_files() # Loop through files, searching for string num_matches = 0 for fn in files: From 3a55f3682dde79066b3148e51388d80249a19de0 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 13:52:38 +0200 Subject: [PATCH 19/92] update documentation for `multiqc_config` linting --- nf_core/pipelines/lint/multiqc_config.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/nf_core/pipelines/lint/multiqc_config.py b/nf_core/pipelines/lint/multiqc_config.py index 2b0fc7902e..fec5b518e3 100644 --- a/nf_core/pipelines/lint/multiqc_config.py +++ b/nf_core/pipelines/lint/multiqc_config.py @@ -31,6 +31,15 @@ def multiqc_config(self) -> Dict[str, List[str]]: lint: multiqc_config: False + To disable this test only for specific sections, you can specify a list of section names. + For example: + + .. code-block:: yaml + lint: + multiqc_config: + - report_section_order + - report_comment + """ passed: List[str] = [] From fcc442aae3019facd0a2a5b84397d08cbe503c0e Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 13:53:17 +0200 Subject: [PATCH 20/92] parse yaml correctly --- nf_core/pipelines/lint/nfcore_yml.py | 31 +++++++------- nf_core/utils.py | 62 +++++++++++++++++++++++++++- 2 files changed, 77 insertions(+), 16 deletions(-) diff --git a/nf_core/pipelines/lint/nfcore_yml.py b/nf_core/pipelines/lint/nfcore_yml.py index e0d5fb2005..3395696d1d 100644 --- a/nf_core/pipelines/lint/nfcore_yml.py +++ b/nf_core/pipelines/lint/nfcore_yml.py @@ -1,7 +1,8 @@ -import re from pathlib import Path from typing import Dict, List +from ruamel.yaml import YAML + from nf_core import __version__ REPOSITORY_TYPES = ["pipeline", "modules"] @@ -26,21 +27,23 @@ def nfcore_yml(self) -> Dict[str, List[str]]: failed: List[str] = [] ignored: List[str] = [] + yaml = YAML() + # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get(".nf-core", []) if self.lint_config is not None else [] - try: - with open(Path(self.wf_path, ".nf-core.yml")) as fh: - content = fh.read() - except FileNotFoundError: - with open(Path(self.wf_path, ".nf-core.yaml")) as fh: - content = fh.read() + for ext in (".yml", ".yaml"): + try: + nf_core_yml = yaml.load(Path(self.wf_path) / f".nf-core{ext}") + break + except FileNotFoundError: + continue + else: + raise FileNotFoundError("No `.nf-core.yml` file found.") if "repository_type" not in ignore_configs: # Check that the repository type is set in the .nf-core.yml - repo_type_re = r"repository_type: (.+)" - match = re.search(repo_type_re, content) - if match: - repo_type = match.group(1) + if "repository_type" in nf_core_yml: + repo_type = nf_core_yml["repository_type"] if repo_type not in REPOSITORY_TYPES: failed.append( f"Repository type in `.nf-core.yml` is not valid. " @@ -55,10 +58,8 @@ def nfcore_yml(self) -> Dict[str, List[str]]: if "nf_core_version" not in ignore_configs: # Check that the nf-core version is set in the .nf-core.yml - nf_core_version_re = r"nf_core_version: (.+)" - match = re.search(nf_core_version_re, content) - if match: - nf_core_version = match.group(1).strip('"') + if "nf_core_version" in nf_core_yml: + nf_core_version = nf_core_yml["nf_core_version"] if nf_core_version != __version__ and "dev" not in nf_core_version: warned.append( f"nf-core version in `.nf-core.yml` is not set to the latest version. " diff --git a/nf_core/utils.py b/nf_core/utils.py index 4b6e2ddc73..5cce2494cd 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1091,6 +1091,63 @@ def get(self, item: str, default: Any = None) -> Any: LintConfigType = Optional[Dict[str, Union[List[str], List[Union[str, Dict[str, List[str]]]], bool]]] +class NFCoreYamlLintConfig(BaseModel): + """ + schema for linting config in `.nf-core.yml` should cover: + + .. code-block:: yaml + files_unchanged: + - .github/workflows/branch.yml + modules_config: False + modules_config: + - fastqc + # merge_markers: False + merge_markers: + - docs/my_pdf.pdf + nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + # multiqc_config: False + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + template_strings: False + template_strings: + - docs/my_pdf.pdf + """ + + files_unchanged: Optional[List[str]] = None + """ List of files that should not be changed """ + modules_config: Optional[Union[bool, List[str]]] = None + """ List of modules that should not be changed """ + merge_markers: Optional[Union[bool, List[str]]] = None + """ List of files that should not contain merge markers """ + nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = None + """ List of Nextflow config files that should not be changed """ + multiqc_config: Optional[List[str]] = None + """ List of MultiQC config options that be changed """ + files_exist: Optional[List[str]] = None + """ List of files that can not exist """ + template_strings: Optional[Union[bool, List[str]]] = None + """ List of files that can contain template strings """ + + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + return getattr(self, item, default) + + def __setitem__(self, item: str, value: Any) -> None: + setattr(self, item, value) + + class NFCoreYamlConfig(BaseModel): """.nf-core.yml configuration file schema""" @@ -1100,7 +1157,7 @@ class NFCoreYamlConfig(BaseModel): """ Version of nf-core/tools used to create/update the pipeline""" org_path: Optional[str] = None """ Path to the organisation's modules repository (used for modules repo_type only) """ - lint: Optional[LintConfigType] = None + lint: Optional[NFCoreYamlLintConfig] = None """ Pipeline linting configuration, see https://nf-co.re/docs/nf-core-tools/pipelines/lint#linting-config for examples and documentation """ template: Optional[NFCoreTemplateConfig] = None """ Pipeline template configuration """ @@ -1115,6 +1172,9 @@ def __getitem__(self, item: str) -> Any: def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) + def __setitem__(self, item: str, value: Any) -> None: + setattr(self, item, value) + def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path], Optional[NFCoreYamlConfig]]: """ From 482f9f9e3f7b4ad1864af20a03078335f77ab12a Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 14:12:28 +0200 Subject: [PATCH 21/92] found a better way to handle the ignore file being None --- nf_core/pipelines/lint/files_exist.py | 2 -- nf_core/pipelines/lint/files_unchanged.py | 2 -- nf_core/pipelines/lint/template_strings.py | 2 -- nf_core/utils.py | 8 +++++--- 4 files changed, 5 insertions(+), 9 deletions(-) diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index 62af34845e..9dd307d8b5 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -200,8 +200,6 @@ def files_exist(self) -> Dict[str, List[str]]: # Remove files that should be ignored according to the linting config ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] - if ignore_files is None: - ignore_files = [] def pf(file_path: Union[str, Path]) -> Path: return Path(self.wf_path, file_path) diff --git a/nf_core/pipelines/lint/files_unchanged.py b/nf_core/pipelines/lint/files_unchanged.py index 2a0f8ffd3c..300b3674b2 100644 --- a/nf_core/pipelines/lint/files_unchanged.py +++ b/nf_core/pipelines/lint/files_unchanged.py @@ -144,8 +144,6 @@ def _tf(file_path: Union[str, Path]) -> Path: return Path(test_pipeline_dir, file_path) ignore_files = self.lint_config.get("files_unchanged", []) if self.lint_config is not None else [] - if ignore_files is None: - ignore_files = [] # Files that must be completely unchanged from template for files in files_exact: diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 0bf2ccbeca..0cb669e553 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -39,8 +39,6 @@ def template_strings(self): ignored = [] # Files that should be ignored according to the linting config ignore_files = self.lint_config.get("template_strings", []) if self.lint_config is not None else [] - if ignore_files is None: - ignore_files = [] files = self.list_files() # Loop through files, searching for string diff --git a/nf_core/utils.py b/nf_core/utils.py index 5cce2494cd..283e2e5c75 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1123,7 +1123,7 @@ class NFCoreYamlLintConfig(BaseModel): - docs/my_pdf.pdf """ - files_unchanged: Optional[List[str]] = None + files_unchanged: List[str] = [] """ List of files that should not be changed """ modules_config: Optional[Union[bool, List[str]]] = None """ List of modules that should not be changed """ @@ -1131,12 +1131,14 @@ class NFCoreYamlLintConfig(BaseModel): """ List of files that should not contain merge markers """ nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = None """ List of Nextflow config files that should not be changed """ - multiqc_config: Optional[List[str]] = None + multiqc_config: List[str] = [] """ List of MultiQC config options that be changed """ - files_exist: Optional[List[str]] = None + files_exist: List[str] = [] """ List of files that can not exist """ template_strings: Optional[Union[bool, List[str]]] = None """ List of files that can contain template strings """ + nfcore_components: Optional[bool] = None + """ Include all required files to use nf-core modules and subworkflows """ def __getitem__(self, item: str) -> Any: return getattr(self, item) From 663a9329bed700a484f7b86d1b501ebce9df7b9c Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 14:15:37 +0200 Subject: [PATCH 22/92] handle new lint config structure --- nf_core/pipelines/lint/__init__.py | 12 +++++------- nf_core/utils.py | 1 + 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 8cc7c37cb2..82361565f4 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -27,8 +27,8 @@ from nf_core import __version__ from nf_core.components.lint import ComponentLint from nf_core.pipelines.lint_utils import console +from nf_core.utils import NFCoreYamlConfig, NFCoreYamlLintConfig, strip_ansi_codes from nf_core.utils import plural_s as _s -from nf_core.utils import strip_ansi_codes from .actions_awsfulltest import actions_awsfulltest from .actions_awstest import actions_awstest @@ -112,7 +112,7 @@ def __init__( # Initialise the parent object super().__init__(wf_path) - self.lint_config = {} + self.lint_config: Optional[NFCoreYamlLintConfig] = None self.release_mode = release_mode self.fail_ignored = fail_ignored self.fail_warned = fail_warned @@ -173,12 +173,11 @@ def _load_lint_config(self) -> bool: Add parsed config to the `self.lint_config` class attribute. """ _, tools_config = nf_core.utils.load_tools_config(self.wf_path) - self.lint_config = getattr(tools_config, "lint", {}) or {} + self.lint_config = getattr(tools_config, "lint", None) or None is_correct = True - # Check if we have any keys that don't match lint test names if self.lint_config is not None: - for k in self.lint_config: + for k, v in self.lint_config: if k != "nfcore_components" and k not in self.lint_tests: # nfcore_components is an exception to allow custom pipelines without nf-core components log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") @@ -594,7 +593,7 @@ def run_linting( lint_obj._load_lint_config() lint_obj.load_pipeline_config() - if "nfcore_components" in lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: + if lint_obj.lint_config and lint_obj.lint_config["nfcore_components"] is False: module_lint_obj = None subworkflow_lint_obj = None else: @@ -679,5 +678,4 @@ def run_linting( if len(lint_obj.failed) > 0: if release_mode: log.info("Reminder: Lint tests were run in --release mode.") - return lint_obj, module_lint_obj, subworkflow_lint_obj diff --git a/nf_core/utils.py b/nf_core/utils.py index 283e2e5c75..c3eb919875 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1121,6 +1121,7 @@ class NFCoreYamlLintConfig(BaseModel): template_strings: False template_strings: - docs/my_pdf.pdf + nfcore_components: False """ files_unchanged: List[str] = [] From 53ae873e615478516e30c999c05338b8a5244823 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 17:05:41 +0200 Subject: [PATCH 23/92] add tests with different valid yaml structures --- nf_core/utils.py | 8 +- tests/pipelines/lint/test_files_exist.py | 82 ++++++++++--- tests/pipelines/lint/test_nextflow_config.py | 20 ++-- tests/pipelines/lint/test_nfcore_yml.py | 112 ++++++++++++++---- tests/pipelines/lint/test_template_strings.py | 28 ++++- 5 files changed, 196 insertions(+), 54 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index c3eb919875..03112dd1da 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1126,17 +1126,17 @@ class NFCoreYamlLintConfig(BaseModel): files_unchanged: List[str] = [] """ List of files that should not be changed """ - modules_config: Optional[Union[bool, List[str]]] = None + modules_config: Optional[Union[bool, List[str]]] = [] """ List of modules that should not be changed """ - merge_markers: Optional[Union[bool, List[str]]] = None + merge_markers: Optional[Union[bool, List[str]]] = [] """ List of files that should not contain merge markers """ - nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = None + nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = [] """ List of Nextflow config files that should not be changed """ multiqc_config: List[str] = [] """ List of MultiQC config options that be changed """ files_exist: List[str] = [] """ List of files that can not exist """ - template_strings: Optional[Union[bool, List[str]]] = None + template_strings: Optional[Union[bool, List[str]]] = [] """ List of files that can contain template strings """ nfcore_components: Optional[bool] = None """ Include all required files to use nf-core modules and subworkflows """ diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py index 97dd346cdf..eb1ba9a17e 100644 --- a/tests/pipelines/lint/test_files_exist.py +++ b/tests/pipelines/lint/test_files_exist.py @@ -1,5 +1,7 @@ from pathlib import Path +from ruamel.yaml import YAML + import nf_core.pipelines.lint from ..test_lint import TestLint @@ -9,17 +11,17 @@ class TestLintFilesExist(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) def test_files_exist_missing_config(self): """Lint test: critical files missing FAIL""" Path(self.new_pipeline, "CHANGELOG.md").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" + assert self.lint_obj._load() + self.lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert "File not found: `CHANGELOG.md`" in results["failed"] def test_files_exist_missing_main(self): @@ -27,10 +29,9 @@ def test_files_exist_missing_main(self): Path(self.new_pipeline, "main.nf").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert "File not found: `main.nf`" in results["warned"] def test_files_exist_deprecated_file(self): @@ -39,19 +40,17 @@ def test_files_exist_deprecated_file(self): nf = Path(self.new_pipeline, "parameters.settings.json") nf.touch() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert results["failed"] == ["File must be removed: `parameters.settings.json`"] def test_files_exist_pass(self): """Lint check should pass if all files are there""" - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert results["failed"] == [] def test_files_exist_pass_conditional_nfschema(self): @@ -62,9 +61,58 @@ def test_files_exist_pass_conditional_nfschema(self): with open(Path(self.new_pipeline, "nextflow.config"), "w") as f: f.write(config) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.schema"] = "nf-core" - results = lint_obj.files_exist() + assert self.lint_obj._load() + self.lint_obj.nf_config["manifest.schema"] = "nf-core" + results = self.lint_obj.files_exist() assert results["failed"] == [] assert results["ignored"] == [] + + def test_files_exists_pass_nf_core_yml_config(self): + """Check if linting passes with a valid nf-core.yml config""" + valid_yaml = """ + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + """ + yaml = YAML() + nf_core_yml_path = Path(self.new_pipeline, ".nf-core.yml") + nf_core_yml = yaml.load(nf_core_yml_path) + + nf_core_yml["lint"] = yaml.load(valid_yaml) + yaml.dump(nf_core_yml, nf_core_yml_path) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + assert self.lint_obj._load() + + results = self.lint_obj.files_exist() + assert results["failed"] == [] + assert "File is ignored: `.github/CONTRIBUTING.md`" in results["ignored"] + assert "File is ignored: `CITATIONS.md`" in results["ignored"] + + def test_files_exists_fail_nf_core_yml_config(self): + """Check if linting fails with a valid nf-core.yml config""" + valid_yaml = """ + files_exist: + - CITATIONS.md + """ + + # remove CITATIONS.md + Path(self.new_pipeline, "CITATIONS.md").unlink() + assert self.lint_obj._load() + # test first if linting fails correctly + results = self.lint_obj.files_exist() + assert "File not found: `CITATIONS.md`" in results["failed"] + + yaml = YAML() + nf_core_yml_path = Path(self.new_pipeline, ".nf-core.yml") + nf_core_yml = yaml.load(nf_core_yml_path) + + nf_core_yml["lint"] = yaml.load(valid_yaml) + yaml.dump(nf_core_yml, nf_core_yml_path) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + assert self.lint_obj._load() + + results = self.lint_obj.files_exist() + assert results["failed"] == [] + assert "File is ignored: `CITATIONS.md`" in results["ignored"] diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py index a655fb8ace..f8c3c1f31f 100644 --- a/tests/pipelines/lint/test_nextflow_config.py +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -6,7 +6,6 @@ import nf_core.pipelines.create.create import nf_core.pipelines.lint -from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -125,23 +124,30 @@ def test_allow_params_reference_in_main_nf(self): def test_default_values_ignored(self): """Test ignoring linting of default values.""" + valid_yaml = """ + nextflow_config: + - manifest.name + - config_defaults: + - params.custom_config_version + """ # Add custom_config_version to the ignore list nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" - nf_core_yml = NFCoreYamlConfig( - repository_type="pipeline", - lint={"nextflow_config": [{"config_defaults": ["params.custom_config_version"]}]}, - ) + + with open(nf_core_yml_path) as f: + nf_core_yml = yaml.safe_load(f) + nf_core_yml["lint"] = yaml.safe_load(valid_yaml) with open(nf_core_yml_path, "w") as f: - yaml.dump(nf_core_yml.model_dump(), f) + yaml.dump(nf_core_yml, f) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() lint_obj._load_lint_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 + assert len(result["ignored"]) == 2 assert "Config default value correct: params.custom_config_version" not in str(result["passed"]) assert "Config default ignored: params.custom_config_version" in str(result["ignored"]) + assert "Config variable ignored: `manifest.name`" in str(result["ignored"]) def test_default_values_float(self): """Test comparing two float values.""" diff --git a/tests/pipelines/lint/test_nfcore_yml.py b/tests/pipelines/lint/test_nfcore_yml.py index 955c00da81..780e212419 100644 --- a/tests/pipelines/lint/test_nfcore_yml.py +++ b/tests/pipelines/lint/test_nfcore_yml.py @@ -1,8 +1,9 @@ -import re from pathlib import Path -import nf_core.pipelines.create +from ruamel.yaml import YAML + import nf_core.pipelines.lint +from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -11,11 +12,14 @@ class TestLintNfCoreYml(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() - self.nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + self.nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + self.yaml = YAML() + self.nf_core_yml: NFCoreYamlConfig = self.yaml.load(self.nf_core_yml_path) + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) def test_nfcore_yml_pass(self): """Lint test: nfcore_yml - PASS""" - self.lint_obj._load() + assert self.lint_obj._load() results = self.lint_obj.nfcore_yml() assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) @@ -27,14 +31,10 @@ def test_nfcore_yml_pass(self): def test_nfcore_yml_fail_repo_type(self): """Lint test: nfcore_yml - FAIL - repository type not set""" - with open(self.nf_core_yml) as fh: - content = fh.read() - new_content = content.replace("repository_type: pipeline", "repository_type: foo") - with open(self.nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() + self.nf_core_yml["repository_type"] = "foo" + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) assert len(results.get("warned", [])) == 0 assert len(results.get("passed", [])) >= 0 @@ -43,15 +43,87 @@ def test_nfcore_yml_fail_repo_type(self): def test_nfcore_yml_fail_nfcore_version(self): """Lint test: nfcore_yml - FAIL - nf-core version not set""" - with open(self.nf_core_yml) as fh: - content = fh.read() - new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) - with open(self.nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() + self.nf_core_yml["nf_core_version"] = "foo" + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) assert len(results.get("failed", [])) == 0 assert len(results.get("passed", [])) >= 0 assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_nested_lint_config(self) -> None: + """Lint test: nfcore_yml with nested lint config - PASS""" + valid_yaml = """ + lint: + files_unchanged: + - .github/workflows/branch.yml + # modules_config: False + modules_config: + - fastqc + # merge_markers: False + merge_markers: + - docs/my_pdf.pdf + # nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + # template_strings: False + template_strings: + - docs/my_pdf.pdf + """ + self.nf_core_yml["lint"] = self.yaml.load(valid_yaml) + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + assert len(results.get("failed", [])) == 0 + assert len(results.get("warned", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_nested_lint_config_bool(self) -> None: + """Lint test: nfcore_yml with nested lint config - PASS""" + valid_yaml = """ + lint: + files_unchanged: + - .github/workflows/branch.yml + modules_config: False + # modules_config: + # - fastqc + merge_markers: False + # merge_markers: + # - docs/my_pdf.pdf + # nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + template_strings: False + # template_strings: + # - docs/my_pdf.pdf + """ + self.nf_core_yml["lint"] = self.yaml.load(valid_yaml) + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + assert len(results.get("failed", [])) == 0 + assert len(results.get("warned", [])) == 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_template_strings.py b/tests/pipelines/lint/test_template_strings.py index 406ba63e0c..37b7604806 100644 --- a/tests/pipelines/lint/test_template_strings.py +++ b/tests/pipelines/lint/test_template_strings.py @@ -1,6 +1,8 @@ import subprocess from pathlib import Path +import yaml + import nf_core.pipelines.create import nf_core.pipelines.lint @@ -11,6 +13,9 @@ class TestLintTemplateStrings(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() + self.nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + with open(self.nf_core_yml_path) as f: + self.nf_core_yml = yaml.safe_load(f) def test_template_strings(self): """Tests finding a template string in a file fails linting.""" @@ -28,9 +33,12 @@ def test_template_strings(self): def test_template_strings_ignored(self): """Tests ignoring template_strings""" # Ignore template_strings test - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings: False") + valid_yaml = """ + template_strings: false + """ + self.nf_core_yml["lint"] = yaml.safe_load(valid_yaml) + with open(self.nf_core_yml_path, "w") as f: + yaml.safe_dump(self.nf_core_yml, f) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj._load() lint_obj._lint_pipeline() @@ -43,13 +51,21 @@ def test_template_strings_ignore_file(self): txt_file = Path(self.new_pipeline) / "docs" / "test.txt" with open(txt_file, "w") as f: f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + # Ignore template_strings test - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") + valid_yaml = """ + template_strings: + - docs/test.txt + """ + self.nf_core_yml["lint"] = yaml.safe_load(valid_yaml) + with open(self.nf_core_yml_path, "w") as f: + yaml.safe_dump(self.nf_core_yml, f) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj._load() result = lint_obj.template_strings() + assert len(result["failed"]) == 0 assert len(result["ignored"]) == 1 From 57f7ca8680e5da788f04cb81cded4de4cbd0ad42 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 17:33:31 +0200 Subject: [PATCH 24/92] remove last traces of LintConfigType --- nf_core/components/lint/__init__.py | 4 ++-- nf_core/pipelines/create/create.py | 6 +++--- nf_core/utils.py | 3 --- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index fcc3b414d8..69740135a8 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -22,7 +22,7 @@ from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_json import ModulesJson from nf_core.pipelines.lint_utils import console -from nf_core.utils import LintConfigType +from nf_core.utils import NFCoreYamlLintConfig from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) @@ -80,7 +80,7 @@ def __init__( self.failed: List[LintResult] = [] self.all_local_components: List[NFCoreComponent] = [] - self.lint_config: Optional[LintConfigType] = None + self.lint_config: Optional[NFCoreYamlLintConfig] = None self.modules_json: Optional[ModulesJson] = None if self.component_type == "modules": diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8ab547c1cc..98b2b704be 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -8,7 +8,7 @@ import re import shutil from pathlib import Path -from typing import Dict, List, Optional, Tuple, Union, cast +from typing import Dict, List, Optional, Tuple, Union import git import git.config @@ -21,7 +21,7 @@ from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file -from nf_core.utils import LintConfigType, NFCoreTemplateConfig +from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlLintConfig log = logging.getLogger(__name__) @@ -395,7 +395,7 @@ def fix_linting(self): # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and nf_core_yml is not None: - nf_core_yml.lint = cast(LintConfigType, lint_config) + nf_core_yml.lint = NFCoreYamlLintConfig(**lint_config) with open(self.outdir / config_fn, "w") as fh: yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) diff --git a/nf_core/utils.py b/nf_core/utils.py index 03112dd1da..1b0d491e2a 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1088,9 +1088,6 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -LintConfigType = Optional[Dict[str, Union[List[str], List[Union[str, Dict[str, List[str]]]], bool]]] - - class NFCoreYamlLintConfig(BaseModel): """ schema for linting config in `.nf-core.yml` should cover: From e743185cf8d388bb18032aa9ebac5aca363d0da9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 17:48:58 +0200 Subject: [PATCH 25/92] fix incorrect type --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 1b0d491e2a..ac886755f9 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1129,7 +1129,7 @@ class NFCoreYamlLintConfig(BaseModel): """ List of files that should not contain merge markers """ nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = [] """ List of Nextflow config files that should not be changed """ - multiqc_config: List[str] = [] + multiqc_config: Union[bool, List[str]] = [] """ List of MultiQC config options that be changed """ files_exist: List[str] = [] """ List of files that can not exist """ From 58869a18facf3571c84b6f51a4c4a877f25c251d Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 21 Oct 2024 17:49:25 +0200 Subject: [PATCH 26/92] more type fixes --- nf_core/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index ac886755f9..4c4d9f73d2 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1121,7 +1121,7 @@ class NFCoreYamlLintConfig(BaseModel): nfcore_components: False """ - files_unchanged: List[str] = [] + files_unchanged: Union[bool, List[str]] = [] """ List of files that should not be changed """ modules_config: Optional[Union[bool, List[str]]] = [] """ List of modules that should not be changed """ @@ -1131,7 +1131,7 @@ class NFCoreYamlLintConfig(BaseModel): """ List of Nextflow config files that should not be changed """ multiqc_config: Union[bool, List[str]] = [] """ List of MultiQC config options that be changed """ - files_exist: List[str] = [] + files_exist: Union[bool, List[str]] = [] """ List of files that can not exist """ template_strings: Optional[Union[bool, List[str]]] = [] """ List of files that can contain template strings """ From afbd51b8c30b785cd49797434540b1fe2279ac1d Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 22 Oct 2024 13:33:29 +0200 Subject: [PATCH 27/92] add all lint tests to config --- nf_core/utils.py | 38 +++++++++++++++++++++++++++++++++++++- 1 file changed, 37 insertions(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 4c4d9f73d2..f7472ec944 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1136,7 +1136,43 @@ class NFCoreYamlLintConfig(BaseModel): template_strings: Optional[Union[bool, List[str]]] = [] """ List of files that can contain template strings """ nfcore_components: Optional[bool] = None - """ Include all required files to use nf-core modules and subworkflows """ + """ Lint all required files to use nf-core modules and subworkflows """ + actions_ci: Optional[bool] = None + """ Lint all required files to use GitHub Actions CI """ + actions_awstest: Optional[bool] = None + """ Lint all required files to run tests on AWS """ + actions_awsfulltest: Optional[bool] = None + """ Lint all required files to run full tests on AWS """ + readme: Optional[bool] = None + """ Lint the README.md file """ + pipeline_todos: Optional[bool] = None + """ Lint for TODOs statements""" + plugin_includes: Optional[bool] = None + """ Lint for nextflow plugin """ + pipeline_name_conventions: Optional[bool] = None + """ Lint for pipeline name conventions """ + schema_lint: Optional[bool] = None + """ Lint nextflow_schema.json file""" + schema_params: Optional[bool] = None + """ Lint schema for all params """ + system_exit: Optional[bool] = None + """ Lint for System.exit calls in groovy/nextflow code """ + schema_description: Optional[bool] = None + """ Check that every parameter in the schema has a description. """ + actions_schema_validation: Optional[bool] = None + """ Lint GitHub Action workflow files with schema""" + modules_json: Optional[bool] = None + """ Lint modules.json file """ + modules_structure: Optional[bool] = None + """ Lint modules structure """ + base_config: Optional[bool] = None + """ Lint base.config file """ + nfcore_yml: Optional[bool] = None + """ Lint nf-core.yml """ + version_consistency: Optional[bool] = None + """ Lint for version consistency """ + included_configs: Optional[bool] = None + """ Lint for included configs """ def __getitem__(self, item: str) -> Any: return getattr(self, item) From 9bf91f51fe36ecbc9baa62122016f2fbda32d788 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 22 Oct 2024 14:14:07 +0200 Subject: [PATCH 28/92] switch all defaults to None and drop them on dump --- nf_core/pipelines/create/create.py | 10 +++++----- nf_core/pipelines/lint/readme.py | 15 +++++++++++++++ nf_core/pipelines/sync.py | 6 +++--- nf_core/utils.py | 20 +++++++++++--------- 4 files changed, 34 insertions(+), 17 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 98b2b704be..776fc89439 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -67,7 +67,7 @@ def __init__( _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else Path().cwd()) # Obtain a CreateConfig object from `.nf-core.yml` config file if config_yml is not None and getattr(config_yml, "template", None) is not None: - self.config = CreateConfig(**config_yml["template"].model_dump()) + self.config = CreateConfig(**config_yml["template"].model_dump(exclude_none=True)) else: raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") # Update the output directory @@ -205,7 +205,7 @@ def obtain_jinja_params_dict( config_yml = None # Set the parameters for the jinja template - jinja_params = self.config.model_dump() + jinja_params = self.config.model_dump(exclude_none=True) # Add template areas to jinja params and create list of areas with paths to skip skip_areas = [] @@ -363,8 +363,8 @@ def render_template(self) -> None: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: - config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) - yaml.safe_dump(config_yml.model_dump(), fh) + config_yml.template = NFCoreTemplateConfig(**self.config.model_dump(exclude_none=True)) + yaml.safe_dump(config_yml.model_dump(exclude_none=True), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") # Run prettier on files @@ -397,7 +397,7 @@ def fix_linting(self): if config_fn is not None and nf_core_yml is not None: nf_core_yml.lint = NFCoreYamlLintConfig(**lint_config) with open(self.outdir / config_fn, "w") as fh: - yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) + yaml.dump(nf_core_yml.model_dump(exclude_none=True), fh, default_flow_style=False, sort_keys=False) def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index bdfad5200f..5a10fbfce5 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -23,6 +23,21 @@ def readme(self): * If pipeline is released but still contains a 'zenodo.XXXXXXX' tag, the test fails + To disable this test, add the following to the pipeline's ``.nf-core.yml`` file: + + .. code-block:: yaml + lint: + readme: False + + To disable subsets of these tests, add the following to the pipeline's ``.nf-core.yml`` file: + + .. code-block:: yaml + + lint: + readme: + nextflow_badge + zenodo_release + """ passed = [] warned = [] diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 12b29f15ec..896adda94f 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -105,7 +105,7 @@ def __init__( with open(template_yaml_path) as f: self.config_yml.template = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: - yaml.safe_dump(self.config_yml.model_dump(), fh) + yaml.safe_dump(self.config_yml.model_dump(exclude_none=True), fh) log.info(f"Saved pipeline creation settings to '{self.config_yml_path}'") raise SystemExit( f"Please commit your changes and delete the {template_yaml_path} file. Then run the sync command again." @@ -271,7 +271,7 @@ def make_template_pipeline(self): self.config_yml.template.force = True with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml.model_dump(), config_path) + yaml.safe_dump(self.config_yml.model_dump(exclude_none=True), config_path) try: pipeline_create_obj = nf_core.pipelines.create.create.PipelineCreate( @@ -291,7 +291,7 @@ def make_template_pipeline(self): self.config_yml.template.outdir = "." # Update nf-core version self.config_yml.nf_core_version = nf_core.__version__ - dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump()) + dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump(exclude_none=True)) except Exception as err: # Reset to where you were to prevent git getting messed up. diff --git a/nf_core/utils.py b/nf_core/utils.py index f7472ec944..b318634352 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -1121,20 +1121,22 @@ class NFCoreYamlLintConfig(BaseModel): nfcore_components: False """ - files_unchanged: Union[bool, List[str]] = [] + files_unchanged: Optional[Union[bool, List[str]]] = None """ List of files that should not be changed """ - modules_config: Optional[Union[bool, List[str]]] = [] + modules_config: Optional[Optional[Union[bool, List[str]]]] = None """ List of modules that should not be changed """ - merge_markers: Optional[Union[bool, List[str]]] = [] + merge_markers: Optional[Optional[Union[bool, List[str]]]] = None """ List of files that should not contain merge markers """ - nextflow_config: Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]] = [] + nextflow_config: Optional[Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]]] = None """ List of Nextflow config files that should not be changed """ - multiqc_config: Union[bool, List[str]] = [] + multiqc_config: Optional[Union[bool, List[str]]] = None """ List of MultiQC config options that be changed """ - files_exist: Union[bool, List[str]] = [] + files_exist: Optional[Union[bool, List[str]]] = None """ List of files that can not exist """ - template_strings: Optional[Union[bool, List[str]]] = [] + template_strings: Optional[Optional[Union[bool, List[str]]]] = None """ List of files that can contain template strings """ + readme: Optional[Union[bool, List[str]]] = None + """ Lint the README.md file """ nfcore_components: Optional[bool] = None """ Lint all required files to use nf-core modules and subworkflows """ actions_ci: Optional[bool] = None @@ -1143,8 +1145,6 @@ class NFCoreYamlLintConfig(BaseModel): """ Lint all required files to run tests on AWS """ actions_awsfulltest: Optional[bool] = None """ Lint all required files to run full tests on AWS """ - readme: Optional[bool] = None - """ Lint the README.md file """ pipeline_todos: Optional[bool] = None """ Lint for TODOs statements""" plugin_includes: Optional[bool] = None @@ -1178,6 +1178,8 @@ def __getitem__(self, item: str) -> Any: return getattr(self, item) def get(self, item: str, default: Any = None) -> Any: + if getattr(self, item, default) is None: + return default return getattr(self, item, default) def __setitem__(self, item: str, value: Any) -> None: From 61bb733943e5c1216574366da0e6dd89e83dc2c9 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 22 Oct 2024 15:46:09 +0200 Subject: [PATCH 29/92] drop None values when checking for test names --- nf_core/pipelines/lint/__init__.py | 2 +- tests/pipelines/lint/test_files_exist.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 82361565f4..f243743846 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -178,7 +178,7 @@ def _load_lint_config(self) -> bool: # Check if we have any keys that don't match lint test names if self.lint_config is not None: for k, v in self.lint_config: - if k != "nfcore_components" and k not in self.lint_tests: + if v is not None and k != "nfcore_components" and k not in self.lint_tests: # nfcore_components is an exception to allow custom pipelines without nf-core components log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") is_correct = False diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py index eb1ba9a17e..ebc529247e 100644 --- a/tests/pipelines/lint/test_files_exist.py +++ b/tests/pipelines/lint/test_files_exist.py @@ -37,8 +37,7 @@ def test_files_exist_missing_main(self): def test_files_exist_deprecated_file(self): """Check whether deprecated file issues warning""" - nf = Path(self.new_pipeline, "parameters.settings.json") - nf.touch() + Path(self.new_pipeline, "parameters.settings.json").touch() assert self.lint_obj._load() From e2ac2b57dc152f46a364dbdac9dce405064c1320 Mon Sep 17 00:00:00 2001 From: mashehu Date: Tue, 22 Oct 2024 16:33:19 +0200 Subject: [PATCH 30/92] fix test_lint tests --- tests/pipelines/test_lint.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 9ca29d249f..ca7353d50d 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -48,7 +48,8 @@ def test_init_pipeline_lint(self): def test_load_lint_config_not_found(self): """Try to load a linting config file that doesn't exist""" assert self.lint_obj._load_lint_config() - assert self.lint_obj.lint_config == {} + assert self.lint_obj.lint_config is not None + assert self.lint_obj.lint_config.model_dump(exclude_none=True) == {} def test_load_lint_config_ignore_all_tests(self): """Try to load a linting config file that ignores all tests""" @@ -64,7 +65,8 @@ def test_load_lint_config_ignore_all_tests(self): # Load the new lint config file and check lint_obj._load_lint_config() - assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + assert lint_obj.lint_config is not None + assert sorted(list(lint_obj.lint_config.model_dump(exclude_none=True))) == sorted(lint_obj.lint_tests) # Try running linting and make sure that all tests are ignored lint_obj._lint_pipeline() From 78e82640a0206dcda873f543181b804a59b95a5c Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 23 Oct 2024 15:30:57 +0200 Subject: [PATCH 31/92] move test in correct directory --- tests/{ => pipelines}/test_rocrate.py | 31 ++++++--------------------- 1 file changed, 7 insertions(+), 24 deletions(-) rename tests/{ => pipelines}/test_rocrate.py (73%) diff --git a/tests/test_rocrate.py b/tests/pipelines/test_rocrate.py similarity index 73% rename from tests/test_rocrate.py rename to tests/pipelines/test_rocrate.py index 6defd5d5e8..54561af425 100644 --- a/tests/test_rocrate.py +++ b/tests/pipelines/test_rocrate.py @@ -1,8 +1,6 @@ """Test the nf-core pipelines rocrate command""" import shutil -import tempfile -import unittest from pathlib import Path import rocrate.rocrate @@ -13,33 +11,18 @@ import nf_core.pipelines.rocrate import nf_core.utils +from ..test_pipelines import TestPipelines -class TestROCrate(unittest.TestCase): - """Class for lint tests""" - - def setUp(self): - """Function that runs at start of tests for common resources - Use nf_core.create() to make a pipeline that we can use for testing - """ - - self.tmp_dir = Path(tempfile.mkdtemp()) - self.test_pipeline_dir = Path(self.tmp_dir, "nf-core-testpipeline") - self.create_obj = nf_core.pipelines.create.create.PipelineCreate( - name="testpipeline", - description="This is a test pipeline", - author="Test McTestFace", - outdir=str(self.test_pipeline_dir), - version="1.0.0", - no_git=False, - force=True, - ) - self.create_obj.init_pipeline() +class TestROCrate(TestPipelines): + """Class for lint tests""" + def setUp(self) -> None: + super().setUp() # add fake metro map - Path(self.test_pipeline_dir, "docs", "images", "nf-core-testpipeline_metro_map.png").touch() + Path(self.pipeline_dir, "docs", "images", "nf-core-testpipeline_metro_map.png").touch() # commit the changes - repo = Repo(self.test_pipeline_dir) + repo = Repo(self.pipeline_dir) repo.git.add(A=True) repo.index.commit("Initial commit") From b74319de5c2251200b4e0a6f244f2e7f7fa453a7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 23 Oct 2024 16:49:59 +0200 Subject: [PATCH 32/92] fix tests --- nf_core/pipelines/rocrate.py | 35 +++++++++------------------------ tests/pipelines/test_rocrate.py | 8 ++++---- 2 files changed, 13 insertions(+), 30 deletions(-) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index de00189a2c..04f91480c5 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -7,7 +7,7 @@ import sys from datetime import datetime from pathlib import Path -from typing import Dict, List, Optional, Set, Union +from typing import Optional, Set, Union import requests import rocrate.rocrate @@ -90,7 +90,7 @@ def __init__(self, pipeline_dir: Path, version="") -> None: def create_rocrate( self, outdir: Path, json_path: Union[None, Path] = None, zip_path: Union[None, Path] = None - ) -> None: + ) -> bool: """ Create an RO Crate for a pipeline @@ -107,8 +107,6 @@ def create_rocrate( log.error(e) sys.exit(1) - # Change to the pipeline directory, because the RO Crate doesn't handle relative paths well - # Check that the checkout pipeline version is the same as the requested version if self.version != "": if self.version != self.pipeline_obj.nf_config.get("manifest.version"): @@ -132,11 +130,12 @@ def create_rocrate( # Save just the JSON metadata file if json_path is not None: - if json_path.name != "ro-crate-metadata.json": - json_path = json_path / "ro-crate-metadata.json" + if json_path.name == "ro-crate-metadata.json": + json_path = json_path.parent log.info(f"Saving metadata file to '{json_path}'") self.crate.metadata.write(json_path) + return True # Save the whole crate zip file if zip_path is not None: @@ -144,6 +143,10 @@ def create_rocrate( zip_path = zip_path / "ro-crate.crate.zip" log.info(f"Saving zip file '{zip_path}") self.crate.write_zip(zip_path) + return True + if json_path is None and zip_path is None: + log.error("Please provide a path to save the ro-crate file or the zip file.") + return False def make_workflow_rocrate(self) -> None: """ @@ -224,26 +227,6 @@ def set_main_entity(self, main_entity_filename: str): "url", f"https://nf-co.re/{self.crate.name.replace('nf-core/','')}/{url}/", compact=True ) self.crate.mainEntity.append_to("version", self.version, compact=True) - if self.pipeline_obj.schema_obj is not None: - log.debug("input value") - - schema_input = self.pipeline_obj.schema_obj.schema["definitions"]["input_output_options"]["properties"][ - "input" - ] - input_value: Dict[str, Union[str, List[str], bool]] = { - "@id": "#input", - "@type": ["FormalParameter"], - "default": schema_input.get("default", ""), - "encodingFormat": schema_input.get("mimetype", ""), - "valueRequired": "input" - in self.pipeline_obj.schema_obj.schema["definitions"]["input_output_options"]["required"], - "dct:conformsTo": "https://bioschemas.org/types/FormalParameter/1.0-RELEASE", - } - self.crate.add_jsonld(input_value) - self.crate.mainEntity.append_to( - "input", - {"@id": "#input"}, - ) # get keywords from nf-core website remote_workflows = requests.get("https://nf-co.re/pipelines.json").json()["remote_workflows"] diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py index 54561af425..2e14878da1 100644 --- a/tests/pipelines/test_rocrate.py +++ b/tests/pipelines/test_rocrate.py @@ -36,14 +36,14 @@ def test_rocrate_creation(self): """Run the nf-core rocrate command""" # Run the command - self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.test_pipeline_dir) - self.rocrate_obj.create_rocrate(self.test_pipeline_dir, metadata_path=Path(self.test_pipeline_dir)) + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir) + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) # Check that the crate was created - self.assertTrue(Path(self.test_pipeline_dir, "ro-crate-metadata.json").exists()) + self.assertTrue(Path(self.pipeline_dir, "ro-crate-metadata.json").exists()) # Check that the entries in the crate are correct - crate = rocrate.rocrate.ROCrate(self.test_pipeline_dir) + crate = rocrate.rocrate.ROCrate(self.pipeline_dir) entities = crate.get_entities() # Check if the correct entities are set: From 406bdf8bcba91bbd81f72fbab0c47786770193ba Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 09:08:35 +0200 Subject: [PATCH 33/92] run tests without commiting ro-crate --- .github/workflows/create-test-lint-wf-template.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 5871919ca3..6092360964 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -112,9 +112,9 @@ jobs: run: | cd create-test-lint-wf nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml template_skip_${{ matrix.TEMPLATE }}.yml + # fake ro-crate touch my-prefix-testpipeline/ro-crate-metadata.json - git commit -am "add ro-crate" - name: run the pipeline run: | From 94bddc22745dd94ce9d7500e035942b448e83e6b Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 09:16:11 +0200 Subject: [PATCH 34/92] add ro-crate creation to pipelines create command --- .github/workflows/create-test-lint-wf-template.yml | 3 --- nf_core/pipelines/create/create.py | 4 ++++ 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 6092360964..f6ae34c90d 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -113,9 +113,6 @@ jobs: cd create-test-lint-wf nf-core --log-file log.txt pipelines create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml template_skip_${{ matrix.TEMPLATE }}.yml - # fake ro-crate - touch my-prefix-testpipeline/ro-crate-metadata.json - - name: run the pipeline run: | cd create-test-lint-wf diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8ab547c1cc..13f059c7bd 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -21,6 +21,7 @@ from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file +from nf_core.rocrate import ROCrate from nf_core.utils import LintConfigType, NFCoreTemplateConfig log = logging.getLogger(__name__) @@ -255,6 +256,9 @@ def init_pipeline(self): """Creates the nf-core pipeline.""" # Make the new pipeline self.render_template() + # Create the RO-Crate metadata file + rocrate_obj = ROCrate(self.outdir) + rocrate_obj.create_rocrate(self.outdir, json_path=self.outdir / "ro-crate-metadata.json") # Init the git repository and make the first commit if not self.no_git: From 5c4a5e613b381e2896e1c3266df85683c7ee823d Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 09:20:25 +0200 Subject: [PATCH 35/92] fix command import --- nf_core/pipelines/create/create.py | 2 +- nf_core/pipelines/rocrate.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 13f059c7bd..c9be4e7be3 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -21,7 +21,7 @@ from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file -from nf_core.rocrate import ROCrate +from nf_core.pipelines.rocrate import ROCrate from nf_core.utils import LintConfigType, NFCoreTemplateConfig log = logging.getLogger(__name__) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 04f91480c5..1fe0e4cca6 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -177,14 +177,14 @@ def make_workflow_rocrate(self) -> None: ) # add readme as description - readme = Path("README.md") + readme = self.pipeline_dir / "README.md" try: self.crate.description = readme.read_text() except FileNotFoundError: log.error(f"Could not find README.md in {self.pipeline_dir}") # get license from LICENSE file - license_file = Path("LICENSE") + license_file = self.pipeline_dir / "LICENSE" try: license = license_file.read_text() if license.startswith("MIT"): From a679a14c8e0eab470939d0fe0b3148057739830b Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 09:55:31 +0200 Subject: [PATCH 36/92] add rocrate to skip features --- nf_core/pipelines/create/create.py | 8 +++++--- nf_core/pipelines/create/template_features.yml | 10 ++++++++++ nf_core/pipelines/rocrate.py | 9 +++++++-- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index c9be4e7be3..4f6fa12383 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -256,9 +256,6 @@ def init_pipeline(self): """Creates the nf-core pipeline.""" # Make the new pipeline self.render_template() - # Create the RO-Crate metadata file - rocrate_obj = ROCrate(self.outdir) - rocrate_obj.create_rocrate(self.outdir, json_path=self.outdir / "ro-crate-metadata.json") # Init the git repository and make the first commit if not self.no_git: @@ -360,6 +357,11 @@ def render_template(self) -> None: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() + if self.config.skip_features is None or "ro-crate" not in self.config.skip_features: + # Create the RO-Crate metadata file + rocrate_obj = ROCrate(self.outdir) + rocrate_obj.create_rocrate(self.outdir, json_path=self.outdir / "ro-crate-metadata.json") + # Update the .nf-core.yml with linting configurations self.fix_linting() diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index 0a3180286f..cf1867bff2 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -432,3 +432,13 @@ seqera_platform: You can extend this file adding any other desired configuration. nfcore_pipelines: False custom_pipelines: True +rocrate: + skippable_paths: + - "ro-crate-metadata.json" + short_description: "Add RO-Crate metadata" + description: "Add a RO-Crate metadata file to describe the pipeline" + help_text: | + RO-Crate is a metadata specification to describe research data and software. + This will add a `ro-crate-metadata.json` file to describe the pipeline. + nfcore_pipelines: False + custom_pipelines: True diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 1fe0e4cca6..388d681eb0 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -284,7 +284,9 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: # look at git contributors for author names try: git_contributors: Set[str] = set() - assert self.pipeline_obj.repo is not None # mypy + if self.pipeline_obj.repo is None: + log.info("No git repository found. No git contributors will be added as authors.") + return commits_touching_path = list(self.pipeline_obj.repo.iter_commits(paths="main.nf")) for commit in commits_touching_path: @@ -324,7 +326,10 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: for author in named_contributors: log.debug(f"Adding author: {author}") - assert self.pipeline_obj.repo is not None # mypy + + if self.pipeline_obj.repo is None: + log.info("No git repository found. No git contributors will be added as authors.") + return # get email from git log email = self.pipeline_obj.repo.git.log(f"--author={author}", "--pretty=format:%ae", "-1") orcid = get_orcid(author) From d79ba1009b7659734481c0b6e6600056e4596f74 Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 10:11:28 +0200 Subject: [PATCH 37/92] remove schema loading, because it is not needed anymore --- nf_core/pipelines/rocrate.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index 388d681eb0..d4e6056620 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -17,7 +17,6 @@ from rocrate.model.person import Person from rocrate.rocrate import ROCrate as BaseROCrate -from nf_core.pipelines.schema import PipelineSchema from nf_core.utils import Pipeline log = logging.getLogger(__name__) @@ -81,10 +80,6 @@ def __init__(self, pipeline_dir: Path, version="") -> None: self.crate: rocrate.rocrate.ROCrate self.pipeline_obj = Pipeline(self.pipeline_dir) self.pipeline_obj._load() - self.pipeline_obj.schema_obj = PipelineSchema() - # Assume we're in a pipeline dir root if schema path not set - self.pipeline_obj.schema_obj.get_schema_path(self.pipeline_dir) - self.pipeline_obj.schema_obj.load_schema() setup_requests_cachedir() From dfb9283c238b1e83b00c5c35d69f2896c615577d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Thu, 24 Oct 2024 09:06:26 +0000 Subject: [PATCH 38/92] update snapshots --- .../test_customisation_help.svg | 256 +++++++++--------- .../test_create_app/test_type_custom.svg | 254 ++++++++--------- 2 files changed, 255 insertions(+), 255 deletions(-) diff --git a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg index 07ab592d27..450f1d303c 100644 --- a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg +++ b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg @@ -19,257 +19,257 @@ font-weight: 700; } - .terminal-3477423502-matrix { + .terminal-333203530-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3477423502-title { + .terminal-333203530-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3477423502-r1 { fill: #c5c8c6 } -.terminal-3477423502-r2 { fill: #e3e3e3 } -.terminal-3477423502-r3 { fill: #989898 } -.terminal-3477423502-r4 { fill: #e1e1e1 } -.terminal-3477423502-r5 { fill: #4ebf71;font-weight: bold } -.terminal-3477423502-r6 { fill: #1e1e1e } -.terminal-3477423502-r7 { fill: #e2e2e2 } -.terminal-3477423502-r8 { fill: #507bb3 } -.terminal-3477423502-r9 { fill: #808080 } -.terminal-3477423502-r10 { fill: #dde6ed;font-weight: bold } -.terminal-3477423502-r11 { fill: #001541 } -.terminal-3477423502-r12 { fill: #0178d4 } -.terminal-3477423502-r13 { fill: #454a50 } -.terminal-3477423502-r14 { fill: #e2e3e3;font-weight: bold } -.terminal-3477423502-r15 { fill: #000000 } -.terminal-3477423502-r16 { fill: #e4e4e4 } -.terminal-3477423502-r17 { fill: #14191f } -.terminal-3477423502-r18 { fill: #7ae998 } -.terminal-3477423502-r19 { fill: #0a180e;font-weight: bold } -.terminal-3477423502-r20 { fill: #008139 } -.terminal-3477423502-r21 { fill: #fea62b;font-weight: bold } -.terminal-3477423502-r22 { fill: #a7a9ab } -.terminal-3477423502-r23 { fill: #e2e3e3 } + .terminal-333203530-r1 { fill: #c5c8c6 } +.terminal-333203530-r2 { fill: #e3e3e3 } +.terminal-333203530-r3 { fill: #989898 } +.terminal-333203530-r4 { fill: #e1e1e1 } +.terminal-333203530-r5 { fill: #4ebf71;font-weight: bold } +.terminal-333203530-r6 { fill: #1e1e1e } +.terminal-333203530-r7 { fill: #e2e2e2 } +.terminal-333203530-r8 { fill: #507bb3 } +.terminal-333203530-r9 { fill: #808080 } +.terminal-333203530-r10 { fill: #dde6ed;font-weight: bold } +.terminal-333203530-r11 { fill: #001541 } +.terminal-333203530-r12 { fill: #14191f } +.terminal-333203530-r13 { fill: #0178d4 } +.terminal-333203530-r14 { fill: #454a50 } +.terminal-333203530-r15 { fill: #e2e3e3;font-weight: bold } +.terminal-333203530-r16 { fill: #000000 } +.terminal-333203530-r17 { fill: #e4e4e4 } +.terminal-333203530-r18 { fill: #7ae998 } +.terminal-333203530-r19 { fill: #0a180e;font-weight: bold } +.terminal-333203530-r20 { fill: #008139 } +.terminal-333203530-r21 { fill: #fea62b;font-weight: bold } +.terminal-333203530-r22 { fill: #a7a9ab } +.terminal-333203530-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core pipelines create + nf-core pipelines create - + - - nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… - - -Template features - - -▔▔▔▔▔▔▔▔ -        Toggle all features -▁▁▁▁▁▁▁▁ -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use a GitHub Create a GitHub  Show help  -▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -pipeline. - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github CI testsThe pipeline will  Show help  -▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -actions for Continuous -Integration (CI)  -testing - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use reference genomesThe pipeline will be  Hide help  -▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -copy of the most  -common reference  -genome files from  -iGenomes - - -Nf-core pipelines are configured to use a copy of the most common reference  -genome files. - -By selecting this option, your pipeline will include a configuration file  -specifying the paths to these files. - -The required code to use these files will also be included in the template.  -When the pipeline user provides an appropriate genome key, the pipeline will -automatically download the required reference files. -▅▅ -For more information about reference genomes in nf-core pipelines, see the  - - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github badgesThe README.md file of  Show help  -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Back  Continue  -▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - d Toggle dark mode  q Quit  a Toggle all  + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔ +        Toggle all features +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous▃▃ +Integration (CI)  +testing + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Hide help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + + +Nf-core pipelines are configured to use a copy of the most common reference  +genome files. + +By selecting this option, your pipeline will include a configuration file  +specifying the paths to these files. + +The required code to use these files will also be included in the template.  +When the pipeline user provides an appropriate genome key, the pipeline will +automatically download the required reference files. +▅▅ +For more information about reference genomes in nf-core pipelines, see the  + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg index cc34c92537..6e178ba840 100644 --- a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg @@ -19,256 +19,256 @@ font-weight: 700; } - .terminal-829252251-matrix { + .terminal-3425198753-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-829252251-title { + .terminal-3425198753-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-829252251-r1 { fill: #c5c8c6 } -.terminal-829252251-r2 { fill: #e3e3e3 } -.terminal-829252251-r3 { fill: #989898 } -.terminal-829252251-r4 { fill: #e1e1e1 } -.terminal-829252251-r5 { fill: #4ebf71;font-weight: bold } -.terminal-829252251-r6 { fill: #1e1e1e } -.terminal-829252251-r7 { fill: #0178d4 } -.terminal-829252251-r8 { fill: #e2e2e2 } -.terminal-829252251-r9 { fill: #507bb3 } -.terminal-829252251-r10 { fill: #808080 } -.terminal-829252251-r11 { fill: #dde6ed;font-weight: bold } -.terminal-829252251-r12 { fill: #001541 } -.terminal-829252251-r13 { fill: #14191f } -.terminal-829252251-r14 { fill: #454a50 } -.terminal-829252251-r15 { fill: #7ae998 } -.terminal-829252251-r16 { fill: #e2e3e3;font-weight: bold } -.terminal-829252251-r17 { fill: #0a180e;font-weight: bold } -.terminal-829252251-r18 { fill: #000000 } -.terminal-829252251-r19 { fill: #008139 } -.terminal-829252251-r20 { fill: #fea62b;font-weight: bold } -.terminal-829252251-r21 { fill: #a7a9ab } -.terminal-829252251-r22 { fill: #e2e3e3 } + .terminal-3425198753-r1 { fill: #c5c8c6 } +.terminal-3425198753-r2 { fill: #e3e3e3 } +.terminal-3425198753-r3 { fill: #989898 } +.terminal-3425198753-r4 { fill: #e1e1e1 } +.terminal-3425198753-r5 { fill: #4ebf71;font-weight: bold } +.terminal-3425198753-r6 { fill: #1e1e1e } +.terminal-3425198753-r7 { fill: #0178d4 } +.terminal-3425198753-r8 { fill: #e2e2e2 } +.terminal-3425198753-r9 { fill: #507bb3 } +.terminal-3425198753-r10 { fill: #808080 } +.terminal-3425198753-r11 { fill: #dde6ed;font-weight: bold } +.terminal-3425198753-r12 { fill: #001541 } +.terminal-3425198753-r13 { fill: #14191f } +.terminal-3425198753-r14 { fill: #454a50 } +.terminal-3425198753-r15 { fill: #7ae998 } +.terminal-3425198753-r16 { fill: #e2e3e3;font-weight: bold } +.terminal-3425198753-r17 { fill: #0a180e;font-weight: bold } +.terminal-3425198753-r18 { fill: #000000 } +.terminal-3425198753-r19 { fill: #008139 } +.terminal-3425198753-r20 { fill: #fea62b;font-weight: bold } +.terminal-3425198753-r21 { fill: #a7a9ab } +.terminal-3425198753-r22 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core pipelines create + nf-core pipelines create - + - - nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… - - -Template features - - -▔▔▔▔▔▔▔▔ -        Toggle all features -▁▁▁▁▁▁▁▁ -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use a GitHub Create a GitHub  Show help  -▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -pipeline. - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github CI testsThe pipeline will  Show help  -▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -actions for Continuous -Integration (CI) ▁▁ -testing - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use reference genomesThe pipeline will be  Show help  -▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -copy of the most  -common reference  -genome files from  -iGenomes - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github badgesThe README.md file of  Show help  -▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -include GitHub badges - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add configuration The pipeline will  Show help  -▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -profiles containing  -custom parameters  -required to run  -nf-core pipelines at  -different institutions - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use code lintersThe pipeline will  Show help  -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Back  Continue  -▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - d Toggle dark mode  q Quit  a Toggle all  + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔ +        Toggle all features +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI) ▄▄ +testing + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +include GitHub badges + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add configuration The pipeline will  Show help  +▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +profiles containing  +custom parameters  +required to run  +nf-core pipelines at  +different institutions + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use code lintersThe pipeline will  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  From 2696940ee2a86a79c6aa6d210a2a0a0e129766fd Mon Sep 17 00:00:00 2001 From: mashehu Date: Thu, 24 Oct 2024 12:22:48 +0200 Subject: [PATCH 39/92] try to fix coverage report generation --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index a29a6970ed..7d8d3ea6fd 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -132,7 +132,7 @@ jobs: - name: Test with pytest run: | - python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + python3 -m pytest tests/${{matrix.test}} --color=yes --cov --cov-config=.coveragerc --durations=0 && exit_code=0|| exit_code=$? # don't fail if no tests were collected, e.g. for test_licence.py if [ "${exit_code}" -eq 5 ]; then echo "No tests were collected" From 7cd3dea18fed9af950c3a2249ab54797134e58e8 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 15 Nov 2024 16:42:14 +0100 Subject: [PATCH 40/92] update and fix swf patch tests --- nf_core/components/patch.py | 14 +- nf_core/components/remove.py | 2 +- nf_core/components/update.py | 11 +- nf_core/modules/lint/main_nf.py | 1 + nf_core/modules/lint/meta_yml.py | 2 + nf_core/modules/lint/module_changes.py | 1 + nf_core/modules/lint/module_patch.py | 1 + nf_core/modules/modules_differ.py | 20 ++- nf_core/modules/modules_json.py | 50 +++--- tests/modules/test_modules_json.py | 15 +- tests/subworkflows/patch.py | 212 ------------------------- tests/subworkflows/test_patch.py | 204 ++++++++++++++++++++++++ 12 files changed, 281 insertions(+), 252 deletions(-) delete mode 100644 tests/subworkflows/patch.py create mode 100644 tests/subworkflows/test_patch.py diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 41fccd8be2..77717877fc 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -65,7 +65,9 @@ def patch(self, component=None): component_fullname = str(Path(self.component_type, self.modules_repo.repo_path, component)) # Verify that the component has an entry in the modules.json file - if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + if not self.modules_json.component_present( + component, self.modules_repo.remote_url, component_dir, self.component_type + ): raise UserWarning( f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" ) @@ -127,7 +129,9 @@ def patch(self, component=None): raise UserWarning(f"{self.component_type[:-1]} '{component_fullname}' is unchanged. No patch to compute") # Write changes to modules.json - self.modules_json.add_patch_entry(component, self.modules_repo.remote_url, component_dir, patch_relpath) + self.modules_json.add_patch_entry( + self.component_type, component, self.modules_repo.remote_url, component_dir, patch_relpath + ) log.debug(f"Wrote patch path for {self.component_type[:-1]} {component} to modules.json") # Show the changes made to the module @@ -166,7 +170,9 @@ def remove(self, component): component_fullname = str(Path(self.component_type, component_dir, component)) # Verify that the component has an entry in the modules.json file - if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + if not self.modules_json.component_present( + component, self.modules_repo.remote_url, component_dir, self.component_type + ): raise UserWarning( f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" ) @@ -202,7 +208,7 @@ def remove(self, component): # Try to apply the patch in reverse and move resulting files to module dir temp_component_dir = self.modules_json.try_apply_patch_reverse( - component, self.modules_repo.repo_path, patch_relpath, component_path + self.component_type, component, self.modules_repo.repo_path, patch_relpath, component_path ) try: for file in Path(temp_component_dir).glob("*"): diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index c2c5843918..37208629c0 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -68,7 +68,7 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals if not component_dir.exists(): log.error(f"Installation directory '{component_dir}' does not exist.") - if modules_json.module_present(component, self.modules_repo.remote_url, repo_path): + if modules_json.component_present(component, self.modules_repo.remote_url, repo_path, self.component_type): log.error(f"Found entry for '{component}' in 'modules.json'. Removing...") modules_json.remove_entry(self.component_type, component, self.modules_repo.remote_url, repo_path) return False diff --git a/nf_core/components/update.py b/nf_core/components/update.py index bf176fb6d9..7edb0ffd06 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -810,7 +810,9 @@ def try_apply_patch( shutil.copytree(component_install_dir, temp_component_dir) try: - new_files = ModulesDiffer.try_apply_patch(component, repo_path, patch_path, temp_component_dir) + new_files = ModulesDiffer.try_apply_patch( + self.component_type, component, repo_path, patch_path, temp_component_dir + ) except LookupError: # Patch failed. Save the patch file by moving to the install dir shutil.move(patch_path, Path(component_install_dir, patch_path.relative_to(component_dir))) @@ -848,7 +850,12 @@ def try_apply_patch( # Add the patch file to the modules.json file self.modules_json.add_patch_entry( - component, self.modules_repo.remote_url, repo_path, patch_relpath, write_file=write_file + self.component_type, + component, + self.modules_repo.remote_url, + repo_path, + patch_relpath, + write_file=write_file, ) return True diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index df5a48d5bf..2b7878ca0f 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -51,6 +51,7 @@ def main_nf( lines: List[str] = [] if module.is_patched: lines = ModulesDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 4ad728d10b..59f0f01252 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -47,6 +47,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None meta_yaml = read_meta_yml(module_lint_object, module) if module.is_patched and module_lint_object.modules_repo.repo_path is not None: lines = ModulesDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, @@ -208,6 +209,7 @@ def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> # Check if we have a patch file, get original file in that case if module.is_patched: lines = ModulesDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index eb76f4b88b..708a2bad68 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -31,6 +31,7 @@ def module_changes(module_lint_object, module): shutil.copytree(module.component_dir, tempdir) try: new_lines = ModulesDiffer.try_apply_patch( + module.component_type, module.component_name, module.org, module.patch_path, diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index 29bf78a66b..19c6e76fec 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -162,6 +162,7 @@ def patch_reversible(module_lint_object, module, patch_path): """ try: ModulesDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, patch_path, diff --git a/nf_core/modules/modules_differ.py b/nf_core/modules/modules_differ.py index 6b0781bb89..c151fcce7c 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/modules/modules_differ.py @@ -466,16 +466,22 @@ def try_apply_single_patch(file_lines, patch, reverse=False): @staticmethod def try_apply_patch( - module: str, repo_path: Union[str, Path], patch_path: Union[str, Path], module_dir: Path, reverse: bool = False + component_type: str, + component: str, + repo_path: Union[str, Path], + patch_path: Union[str, Path], + component_dir: Path, + reverse: bool = False, ) -> Dict[str, List[str]]: """ - Try applying a full patch file to a module + Try applying a full patch file to a module or subworkflow Args: - module (str): Name of the module + component_type (str): The type of component (modules or subworkflows) + component (str): Name of the module or subworkflow repo_path (str): Name of the repository where the module resides patch_path (str): The absolute path to the patch file to be applied - module_dir (Path): The directory containing the module + component_dir (Path): The directory containing the component reverse (bool): Apply the patch in reverse Returns: @@ -485,13 +491,13 @@ def try_apply_patch( Raises: LookupError: If the patch application fails in a file """ - module_relpath = Path("modules", repo_path, module) + component_relpath = Path(component_type, repo_path, component) patches = ModulesDiffer.per_file_patch(patch_path) new_files = {} for file, patch in patches.items(): log.debug(f"Applying patch to {file}") - fn = Path(file).relative_to(module_relpath) - file_path = module_dir / fn + fn = Path(file).relative_to(component_relpath) + file_path = component_dir / fn try: with open(file_path) as fh: file_lines = fh.readlines() diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 05c64b6dee..5628c75742 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -308,7 +308,9 @@ def determine_branches_and_shas( # If the module/subworkflow is patched patch_file = component_path / f"{component}.diff" if patch_file.is_file(): - temp_module_dir = self.try_apply_patch_reverse(component, install_dir, patch_file, component_path) + temp_module_dir = self.try_apply_patch_reverse( + component_type, component, install_dir, patch_file, component_path + ) correct_commit_sha = self.find_correct_commit_sha( component_type, component, temp_module_dir, modules_repo ) @@ -805,7 +807,7 @@ def remove_entry(self, component_type, name, repo_url, install_dir, removed_by=N return False - def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, write_file=True): + def add_patch_entry(self, component_type, component_name, repo_url, install_dir, patch_filename, write_file=True): """ Adds (or replaces) the patch entry for a module """ @@ -815,9 +817,11 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr if repo_url not in self.modules_json["repos"]: raise LookupError(f"Repo '{repo_url}' not present in 'modules.json'") - if module_name not in self.modules_json["repos"][repo_url]["modules"][install_dir]: - raise LookupError(f"Module '{install_dir}/{module_name}' not present in 'modules.json'") - self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["patch"] = str(patch_filename) + if component_name not in self.modules_json["repos"][repo_url][component_type][install_dir]: + raise LookupError( + f"{component_type[:-1].title()} '{install_dir}/{component_name}' not present in 'modules.json'" + ) + self.modules_json["repos"][repo_url][component_type][install_dir][component_name]["patch"] = str(patch_filename) if write_file: self.dump() @@ -858,41 +862,46 @@ def get_patch_fn(self, module_name, repo_url, install_dir): ) return Path(path) if path is not None else None - def try_apply_patch_reverse(self, module, repo_name, patch_relpath, module_dir): + def try_apply_patch_reverse(self, component_type, component, repo_name, patch_relpath, component_dir): """ - Try reverse applying a patch file to the modified module files + Try reverse applying a patch file to the modified module or subworkflow files Args: - module (str): The name of the module - repo_name (str): The name of the repository where the module resides + component_type (str): The type of component [modules, subworkflows] + component (str): The name of the module or subworkflow + repo_name (str): The name of the repository where the component resides patch_relpath (Path | str): The path to patch file in the pipeline - module_dir (Path | str): The module directory in the pipeline + component_dir (Path | str): The component directory in the pipeline Returns: - (Path | str): The path of the folder where the module patched files are + (Path | str): The path of the folder where the component patched files are Raises: LookupError: If patch was not applied """ - module_fullname = str(Path(repo_name, module)) + component_fullname = str(Path(repo_name, component)) patch_path = Path(self.directory / patch_relpath) try: - new_files = ModulesDiffer.try_apply_patch(module, repo_name, patch_path, module_dir, reverse=True) + new_files = ModulesDiffer.try_apply_patch( + component_type, component, repo_name, patch_path, component_dir, reverse=True + ) except LookupError as e: - raise LookupError(f"Failed to apply patch in reverse for module '{module_fullname}' due to: {e}") + raise LookupError( + f"Failed to apply patch in reverse for {component_type[:-1]} '{component_fullname}' due to: {e}" + ) # Write the patched files to a temporary directory log.debug("Writing patched files to tmpdir") temp_dir = Path(tempfile.mkdtemp()) - temp_module_dir = temp_dir / module - temp_module_dir.mkdir(parents=True, exist_ok=True) + temp_component_dir = temp_dir / component + temp_component_dir.mkdir(parents=True, exist_ok=True) for file, new_content in new_files.items(): - fn = temp_module_dir / file + fn = temp_component_dir / file with open(fn, "w") as fh: fh.writelines(new_content) - return temp_module_dir + return temp_component_dir def repo_present(self, repo_name): """ @@ -908,20 +917,21 @@ def repo_present(self, repo_name): return repo_name in self.modules_json.get("repos", {}) - def module_present(self, module_name, repo_url, install_dir): + def component_present(self, module_name, repo_url, install_dir, component_type): """ Checks if a module is present in the modules.json file Args: module_name (str): Name of the module repo_url (str): URL of the repository install_dir (str): Name of the directory where modules are installed + component_type (str): Type of component [modules, subworkflows] Returns: (bool): Whether the module is present in the 'modules.json' file """ if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get("modules", {}).get( + return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get(component_type, {}).get( install_dir, {} ) diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 0368c146c4..325a8073b7 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -175,14 +175,17 @@ def test_mod_json_repo_present(self): assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True assert mod_json_obj.repo_present("INVALID_REPO") is False - def test_mod_json_module_present(self): - """Tests the module_present function""" + def test_mod_json_component_present(self): + """Tests the component_present function""" mod_json_obj = ModulesJson(self.pipeline_dir) - assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True - assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False - assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False - assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False + assert mod_json_obj.component_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME, "modules") is True + assert ( + mod_json_obj.component_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME, "modules") + is False + ) + assert mod_json_obj.component_present("fastqc", "INVALID_REPO", "INVALID_DIR", "modules") is False + assert mod_json_obj.component_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR", "modules") is False def test_mod_json_get_module_version(self): """Test the get_module_version function""" diff --git a/tests/subworkflows/patch.py b/tests/subworkflows/patch.py deleted file mode 100644 index 66065784cf..0000000000 --- a/tests/subworkflows/patch.py +++ /dev/null @@ -1,212 +0,0 @@ -import os -import tempfile -from pathlib import Path -from unittest import mock - -import pytest - -import nf_core.components.components_command -import nf_core.subworkflows - -from ..utils import ( - GITLAB_SUBWORKFLOWS_BRANCH, - GITLAB_URL, - GITLAB_REPO -) - -# TODO: #Change this for the correct SUCCEED_SHA -SUCCEED_SHA = "????" -ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" - - -""" -Test the 'nf-core subworkflows patch' command -""" - - -def setup_patch(pipeline_dir, modify_subworkflow): - # Install the subworkflow bam_sort_stats_samtools - install_obj = nf_core.subworkflows.SubworkflowInstall( - pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH, sha=ORG_SHA - ) - - # Install the module - install_obj.install("bam_sort_stats_samtools") - - if modify_subworkflow: - # Modify the subworkflow - subworkflow_path = Path(pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - modify_main_nf(subworkflow_path / "main.nf") - - -def modify_main_nf(path): - """Modify a file to test patch creation""" - with open(path) as fh: - lines = fh.readlines() - # We want a patch file that looks something like: - # - ch_fasta // channel: [ val(meta), path(fasta) ] - for line_index in range(len(lines)): - if lines[line_index] == " ch_fasta // channel: [ val(meta), path(fasta) ]\n": - to_pop = line_index - lines.pop(to_pop) - with open(path, "w") as fh: - fh.writelines(lines) - - -def test_create_patch_no_change(self): - """Test creating a patch when there is a change to the module""" - setup_patch(self.pipeline_dir, False) - - # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) - with pytest.raises(UserWarning): - patch_obj.patch("bam_sort_stats_samtools") - - subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - - # Check that no patch file has been added to the directory - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} - - -def test_create_patch_change(self): - """Test creating a patch when there is no change to the subworkflow""" - setup_patch(self.pipeline_dir, True) - - # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) - patch_obj.patch("bam_sort_stats_samtools") - - subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - - patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check that the correct lines are in the patch file - with open(subworkflow_path / patch_fn) as fh: - patch_lines = fh.readlines() - subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) - assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" - assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines - assert "- ch_fasta // channel: [ val(meta), path(fasta) ]" in patch_lines - - -def test_create_patch_try_apply_successful(self): - """Test creating a patch file and applying it to a new version of the the files""" - setup_patch(self.pipeline_dir, True) - subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) - - # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) - patch_obj.patch("bam_sort_stats_samtools") - - patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} - - update_obj = nf_core.subworkflows.SubworkflowUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - - # Install the new files - install_dir = Path(tempfile.mkdtemp()) - update_obj.install_component_files("bam_sort_stats_samtools", SUCCEED_SHA, update_obj.modules_repo, install_dir) - - # Try applying the patch - subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" - patch_relpath = subworkflow_relpath / patch_fn - assert ( - update_obj.try_apply_patch( - "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir - ) - is True - ) - - # Move the files from the temporary directory - update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, SUCCEED_SHA) - - # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} - - # Check that the correct lines are in the patch file - with open(subworkflow_path / patch_fn) as fh: - patch_lines = fh.readlines() - subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) - assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" - assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines - assert "- ch_fasta // channel: [ val(meta), path(fasta) ]" in patch_lines - - # Check that 'main.nf' is updated correctly - with open(subworkflow_path / "main.nf") as fh: - main_nf_lines = fh.readlines() - # These lines should have been removed by the patch - assert " ch_fasta // channel: [ val(meta), path(fasta) ]\n" not in main_nf_lines - - -def test_create_patch_try_apply_failed(self): - """Test creating a patch file and applying it to a new version of the the files""" - setup_patch(self.pipeline_dir, True) - subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) - - # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) - patch_obj.patch("bam_sort_stats_samtools") - - patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} - - update_obj = nf_core.subworkflows.SubworkflowUpdate( - self.pipeline_dir, sha=SUCCEED_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH - ) - - # Install the new files - install_dir = Path(tempfile.mkdtemp()) - update_obj.install_component_files("bam_sort_stats_samtools", SUCCEED_SHA, update_obj.modules_repo, install_dir) - - # Try applying the patch - subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" - patch_relpath = subworkflow_relpath / patch_fn - assert ( - update_obj.try_apply_patch( - "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir - ) - is False - ) - - -# TODO: create those two missing tests -def test_create_patch_update_success(self): - """Test creating a patch file and updating a subworkflow when there is a diff conflict""" - - -def test_create_patch_update_fail(self): - """ - Test creating a patch file and the updating the subworkflow - - Should have the same effect as 'test_create_patch_try_apply_successful' - but uses higher level api - """ - - -def test_remove_patch(self): - """Test creating a patch when there is no change to the subworkflow""" - setup_patch(self.pipeline_dir, True) - - # Try creating a patch file - patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) - patch_obj.patch("bam_sort_stats_samtools") - - subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") - - patch_fn = f"{'-'.join('bam_sort_stats_samtools')}.diff" - # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", patch_fn} - - with mock.patch.object(nf_core.create.questionary, "confirm") as mock_questionary: - mock_questionary.unsafe_ask.return_value = True - patch_obj.remove("bam_sort_stats_samtools") - # Check that the diff file has been removed - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py new file mode 100644 index 0000000000..ba452083fa --- /dev/null +++ b/tests/subworkflows/test_patch.py @@ -0,0 +1,204 @@ +import os +import tempfile +from pathlib import Path +from unittest import mock + +import pytest + +import nf_core.components.components_command +import nf_core.components.patch +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_REPO, GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + +OLD_SHA = "dbb12457e32d3da8eea7dc4ae096201fff4747c5" +SUCCEED_SHA = "0a33e6a0d730ad22a0ec9f7f9a7540af6e943221" +FAIL_SHA = "b6e5e8739de9a1a0c4f85267144e43dbaf8f1461" + + +class TestSubworkflowsPatch(TestSubworkflows): + """ + Test the 'nf-core subworkflows patch' command + """ + + def modify_main_nf(self, path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # We want a patch file that looks something like: + # - ch_fasta // channel: [ fasta ] + for line_index in range(len(lines)): + if lines[line_index] == " ch_fasta // channel: [ fasta ]\n": + to_pop = line_index + lines.pop(to_pop) + with open(path, "w") as fh: + fh.writelines(lines) + + def setup_patch(self, pipeline_dir, modify_subworkflow): + # Install the subworkflow bam_sort_stats_samtools + install_obj = nf_core.subworkflows.SubworkflowInstall( + pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + sha=OLD_SHA, + ) + + # Install the module + install_obj.install("bam_sort_stats_samtools") + + if modify_subworkflow: + # Modify the subworkflow + subworkflow_path = Path(pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + self.modify_main_nf(subworkflow_path / "main.nf") + + def test_create_patch_no_change(self): + """Test creating a patch when there is a change to the module""" + self.setup_patch(self.pipeline_dir, False) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + with pytest.raises(UserWarning): + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that no patch file has been added to the directory + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + + def test_create_patch_change(self): + """Test creating a patch when there is no change to the subworkflow""" + self.setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + + # Check that the correct lines are in the patch file + with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: + patch_lines = fh.readlines() + print(patch_lines) + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + def test_create_patch_try_apply_successful(self): + """Test creating a patch file and applying it to a new version of the the files""" + self.setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, sha=OLD_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", OLD_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / "bam_sort_stats_samtools.diff" + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is True + ) + + # Move the files from the temporary directory + update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, OLD_SHA) + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + + # Check that the correct lines are in the patch file + with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: + patch_lines = fh.readlines() + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(subworkflow_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert "- ch_fasta // channel: [ fasta ]\n" not in main_nf_lines + + def test_create_patch_try_apply_failed(self): + """Test creating a patch file and applying it to a new version of the the files""" + self.setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / "bam_sort_stats_samtools.diff" + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is False + ) + + # TODO: create those two missing tests + def test_create_patch_update_success(self): + """Test creating a patch file and updating a subworkflow when there is a diff conflict""" + + def test_create_patch_update_fail(self): + """ + Test creating a patch file and the updating the subworkflow + + Should have the same effect as 'test_create_patch_try_apply_successful' + but uses higher level api + """ + + def test_remove_patch(self): + """Test creating a patch when there is no change to the subworkflow""" + self.setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + + with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove("bam_sort_stats_samtools") + # Check that the diff file has been removed + assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} From 25940f7e90fd7f94fa9eae596e7b7d0a9dc54a3c Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 22 Nov 2024 16:05:43 +0100 Subject: [PATCH 41/92] apply patch reverse when linting a patched subworkflow --- .../subworkflows/lint/subworkflow_changes.py | 27 ++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/nf_core/subworkflows/lint/subworkflow_changes.py b/nf_core/subworkflows/lint/subworkflow_changes.py index a9c9616a21..e6af892125 100644 --- a/nf_core/subworkflows/lint/subworkflow_changes.py +++ b/nf_core/subworkflows/lint/subworkflow_changes.py @@ -2,9 +2,12 @@ Check whether the content of a subworkflow has changed compared to the original repository """ +import shutil +import tempfile from pathlib import Path import nf_core.modules.modules_repo +from nf_core.modules.modules_differ import ModulesDiffer def subworkflow_changes(subworkflow_lint_object, subworkflow): @@ -20,7 +23,29 @@ def subworkflow_changes(subworkflow_lint_object, subworkflow): Only runs when linting a pipeline, not the modules repository """ - tempdir = subworkflow.component_dir + if subworkflow.is_patched: + # If the subworkflow is patched, we need to apply + # the patch in reverse before comparing with the remote + tempdir_parent = Path(tempfile.mkdtemp()) + tempdir = tempdir_parent / "tmp_subworkflow_dir" + shutil.copytree(subworkflow.component_dir, tempdir) + try: + new_lines = ModulesDiffer.try_apply_patch( + subworkflow.component_type, + subworkflow.component_name, + subworkflow.org, + subworkflow.patch_path, + tempdir, + reverse=True, + ) + for file, lines in new_lines.items(): + with open(tempdir / file, "w") as fh: + fh.writelines(lines) + except LookupError: + # This error is already reported by subworkflow_patch, so just return + return + else: + tempdir = subworkflow.component_dir subworkflow.branch = subworkflow_lint_object.modules_json.get_component_branch( "subworkflows", subworkflow.component_name, subworkflow.repo_url, subworkflow.org ) From b2cfd0125ba3e0658a17d301291fc952ca1f8afc Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 22 Nov 2024 16:31:47 +0100 Subject: [PATCH 42/92] update get_patch_fn to work with subworkflows --- nf_core/components/update.py | 11 +++++++++-- nf_core/modules/modules_json.py | 16 ++++++++-------- tests/modules/test_patch.py | 22 +++++++++++----------- 3 files changed, 28 insertions(+), 21 deletions(-) diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 7edb0ffd06..76c6b2b075 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -449,7 +449,9 @@ def get_single_component_info(self, component): self.modules_repo.setup_branch(current_branch) # If there is a patch file, get its filename - patch_fn = self.modules_json.get_patch_fn(component, self.modules_repo.remote_url, install_dir) + patch_fn = self.modules_json.get_patch_fn( + self.component_type, component, self.modules_repo.remote_url, install_dir + ) return (self.modules_repo, component, sha, patch_fn) @@ -695,7 +697,12 @@ def get_all_components_info(self, branch=None): # Add patch filenames to the components that have them components_info = [ - (repo, comp, sha, self.modules_json.get_patch_fn(comp, repo.remote_url, repo.repo_path)) + ( + repo, + comp, + sha, + self.modules_json.get_patch_fn(self.component_type, comp, repo.remote_url, repo.repo_path), + ) for repo, comp, sha in components_info ] diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 5628c75742..64aab54bff 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -837,17 +837,17 @@ def remove_patch_entry(self, module_name, repo_url, install_dir, write_file=True if write_file: self.dump() - def get_patch_fn(self, module_name, repo_url, install_dir): + def get_patch_fn(self, component_type, component_name, repo_url, install_dir): """ - Get the patch filename of a module + Get the patch filename of a component Args: - module_name (str): The name of the module - repo_url (str): The URL of the repository containing the module - install_dir (str): The name of the directory where modules are installed + component_name (str): The name of the component + repo_url (str): The URL of the repository containing the component + install_dir (str): The name of the directory where components are installed Returns: - (str): The patch filename for the module, None if not present + (str): The patch filename for the component, None if not present """ if self.modules_json is None: self.load() @@ -855,9 +855,9 @@ def get_patch_fn(self, module_name, repo_url, install_dir): path = ( self.modules_json["repos"] .get(repo_url, {}) - .get("modules") + .get(component_type) .get(install_dir) - .get(module_name, {}) + .get(component_name, {}) .get("patch") ) return Path(path) if path is not None else None diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index 2f60cd4a20..27c1e342e4 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -80,7 +80,7 @@ def test_create_patch_no_change(self): # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) is None def test_create_patch_change(self): """Test creating a patch when there is a change to the module""" @@ -98,7 +98,7 @@ def test_create_patch_change(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -131,7 +131,7 @@ def test_create_patch_try_apply_successful(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -157,7 +157,7 @@ def test_create_patch_try_apply_successful(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -199,7 +199,7 @@ def test_create_patch_try_apply_failed(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -238,7 +238,7 @@ def test_create_patch_update_success(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -258,9 +258,9 @@ def test_create_patch_update_success(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) + ), modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) # Check that the correct lines are in the patch file with open(module_path / patch_fn) as fh: @@ -299,7 +299,7 @@ def test_create_patch_update_fail(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -353,7 +353,7 @@ def test_remove_patch(self): # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -365,4 +365,4 @@ def test_remove_patch(self): # Check that the 'modules.json' entry has been removed modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) is None From 6ec2dcd763865dcfd3c3672e244b7e3a401ac037 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Fri, 22 Nov 2024 16:49:39 +0100 Subject: [PATCH 43/92] move modules_differ.py to components_differ.py --- .../components_differ.py} | 112 +++++++++--------- nf_core/components/patch.py | 6 +- nf_core/components/update.py | 12 +- nf_core/modules/lint/main_nf.py | 4 +- nf_core/modules/lint/meta_yml.py | 6 +- nf_core/modules/lint/module_changes.py | 4 +- nf_core/modules/lint/module_patch.py | 16 +-- nf_core/modules/modules_json.py | 4 +- .../subworkflows/lint/subworkflow_changes.py | 4 +- 9 files changed, 84 insertions(+), 84 deletions(-) rename nf_core/{modules/modules_differ.py => components/components_differ.py} (83%) diff --git a/nf_core/modules/modules_differ.py b/nf_core/components/components_differ.py similarity index 83% rename from nf_core/modules/modules_differ.py rename to nf_core/components/components_differ.py index c151fcce7c..db51c1910d 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/components/components_differ.py @@ -16,10 +16,10 @@ log = logging.getLogger(__name__) -class ModulesDiffer: +class ComponentsDiffer: """ Static class that provides functionality for computing diffs between - different instances of a module + different instances of a module or subworkflow """ class DiffEnum(enum.Enum): @@ -34,15 +34,15 @@ class DiffEnum(enum.Enum): REMOVED = enum.auto() @staticmethod - def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_dir=None): + def get_component_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_dir=None): """ - Compute the diff between the current module version + Compute the diff between the current component version and the new version. Args: - from_dir (strOrPath): The folder containing the old module files - to_dir (strOrPath): The folder containing the new module files - path_in_diff (strOrPath): The directory displayed containing the module + from_dir (strOrPath): The folder containing the old component files + to_dir (strOrPath): The folder containing the new component files + path_in_diff (strOrPath): The directory displayed containing the component file in the diff. Added so that temporary dirs are not shown for_git (bool): indicates whether the diff file is to be @@ -52,7 +52,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d dsp_to_dir (str | Path): The to directory to display in the diff Returns: - dict[str, (ModulesDiffer.DiffEnum, str)]: A dictionary containing + dict[str, (ComponentsDiffer.DiffEnum, str)]: A dictionary containing the diff type and the diff string (empty if no diff) """ if for_git: @@ -72,7 +72,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d ) files = list(files) - # Loop through all the module files and compute their diffs if needed + # Loop through all the component files and compute their diffs if needed for file in files: temp_path = Path(to_dir, file) curr_path = Path(from_dir, file) @@ -84,7 +84,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d if new_lines == old_lines: # The files are identical - diffs[file] = (ModulesDiffer.DiffEnum.UNCHANGED, ()) + diffs[file] = (ComponentsDiffer.DiffEnum.UNCHANGED, ()) else: # Compute the diff diff = difflib.unified_diff( @@ -93,7 +93,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path(dsp_from_dir, file)), tofile=str(Path(dsp_to_dir, file)), ) - diffs[file] = (ModulesDiffer.DiffEnum.CHANGED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.CHANGED, diff) elif temp_path.exists(): with open(temp_path) as fh: @@ -106,7 +106,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path("/dev", "null")), tofile=str(Path(dsp_to_dir, file)), ) - diffs[file] = (ModulesDiffer.DiffEnum.CREATED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.CREATED, diff) elif curr_path.exists(): # The file was removed @@ -119,14 +119,14 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path(dsp_from_dir, file)), tofile=str(Path("/dev", "null")), ) - diffs[file] = (ModulesDiffer.DiffEnum.REMOVED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.REMOVED, diff) return diffs @staticmethod def write_diff_file( diff_path, - module, + component, repo_path, from_dir, to_dir, @@ -139,20 +139,19 @@ def write_diff_file( limit_output=False, ): """ - Writes the diffs of a module to the diff file. + Writes the diffs of a component to the diff file. Args: diff_path (str | Path): The path to the file that should be appended - module (str): The module name - repo_path (str): The name of the repo where the module resides - from_dir (str | Path): The directory containing the old module files - to_dir (str | Path): The directory containing the new module files - diffs (dict[str, (ModulesDiffer.DiffEnum, str)]): A dictionary containing + component (str): The component name + repo_path (str): The name of the repo where the component resides + from_dir (str | Path): The directory containing the old component files + to_dir (str | Path): The directory containing the new component files + diffs (dict[str, (ComponentsDiffer.DiffEnum, str)]): A dictionary containing the type of change and the diff (if any) - module_dir (str | Path): The path to the current installation of the module - current_version (str): The installed version of the module - new_version (str): The version of the module the diff is computed against + current_version (str): The installed version of the component + new_version (str): The version of the component the diff is computed against for_git (bool): indicates whether the diff file is to be compatible with `git apply`. If true it adds a/ and b/ prefixes to the file paths @@ -165,36 +164,36 @@ def write_diff_file( if dsp_to_dir is None: dsp_to_dir = to_dir - diffs = ModulesDiffer.get_module_diffs(from_dir, to_dir, for_git, dsp_from_dir, dsp_to_dir) - if all(diff_status == ModulesDiffer.DiffEnum.UNCHANGED for _, (diff_status, _) in diffs.items()): - raise UserWarning("Module is unchanged") - log.debug(f"Writing diff of '{module}' to '{diff_path}'") + diffs = ComponentsDiffer.get_component_diffs(from_dir, to_dir, for_git, dsp_from_dir, dsp_to_dir) + if all(diff_status == ComponentsDiffer.DiffEnum.UNCHANGED for _, (diff_status, _) in diffs.items()): + raise UserWarning("Component is unchanged") + log.debug(f"Writing diff of '{component}' to '{diff_path}'") with open(diff_path, file_action) as fh: if current_version is not None and new_version is not None: fh.write( - f"Changes in module '{Path(repo_path, module)}' between" + f"Changes in component '{Path(repo_path, component)}' between" f" ({current_version}) and" f" ({new_version})\n" ) else: - fh.write(f"Changes in module '{Path(repo_path, module)}'\n") + fh.write(f"Changes in component '{Path(repo_path, component)}'\n") for file, (diff_status, diff) in diffs.items(): - if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + if diff_status == ComponentsDiffer.DiffEnum.UNCHANGED: # The files are identical fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: # The file was created between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was created\n") - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was removed\n") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - fh.write(f"Changes in '{Path(module, file)}' but not shown\n") + fh.write(f"Changes in '{Path(component, file)}' but not shown\n") else: # The file has changed write the diff lines to the file - fh.write(f"Changes in '{Path(module, file)}':\n") + fh.write(f"Changes in '{Path(component, file)}':\n") for line in diff: fh.write(line) fh.write("\n") @@ -237,7 +236,7 @@ def append_modules_json_diff(diff_path, old_modules_json, new_modules_json, modu @staticmethod def print_diff( - module, + component, repo_path, from_dir, to_dir, @@ -248,16 +247,15 @@ def print_diff( limit_output=False, ): """ - Prints the diffs between two module versions to the terminal + Prints the diffs between two component versions to the terminal Args: - module (str): The module name - repo_path (str): The name of the repo where the module resides - from_dir (str | Path): The directory containing the old module files - to_dir (str | Path): The directory containing the new module files - module_dir (str): The path to the current installation of the module - current_version (str): The installed version of the module - new_version (str): The version of the module the diff is computed against + component (str): The component name + repo_path (str): The name of the repo where the component resides + from_dir (str | Path): The directory containing the old component files + to_dir (str | Path): The directory containing the new component files + current_version (str): The installed version of the component + new_version (str): The version of the component the diff is computed against dsp_from_dir (str | Path): The 'from' directory displayed in the diff dsp_to_dir (str | Path): The 'to' directory displayed in the diff limit_output (bool): If true, don't print the diff for files other than main.nf @@ -267,41 +265,43 @@ def print_diff( if dsp_to_dir is None: dsp_to_dir = to_dir - diffs = ModulesDiffer.get_module_diffs( + diffs = ComponentsDiffer.get_component_diffs( from_dir, to_dir, for_git=False, dsp_from_dir=dsp_from_dir, dsp_to_dir=dsp_to_dir ) console = Console(force_terminal=nf_core.utils.rich_force_colors()) if current_version is not None and new_version is not None: log.info( - f"Changes in module '{Path(repo_path, module)}' between" f" ({current_version}) and" f" ({new_version})" + f"Changes in component '{Path(repo_path, component)}' between" + f" ({current_version}) and" + f" ({new_version})" ) else: - log.info(f"Changes in module '{Path(repo_path, module)}'") + log.info(f"Changes in component '{Path(repo_path, component)}'") panel_group: list[RenderableType] = [] for file, (diff_status, diff) in diffs.items(): - if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + if diff_status == ComponentsDiffer.DiffEnum.UNCHANGED: # The files are identical log.info(f"'{Path(dsp_from_dir, file)}' is unchanged") - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: # The file was created between the commits log.info(f"'{Path(dsp_from_dir, file)}' was created") - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - log.info(f"Changes in '{Path(module, file)}' but not shown") + log.info(f"Changes in '{Path(component, file)}' but not shown") else: # The file has changed - log.info(f"Changes in '{Path(module, file)}':") + log.info(f"Changes in '{Path(component, file)}':") # Pretty print the diff using the pygments diff lexer syntax = Syntax("".join(diff), "diff", theme="ansi_dark", line_numbers=True) panel_group.append(Panel(syntax, title=str(file), title_align="left", padding=0)) console.print( Panel( Group(*panel_group), - title=f"[white]{str(module)}[/white]", + title=f"[white]{str(component)}[/white]", title_align="left", padding=0, border_style="blue", @@ -422,7 +422,7 @@ def try_apply_single_patch(file_lines, patch, reverse=False): LookupError: If it fails to find the old lines from the patch in the file. """ - org_lines, patch_lines = ModulesDiffer.get_new_and_old_lines(patch) + org_lines, patch_lines = ComponentsDiffer.get_new_and_old_lines(patch) if reverse: patch_lines, org_lines = org_lines, patch_lines @@ -479,7 +479,7 @@ def try_apply_patch( Args: component_type (str): The type of component (modules or subworkflows) component (str): Name of the module or subworkflow - repo_path (str): Name of the repository where the module resides + repo_path (str): Name of the repository where the component resides patch_path (str): The absolute path to the patch file to be applied component_dir (Path): The directory containing the component reverse (bool): Apply the patch in reverse @@ -492,7 +492,7 @@ def try_apply_patch( LookupError: If the patch application fails in a file """ component_relpath = Path(component_type, repo_path, component) - patches = ModulesDiffer.per_file_patch(patch_path) + patches = ComponentsDiffer.per_file_patch(patch_path) new_files = {} for file, patch in patches.items(): log.debug(f"Applying patch to {file}") @@ -504,6 +504,6 @@ def try_apply_patch( except FileNotFoundError: # The file was added with the patch file_lines = [""] - patched_new_lines = ModulesDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) + patched_new_lines = ComponentsDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) new_files[str(fn)] = patched_new_lines return new_files diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 77717877fc..59ec7a381b 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -8,7 +8,7 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer from nf_core.modules.modules_json import ModulesJson log = logging.getLogger(__name__) @@ -114,7 +114,7 @@ def patch(self, component=None): # Write the patch to a temporary location (otherwise it is printed to the screen later) patch_temp_path = tempfile.mktemp() try: - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( patch_temp_path, component, self.modules_repo.repo_path, @@ -135,7 +135,7 @@ def patch(self, component=None): log.debug(f"Wrote patch path for {self.component_type[:-1]} {component} to modules.json") # Show the changes made to the module - ModulesDiffer.print_diff( + ComponentsDiffer.print_diff( component, self.modules_repo.repo_path, component_install_dir, diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 76c6b2b075..901a7f02fe 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -9,13 +9,13 @@ import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.components_utils import ( get_components_to_install, prompt_component_version_sha, ) from nf_core.components.install import ComponentInstall from nf_core.components.remove import ComponentRemove -from nf_core.modules.modules_differ import ModulesDiffer from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ModulesRepo from nf_core.utils import plural_es, plural_s, plural_y @@ -223,7 +223,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr f"Writing diff file for {self.component_type[:-1]} '{component_fullname}' to '{self.save_diff_fn}'" ) try: - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( self.save_diff_fn, component, modules_repo.repo_path, @@ -265,7 +265,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) elif self.show_diff: - ModulesDiffer.print_diff( + ComponentsDiffer.print_diff( component, modules_repo.repo_path, component_dir, @@ -313,7 +313,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr if self.save_diff_fn: # Write the modules.json diff to the file - ModulesDiffer.append_modules_json_diff( + ComponentsDiffer.append_modules_json_diff( self.save_diff_fn, old_modules_json, self.modules_json.get_modules_json(), @@ -817,7 +817,7 @@ def try_apply_patch( shutil.copytree(component_install_dir, temp_component_dir) try: - new_files = ModulesDiffer.try_apply_patch( + new_files = ComponentsDiffer.try_apply_patch( self.component_type, component, repo_path, patch_path, temp_component_dir ) except LookupError: @@ -837,7 +837,7 @@ def try_apply_patch( # Create the new patch file log.debug("Regenerating patch file") - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( Path(temp_component_dir, patch_path.relative_to(component_dir)), component, repo_path, diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 2b7878ca0f..848e17130e 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -15,8 +15,8 @@ import nf_core import nf_core.modules.modules_utils +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) @@ -50,7 +50,7 @@ def main_nf( # otherwise read the lines directly from the module lines: List[str] = [] if module.is_patched: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 59f0f01252..d0268a40cc 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -6,9 +6,9 @@ import ruamel.yaml from jsonschema import exceptions, validators +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) @@ -46,7 +46,7 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None # Check if we have a patch file, get original file in that case meta_yaml = read_meta_yml(module_lint_object, module) if module.is_patched and module_lint_object.modules_repo.repo_path is not None: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, @@ -208,7 +208,7 @@ def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> yaml.preserve_quotes = True # Check if we have a patch file, get original file in that case if module.is_patched: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index 708a2bad68..121de00c0a 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -7,7 +7,7 @@ from pathlib import Path import nf_core.modules.modules_repo -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer def module_changes(module_lint_object, module): @@ -30,7 +30,7 @@ def module_changes(module_lint_object, module): tempdir = tempdir_parent / "tmp_module_dir" shutil.copytree(module.component_dir, tempdir) try: - new_lines = ModulesDiffer.try_apply_patch( + new_lines = ComponentsDiffer.try_apply_patch( module.component_type, module.component_name, module.org, diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index 19c6e76fec..6347c5c553 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -1,7 +1,7 @@ from pathlib import Path +from ...components.components_differ import ComponentsDiffer from ...components.nfcore_component import NFCoreComponent -from ..modules_differ import ModulesDiffer def module_patch(module_lint_obj, module: NFCoreComponent): @@ -66,11 +66,11 @@ def check_patch_valid(module, patch_path): continue topath = Path(line.split(" ")[1].strip("\n")) if frompath == Path("/dev/null"): - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.CREATED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.CREATED)) elif topath == Path("/dev/null"): - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.REMOVED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.REMOVED)) elif frompath == topath: - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.CHANGED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.CHANGED)) else: module.failed.append( ( @@ -105,7 +105,7 @@ def check_patch_valid(module, patch_path): # Warn about any created or removed files passed = True for path, diff_status in paths_in_patch: - if diff_status == ModulesDiffer.DiffEnum.CHANGED: + if diff_status == ComponentsDiffer.DiffEnum.CHANGED: if not Path(module.base_dir, path).exists(): module.failed.append( ( @@ -116,7 +116,7 @@ def check_patch_valid(module, patch_path): ) passed = False continue - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: if not Path(module.base_dir, path).exists(): module.failed.append( ( @@ -130,7 +130,7 @@ def check_patch_valid(module, patch_path): module.warned.append( ("patch", f"Patch file performs file creation of {path}. This is discouraged."), patch_path ) - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: if Path(module.base_dir, path).exists(): module.failed.append( ( @@ -161,7 +161,7 @@ def patch_reversible(module_lint_object, module, patch_path): (bool): False if any test failed, True otherwise """ try: - ModulesDiffer.try_apply_patch( + ComponentsDiffer.try_apply_patch( module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 64aab54bff..9ae735b1c0 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -19,7 +19,7 @@ from nf_core.modules.modules_repo import ModulesRepo from nf_core.pipelines.lint_utils import dump_json_with_prettier -from .modules_differ import ModulesDiffer +from ..components.components_differ import ComponentsDiffer log = logging.getLogger(__name__) @@ -883,7 +883,7 @@ def try_apply_patch_reverse(self, component_type, component, repo_name, patch_re patch_path = Path(self.directory / patch_relpath) try: - new_files = ModulesDiffer.try_apply_patch( + new_files = ComponentsDiffer.try_apply_patch( component_type, component, repo_name, patch_path, component_dir, reverse=True ) except LookupError as e: diff --git a/nf_core/subworkflows/lint/subworkflow_changes.py b/nf_core/subworkflows/lint/subworkflow_changes.py index e6af892125..cf0fd7211c 100644 --- a/nf_core/subworkflows/lint/subworkflow_changes.py +++ b/nf_core/subworkflows/lint/subworkflow_changes.py @@ -7,7 +7,7 @@ from pathlib import Path import nf_core.modules.modules_repo -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer def subworkflow_changes(subworkflow_lint_object, subworkflow): @@ -30,7 +30,7 @@ def subworkflow_changes(subworkflow_lint_object, subworkflow): tempdir = tempdir_parent / "tmp_subworkflow_dir" shutil.copytree(subworkflow.component_dir, tempdir) try: - new_lines = ModulesDiffer.try_apply_patch( + new_lines = ComponentsDiffer.try_apply_patch( subworkflow.component_type, subworkflow.component_name, subworkflow.org, From 27582f94a1b9b8e2fe18b118e7138c367f46ea8b Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 11:06:07 +0100 Subject: [PATCH 44/92] add subworkflows patch missing tests --- tests/subworkflows/test_patch.py | 111 +++++++++++++++++++++++++++++-- 1 file changed, 107 insertions(+), 4 deletions(-) diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py index ba452083fa..388c5adde5 100644 --- a/tests/subworkflows/test_patch.py +++ b/tests/subworkflows/test_patch.py @@ -172,17 +172,120 @@ def test_create_patch_try_apply_failed(self): is False ) - # TODO: create those two missing tests def test_create_patch_update_success(self): - """Test creating a patch file and updating a subworkflow when there is a diff conflict""" - - def test_create_patch_update_fail(self): """ Test creating a patch file and the updating the subworkflow Should have the same effect as 'test_create_patch_try_apply_successful' but uses higher level api """ + self.setup_patch(self.pipeline_dir, True) + swf_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = "bam_sort_stats_samtools.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn) + + # Update the subworkflow + update_obj = nf_core.subworkflows.update.SubworkflowUpdate( + self.pipeline_dir, + sha=OLD_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + ) + assert update_obj.update("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn), modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) + + # Check that the correct lines are in the patch file + with open(swf_path / patch_fn) as fh: + patch_lines = fh.readlines() + swf_relpath = swf_path.relative_to(self.pipeline_dir) + assert f"--- {swf_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {swf_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(swf_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # this line should have been removed by the patch + assert " ch_fasta // channel: [ fasta ]\n" not in main_nf_lines + + def test_create_patch_update_fail(self): + """ + Test creating a patch file and updating a subworkflow when there is a diff conflict + """ + self.setup_patch(self.pipeline_dir, True) + swf_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = "bam_sort_stats_samtools.diff" + # Check that a patch file with the correct name has been created + assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn) + + # Save the file contents for downstream comparison + with open(swf_path / patch_fn) as fh: + patch_contents = fh.read() + + update_obj = nf_core.subworkflows.update.SubworkflowUpdate( + self.pipeline_dir, + sha=FAIL_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + ) + update_obj.update("bam_sort_stats_samtools") + + # Check that the installed files have not been affected by the attempted patch + temp_dir = Path(tempfile.mkdtemp()) + nf_core.components.components_command.ComponentCommand( + "subworkflows", self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH + ).install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, temp_dir) + + temp_module_dir = temp_dir / "bam_sort_stats_samtools" + for file in os.listdir(temp_module_dir): + assert file in os.listdir(swf_path) + with open(swf_path / file) as fh: + installed = fh.read() + with open(temp_module_dir / file) as fh: + shouldbe = fh.read() + assert installed == shouldbe + + # Check that the patch file is unaffected + with open(swf_path / patch_fn) as fh: + new_patch_contents = fh.read() + assert patch_contents == new_patch_contents def test_remove_patch(self): """Test creating a patch when there is no change to the subworkflow""" From 4f93d5759db5b23383a2bf95665de4c8172c9e03 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 11:56:51 +0100 Subject: [PATCH 45/92] fix subworkflows update test --- tests/subworkflows/test_update.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index 153038cd1d..9f5d1939f3 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -98,7 +98,7 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): with open(patch_path) as fh: line = fh.readline() assert line.startswith( - "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" + "Changes in component 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" ) def test_install_at_hash_and_update_and_save_diff_limit_output(self): From 5ad45701e7d89736efef768ce712eddc5215f32e Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 15:24:04 +0100 Subject: [PATCH 46/92] update changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a16435057..fed991f850 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,8 @@ ### Subworkflows +- Add `nf-core subworkflows patch` command ([#2861](https://github.com/nf-core/tools/pull/2861)) + ### General - Include .nf-core.yml in `nf-core pipelines bump-version` ([#3220](https://github.com/nf-core/tools/pull/3220)) From 37ca244d7a0ebaa48b24b28d02c900114e780a01 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 15:24:17 +0100 Subject: [PATCH 47/92] add help text for --remove flag --- nf_core/__main__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 6780ad80c6..81d088e13d 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -1024,7 +1024,7 @@ def command_modules_update( default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-r", "--remove", is_flag=True, default=False) +@click.option("-r", "--remove", is_flag=True, default=False, help="Remove an existent patch file and regenerate it.") def command_modules_patch(ctx, tool, directory, remove): """ Create a patch file for minor changes in a module @@ -1578,7 +1578,7 @@ def command_subworkflows_install(ctx, subworkflow, directory, prompt, force, sha default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-r", "--remove", is_flag=True, default=False) +@click.option("-r", "--remove", is_flag=True, default=False, help="Remove an existent patch file and regenerate it.") def subworkflows_patch(ctx, tool, dir, remove): """ Create a patch file for minor changes in a subworkflow From 805ba91df58633eaf68df64677e714b4b7ca04f0 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 15:24:33 +0100 Subject: [PATCH 48/92] apply code review suggestions to patch tests --- tests/modules/test_patch.py | 20 ++++++++++---------- tests/subworkflows/test_patch.py | 20 ++++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index 27c1e342e4..1c23871ccc 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -76,7 +76,7 @@ def test_create_patch_no_change(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Check that no patch file has been added to the directory - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml"} + assert "bismark-align.diff" in set(os.listdir(module_path)) # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -94,7 +94,7 @@ def test_create_patch_change(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -127,7 +127,7 @@ def test_create_patch_try_apply_successful(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -153,7 +153,7 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -195,7 +195,7 @@ def test_create_patch_try_apply_failed(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -234,7 +234,7 @@ def test_create_patch_update_success(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -254,7 +254,7 @@ def test_create_patch_update_success(self): assert update_obj.update(BISMARK_ALIGN) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -295,7 +295,7 @@ def test_create_patch_update_fail(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -349,7 +349,7 @@ def test_remove_patch(self): # Check that a patch file with the correct name has been created patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert patch_fn in set(os.listdir(module_path)) # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -361,7 +361,7 @@ def test_remove_patch(self): mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml"} + assert patch_fn not in set(os.listdir(module_path)) # Check that the 'modules.json' entry has been removed modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py index 388c5adde5..3df575c3de 100644 --- a/tests/subworkflows/test_patch.py +++ b/tests/subworkflows/test_patch.py @@ -66,7 +66,7 @@ def test_create_patch_no_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that no patch file has been added to the directory - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + assert "bam_sort_stats_samtools.diff" not in set(os.listdir(subworkflow_path)) def test_create_patch_change(self): """Test creating a patch when there is no change to the subworkflow""" @@ -79,7 +79,7 @@ def test_create_patch_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) # Check that the correct lines are in the patch file with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: @@ -101,7 +101,7 @@ def test_create_patch_try_apply_successful(self): patch_obj.patch("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) update_obj = nf_core.subworkflows.SubworkflowUpdate( self.pipeline_dir, sha=OLD_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH @@ -125,7 +125,7 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, OLD_SHA) # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) # Check that the correct lines are in the patch file with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: @@ -152,7 +152,7 @@ def test_create_patch_try_apply_failed(self): patch_obj.patch("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) update_obj = nf_core.subworkflows.SubworkflowUpdate( self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH @@ -188,7 +188,7 @@ def test_create_patch_update_success(self): patch_fn = "bam_sort_stats_samtools.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + assert patch_fn in set(os.listdir(swf_path)) # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -208,7 +208,7 @@ def test_create_patch_update_success(self): assert update_obj.update("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + assert patch_fn in set(os.listdir(swf_path)) # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -245,7 +245,7 @@ def test_create_patch_update_fail(self): patch_fn = "bam_sort_stats_samtools.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(swf_path)) == {"main.nf", "meta.yml", patch_fn} + assert patch_fn in set(os.listdir(swf_path)) # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -298,10 +298,10 @@ def test_remove_patch(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml", "bam_sort_stats_samtools.diff"} + assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: mock_questionary.unsafe_ask.return_value = True patch_obj.remove("bam_sort_stats_samtools") # Check that the diff file has been removed - assert set(os.listdir(subworkflow_path)) == {"main.nf", "meta.yml"} + assert "bam_sort_stats_samtools.diff" not in set(os.listdir(subworkflow_path)) From 84cec26f4f8b350949f02d6a010b340a59d5cca0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Mon, 25 Nov 2024 15:28:28 +0100 Subject: [PATCH 49/92] Update tests/modules/test_patch.py --- tests/modules/test_patch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index 1c23871ccc..df24ce819b 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -76,7 +76,7 @@ def test_create_patch_no_change(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Check that no patch file has been added to the directory - assert "bismark-align.diff" in set(os.listdir(module_path)) + assert "bismark-align.diff" not in set(os.listdir(module_path)) # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) From 8b2bec4933fe41645a3fe9a5bd7db32d030a5237 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 15:39:55 +0100 Subject: [PATCH 50/92] apply suggestions by @mashehu --- tests/modules/test_patch.py | 20 ++++++++++---------- tests/subworkflows/test_patch.py | 20 ++++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index df24ce819b..f608278618 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -76,7 +76,7 @@ def test_create_patch_no_change(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Check that no patch file has been added to the directory - assert "bismark-align.diff" not in set(os.listdir(module_path)) + assert not (module_path / "bismark-align.diff").exists() # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -94,7 +94,7 @@ def test_create_patch_change(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -127,7 +127,7 @@ def test_create_patch_try_apply_successful(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -153,7 +153,7 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -195,7 +195,7 @@ def test_create_patch_try_apply_failed(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -234,7 +234,7 @@ def test_create_patch_update_success(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -254,7 +254,7 @@ def test_create_patch_update_success(self): assert update_obj.update(BISMARK_ALIGN) # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -295,7 +295,7 @@ def test_create_patch_update_fail(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -349,7 +349,7 @@ def test_remove_patch(self): # Check that a patch file with the correct name has been created patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert patch_fn in set(os.listdir(module_path)) + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -361,7 +361,7 @@ def test_remove_patch(self): mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed - assert patch_fn not in set(os.listdir(module_path)) + assert not (module_path / patch_fn).exists() # Check that the 'modules.json' entry has been removed modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py index 3df575c3de..5bb6a6798e 100644 --- a/tests/subworkflows/test_patch.py +++ b/tests/subworkflows/test_patch.py @@ -66,7 +66,7 @@ def test_create_patch_no_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that no patch file has been added to the directory - assert "bam_sort_stats_samtools.diff" not in set(os.listdir(subworkflow_path)) + assert not (subworkflow_path / "bam_sort_stats_samtools.diff").exists() def test_create_patch_change(self): """Test creating a patch when there is no change to the subworkflow""" @@ -79,7 +79,7 @@ def test_create_patch_change(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() # Check that the correct lines are in the patch file with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: @@ -101,7 +101,7 @@ def test_create_patch_try_apply_successful(self): patch_obj.patch("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() update_obj = nf_core.subworkflows.SubworkflowUpdate( self.pipeline_dir, sha=OLD_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH @@ -125,7 +125,7 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, OLD_SHA) # Check that a patch file with the correct name has been created - assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() # Check that the correct lines are in the patch file with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: @@ -152,7 +152,7 @@ def test_create_patch_try_apply_failed(self): patch_obj.patch("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() update_obj = nf_core.subworkflows.SubworkflowUpdate( self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH @@ -188,7 +188,7 @@ def test_create_patch_update_success(self): patch_fn = "bam_sort_stats_samtools.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(swf_path)) + assert (swf_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -208,7 +208,7 @@ def test_create_patch_update_success(self): assert update_obj.update("bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(swf_path)) + assert (swf_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -245,7 +245,7 @@ def test_create_patch_update_fail(self): patch_fn = "bam_sort_stats_samtools.diff" # Check that a patch file with the correct name has been created - assert patch_fn in set(os.listdir(swf_path)) + assert (swf_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the subworkflow modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) @@ -298,10 +298,10 @@ def test_remove_patch(self): subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") # Check that a patch file with the correct name has been created - assert "bam_sort_stats_samtools.diff" in set(os.listdir(subworkflow_path)) + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: mock_questionary.unsafe_ask.return_value = True patch_obj.remove("bam_sort_stats_samtools") # Check that the diff file has been removed - assert "bam_sort_stats_samtools.diff" not in set(os.listdir(subworkflow_path)) + assert not (subworkflow_path / "bam_sort_stats_samtools.diff").exists() From 9e9d930ebbb16f717e8d3e51849cb9e7f41c0488 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 25 Nov 2024 16:18:45 +0100 Subject: [PATCH 51/92] remove def from nextflow.config and add trace_report_suffix param --- nf_core/pipeline-template/nextflow.config | 10 +++++----- nf_core/pipeline-template/nextflow_schema.json | 8 +++++++- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 6970d05183..596c0e0b0c 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -52,6 +52,7 @@ params { version = false {%- if test_config %} pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/'{% endif %} + trace_report_suffix = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') {%- if nf_core_configs -%} // Config options @@ -249,22 +250,21 @@ set -C # No clobber - prevent output redirection from overwriting files. // Disable process selector warnings by default. Use debug profile to enable warnings. nextflow.enable.configProcessNamesValidation = false -def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') timeline { enabled = true - file = "${params.outdir}/pipeline_info/execution_timeline_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_timeline_${params.trace_report_suffix}.html" } report { enabled = true - file = "${params.outdir}/pipeline_info/execution_report_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_report_${params.trace_report_suffix}.html" } trace { enabled = true - file = "${params.outdir}/pipeline_info/execution_trace_${trace_timestamp}.txt" + file = "${params.outdir}/pipeline_info/execution_trace_${params.trace_report_suffix}.txt" } dag { enabled = true - file = "${params.outdir}/pipeline_info/pipeline_dag_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/pipeline_dag_${params.trace_report_suffix}.html" } manifest { diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 4136a0b490..389f9d104d 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -229,7 +229,13 @@ "description": "Base URL or local path to location of pipeline test dataset files", "default": "https://raw.githubusercontent.com/nf-core/test-datasets/", "hidden": true - }{% endif %} + }{% endif %}, + "trace_report_suffix": { + "type": "string", + "fa_icon": "far calendar", + "description": "Suffix to add to the trace report filename. Default is the date and time in the format yyyy-MM-dd_HH-mm-ss.", + "hidden": true + } } } }, From 225699c07eb043b327e11301c9d19719d8282b04 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 25 Nov 2024 15:20:34 +0000 Subject: [PATCH 52/92] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fed991f850..213306cf1e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - fix workflow_dispatch trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) +- Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) ### Download From 9f5a95c5866b2db9ab70cafb8ad6a11b77df0930 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 07:31:11 +0000 Subject: [PATCH 53/92] Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 28ce84befe..a7c7d38ce3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.4 + rev: v0.8.0 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 5965621076746c79d491e23d272d702cd0982d84 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Tue, 26 Nov 2024 07:32:01 +0000 Subject: [PATCH 54/92] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fed991f850..47981fe051 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -46,6 +46,7 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.4 ([#3282](https://github.com/nf-core/tools/pull/3282)) - Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) - Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] From f903ce1808b788805ba802809f288b49616913d2 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 26 Nov 2024 09:59:12 +0100 Subject: [PATCH 55/92] add validation.monochromeLogs to config --- nf_core/pipeline-template/nextflow.config | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 6970d05183..7e4e7b3e72 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -286,6 +286,7 @@ plugins { validation { defaultIgnoreParams = ["genomes"] + monochromeLogs = params.monochrome_logs help { enabled = true command = "nextflow run {{ name }} -profile --input samplesheet.csv --outdir " From d56c87cbebececdc306ad6acd6dff790d3b47280 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Tue, 26 Nov 2024 10:41:35 +0100 Subject: [PATCH 56/92] ignore trace_report_suffix default check --- nf_core/pipelines/schema.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 96ee3ffbb3..2a8a9a1929 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -96,6 +96,7 @@ def _update_validation_plugin_from_config(self) -> None: conf.get("validation.help.shortParameter", "help"), conf.get("validation.help.fullParameter", "helpFull"), conf.get("validation.help.showHiddenParameter", "showHidden"), + "trace_report_suffix", # report suffix should be ignored by default as it is a Java Date object ] # Help parameter should be ignored by default ignored_params_config_str = conf.get("validation.defaultIgnoreParams", "") ignored_params_config = [ From af1a1be117dd04258bc184f75cefa7c4ca48dcba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Wed, 27 Nov 2024 15:42:15 +0000 Subject: [PATCH 57/92] update snapshots --- .../test_customisation_help.svg | 256 +++++++++--------- .../test_create_app/test_type_custom.svg | 254 ++++++++--------- 2 files changed, 255 insertions(+), 255 deletions(-) diff --git a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg index 450f1d303c..c34bd85230 100644 --- a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg +++ b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg @@ -19,257 +19,257 @@ font-weight: 700; } - .terminal-333203530-matrix { + .terminal-4061415502-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-333203530-title { + .terminal-4061415502-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-333203530-r1 { fill: #c5c8c6 } -.terminal-333203530-r2 { fill: #e3e3e3 } -.terminal-333203530-r3 { fill: #989898 } -.terminal-333203530-r4 { fill: #e1e1e1 } -.terminal-333203530-r5 { fill: #4ebf71;font-weight: bold } -.terminal-333203530-r6 { fill: #1e1e1e } -.terminal-333203530-r7 { fill: #e2e2e2 } -.terminal-333203530-r8 { fill: #507bb3 } -.terminal-333203530-r9 { fill: #808080 } -.terminal-333203530-r10 { fill: #dde6ed;font-weight: bold } -.terminal-333203530-r11 { fill: #001541 } -.terminal-333203530-r12 { fill: #14191f } -.terminal-333203530-r13 { fill: #0178d4 } -.terminal-333203530-r14 { fill: #454a50 } -.terminal-333203530-r15 { fill: #e2e3e3;font-weight: bold } -.terminal-333203530-r16 { fill: #000000 } -.terminal-333203530-r17 { fill: #e4e4e4 } -.terminal-333203530-r18 { fill: #7ae998 } -.terminal-333203530-r19 { fill: #0a180e;font-weight: bold } -.terminal-333203530-r20 { fill: #008139 } -.terminal-333203530-r21 { fill: #fea62b;font-weight: bold } -.terminal-333203530-r22 { fill: #a7a9ab } -.terminal-333203530-r23 { fill: #e2e3e3 } + .terminal-4061415502-r1 { fill: #c5c8c6 } +.terminal-4061415502-r2 { fill: #e3e3e3 } +.terminal-4061415502-r3 { fill: #989898 } +.terminal-4061415502-r4 { fill: #e1e1e1 } +.terminal-4061415502-r5 { fill: #4ebf71;font-weight: bold } +.terminal-4061415502-r6 { fill: #1e1e1e } +.terminal-4061415502-r7 { fill: #e2e2e2 } +.terminal-4061415502-r8 { fill: #507bb3 } +.terminal-4061415502-r9 { fill: #808080 } +.terminal-4061415502-r10 { fill: #dde6ed;font-weight: bold } +.terminal-4061415502-r11 { fill: #001541 } +.terminal-4061415502-r12 { fill: #14191f } +.terminal-4061415502-r13 { fill: #0178d4 } +.terminal-4061415502-r14 { fill: #454a50 } +.terminal-4061415502-r15 { fill: #e2e3e3;font-weight: bold } +.terminal-4061415502-r16 { fill: #000000 } +.terminal-4061415502-r17 { fill: #e4e4e4 } +.terminal-4061415502-r18 { fill: #7ae998 } +.terminal-4061415502-r19 { fill: #0a180e;font-weight: bold } +.terminal-4061415502-r20 { fill: #008139 } +.terminal-4061415502-r21 { fill: #fea62b;font-weight: bold } +.terminal-4061415502-r22 { fill: #a7a9ab } +.terminal-4061415502-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core pipelines create + nf-core pipelines create - + - - nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… - - -Template features - - -▔▔▔▔▔▔▔▔ -        Toggle all features -▁▁▁▁▁▁▁▁ -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use a GitHub Create a GitHub  Show help  -▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -pipeline. - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github CI testsThe pipeline will  Show help  -▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -actions for Continuous▃▃ -Integration (CI)  -testing - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use reference genomesThe pipeline will be  Hide help  -▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -copy of the most  -common reference  -genome files from  -iGenomes - - -Nf-core pipelines are configured to use a copy of the most common reference  -genome files. - -By selecting this option, your pipeline will include a configuration file  -specifying the paths to these files. - -The required code to use these files will also be included in the template.  -When the pipeline user provides an appropriate genome key, the pipeline will -automatically download the required reference files. -▅▅ -For more information about reference genomes in nf-core pipelines, see the  - - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github badgesThe README.md file of  Show help  -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Back  Continue  -▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - d Toggle dark mode  q Quit  a Toggle all  + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔ +        Toggle all features +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous▅▅ +Integration (CI)  +testing + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Hide help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + + +Nf-core pipelines are configured to use a copy of the most common reference  +genome files. + +By selecting this option, your pipeline will include a configuration file  +specifying the paths to these files. + +The required code to use these files will also be included in the template.  +When the pipeline user provides an appropriate genome key, the pipeline will +automatically download the required reference files. +▅▅ +For more information about reference genomes in nf-core pipelines, see the  + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg index 6e178ba840..b8dea05604 100644 --- a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg @@ -19,256 +19,256 @@ font-weight: 700; } - .terminal-3425198753-matrix { + .terminal-1727160999-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3425198753-title { + .terminal-1727160999-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3425198753-r1 { fill: #c5c8c6 } -.terminal-3425198753-r2 { fill: #e3e3e3 } -.terminal-3425198753-r3 { fill: #989898 } -.terminal-3425198753-r4 { fill: #e1e1e1 } -.terminal-3425198753-r5 { fill: #4ebf71;font-weight: bold } -.terminal-3425198753-r6 { fill: #1e1e1e } -.terminal-3425198753-r7 { fill: #0178d4 } -.terminal-3425198753-r8 { fill: #e2e2e2 } -.terminal-3425198753-r9 { fill: #507bb3 } -.terminal-3425198753-r10 { fill: #808080 } -.terminal-3425198753-r11 { fill: #dde6ed;font-weight: bold } -.terminal-3425198753-r12 { fill: #001541 } -.terminal-3425198753-r13 { fill: #14191f } -.terminal-3425198753-r14 { fill: #454a50 } -.terminal-3425198753-r15 { fill: #7ae998 } -.terminal-3425198753-r16 { fill: #e2e3e3;font-weight: bold } -.terminal-3425198753-r17 { fill: #0a180e;font-weight: bold } -.terminal-3425198753-r18 { fill: #000000 } -.terminal-3425198753-r19 { fill: #008139 } -.terminal-3425198753-r20 { fill: #fea62b;font-weight: bold } -.terminal-3425198753-r21 { fill: #a7a9ab } -.terminal-3425198753-r22 { fill: #e2e3e3 } + .terminal-1727160999-r1 { fill: #c5c8c6 } +.terminal-1727160999-r2 { fill: #e3e3e3 } +.terminal-1727160999-r3 { fill: #989898 } +.terminal-1727160999-r4 { fill: #e1e1e1 } +.terminal-1727160999-r5 { fill: #4ebf71;font-weight: bold } +.terminal-1727160999-r6 { fill: #1e1e1e } +.terminal-1727160999-r7 { fill: #0178d4 } +.terminal-1727160999-r8 { fill: #e2e2e2 } +.terminal-1727160999-r9 { fill: #507bb3 } +.terminal-1727160999-r10 { fill: #808080 } +.terminal-1727160999-r11 { fill: #dde6ed;font-weight: bold } +.terminal-1727160999-r12 { fill: #001541 } +.terminal-1727160999-r13 { fill: #14191f } +.terminal-1727160999-r14 { fill: #454a50 } +.terminal-1727160999-r15 { fill: #7ae998 } +.terminal-1727160999-r16 { fill: #e2e3e3;font-weight: bold } +.terminal-1727160999-r17 { fill: #0a180e;font-weight: bold } +.terminal-1727160999-r18 { fill: #000000 } +.terminal-1727160999-r19 { fill: #008139 } +.terminal-1727160999-r20 { fill: #fea62b;font-weight: bold } +.terminal-1727160999-r21 { fill: #a7a9ab } +.terminal-1727160999-r22 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core pipelines create + nf-core pipelines create - + - - nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… - - -Template features - - -▔▔▔▔▔▔▔▔ -        Toggle all features -▁▁▁▁▁▁▁▁ -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use a GitHub Create a GitHub  Show help  -▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -pipeline. - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github CI testsThe pipeline will  Show help  -▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -actions for Continuous -Integration (CI) ▄▄ -testing - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use reference genomesThe pipeline will be  Show help  -▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -copy of the most  -common reference  -genome files from  -iGenomes - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github badgesThe README.md file of  Show help  -▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -include GitHub badges - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add configuration The pipeline will  Show help  -▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -profiles containing  -custom parameters  -required to run  -nf-core pipelines at  -different institutions - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use code lintersThe pipeline will  Show help  -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Back  Continue  -▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - d Toggle dark mode  q Quit  a Toggle all  + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔ +        Toggle all features +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI) ▇▇ +testing + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +include GitHub badges + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add configuration The pipeline will  Show help  +▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +profiles containing  +custom parameters  +required to run  +nf-core pipelines at  +different institutions + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use code lintersThe pipeline will  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  From d48b3004b04ba001af022d47485e1f11bc5429c7 Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 27 Nov 2024 18:42:54 +0100 Subject: [PATCH 58/92] add more tests --- nf_core/commands_pipelines.py | 2 +- nf_core/pipelines/create/create.py | 2 +- nf_core/pipelines/rocrate.py | 24 +----------- tests/pipelines/test_rocrate.py | 63 +++++++++++++++++++++++++++++- 4 files changed, 65 insertions(+), 26 deletions(-) diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 9699dc53a3..3b28f4979c 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -299,7 +299,7 @@ def pipelines_rocrate( zip_path = Path(zip_path) try: rocrate_obj = ROCrate(pipeline_dir, pipeline_version) - rocrate_obj.create_rocrate(pipeline_dir, json_path=json_path, zip_path=zip_path) + rocrate_obj.create_rocrate(json_path=json_path, zip_path=zip_path) except (UserWarning, LookupError, FileNotFoundError) as e: log.error(e) sys.exit(1) diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 4f6fa12383..dba0a40caf 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -360,7 +360,7 @@ def render_template(self) -> None: if self.config.skip_features is None or "ro-crate" not in self.config.skip_features: # Create the RO-Crate metadata file rocrate_obj = ROCrate(self.outdir) - rocrate_obj.create_rocrate(self.outdir, json_path=self.outdir / "ro-crate-metadata.json") + rocrate_obj.create_rocrate(json_path=self.outdir / "ro-crate-metadata.json") # Update the .nf-core.yml with linting configurations self.fix_linting() diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index d4e6056620..d5a51eddf6 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -83,9 +83,7 @@ def __init__(self, pipeline_dir: Path, version="") -> None: setup_requests_cachedir() - def create_rocrate( - self, outdir: Path, json_path: Union[None, Path] = None, zip_path: Union[None, Path] = None - ) -> bool: + def create_rocrate(self, json_path: Union[None, Path] = None, zip_path: Union[None, Path] = None) -> bool: """ Create an RO Crate for a pipeline @@ -95,12 +93,6 @@ def create_rocrate( zip_path (Path): Path to the zip file """ - # Set input paths - try: - self.set_crate_paths(outdir) - except OSError as e: - log.error(e) - sys.exit(1) # Check that the checkout pipeline version is the same as the requested version if self.version != "": @@ -337,20 +329,6 @@ def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: if author in authors: wf_file.append_to("maintainer", author_entitity) - def set_crate_paths(self, path: Path) -> None: - """Given a pipeline name, directory, or path, set wf_crate_filename""" - - if path.is_dir(): - self.pipeline_dir = path - # wf_crate_filename = path / "ro-crate-metadata.json" - elif path.is_file(): - self.pipeline_dir = path.parent - # wf_crate_filename = path - - # Check that the schema file exists - if self.pipeline_dir is None: - raise OSError(f"Could not find pipeline '{path}'") - def get_orcid(name: str) -> Optional[str]: """ diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py index 2e14878da1..01a77ecd76 100644 --- a/tests/pipelines/test_rocrate.py +++ b/tests/pipelines/test_rocrate.py @@ -1,8 +1,10 @@ """Test the nf-core pipelines rocrate command""" import shutil +import tempfile from pathlib import Path +import git import rocrate.rocrate from git import Repo @@ -25,6 +27,7 @@ def setUp(self) -> None: repo = Repo(self.pipeline_dir) repo.git.add(A=True) repo.index.commit("Initial commit") + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir) def tearDown(self): """Clean up temporary files and folders""" @@ -36,7 +39,7 @@ def test_rocrate_creation(self): """Run the nf-core rocrate command""" # Run the command - self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir) + self.rocrate_obj assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) # Check that the crate was created @@ -64,3 +67,61 @@ def test_rocrate_creation(self): # check that it is set as author of the main entity if crate.mainEntity is not None: self.assertEqual(crate.mainEntity["author"][0].id, entity_json["@id"]) + + def test_rocrate_creation_wrong_pipeline_dir(self): + """Run the nf-core rocrate command with a wrong pipeline directory""" + # Run the command + + # Check that it raises a UserWarning + with self.assertRaises(UserWarning): + nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir / "bad_dir") + + # assert that the crate was not created + self.assertFalse(Path(self.pipeline_dir / "bad_dir", "ro-crate-metadata.json").exists()) + + def test_rocrate_creation_with_wrong_version(self): + """Run the nf-core rocrate command with a pipeline version""" + # Run the command + + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir, version="1.0.0") + + # Check that the crate was created + with self.assertRaises(SystemExit): + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + def test_rocrate_creation_without_git(self): + """Run the nf-core rocrate command with a pipeline version""" + + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir, version="1.0.0") + # remove git repo + shutil.rmtree(self.pipeline_dir / ".git") + # Check that the crate was created + with self.assertRaises(SystemExit): + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + def test_rocrate_creation_to_zip(self): + """Run the nf-core rocrate command with a zip output""" + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, zip_path=self.pipeline_dir) + # Check that the crate was created + self.assertTrue(Path(self.pipeline_dir, "ro-crate.crate.zip").exists()) + + def test_rocrate_creation_for_fetchngs(self): + """Run the nf-core rocrate command with nf-core/fetchngs""" + tmp_dir = Path(tempfile.mkdtemp()) + # git clone nf-core/fetchngs + git.Repo.clone_from("https://github.com/nf-core/fetchngs", tmp_dir / "fetchngs") + # Run the command + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(tmp_dir / "fetchngs", version="1.12.0") + assert self.rocrate_obj.create_rocrate(tmp_dir / "fetchngs", self.pipeline_dir) + + # Check that Sateesh Peri is mentioned in creator field + + crate = rocrate.rocrate.ROCrate(self.pipeline_dir) + entities = crate.get_entities() + for entity in entities: + entity_json = entity.as_jsonld() + if entity_json["@id"] == "#main.nf": + assert "https://orcid.org/0000-0002-9879-9070" in entity_json["creator"] + + # Clean up + shutil.rmtree(tmp_dir) From 65d74d58210f6f509207967ee5e888a3c14597ab Mon Sep 17 00:00:00 2001 From: mashehu Date: Wed, 27 Nov 2024 23:15:32 +0100 Subject: [PATCH 59/92] enable zip output --- nf_core/pipelines/rocrate.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py index d5a51eddf6..915f203f00 100644 --- a/nf_core/pipelines/rocrate.py +++ b/nf_core/pipelines/rocrate.py @@ -122,7 +122,6 @@ def create_rocrate(self, json_path: Union[None, Path] = None, zip_path: Union[No log.info(f"Saving metadata file to '{json_path}'") self.crate.metadata.write(json_path) - return True # Save the whole crate zip file if zip_path is not None: @@ -130,11 +129,13 @@ def create_rocrate(self, json_path: Union[None, Path] = None, zip_path: Union[No zip_path = zip_path / "ro-crate.crate.zip" log.info(f"Saving zip file '{zip_path}") self.crate.write_zip(zip_path) - return True + if json_path is None and zip_path is None: log.error("Please provide a path to save the ro-crate file or the zip file.") return False + return True + def make_workflow_rocrate(self) -> None: """ Create an RO Crate for a pipeline From 0d337393f455c8b5cc8256cf7e66aac477988e11 Mon Sep 17 00:00:00 2001 From: Louis LE NEZET <58640615+LouisLeNezet@users.noreply.github.com> Date: Thu, 28 Nov 2024 10:08:28 +0100 Subject: [PATCH 60/92] Move modules config import after base.config --- nf_core/pipeline-template/nextflow.config | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 6970d05183..a9aa9c553e 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -76,6 +76,11 @@ params { includeConfig 'conf/base.config' {%- else %} +{% if modules -%} +// Load modules.config for DSL2 module specific options +includeConfig 'conf/modules.config' +{%- endif %} + process { // TODO nf-core: Check the defaults for all processes cpus = { 1 * task.attempt } @@ -316,8 +321,3 @@ validation { }{% endif %} } {%- endif %} - -{% if modules -%} -// Load modules.config for DSL2 module specific options -includeConfig 'conf/modules.config' -{%- endif %} From 43287c654f41e8e7dcae44be6f21215174ae61b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=BAlia=20Mir=20Pedrol?= Date: Thu, 28 Nov 2024 14:32:30 +0100 Subject: [PATCH 61/92] Update nf_core/pipelines/create/template_features.yml --- nf_core/pipelines/create/template_features.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index e2293567ea..9841879e83 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -442,6 +442,9 @@ rocrate: This will add a `ro-crate-metadata.json` file to describe the pipeline. nfcore_pipelines: False custom_pipelines: True + linting: + files_warn: + - "ro-crate-metadata.json" vscode: skippable_paths: - ".vscode" From 2eec420b1db2f568a2607350f400c7cb5cf66a3d Mon Sep 17 00:00:00 2001 From: Louis Le Nezet Date: Thu, 28 Nov 2024 15:46:29 +0100 Subject: [PATCH 62/92] Update documentation --- .../pipeline-template/.github/CONTRIBUTING.md | 10 +++---- nf_core/pipeline-template/docs/usage.md | 29 +++++++++---------- 2 files changed, 18 insertions(+), 21 deletions(-) diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 0200ea26ce..3e6b960088 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -1,4 +1,4 @@ -# {{ name }}: Contributing Guidelines +# `{{ name }}`: Contributing Guidelines Hi there! Many thanks for taking an interest in improving {{ name }}. @@ -66,7 +66,7 @@ These tests are run both with the latest available version of `Nextflow` and als - On your own fork, make a new branch `patch` based on `upstream/master`. - Fix the bug, and bump version (X.Y.Z+1). -- A PR should be made on `master` from patch to directly this particular bug. +- A PR should be made on `master` from patch to directly adress this particular bug. {% if is_nfcore -%} @@ -78,13 +78,13 @@ For further information/help, please consult the [{{ name }} documentation](http ## Pipeline contribution conventions -To make the {{ name }} code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. +To make the `{{ name }}` code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. ### Adding a new step If you wish to contribute a new step, please use the following coding standards: -1. Define the corresponding input channel into your new process from the expected previous process channel +1. Define the corresponding input channel into your new process from the expected previous process channel. 2. Write the process block (see below). 3. Define the output channel if needed (see below). 4. Add any new parameters to `nextflow.config` with a default (see below). @@ -99,7 +99,7 @@ If you wish to contribute a new step, please use the following coding standards: ### Default values -Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope. +Parameters should be initialised / defined with default values within the `params` scope in `nextflow.config`. Once there, use `nf-core pipelines schema build` to add to `nextflow_schema.json`. diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 67fda78658..f1d0f2bbb6 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -79,9 +79,8 @@ If you wish to repeatedly use the same parameters for multiple runs, rather than Pipeline settings can be provided in a `yaml` or `json` file via `-params-file `. -:::warning -Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). -::: +> [!WARNING] +> Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). The above pipeline run specified with a params file in yaml format: @@ -110,7 +109,7 @@ nextflow pull {{ name }} ### Reproducibility -It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. +It is a good idea to specify the pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. @@ -118,15 +117,13 @@ This version number will be logged in reports when you run the pipeline, so that To further assist in reproducibility, you can use share and reuse [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. -:::tip -If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. -::: +> [!TIP] +> If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. ## Core Nextflow arguments -:::note -These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen). -::: +> [!NOTE] +> These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen) ### `-profile` @@ -134,13 +131,12 @@ Use this parameter to choose a configuration profile. Profiles can give configur Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Apptainer, Conda) - see below. -:::info -We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. -::: +> [!IMPORTANT] +> We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. {%- if nf_core_configs %} -The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). +The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to check if your system is suported, please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). {% else %} {% endif %} Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! @@ -185,13 +181,14 @@ Specify the path to a specific config file (this is a core Nextflow command). Se ### Resource requests -Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher requests (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. +Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the pipeline steps, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher resources request (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. To change the resource requests, please see the [max resources](https://nf-co.re/docs/usage/configuration#max-resources) and [tuning workflow resources](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources) section of the nf-core website. ### Custom Containers -In some cases you may wish to change which container or conda environment a step of the pipeline uses for a particular tool. By default nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However in some cases the pipeline specified version maybe out of date. +In some cases, you may wish to change the container or conda environment used by a pipeline steps for a particular tool. By default, nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However, in some cases the pipeline specified version maybe out of date. + To use a different container from the default container or conda environment specified in a pipeline, please see the [updating tool versions](https://nf-co.re/docs/usage/configuration#updating-tool-versions) section of the nf-core website. From b12459a20b1742b8246961490e000149d7a4cfd4 Mon Sep 17 00:00:00 2001 From: Louis Le Nezet Date: Thu, 28 Nov 2024 16:21:08 +0100 Subject: [PATCH 63/92] Update CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36ea940844..8cde2a4d80 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) - Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) +- Fix some typo and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) ### Download From c8f22cfe7f7cd97718741949474304bb99cb8e6f Mon Sep 17 00:00:00 2001 From: Louis Le Nezet Date: Thu, 28 Nov 2024 16:24:26 +0100 Subject: [PATCH 64/92] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36ea940844..f418dc0af0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) - Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) +- Move `includeConfig 'conf/modules.config'` next to `includeConfig 'conf/base.config'` to not overwrite tests profiles configurations ([#3301](https://github.com/nf-core/tools/pull/3301)) ### Download From 07984beab52051ac1bd7d1728a49e368aee480cd Mon Sep 17 00:00:00 2001 From: LouisLeNezet Date: Thu, 28 Nov 2024 18:42:13 +0100 Subject: [PATCH 65/92] Fix linting --- nf_core/pipeline-template/docs/usage.md | 1 - 1 file changed, 1 deletion(-) diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index f1d0f2bbb6..16e6220aaf 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -189,7 +189,6 @@ To change the resource requests, please see the [max resources](https://nf-co.re In some cases, you may wish to change the container or conda environment used by a pipeline steps for a particular tool. By default, nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However, in some cases the pipeline specified version maybe out of date. - To use a different container from the default container or conda environment specified in a pipeline, please see the [updating tool versions](https://nf-co.re/docs/usage/configuration#updating-tool-versions) section of the nf-core website. ### Custom Tool Arguments From 82d6797edc306ad39ab6488d450f66b7ba92182b Mon Sep 17 00:00:00 2001 From: LouisLeNezet Date: Thu, 28 Nov 2024 18:45:27 +0100 Subject: [PATCH 66/92] Update CHANGELOG --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 06f584d6e6..d778b1a8dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,7 @@ - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) - Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) - Move `includeConfig 'conf/modules.config'` next to `includeConfig 'conf/base.config'` to not overwrite tests profiles configurations ([#3301](https://github.com/nf-core/tools/pull/3301)) -- Fix some typo and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) +- Fix some typos and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) ### Download From ac3fbc6497417d449463016159e0fc3e94f23781 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Tue, 19 Nov 2024 17:40:07 +0100 Subject: [PATCH 67/92] Download: Need to deduplicate Seqera Container matches as well, otherwise a race condition emerges. --- nf_core/pipelines/download.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 9a329aeaff..24a6d4ab26 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1016,8 +1016,8 @@ def prioritize_direct_download(self, container_list: List[str]) -> List[str]: log.debug(f"{c} matches and will be saved as {k}") d[k] = c - # combine deduplicated others and Seqera containers - return sorted(list(d.values()) + seqera_containers) + # combine deduplicated others and deduplicated Seqera containers + return sorted(list(d.values()) + list(set(seqera_containers))) def gather_registries(self, workflow_directory: str) -> None: """Fetch the registries from the pipeline config and CLI arguments and store them in a set. From a5b0e866030f4b1a5c5316adcb4c9484d3be364d Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Tue, 19 Nov 2024 19:55:28 +0100 Subject: [PATCH 68/92] Add new function to handle Seqera Container Oras URIs. --- nf_core/pipelines/download.py | 49 ++++++++++++++++++++++++++++++++--- 1 file changed, 45 insertions(+), 4 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 24a6d4ab26..68e5e3a98a 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1000,14 +1000,18 @@ def prioritize_direct_download(self, container_list: List[str]) -> List[str]: 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data' 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data' + Lastly, we want to remove at least a few Docker URIs for those modules, that have an oras:// download link. """ d: Dict[str, str] = {} - seqera_containers: List[str] = [] + seqera_containers_http: List[str] = [] + seqera_containers_oras: List[str] = [] all_others: List[str] = [] for c in container_list: if bool(re.search(r"/data$", c)): - seqera_containers.append(c) + seqera_containers_http.append(c) + elif bool(re.search(r"^oras$", c)): + seqera_containers_oras.append(c) else: all_others.append(c) @@ -1016,8 +1020,45 @@ def prioritize_direct_download(self, container_list: List[str]) -> List[str]: log.debug(f"{c} matches and will be saved as {k}") d[k] = c - # combine deduplicated others and deduplicated Seqera containers - return sorted(list(d.values()) + list(set(seqera_containers))) + combined_with_oras = self.reconcile_seqera_container_uris(seqera_containers_oras, list(d.values())) + + # combine deduplicated others (Seqera containers oras, http others and Docker URI others) and Seqera containers http + return sorted(list(set(combined_with_oras + seqera_containers_http))) + + @staticmethod + def reconcile_seqera_container_uris(prioritzed_container_list: List[str], other_list: List[str]) -> List[str]: + """ + Helper function that takes a list of Seqera container URIs, + extracts the software string and builds a regex from them to filter out + similar containers from the second container list. + + prioritzed_container_list = [ + ... "oras://community.wave.seqera.io/library/multiqc:1.25.1--f0e743d16869c0bf", + ... "oras://community.wave.seqera.io/library/multiqc_pip_multiqc-plugins:e1f4877f1515d03c" + ... ] + + will be cleaned to + + ['library/multiqc:1.25.1', 'library/multiqc_pip_multiqc-plugins'] + + Subsequently, build a regex from those and filter out matching duplicates in other_list: + """ + + # trim the URIs to the stem that contains the tool string, assign with Walrus operator to account for non-matching patterns + trimmed_priority_list = [ + match.group() + for c in set(prioritzed_container_list) + if (match := re.search(r"library/.*?:[\d.]+", c) if "--" in c else re.search(r"library/[^\s:]+", c)) + ] + + # build regex + prioritized_containers = re.compile("|".join(f"{re.escape(c)}" for c in trimmed_priority_list)) + + # filter out matches in other list + filtered_containers = [c for c in other_list if not re.search(prioritized_containers, c)] + + # combine priorized and regular container lists + return sorted(list(set(prioritzed_container_list + filtered_containers))) def gather_registries(self, workflow_directory: str) -> None: """Fetch the registries from the pipeline config and CLI arguments and store them in a set. From 838286bbf92e481c2eecb3bec49b419867c61bd8 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Tue, 19 Nov 2024 20:05:42 +0100 Subject: [PATCH 69/92] Ensure, that oras:// containers are correctly handled. --- nf_core/pipelines/download.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 68e5e3a98a..1710787383 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1460,9 +1460,10 @@ def singularity_pull_image( # Sometimes, container still contain an explicit library specification, which # resulted in attempted pulls e.g. from docker://quay.io/quay.io/qiime2/core:2022.11 # Thus, if an explicit registry is specified, the provided -l value is ignored. + # Additionally, check if the container to be pulled is native Singularity: oras:// protocol. container_parts = container.split("/") if len(container_parts) > 2: - address = f"docker://{container}" + address = container if container.startswith("oras://") else f"docker://{container}" absolute_URI = True else: address = f"docker://{library}/{container.replace('docker://', '')}" From c2f9056c1c1237ca460b60539a5e6048e132af61 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Tue, 19 Nov 2024 20:28:54 +0100 Subject: [PATCH 70/92] Download: Add test data for oras:// modules. --- .../modules/mock_seqera_container_oras.nf | 11 +++++++++++ .../modules/mock_seqera_container_oras_mulled.nf | 11 +++++++++++ 2 files changed, 22 insertions(+) create mode 100644 tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf create mode 100644 tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf new file mode 100644 index 0000000000..8278ac7917 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf @@ -0,0 +1,11 @@ +process UMI_TRANSFER { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6' : + 'community.wave.seqera.io/library/umi-transfer:1.0.0--d30e8812ea280fa1' }" + + // truncated + +} diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf new file mode 100644 index 0000000000..234ca04a45 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf @@ -0,0 +1,11 @@ +process UMI_TRANSFER_MULLED { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'oras://community.wave.seqera.io/library/umi-transfer_umicollapse:796a995ff53da9e3' : + 'community.wave.seqera.io/library/umi-transfer_umicollapse:3298d4f1b49e33bd' }" + + // truncated + +} From 50896ead8e3862db4314caf820f383d59c922cf5 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Thu, 21 Nov 2024 19:39:25 +0100 Subject: [PATCH 71/92] Test the new container elimination routine. --- ...ainer.nf => mock_seqera_container_http.nf} | 0 tests/pipelines/test_download.py | 24 ++++++++++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) rename tests/data/mock_module_containers/modules/{mock_seqera_container.nf => mock_seqera_container_http.nf} (100%) diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_http.nf similarity index 100% rename from tests/data/mock_module_containers/modules/mock_seqera_container.nf rename to tests/data/mock_module_containers/modules/mock_seqera_container_http.nf diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 86b07ef7f8..265936106f 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -257,7 +257,20 @@ def test_find_container_images_modules(self, tmp_path, mock_fetch_wf_config): not in download_obj.containers ) - # mock_seqera_container.nf + # mock_seqera_container_oras.nf + assert "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6" in download_obj.containers + assert "community.wave.seqera.io/library/umi-transfer:1.0.0--d30e8812ea280fa1" not in download_obj.containers + + # mock_seqera_container_oras_mulled.nf + assert ( + "oras://community.wave.seqera.io/library/umi-transfer_umicollapse:796a995ff53da9e3" + in download_obj.containers + ) + assert ( + "community.wave.seqera.io/library/umi-transfer_umicollapse:3298d4f1b49e33bd" not in download_obj.containers + ) + + # mock_seqera_container_http.nf assert ( "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" in download_obj.containers @@ -356,6 +369,15 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "docker.io/bschiffthaler/sed", f"{tmp_dir}/sed.sif", None, "docker.io", mock_rich_progress ) + # Test successful pull with absolute oras:// URI + download_obj.singularity_pull_image( + "oras://ghcr.io/scilifelab/umi-transfer:latest", + f"{tmp_dir}/umi-transfer-oras.sif", + None, + "docker.io", + mock_rich_progress, + ) + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExistsError is raised before attempting to pull.) with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( From 7ef1cfb459d40da0c63056f0ea0c7fea6b4f9b7a Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Thu, 21 Nov 2024 20:06:56 +0100 Subject: [PATCH 72/92] Download: Update the tests. --- nf_core/pipelines/download.py | 30 ++++++++++--------- tests/pipelines/test_download.py | 51 +++++++++++++++++++++++++++++++- 2 files changed, 66 insertions(+), 15 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 1710787383..b45395a939 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1026,7 +1026,7 @@ def prioritize_direct_download(self, container_list: List[str]) -> List[str]: return sorted(list(set(combined_with_oras + seqera_containers_http))) @staticmethod - def reconcile_seqera_container_uris(prioritzed_container_list: List[str], other_list: List[str]) -> List[str]: + def reconcile_seqera_container_uris(prioritized_container_list: List[str], other_list: List[str]) -> List[str]: """ Helper function that takes a list of Seqera container URIs, extracts the software string and builds a regex from them to filter out @@ -1043,22 +1043,24 @@ def reconcile_seqera_container_uris(prioritzed_container_list: List[str], other_ Subsequently, build a regex from those and filter out matching duplicates in other_list: """ + if not prioritized_container_list: + return other_list + else: + # trim the URIs to the stem that contains the tool string, assign with Walrus operator to account for non-matching patterns + trimmed_priority_list = [ + match.group() + for c in set(prioritized_container_list) + if (match := re.search(r"library/.*?:[\d.]+", c) if "--" in c else re.search(r"library/[^\s:]+", c)) + ] - # trim the URIs to the stem that contains the tool string, assign with Walrus operator to account for non-matching patterns - trimmed_priority_list = [ - match.group() - for c in set(prioritzed_container_list) - if (match := re.search(r"library/.*?:[\d.]+", c) if "--" in c else re.search(r"library/[^\s:]+", c)) - ] - - # build regex - prioritized_containers = re.compile("|".join(f"{re.escape(c)}" for c in trimmed_priority_list)) + # build regex + prioritized_containers = re.compile("|".join(f"{re.escape(c)}" for c in trimmed_priority_list)) - # filter out matches in other list - filtered_containers = [c for c in other_list if not re.search(prioritized_containers, c)] + # filter out matches in other list + filtered_containers = [c for c in other_list if not re.search(prioritized_containers, c)] - # combine priorized and regular container lists - return sorted(list(set(prioritzed_container_list + filtered_containers))) + # combine prioritized and regular container lists + return sorted(list(set(prioritized_container_list + filtered_containers))) def gather_registries(self, workflow_directory: str) -> None: """Fetch the registries from the pipeline config and CLI arguments and store them in a set. diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 265936106f..8c68aa5651 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -307,6 +307,7 @@ def test_prioritize_direct_download(self, tmp_path): "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--h9ee0642_1", "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data", "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data", + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data", ] result = download_obj.prioritize_direct_download(test_container) @@ -329,7 +330,7 @@ def test_prioritize_direct_download(self, tmp_path): assert "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_0" in result assert "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--h9ee0642_1" in result - # Verify that Seqera containers are not deduplicated + # Verify that Seqera containers are not deduplicated... assert ( "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data" in result @@ -338,6 +339,54 @@ def test_prioritize_direct_download(self, tmp_path): "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" in result ) + # ...but identical ones are. + assert ( + result.count( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + ) + == 1 + ) + + # + # Test for 'reconcile_seqera_container_uris' + # + @with_temporary_folder + def test_reconcile_seqera_container_uris(self, tmp_path): + download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) + + prioritized_container = [ + "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6", + "oras://community.wave.seqera.io/library/sylph:0.6.1--b97274cdc1caa649", + ] + + test_container = [ + "https://depot.galaxyproject.org/singularity/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "community.wave.seqera.io/library/umi-transfer:1.5.0--73c1a6b65e5b0b81", + "community.wave.seqera.io/library/sylph:0.6.1--a21713a57a65a373", + "biocontainers/sylph:0.6.1--b97274cdc1caa649", + ] + + result = download_obj.reconcile_seqera_container_uris(prioritized_container, test_container) + + # Verify that unrelated images are retained + assert "https://depot.galaxyproject.org/singularity/ubuntu:22.04" in result + assert "nf-core/ubuntu:22.04" in result + + # Verify that the priority works for regular Seqera container (Native Singularity over Docker, but only for Seqera registry) + assert "oras://community.wave.seqera.io/library/sylph:0.6.1--b97274cdc1caa649" in result + assert "community.wave.seqera.io/library/sylph:0.6.1--a21713a57a65a373" not in result + assert "biocontainers/sylph:0.6.1--b97274cdc1caa649" in result + + # Verify that version strings are respected: Version 1.0.0 does not replace version 1.5.0 + assert "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6" in result + assert "community.wave.seqera.io/library/umi-transfer:1.5.0--73c1a6b65e5b0b81" in result + + # assert that the deduplication works + assert test_container.count("nf-core/ubuntu:22.04") == 3 + assert result.count("nf-core/ubuntu:22.04") == 1 # # Tests for 'singularity_pull_image' From 335c48de10c45f77fd73b5ca325496cee5fccedc Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Fri, 22 Nov 2024 13:03:03 +0100 Subject: [PATCH 73/92] Add dedicated ORAS image format error. --- nf_core/pipelines/download.py | 14 ++++++++++++++ tests/pipelines/test_download.py | 16 +++++++++++++++- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index b45395a939..e30815bb58 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1887,6 +1887,9 @@ def __init__( elif re.search(r"manifest\sunknown", line): self.error_type = self.InvalidTagError(self) break + elif re.search(r"ORAS\sSIF\simage\sshould\shave\sa\ssingle\slayer", line): + self.error_type = self.NoSingularityContainerError(self) + break elif re.search(r"Image\sfile\salready\sexists", line): self.error_type = self.ImageExistsError(self) break @@ -1951,6 +1954,17 @@ def __init__(self, error_log): self.helpmessage = f'Saving image of "{self.error_log.container}" failed, because "{self.error_log.out_path}" exists.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n' super().__init__(self.message) + class NoSingularityContainerError(RuntimeError): + """The container image is no native Singularity Image Format.""" + + def __init__(self, error_log): + self.error_log = error_log + self.message = ( + f'[bold red]"{self.error_log.container}" is no valid Singularity Image Format container.[/]\n' + ) + self.helpmessage = f"Pulling \"{self.error_log.container}\" failed, because it appears invalid. To convert form Docker's OCI format, prefix the URI with 'docker://' instead of 'oras://'.\n" + super().__init__(self.message) + class OtherError(RuntimeError): """Undefined error with the container""" diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 8c68aa5651..01be5a5d42 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -369,6 +369,10 @@ def test_reconcile_seqera_container_uris(self, tmp_path): "biocontainers/sylph:0.6.1--b97274cdc1caa649", ] + # test that the test_container list is returned as it is, if no prioritized_containers are specified + result_empty = download_obj.reconcile_seqera_container_uris([], test_container) + assert result_empty == test_container + result = download_obj.reconcile_seqera_container_uris(prioritized_container, test_container) # Verify that unrelated images are retained @@ -420,13 +424,23 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p # Test successful pull with absolute oras:// URI download_obj.singularity_pull_image( - "oras://ghcr.io/scilifelab/umi-transfer:latest", + "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6", f"{tmp_dir}/umi-transfer-oras.sif", None, "docker.io", mock_rich_progress, ) + # try pulling Docker container image with oras:// + with pytest.raises(ContainerError.NoSingularityContainerError): + download_obj.singularity_pull_image( + "oras://ghcr.io/matthiaszepper/umi-transfer:dev", + f"{tmp_dir}/umi-transfer-oras.sif", + None, + "docker.io", + mock_rich_progress, + ) + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExistsError is raised before attempting to pull.) with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( From 0d0fe6b85db3c90840c65d920dc60d4404700835 Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Fri, 22 Nov 2024 14:38:45 +0100 Subject: [PATCH 74/92] Include oras:// regex in download to recognize the paths. --- nf_core/pipelines/download.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index e30815bb58..4fe38dd280 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -839,11 +839,12 @@ def rectify_raw_container_matches(self, raw_findings): url_regex = ( r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" ) + oras_regex = r"oras:\/\/[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/39672069/713980 docker_regex = r"^(?:(?=[^:\/]{1,253})(?!-)[a-zA-Z0-9-]{1,63}(? List[str]: for c in container_list: if bool(re.search(r"/data$", c)): seqera_containers_http.append(c) - elif bool(re.search(r"^oras$", c)): + elif bool(re.search(r"^oras://", c)): seqera_containers_oras.append(c) else: all_others.append(c) From 66ffaf1804a90acfc6a2373611a42cd4061645bb Mon Sep 17 00:00:00 2001 From: Matthias Zepper Date: Fri, 22 Nov 2024 16:09:18 +0100 Subject: [PATCH 75/92] Changelog. --- CHANGELOG.md | 1 + tests/pipelines/test_download.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f418dc0af0..29c6db0cd6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ ### Download - First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity https:// Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). +- Further steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Enable limited support for `oras://` container paths (_only absolute URIs, no flexible registries like with Docker_) and prevent unnecessary image downloads for Seqera Container modules with `reconcile_seqera_container_uris()` ([#3293](https://github.com/nf-core/tools/pull/3293)). ### Linting diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 01be5a5d42..d1e2c41a68 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -435,7 +435,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p with pytest.raises(ContainerError.NoSingularityContainerError): download_obj.singularity_pull_image( "oras://ghcr.io/matthiaszepper/umi-transfer:dev", - f"{tmp_dir}/umi-transfer-oras.sif", + f"{tmp_dir}/umi-transfer-oras_impostor.sif", None, "docker.io", mock_rich_progress, @@ -445,7 +445,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( "hello-world", - f"{tmp_dir}/hello-world_new.sif", + f"{tmp_dir}/break_the_registry_test.sif", None, "register-this-domain-to-break-the-test.io", mock_rich_progress, @@ -481,7 +481,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p with pytest.raises(ContainerError.InvalidTagError): download_obj.singularity_pull_image( "ewels/multiqc:go-rewrite", - f"{tmp_dir}/umi-transfer.sif", + f"{tmp_dir}/multiqc-go.sif", None, "ghcr.io", mock_rich_progress, From 8c285304c72e69bf9ce0a93b4b8be7f62b51354b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sat, 30 Nov 2024 07:24:39 +0000 Subject: [PATCH 76/92] Update dawidd6/action-download-artifact action to v7 (#3306) * Update dawidd6/action-download-artifact action to v7 * [automated] Update CHANGELOG.md --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: nf-core-bot --- CHANGELOG.md | 1 + nf_core/pipeline-template/.github/workflows/linting_comment.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f418dc0af0..244dcb8adb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ ### Download - First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity https:// Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). +- Update dawidd6/action-download-artifact action to v7 ([#3306](https://github.com/nf-core/tools/pull/3306)) ### Linting diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index 908dcea159..63b20bb311 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 + uses: dawidd6/action-download-artifact@80620a5d27ce0ae443b965134db88467fc607b43 # v7 with: workflow: linting.yml workflow_conclusion: completed From d7f1df2617406afc9b5bf035621d6e31285d2835 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sat, 30 Nov 2024 11:22:59 +0000 Subject: [PATCH 77/92] Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.1 --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a7c7d38ce3..1494f58182 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.0 + rev: v0.8.1 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix From 800fd8da95d41888ec5bacb71c58b5d5705fa2ff Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 06:44:55 +0100 Subject: [PATCH 78/92] Update gitpod/workspace-base Docker digest to 12853f7 (#3309) * Update gitpod/workspace-base Docker digest to 12853f7 * [automated] Update CHANGELOG.md --------- Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: nf-core-bot --- CHANGELOG.md | 1 + nf_core/gitpod/gitpod.Dockerfile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 244dcb8adb..37c73d7788 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -50,6 +50,7 @@ - Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) - Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) - Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) +- Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 78a528c19d..a0002ed424 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -2,7 +2,7 @@ # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . # See https://docs.renovatebot.com/docker/#digest-pinning for why a digest is used. -FROM gitpod/workspace-base@sha256:2cc134fe5bd7d8fdbe44cab294925d4bc6d2d178d94624f4c376584a22d1f7b6 +FROM gitpod/workspace-base@sha256:12853f7c901eb2b677a549cb112c85f9679d18feb30093bcc63aa252540ecad9 USER root From acb91031a8081148ab6afd70b31135c8740264fd Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 2 Dec 2024 10:51:27 +0100 Subject: [PATCH 79/92] update modules and subworkflows --- nf_core/pipeline-template/modules.json | 8 +-- .../modules/nf-core/fastqc/main.nf | 2 +- .../modules/nf-core/fastqc/meta.yml | 1 + .../nf-core/utils_nextflow_pipeline/main.nf | 2 + .../tests/main.workflow.nf.test | 10 ++-- .../nf-core/utils_nfcore_pipeline/main.nf | 51 +++--------------- .../tests/main.function.nf.test | 52 ------------------- .../tests/main.function.nf.test.snap | 30 ----------- .../utils_nfschema_plugin/tests/main.nf.test | 4 +- 9 files changed, 22 insertions(+), 138 deletions(-) diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index f714eb1d93..90c5728d9a 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -8,7 +8,7 @@ {%- if fastqc %} "fastqc": { "branch": "master", - "git_sha": "666652151335353eef2fcd58880bcef5bc2928e1", + "git_sha": "dc94b6ee04a05ddb9f7ae050712ff30a13149164", "installed_by": ["modules"] }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { @@ -23,17 +23,17 @@ "nf-core": { "utils_nextflow_pipeline": { "branch": "master", - "git_sha": "3aa0aec1d52d492fe241919f0c6100ebf0074082", + "git_sha": "c2b22d85f30a706a3073387f30380704fcae013b", "installed_by": ["subworkflows"] }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "1b6b9a3338d011367137808b49b923515080e3ba", + "git_sha": "9a1e8bb6a5d205cf7807dcefca872a3314b2f3e6", "installed_by": ["subworkflows"] }{% if nf_schema %}, "utils_nfschema_plugin": { "branch": "master", - "git_sha": "bbd5a41f4535a8defafe6080e00ea74c45f4f96c", + "git_sha": "2fd2cd6d0e7b273747f32e465fdc6bcc3ae0814e", "installed_by": ["subworkflows"] }{% endif %} } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf index d8989f4812..752c3a10c6 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf @@ -24,7 +24,7 @@ process FASTQC { // Make list of old name and new name pairs to use for renaming in the bash while loop def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } def rename_to = old_new_pairs*.join(' ').join(' ') - def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') + def renamed_files = old_new_pairs.collect{ _old_name, new_name -> new_name }.join(' ') // The total amount of allocated RAM by FastQC is equal to the number of threads defined (--threads) time the amount of RAM defined (--memory) // https://github.com/s-andrews/FastQC/blob/1faeea0412093224d7f6a07f777fad60a5650795/fastqc#L211-L222 diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml index 4827da7af2..2b2e62b8ae 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml @@ -11,6 +11,7 @@ tools: FastQC gives general quality metrics about your reads. It provides information about the quality score distribution across your reads, the per base sequence content (%A/C/G/T). + You get information about adapter contamination and other overrepresented sequences. homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf index 0fcbf7b3f2..d6e593e852 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -92,10 +92,12 @@ def checkCondaChannels() { channels = config.channels } catch (NullPointerException e) { + log.debug(e) log.warn("Could not verify conda channel configuration.") return null } catch (IOException e) { + log.debug(e) log.warn("Could not verify conda channel configuration.") return null } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test index ca964ce8e1..02dbf094cd 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test @@ -52,10 +52,12 @@ nextflow_workflow { } then { - assertAll( - { assert workflow.success }, - { assert workflow.stdout.contains("nextflow_workflow v9.9.9") } - ) + expect { + with(workflow) { + assert success + assert "nextflow_workflow v9.9.9" in stdout + } + } } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index 5cb7bafef3..228dbff897 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -56,21 +56,6 @@ def checkProfileProvided(nextflow_cli_args) { } } -// -// Citation string for pipeline -// -def workflowCitation() { - def temp_doi_ref = "" - def manifest_doi = workflow.manifest.doi.tokenize(",") - // Handling multiple DOIs - // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers - // Removing ` ` since the manifest.doi is a string and not a proper list - manifest_doi.each { doi_ref -> - temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" - } - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + temp_doi_ref + "\n" + "* The nf-core framework\n" + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + "* Software dependencies\n" + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" -} - // // Generate workflow version string // @@ -150,33 +135,6 @@ def paramsSummaryMultiqc(summary_params) { return yaml_file_text } -// -// nf-core logo -// -def nfCoreLogo(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - String.format( - """\n - ${dashedLine(monochrome_logs)} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset} - ${dashedLine(monochrome_logs)} - """.stripIndent() - ) -} - -// -// Return dashed line -// -def dashedLine(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - return "-${colors.dim}----------------------------------------------------${colors.reset}-" -} - // // ANSII colours used for terminal logging // @@ -261,7 +219,8 @@ def attachMultiqcReport(multiqc_report) { } } } - catch (Exception all) { + catch (Exception msg) { + log.debug(msg) if (multiqc_report) { log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email") } @@ -340,7 +299,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi def email_html = html_template.toString() // Render the sendmail template - def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit + def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as MemoryUnit def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()] def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt") def sendmail_template = engine.createTemplate(sf).make(smail_fields) @@ -358,7 +317,9 @@ new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') ['sendmail', '-t'].execute() << sendmail_html log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-") } - catch (Exception all) { + catch (Exception msg) { + log.debug(msg) + log.debug("Trying with mail instead of sendmail") // Catch failures and try with plaintext def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address] mail_cmd.execute() << email_html diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test index 1dc317f8f7..e43d208b1b 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test @@ -41,58 +41,6 @@ nextflow_function { } } - test("Test Function workflowCitation") { - - function "workflowCitation" - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function nfCoreLogo") { - - function "nfCoreLogo" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function dashedLine") { - - function "dashedLine" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - test("Test Function without logColours") { function "logColours" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap index 1037232c9e..02c6701413 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap @@ -17,26 +17,6 @@ }, "timestamp": "2024-02-28T12:02:59.729647" }, - "Test Function nfCoreLogo": { - "content": [ - "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:10.562934" - }, - "Test Function workflowCitation": { - "content": [ - "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:07.019761" - }, "Test Function without logColours": { "content": [ { @@ -95,16 +75,6 @@ }, "timestamp": "2024-02-28T12:03:17.969323" }, - "Test Function dashedLine": { - "content": [ - "-\u001b[2m----------------------------------------------------\u001b[0m-" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:14.366181" - }, "Test Function with logColours": { "content": [ { diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test index 842dc432af..8fb3016487 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test @@ -42,7 +42,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { @@ -94,7 +94,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { From 8800b71da74c82f7953f127f97ded756537ad413 Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 2 Dec 2024 09:55:15 +0000 Subject: [PATCH 80/92] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fed991f850..16d3025e0e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ - fix workflow_dispatch trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) +- Use params.monochrome_logs in the template and update nf-core components ([#3310](https://github.com/nf-core/tools/pull/3310)) ### Download From 22ff44b17ab5de2a9f78fca8b8dfc056f2bad14f Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 2 Dec 2024 11:14:47 +0100 Subject: [PATCH 81/92] also add monochrome_logs if nf-schema is used --- nf_core/pipeline-template/nextflow.config | 2 +- nf_core/pipeline-template/nextflow_schema.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index abd186c604..bbd1ad7fc0 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -41,7 +41,7 @@ params { email_on_fail = null plaintext_email = false {%- endif %} - {%- if modules %} + {%- if modules or nf_schema %} monochrome_logs = false{% endif %} {%- if slackreport or adaptivecard %} hook_url = null{% endif %} diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 389f9d104d..3e59a8ba54 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -182,7 +182,7 @@ "fa_icon": "fas fa-file-upload", "hidden": true },{% endif %} - {%- if modules %} + {%- if modules or nf_schema %} "monochrome_logs": { "type": "boolean", "description": "Do not use coloured log outputs.", From b91bd77b9ffa248ce0d0bec89c1acabbc961c360 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 2 Dec 2024 12:12:25 +0100 Subject: [PATCH 82/92] add manifest.contributors to nextflow.config --- nf_core/pipeline-template/nextflow.config | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index bbd1ad7fc0..475fdf6789 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -274,7 +274,16 @@ dag { manifest { name = '{{ name }}' - author = """{{ author }}""" + author = """{{ author }}""" // The author field is deprecated from Nextflow version 24.10.0, use contributors instead + contributors = [ + // TODO nf-core: Update the field with the details of the contributors to your pipeline. New with Nextflow version 24.10.0 + [name: """{{ author }}""" + affiliation: "" + email: "" + github: "" + contribution: [] // List of contribution types ('author', 'maintainer' or 'contributor') + orcid: ""] + ] homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' From da021fecd08245122175219f875dca415f694e5b Mon Sep 17 00:00:00 2001 From: nf-core-bot Date: Mon, 2 Dec 2024 11:14:38 +0000 Subject: [PATCH 83/92] [automated] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3703d41ce6..ac291ea734 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ - Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) - Move `includeConfig 'conf/modules.config'` next to `includeConfig 'conf/base.config'` to not overwrite tests profiles configurations ([#3301](https://github.com/nf-core/tools/pull/3301)) - Use `params.monochrome_logs` in the template and update nf-core components ([#3310](https://github.com/nf-core/tools/pull/3310)) +- Add `manifest.contributors` to `nextflow.config` ([#3311](https://github.com/nf-core/tools/pull/3311)) ### Download From 9c8d50fc7034e7eca8dee04cba3ae168760833aa Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 2 Dec 2024 12:25:48 +0100 Subject: [PATCH 84/92] fix contributors map --- nf_core/pipeline-template/nextflow.config | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 475fdf6789..cc5a71ffe4 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -277,12 +277,14 @@ manifest { author = """{{ author }}""" // The author field is deprecated from Nextflow version 24.10.0, use contributors instead contributors = [ // TODO nf-core: Update the field with the details of the contributors to your pipeline. New with Nextflow version 24.10.0 - [name: """{{ author }}""" - affiliation: "" - email: "" - github: "" - contribution: [] // List of contribution types ('author', 'maintainer' or 'contributor') - orcid: ""] + [ + name: '{{ author }}', + affiliation: '', + email: '', + github: '', + contribution: [], // List of contribution types ('author', 'maintainer' or 'contributor') + orcid: '' + ] ] homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" From df3d25d944eb79b986b531174833eb18638cce88 Mon Sep 17 00:00:00 2001 From: Louis LE NEZET <58640615+LouisLeNezet@users.noreply.github.com> Date: Mon, 2 Dec 2024 13:01:42 +0100 Subject: [PATCH 85/92] Update nf_core/pipeline-template/.github/CONTRIBUTING.md Co-authored-by: Phil Ewels --- nf_core/pipeline-template/.github/CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 3e6b960088..37970c09e8 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -66,7 +66,7 @@ These tests are run both with the latest available version of `Nextflow` and als - On your own fork, make a new branch `patch` based on `upstream/master`. - Fix the bug, and bump version (X.Y.Z+1). -- A PR should be made on `master` from patch to directly adress this particular bug. +- Open a pull-request from `patch` to `master` with the changes. {% if is_nfcore -%} From d72667c65e1ffdb6ee9274d2229bb091f20821ad Mon Sep 17 00:00:00 2001 From: Matthias Zepper <6963520+MatthiasZepper@users.noreply.github.com> Date: Mon, 2 Dec 2024 14:56:00 +0100 Subject: [PATCH 86/92] Typo in error message. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Matthias Hörtenhuber --- nf_core/pipelines/download.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 4fe38dd280..d37dce86d1 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -1963,7 +1963,7 @@ def __init__(self, error_log): self.message = ( f'[bold red]"{self.error_log.container}" is no valid Singularity Image Format container.[/]\n' ) - self.helpmessage = f"Pulling \"{self.error_log.container}\" failed, because it appears invalid. To convert form Docker's OCI format, prefix the URI with 'docker://' instead of 'oras://'.\n" + self.helpmessage = f"Pulling \"{self.error_log.container}\" failed, because it appears invalid. To convert from Docker's OCI format, prefix the URI with 'docker://' instead of 'oras://'.\n" super().__init__(self.message) class OtherError(RuntimeError): From 86b926b5bfcaf556daf5fadd4f033e7c6faba52d Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 2 Dec 2024 15:51:42 +0100 Subject: [PATCH 87/92] test also the main sync function itsel --- tests/pipelines/test_sync.py | 52 ++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index ffbe75510b..5bd4e55aac 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -56,6 +56,8 @@ def mocked_requests_get(url) -> MockResponse: for branch_no in range(3, 7) ] return MockResponse(response_data, 200, url) + if url == "https://nf-co.re/pipelines.json": + return MockResponse({"remote_workflows": [{"name": "testpipeline", "topics": ["test", "pipeline"]}]}, 200, url) return MockResponse([{"html_url": url}], 404, url) @@ -398,3 +400,53 @@ def test_reset_target_dir_fake_branch(self): with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") + + def test_sync_success(self): + """Test successful pipeline sync with PR creation""" + # Set up GitHub auth token for PR creation + os.environ["GITHUB_AUTH_TOKEN"] = "dummy_token" + + with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( + "requests.post", side_effect=mocked_requests_post + ) as mock_post: + psync = nf_core.pipelines.sync.PipelineSync( + self.pipeline_dir, make_pr=True, gh_username="no_existing_pr", gh_repo="response" + ) + + # Run sync + psync.sync() + + # Verify that changes were made and PR was created + self.assertTrue(psync.made_changes) + mock_post.assert_called_once() + self.assertEqual(mock_post.call_args[0][0], "https://api.github.com/repos/no_existing_pr/response/pulls") + + def test_sync_no_changes(self): + """Test pipeline sync when no changes are needed""" + with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( + "requests.post", side_effect=mocked_requests_post + ) as mock_post: + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + + # Mock that no changes were made + psync.made_changes = False + + # Run sync + psync.sync() + + # Verify no PR was created + mock_post.assert_not_called() + + def test_sync_no_github_token(self): + """Test sync fails appropriately when GitHub token is missing""" + # Ensure GitHub token is not set + if "GITHUB_AUTH_TOKEN" in os.environ: + del os.environ["GITHUB_AUTH_TOKEN"] + + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir, make_pr=True) + psync.made_changes = True # Force changes to trigger PR attempt + + # Run sync and check for appropriate error + with self.assertRaises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: + psync.sync() + self.assertIn("GITHUB_AUTH_TOKEN not set!", str(exc_info.exception)) From ee86c151a8ff9b7e0d398184badd65621074d088 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 2 Dec 2024 15:54:07 +0100 Subject: [PATCH 88/92] combine json parsing code --- nf_core/pipelines/sync.py | 34 +++++++++++++++++++++------------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 896adda94f..6f617295e1 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -6,7 +6,7 @@ import re import shutil from pathlib import Path -from typing import Dict, Optional, Union +from typing import Any, Dict, Optional, Union import git import questionary @@ -416,12 +416,8 @@ def close_open_template_merge_prs(self): list_prs_url = f"https://api.github.com/repos/{self.gh_repo}/pulls" with self.gh_api.cache_disabled(): list_prs_request = self.gh_api.get(list_prs_url) - try: - list_prs_json = json.loads(list_prs_request.content) - list_prs_pp = json.dumps(list_prs_json, indent=4) - except Exception: - list_prs_json = list_prs_request.content - list_prs_pp = list_prs_request.content + + list_prs_json, list_prs_pp = self._parse_json_response(list_prs_request) log.debug(f"GitHub API listing existing PRs:\n{list_prs_url}\n{list_prs_pp}") if list_prs_request.status_code != 200: @@ -462,12 +458,8 @@ def close_open_pr(self, pr) -> bool: # Update the PR status to be closed with self.gh_api.cache_disabled(): pr_request = self.gh_api.patch(url=pr["url"], data=json.dumps({"state": "closed"})) - try: - pr_request_json = json.loads(pr_request.content) - pr_request_pp = json.dumps(pr_request_json, indent=4) - except Exception: - pr_request_json = pr_request.content - pr_request_pp = pr_request.content + + pr_request_json, pr_request_pp = self._parse_json_response(pr_request) # PR update worked if pr_request.status_code == 200: @@ -481,6 +473,22 @@ def close_open_pr(self, pr) -> bool: log.warning(f"Could not close PR ('{pr_request.status_code}'):\n{pr['url']}\n{pr_request_pp}") return False + @staticmethod + def _parse_json_response(response) -> tuple[Any, str]: + """Helper method to parse JSON response and create pretty-printed string. + + Args: + response: requests.Response object + + Returns: + Tuple of (parsed_json, pretty_printed_str) + """ + try: + json_data = json.loads(response.content) + return json_data, json.dumps(json_data, indent=4) + except Exception: + return response.content, str(response.content) + def reset_target_dir(self): """ Reset the target pipeline directory. Check out the original branch. From 538893ca9fdd74de62077c3f3d0b71ccc17562c7 Mon Sep 17 00:00:00 2001 From: mirpedrol Date: Mon, 2 Dec 2024 16:40:24 +0100 Subject: [PATCH 89/92] loop over list of authors to supply contributors --- nf_core/pipeline-template/nextflow.config | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index cc5a71ffe4..21174bbdc5 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -277,14 +277,16 @@ manifest { author = """{{ author }}""" // The author field is deprecated from Nextflow version 24.10.0, use contributors instead contributors = [ // TODO nf-core: Update the field with the details of the contributors to your pipeline. New with Nextflow version 24.10.0 + {%- for author_name in author.split(",") %} [ - name: '{{ author }}', + name: '{{ author_name }}', affiliation: '', email: '', github: '', contribution: [], // List of contribution types ('author', 'maintainer' or 'contributor') orcid: '' - ] + ], + {%- endfor %} ] homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" From 10e691bd4dd0c49b07e094d172905ee48e1b7a79 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 2 Dec 2024 17:45:30 +0100 Subject: [PATCH 90/92] remove broken test --- nf_core/pipelines/sync.py | 2 +- tests/pipelines/test_sync.py | 20 -------------------- 2 files changed, 1 insertion(+), 21 deletions(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 6f617295e1..8ea561bd34 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -120,7 +120,7 @@ def __init__( requests.auth.HTTPBasicAuth(self.gh_username, os.environ["GITHUB_AUTH_TOKEN"]) ) - def sync(self): + def sync(self) -> None: """Find workflow attributes, create a new template pipeline on TEMPLATE""" # Clear requests_cache so that we don't get stale API responses diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index 5bd4e55aac..8bf8a3c4ec 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -401,26 +401,6 @@ def test_reset_target_dir_fake_branch(self): psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") - def test_sync_success(self): - """Test successful pipeline sync with PR creation""" - # Set up GitHub auth token for PR creation - os.environ["GITHUB_AUTH_TOKEN"] = "dummy_token" - - with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( - "requests.post", side_effect=mocked_requests_post - ) as mock_post: - psync = nf_core.pipelines.sync.PipelineSync( - self.pipeline_dir, make_pr=True, gh_username="no_existing_pr", gh_repo="response" - ) - - # Run sync - psync.sync() - - # Verify that changes were made and PR was created - self.assertTrue(psync.made_changes) - mock_post.assert_called_once() - self.assertEqual(mock_post.call_args[0][0], "https://api.github.com/repos/no_existing_pr/response/pulls") - def test_sync_no_changes(self): """Test pipeline sync when no changes are needed""" with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( From 11f7f426ce937c53f17ff210a952413d8ad7b408 Mon Sep 17 00:00:00 2001 From: mashehu Date: Mon, 2 Dec 2024 21:16:57 +0100 Subject: [PATCH 91/92] fix type error --- nf_core/pipelines/sync.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 8ea561bd34..781b4f5f00 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -6,7 +6,7 @@ import re import shutil from pathlib import Path -from typing import Any, Dict, Optional, Union +from typing import Any, Dict, Optional, Tuple, Union import git import questionary @@ -474,7 +474,7 @@ def close_open_pr(self, pr) -> bool: return False @staticmethod - def _parse_json_response(response) -> tuple[Any, str]: + def _parse_json_response(response) -> Tuple[Any, str]: """Helper method to parse JSON response and create pretty-printed string. Args: From eed0598c7baf5f20e87254cd8a95bc29b0836fc8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Tue, 3 Dec 2024 12:02:50 +0100 Subject: [PATCH 92/92] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Júlia Mir Pedrol --- nf_core/pipelines/lint/__init__.py | 2 +- nf_core/pipelines/lint/readme.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index f243743846..154e38aea6 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -593,7 +593,7 @@ def run_linting( lint_obj._load_lint_config() lint_obj.load_pipeline_config() - if lint_obj.lint_config and lint_obj.lint_config["nfcore_components"] is False: + if lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: module_lint_obj = None subworkflow_lint_obj = None else: diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index 5a10fbfce5..75b05f16ed 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -35,8 +35,8 @@ def readme(self): lint: readme: - nextflow_badge - zenodo_release + - nextflow_badge + - zenodo_release """ passed = []