diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 6ad6b7b157..3ef0760513 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -65,6 +65,12 @@ runs: run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-lint-wf + # Add empty ro-crate file + - name: add empty ro-crate file + shell: bash + run: touch nf-core-testpipeline/ro-crate-metadata.json + working-directory: create-lint-wf + # Run nf-core pipelines linting - name: nf-core pipelines lint shell: bash diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index b38c1ea5a1..fffa9ffe7a 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -147,6 +147,11 @@ jobs: run: find my-prefix-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; working-directory: create-test-lint-wf + # Add empty ro-crate file + - name: add empty ro-crate file + run: touch my-prefix-testpipeline/ro-crate-metadata.json + working-directory: create-test-lint-wf + # Run nf-core linting - name: nf-core pipelines lint run: nf-core --log-file log.txt --hide-progress pipelines lint --dir my-prefix-testpipeline --fail-warned diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 2d67b1ce98..76d5d710c0 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -132,7 +132,7 @@ jobs: - name: Test with pytest run: | - python3 -m pytest tests/${{matrix.test}} --color=yes --cov --durations=0 && exit_code=0|| exit_code=$? + python3 -m pytest tests/${{matrix.test}} --color=yes --cov --cov-config=.coveragerc --durations=0 && exit_code=0|| exit_code=$? # don't fail if no tests were collected, e.g. for test_licence.py if [ "${exit_code}" -eq 5 ]; then echo "No tests were collected" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 28ce84befe..1494f58182 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.4 + rev: v0.8.1 hooks: - id: ruff # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix diff --git a/CHANGELOG.md b/CHANGELOG.md index d4a96ca744..d9794ed619 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,14 +10,21 @@ - fix workflow_dispatch trigger and parse more review comments in awsfulltest ([#3235](https://github.com/nf-core/tools/pull/3235)) - Add resource limits to Gitpod profile([#3255](https://github.com/nf-core/tools/pull/3255)) - Fix a typo ([#3268](https://github.com/nf-core/tools/pull/3268)) +- Remove `def` from `nextflow.config` and add `trace_report_suffix` param ([#3296](https://github.com/nf-core/tools/pull/3296)) +- Move `includeConfig 'conf/modules.config'` next to `includeConfig 'conf/base.config'` to not overwrite tests profiles configurations ([#3301](https://github.com/nf-core/tools/pull/3301)) +- Use `params.monochrome_logs` in the template and update nf-core components ([#3310](https://github.com/nf-core/tools/pull/3310)) +- Fix some typos and improve writing in `usage.md` and `CONTRIBUTING.md` ([#3302](https://github.com/nf-core/tools/pull/3302)) +- Add `manifest.contributors` to `nextflow.config` ([#3311](https://github.com/nf-core/tools/pull/3311)) ### Download - First steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Modify `prioritize_direct_download()` to retain Seqera Singularity https:// Container URIs and hardcode Seqera Containers into `gather_registries()` ([#3244](https://github.com/nf-core/tools/pull/3244)). +- Further steps towards fixing [#3179](https://github.com/nf-core/tools/issues/3179): Enable limited support for `oras://` container paths (_only absolute URIs, no flexible registries like with Docker_) and prevent unnecessary image downloads for Seqera Container modules with `reconcile_seqera_container_uris()` ([#3293](https://github.com/nf-core/tools/pull/3293)). +- Update dawidd6/action-download-artifact action to v7 ([#3306](https://github.com/nf-core/tools/pull/3306)) ### Linting -- General: Run pre-commit when testing linting the template pipeline ([#3280](https://github.com/nf-core/tools/pull/3280)) +- allow mixed `str` and `dict` entries in lint config ([#3228](https://github.com/nf-core/tools/pull/3228)) ### Modules @@ -25,6 +32,8 @@ ### Subworkflows +- Add `nf-core subworkflows patch` command ([#2861](https://github.com/nf-core/tools/pull/2861)) + ### General - Include .nf-core.yml in `nf-core pipelines bump-version` ([#3220](https://github.com/nf-core/tools/pull/3220)) @@ -44,6 +53,9 @@ - Update pre-commit hook astral-sh/ruff-pre-commit to v0.7.4 ([#3282](https://github.com/nf-core/tools/pull/3282)) - Update codecov/codecov-action action to v5 ([#3283](https://github.com/nf-core/tools/pull/3283)) - Update python:3.12-slim Docker digest to 2a6386a ([#3284](https://github.com/nf-core/tools/pull/3284)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.8.0 ([#3299](https://github.com/nf-core/tools/pull/3299)) +- Update gitpod/workspace-base Docker digest to 12853f7 ([#3309](https://github.com/nf-core/tools/pull/3309)) +- Run pre-commit when testing linting the template pipeline ([#3280](https://github.com/nf-core/tools/pull/3280)) - build: Setup VS Code tests ([#3292](https://github.com/nf-core/tools/pull/3292)) ## [v3.0.2 - Titanium Tapir Patch](https://github.com/nf-core/tools/releases/tag/3.0.2) - [2024-10-11] diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 4612b8b39a..9f16188e95 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -36,6 +36,7 @@ pipelines_launch, pipelines_lint, pipelines_list, + pipelines_rocrate, pipelines_schema_build, pipelines_schema_docs, pipelines_schema_lint, @@ -86,7 +87,7 @@ }, { "name": "For developers", - "commands": ["create", "lint", "bump-version", "sync", "schema", "create-logo"], + "commands": ["create", "lint", "bump-version", "sync", "schema", "rocrate", "create-logo"], }, ], "nf-core modules": [ @@ -570,6 +571,44 @@ def command_pipelines_list(ctx, keywords, sort, json, show_archived): pipelines_list(ctx, keywords, sort, json, show_archived) +# nf-core pipelines rocrate +@pipelines.command("rocrate") +@click.argument( + "pipeline_dir", + type=click.Path(exists=True), + default=Path.cwd(), + required=True, + metavar="", +) +@click.option( + "-j", + "--json_path", + default=Path.cwd(), + type=str, + help="Path to save RO Crate metadata json file to", +) +@click.option("-z", "--zip_path", type=str, help="Path to save RO Crate zip file to") +@click.option( + "-pv", + "--pipeline_version", + type=str, + help="Version of pipeline to use for RO Crate", + default="", +) +@click.pass_context +def rocrate( + ctx, + pipeline_dir: str, + json_path: str, + zip_path: str, + pipeline_version: str, +): + """ + Make an Research Object Crate + """ + pipelines_rocrate(ctx, pipeline_dir, json_path, zip_path, pipeline_version) + + # nf-core pipelines sync @pipelines.command("sync") @click.pass_context @@ -1024,7 +1063,7 @@ def command_modules_update( default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -@click.option("-r", "--remove", is_flag=True, default=False) +@click.option("-r", "--remove", is_flag=True, default=False, help="Remove an existent patch file and regenerate it.") def command_modules_patch(ctx, tool, directory, remove): """ Create a patch file for minor changes in a module @@ -1567,6 +1606,43 @@ def command_subworkflows_install(ctx, subworkflow, directory, prompt, force, sha subworkflows_install(ctx, subworkflow, directory, prompt, force, sha) +# nf-core subworkflows patch +@subworkflows.command("patch") +@click.pass_context +@click.argument("tool", type=str, required=False, metavar=" or ") +@click.option( + "-d", + "--dir", + type=click.Path(exists=True), + default=".", + help=r"Pipeline directory. [dim]\[default: current working directory][/]", +) +@click.option("-r", "--remove", is_flag=True, default=False, help="Remove an existent patch file and regenerate it.") +def subworkflows_patch(ctx, tool, dir, remove): + """ + Create a patch file for minor changes in a subworkflow + + Checks if a subworkflow has been modified locally and creates a patch file + describing how the module has changed from the remote version + """ + from nf_core.subworkflows import SubworkflowPatch + + try: + subworkflow_patch = SubworkflowPatch( + dir, + ctx.obj["modules_repo_url"], + ctx.obj["modules_repo_branch"], + ctx.obj["modules_repo_no_pull"], + ) + if remove: + subworkflow_patch.remove(tool) + else: + subworkflow_patch.patch(tool) + except (UserWarning, LookupError) as e: + log.error(e) + sys.exit(1) + + # nf-core subworkflows remove @subworkflows.command("remove") @click.pass_context @@ -1721,7 +1797,7 @@ def command_schema_validate(pipeline, params): @click.option( "--url", type=str, - default="https://nf-co.re/pipeline_schema_builder", + default="https://oldsite.nf-co.re/pipeline_schema_builder", help="Customise the builder URL (for development work)", ) def command_schema_build(directory, no_prompts, web_only, url): diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index 1186935e52..3b28f4979c 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -2,6 +2,7 @@ import os import sys from pathlib import Path +from typing import Optional, Union import rich @@ -277,6 +278,33 @@ def pipelines_list(ctx, keywords, sort, json, show_archived): stdout.print(list_workflows(keywords, sort, json, show_archived)) +# nf-core pipelines rocrate +def pipelines_rocrate( + ctx, + pipeline_dir: Union[str, Path], + json_path: Optional[Union[str, Path]], + zip_path: Optional[Union[str, Path]], + pipeline_version: str, +) -> None: + from nf_core.pipelines.rocrate import ROCrate + + if json_path is None and zip_path is None: + log.error("Either `--json_path` or `--zip_path` must be specified.") + sys.exit(1) + else: + pipeline_dir = Path(pipeline_dir) + if json_path is not None: + json_path = Path(json_path) + if zip_path is not None: + zip_path = Path(zip_path) + try: + rocrate_obj = ROCrate(pipeline_dir, pipeline_version) + rocrate_obj.create_rocrate(json_path=json_path, zip_path=zip_path) + except (UserWarning, LookupError, FileNotFoundError) as e: + log.error(e) + sys.exit(1) + + # nf-core pipelines sync def pipelines_sync(ctx, directory, from_branch, pull_request, github_repository, username, template_yaml, force_pr): """ diff --git a/nf_core/modules/modules_differ.py b/nf_core/components/components_differ.py similarity index 80% rename from nf_core/modules/modules_differ.py rename to nf_core/components/components_differ.py index 6b0781bb89..db51c1910d 100644 --- a/nf_core/modules/modules_differ.py +++ b/nf_core/components/components_differ.py @@ -16,10 +16,10 @@ log = logging.getLogger(__name__) -class ModulesDiffer: +class ComponentsDiffer: """ Static class that provides functionality for computing diffs between - different instances of a module + different instances of a module or subworkflow """ class DiffEnum(enum.Enum): @@ -34,15 +34,15 @@ class DiffEnum(enum.Enum): REMOVED = enum.auto() @staticmethod - def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_dir=None): + def get_component_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_dir=None): """ - Compute the diff between the current module version + Compute the diff between the current component version and the new version. Args: - from_dir (strOrPath): The folder containing the old module files - to_dir (strOrPath): The folder containing the new module files - path_in_diff (strOrPath): The directory displayed containing the module + from_dir (strOrPath): The folder containing the old component files + to_dir (strOrPath): The folder containing the new component files + path_in_diff (strOrPath): The directory displayed containing the component file in the diff. Added so that temporary dirs are not shown for_git (bool): indicates whether the diff file is to be @@ -52,7 +52,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d dsp_to_dir (str | Path): The to directory to display in the diff Returns: - dict[str, (ModulesDiffer.DiffEnum, str)]: A dictionary containing + dict[str, (ComponentsDiffer.DiffEnum, str)]: A dictionary containing the diff type and the diff string (empty if no diff) """ if for_git: @@ -72,7 +72,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d ) files = list(files) - # Loop through all the module files and compute their diffs if needed + # Loop through all the component files and compute their diffs if needed for file in files: temp_path = Path(to_dir, file) curr_path = Path(from_dir, file) @@ -84,7 +84,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d if new_lines == old_lines: # The files are identical - diffs[file] = (ModulesDiffer.DiffEnum.UNCHANGED, ()) + diffs[file] = (ComponentsDiffer.DiffEnum.UNCHANGED, ()) else: # Compute the diff diff = difflib.unified_diff( @@ -93,7 +93,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path(dsp_from_dir, file)), tofile=str(Path(dsp_to_dir, file)), ) - diffs[file] = (ModulesDiffer.DiffEnum.CHANGED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.CHANGED, diff) elif temp_path.exists(): with open(temp_path) as fh: @@ -106,7 +106,7 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path("/dev", "null")), tofile=str(Path(dsp_to_dir, file)), ) - diffs[file] = (ModulesDiffer.DiffEnum.CREATED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.CREATED, diff) elif curr_path.exists(): # The file was removed @@ -119,14 +119,14 @@ def get_module_diffs(from_dir, to_dir, for_git=True, dsp_from_dir=None, dsp_to_d fromfile=str(Path(dsp_from_dir, file)), tofile=str(Path("/dev", "null")), ) - diffs[file] = (ModulesDiffer.DiffEnum.REMOVED, diff) + diffs[file] = (ComponentsDiffer.DiffEnum.REMOVED, diff) return diffs @staticmethod def write_diff_file( diff_path, - module, + component, repo_path, from_dir, to_dir, @@ -139,20 +139,19 @@ def write_diff_file( limit_output=False, ): """ - Writes the diffs of a module to the diff file. + Writes the diffs of a component to the diff file. Args: diff_path (str | Path): The path to the file that should be appended - module (str): The module name - repo_path (str): The name of the repo where the module resides - from_dir (str | Path): The directory containing the old module files - to_dir (str | Path): The directory containing the new module files - diffs (dict[str, (ModulesDiffer.DiffEnum, str)]): A dictionary containing + component (str): The component name + repo_path (str): The name of the repo where the component resides + from_dir (str | Path): The directory containing the old component files + to_dir (str | Path): The directory containing the new component files + diffs (dict[str, (ComponentsDiffer.DiffEnum, str)]): A dictionary containing the type of change and the diff (if any) - module_dir (str | Path): The path to the current installation of the module - current_version (str): The installed version of the module - new_version (str): The version of the module the diff is computed against + current_version (str): The installed version of the component + new_version (str): The version of the component the diff is computed against for_git (bool): indicates whether the diff file is to be compatible with `git apply`. If true it adds a/ and b/ prefixes to the file paths @@ -165,36 +164,36 @@ def write_diff_file( if dsp_to_dir is None: dsp_to_dir = to_dir - diffs = ModulesDiffer.get_module_diffs(from_dir, to_dir, for_git, dsp_from_dir, dsp_to_dir) - if all(diff_status == ModulesDiffer.DiffEnum.UNCHANGED for _, (diff_status, _) in diffs.items()): - raise UserWarning("Module is unchanged") - log.debug(f"Writing diff of '{module}' to '{diff_path}'") + diffs = ComponentsDiffer.get_component_diffs(from_dir, to_dir, for_git, dsp_from_dir, dsp_to_dir) + if all(diff_status == ComponentsDiffer.DiffEnum.UNCHANGED for _, (diff_status, _) in diffs.items()): + raise UserWarning("Component is unchanged") + log.debug(f"Writing diff of '{component}' to '{diff_path}'") with open(diff_path, file_action) as fh: if current_version is not None and new_version is not None: fh.write( - f"Changes in module '{Path(repo_path, module)}' between" + f"Changes in component '{Path(repo_path, component)}' between" f" ({current_version}) and" f" ({new_version})\n" ) else: - fh.write(f"Changes in module '{Path(repo_path, module)}'\n") + fh.write(f"Changes in component '{Path(repo_path, component)}'\n") for file, (diff_status, diff) in diffs.items(): - if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + if diff_status == ComponentsDiffer.DiffEnum.UNCHANGED: # The files are identical fh.write(f"'{Path(dsp_from_dir, file)}' is unchanged\n") - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: # The file was created between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was created\n") - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits fh.write(f"'{Path(dsp_from_dir, file)}' was removed\n") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - fh.write(f"Changes in '{Path(module, file)}' but not shown\n") + fh.write(f"Changes in '{Path(component, file)}' but not shown\n") else: # The file has changed write the diff lines to the file - fh.write(f"Changes in '{Path(module, file)}':\n") + fh.write(f"Changes in '{Path(component, file)}':\n") for line in diff: fh.write(line) fh.write("\n") @@ -237,7 +236,7 @@ def append_modules_json_diff(diff_path, old_modules_json, new_modules_json, modu @staticmethod def print_diff( - module, + component, repo_path, from_dir, to_dir, @@ -248,16 +247,15 @@ def print_diff( limit_output=False, ): """ - Prints the diffs between two module versions to the terminal + Prints the diffs between two component versions to the terminal Args: - module (str): The module name - repo_path (str): The name of the repo where the module resides - from_dir (str | Path): The directory containing the old module files - to_dir (str | Path): The directory containing the new module files - module_dir (str): The path to the current installation of the module - current_version (str): The installed version of the module - new_version (str): The version of the module the diff is computed against + component (str): The component name + repo_path (str): The name of the repo where the component resides + from_dir (str | Path): The directory containing the old component files + to_dir (str | Path): The directory containing the new component files + current_version (str): The installed version of the component + new_version (str): The version of the component the diff is computed against dsp_from_dir (str | Path): The 'from' directory displayed in the diff dsp_to_dir (str | Path): The 'to' directory displayed in the diff limit_output (bool): If true, don't print the diff for files other than main.nf @@ -267,41 +265,43 @@ def print_diff( if dsp_to_dir is None: dsp_to_dir = to_dir - diffs = ModulesDiffer.get_module_diffs( + diffs = ComponentsDiffer.get_component_diffs( from_dir, to_dir, for_git=False, dsp_from_dir=dsp_from_dir, dsp_to_dir=dsp_to_dir ) console = Console(force_terminal=nf_core.utils.rich_force_colors()) if current_version is not None and new_version is not None: log.info( - f"Changes in module '{Path(repo_path, module)}' between" f" ({current_version}) and" f" ({new_version})" + f"Changes in component '{Path(repo_path, component)}' between" + f" ({current_version}) and" + f" ({new_version})" ) else: - log.info(f"Changes in module '{Path(repo_path, module)}'") + log.info(f"Changes in component '{Path(repo_path, component)}'") panel_group: list[RenderableType] = [] for file, (diff_status, diff) in diffs.items(): - if diff_status == ModulesDiffer.DiffEnum.UNCHANGED: + if diff_status == ComponentsDiffer.DiffEnum.UNCHANGED: # The files are identical log.info(f"'{Path(dsp_from_dir, file)}' is unchanged") - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: # The file was created between the commits log.info(f"'{Path(dsp_from_dir, file)}' was created") - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: # The file was removed between the commits log.info(f"'{Path(dsp_from_dir, file)}' was removed") elif limit_output and not file.suffix == ".nf": # Skip printing the diff for files other than main.nf - log.info(f"Changes in '{Path(module, file)}' but not shown") + log.info(f"Changes in '{Path(component, file)}' but not shown") else: # The file has changed - log.info(f"Changes in '{Path(module, file)}':") + log.info(f"Changes in '{Path(component, file)}':") # Pretty print the diff using the pygments diff lexer syntax = Syntax("".join(diff), "diff", theme="ansi_dark", line_numbers=True) panel_group.append(Panel(syntax, title=str(file), title_align="left", padding=0)) console.print( Panel( Group(*panel_group), - title=f"[white]{str(module)}[/white]", + title=f"[white]{str(component)}[/white]", title_align="left", padding=0, border_style="blue", @@ -422,7 +422,7 @@ def try_apply_single_patch(file_lines, patch, reverse=False): LookupError: If it fails to find the old lines from the patch in the file. """ - org_lines, patch_lines = ModulesDiffer.get_new_and_old_lines(patch) + org_lines, patch_lines = ComponentsDiffer.get_new_and_old_lines(patch) if reverse: patch_lines, org_lines = org_lines, patch_lines @@ -466,16 +466,22 @@ def try_apply_single_patch(file_lines, patch, reverse=False): @staticmethod def try_apply_patch( - module: str, repo_path: Union[str, Path], patch_path: Union[str, Path], module_dir: Path, reverse: bool = False + component_type: str, + component: str, + repo_path: Union[str, Path], + patch_path: Union[str, Path], + component_dir: Path, + reverse: bool = False, ) -> Dict[str, List[str]]: """ - Try applying a full patch file to a module + Try applying a full patch file to a module or subworkflow Args: - module (str): Name of the module - repo_path (str): Name of the repository where the module resides + component_type (str): The type of component (modules or subworkflows) + component (str): Name of the module or subworkflow + repo_path (str): Name of the repository where the component resides patch_path (str): The absolute path to the patch file to be applied - module_dir (Path): The directory containing the module + component_dir (Path): The directory containing the component reverse (bool): Apply the patch in reverse Returns: @@ -485,19 +491,19 @@ def try_apply_patch( Raises: LookupError: If the patch application fails in a file """ - module_relpath = Path("modules", repo_path, module) - patches = ModulesDiffer.per_file_patch(patch_path) + component_relpath = Path(component_type, repo_path, component) + patches = ComponentsDiffer.per_file_patch(patch_path) new_files = {} for file, patch in patches.items(): log.debug(f"Applying patch to {file}") - fn = Path(file).relative_to(module_relpath) - file_path = module_dir / fn + fn = Path(file).relative_to(component_relpath) + file_path = component_dir / fn try: with open(file_path) as fh: file_lines = fh.readlines() except FileNotFoundError: # The file was added with the patch file_lines = [""] - patched_new_lines = ModulesDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) + patched_new_lines = ComponentsDiffer.try_apply_single_patch(file_lines, patch, reverse=reverse) new_files[str(fn)] = patched_new_lines return new_files diff --git a/nf_core/components/info.py b/nf_core/components/info.py index f3e5bf617c..31769785a1 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -211,9 +211,9 @@ def get_local_yaml(self) -> Optional[Dict]: return yaml.safe_load(fh) log.debug(f"{self.component_type[:-1].title()} '{self.component}' meta.yml not found locally") - return None + return {} - def get_remote_yaml(self) -> Optional[dict]: + def get_remote_yaml(self) -> Optional[Dict]: """Attempt to get the meta.yml file from a remote repo. Returns: diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index fcc3b414d8..69740135a8 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -22,7 +22,7 @@ from nf_core.components.nfcore_component import NFCoreComponent from nf_core.modules.modules_json import ModulesJson from nf_core.pipelines.lint_utils import console -from nf_core.utils import LintConfigType +from nf_core.utils import NFCoreYamlLintConfig from nf_core.utils import plural_s as _s log = logging.getLogger(__name__) @@ -80,7 +80,7 @@ def __init__( self.failed: List[LintResult] = [] self.all_local_components: List[NFCoreComponent] = [] - self.lint_config: Optional[LintConfigType] = None + self.lint_config: Optional[NFCoreYamlLintConfig] = None self.modules_json: Optional[ModulesJson] = None if self.component_type == "modules": diff --git a/nf_core/components/patch.py b/nf_core/components/patch.py index 41fccd8be2..59ec7a381b 100644 --- a/nf_core/components/patch.py +++ b/nf_core/components/patch.py @@ -8,7 +8,7 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer from nf_core.modules.modules_json import ModulesJson log = logging.getLogger(__name__) @@ -65,7 +65,9 @@ def patch(self, component=None): component_fullname = str(Path(self.component_type, self.modules_repo.repo_path, component)) # Verify that the component has an entry in the modules.json file - if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + if not self.modules_json.component_present( + component, self.modules_repo.remote_url, component_dir, self.component_type + ): raise UserWarning( f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" ) @@ -112,7 +114,7 @@ def patch(self, component=None): # Write the patch to a temporary location (otherwise it is printed to the screen later) patch_temp_path = tempfile.mktemp() try: - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( patch_temp_path, component, self.modules_repo.repo_path, @@ -127,11 +129,13 @@ def patch(self, component=None): raise UserWarning(f"{self.component_type[:-1]} '{component_fullname}' is unchanged. No patch to compute") # Write changes to modules.json - self.modules_json.add_patch_entry(component, self.modules_repo.remote_url, component_dir, patch_relpath) + self.modules_json.add_patch_entry( + self.component_type, component, self.modules_repo.remote_url, component_dir, patch_relpath + ) log.debug(f"Wrote patch path for {self.component_type[:-1]} {component} to modules.json") # Show the changes made to the module - ModulesDiffer.print_diff( + ComponentsDiffer.print_diff( component, self.modules_repo.repo_path, component_install_dir, @@ -166,7 +170,9 @@ def remove(self, component): component_fullname = str(Path(self.component_type, component_dir, component)) # Verify that the component has an entry in the modules.json file - if not self.modules_json.module_present(component, self.modules_repo.remote_url, component_dir): + if not self.modules_json.component_present( + component, self.modules_repo.remote_url, component_dir, self.component_type + ): raise UserWarning( f"The '{component_fullname}' {self.component_type[:-1]} does not have an entry in the 'modules.json' file. Cannot compute patch" ) @@ -202,7 +208,7 @@ def remove(self, component): # Try to apply the patch in reverse and move resulting files to module dir temp_component_dir = self.modules_json.try_apply_patch_reverse( - component, self.modules_repo.repo_path, patch_relpath, component_path + self.component_type, component, self.modules_repo.repo_path, patch_relpath, component_path ) try: for file in Path(temp_component_dir).glob("*"): diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index c2c5843918..37208629c0 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -68,7 +68,7 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals if not component_dir.exists(): log.error(f"Installation directory '{component_dir}' does not exist.") - if modules_json.module_present(component, self.modules_repo.remote_url, repo_path): + if modules_json.component_present(component, self.modules_repo.remote_url, repo_path, self.component_type): log.error(f"Found entry for '{component}' in 'modules.json'. Removing...") modules_json.remove_entry(self.component_type, component, self.modules_repo.remote_url, repo_path) return False diff --git a/nf_core/components/update.py b/nf_core/components/update.py index bf176fb6d9..901a7f02fe 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -9,13 +9,13 @@ import nf_core.modules.modules_utils import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.components_utils import ( get_components_to_install, prompt_component_version_sha, ) from nf_core.components.install import ComponentInstall from nf_core.components.remove import ComponentRemove -from nf_core.modules.modules_differ import ModulesDiffer from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ModulesRepo from nf_core.utils import plural_es, plural_s, plural_y @@ -223,7 +223,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr f"Writing diff file for {self.component_type[:-1]} '{component_fullname}' to '{self.save_diff_fn}'" ) try: - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( self.save_diff_fn, component, modules_repo.repo_path, @@ -265,7 +265,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr self.manage_changes_in_linked_components(component, modules_to_update, subworkflows_to_update) elif self.show_diff: - ModulesDiffer.print_diff( + ComponentsDiffer.print_diff( component, modules_repo.repo_path, component_dir, @@ -313,7 +313,7 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr if self.save_diff_fn: # Write the modules.json diff to the file - ModulesDiffer.append_modules_json_diff( + ComponentsDiffer.append_modules_json_diff( self.save_diff_fn, old_modules_json, self.modules_json.get_modules_json(), @@ -449,7 +449,9 @@ def get_single_component_info(self, component): self.modules_repo.setup_branch(current_branch) # If there is a patch file, get its filename - patch_fn = self.modules_json.get_patch_fn(component, self.modules_repo.remote_url, install_dir) + patch_fn = self.modules_json.get_patch_fn( + self.component_type, component, self.modules_repo.remote_url, install_dir + ) return (self.modules_repo, component, sha, patch_fn) @@ -695,7 +697,12 @@ def get_all_components_info(self, branch=None): # Add patch filenames to the components that have them components_info = [ - (repo, comp, sha, self.modules_json.get_patch_fn(comp, repo.remote_url, repo.repo_path)) + ( + repo, + comp, + sha, + self.modules_json.get_patch_fn(self.component_type, comp, repo.remote_url, repo.repo_path), + ) for repo, comp, sha in components_info ] @@ -810,7 +817,9 @@ def try_apply_patch( shutil.copytree(component_install_dir, temp_component_dir) try: - new_files = ModulesDiffer.try_apply_patch(component, repo_path, patch_path, temp_component_dir) + new_files = ComponentsDiffer.try_apply_patch( + self.component_type, component, repo_path, patch_path, temp_component_dir + ) except LookupError: # Patch failed. Save the patch file by moving to the install dir shutil.move(patch_path, Path(component_install_dir, patch_path.relative_to(component_dir))) @@ -828,7 +837,7 @@ def try_apply_patch( # Create the new patch file log.debug("Regenerating patch file") - ModulesDiffer.write_diff_file( + ComponentsDiffer.write_diff_file( Path(temp_component_dir, patch_path.relative_to(component_dir)), component, repo_path, @@ -848,7 +857,12 @@ def try_apply_patch( # Add the patch file to the modules.json file self.modules_json.add_patch_entry( - component, self.modules_repo.remote_url, repo_path, patch_relpath, write_file=write_file + self.component_type, + component, + self.modules_repo.remote_url, + repo_path, + patch_relpath, + write_file=write_file, ) return True diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 78a528c19d..a0002ed424 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -2,7 +2,7 @@ # docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . # See https://docs.renovatebot.com/docker/#digest-pinning for why a digest is used. -FROM gitpod/workspace-base@sha256:2cc134fe5bd7d8fdbe44cab294925d4bc6d2d178d94624f4c376584a22d1f7b6 +FROM gitpod/workspace-base@sha256:12853f7c901eb2b677a549cb112c85f9679d18feb30093bcc63aa252540ecad9 USER root diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index df5a48d5bf..848e17130e 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -15,8 +15,8 @@ import nf_core import nf_core.modules.modules_utils +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) @@ -50,7 +50,8 @@ def main_nf( # otherwise read the lines directly from the module lines: List[str] = [] if module.is_patched: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 4ad728d10b..d0268a40cc 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -6,9 +6,9 @@ import ruamel.yaml from jsonschema import exceptions, validators +from nf_core.components.components_differ import ComponentsDiffer from nf_core.components.lint import ComponentLint, LintExceptionError from nf_core.components.nfcore_component import NFCoreComponent -from nf_core.modules.modules_differ import ModulesDiffer log = logging.getLogger(__name__) @@ -46,7 +46,8 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None # Check if we have a patch file, get original file in that case meta_yaml = read_meta_yml(module_lint_object, module) if module.is_patched and module_lint_object.modules_repo.repo_path is not None: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, @@ -207,7 +208,8 @@ def read_meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> yaml.preserve_quotes = True # Check if we have a patch file, get original file in that case if module.is_patched: - lines = ModulesDiffer.try_apply_patch( + lines = ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, module.patch_path, diff --git a/nf_core/modules/lint/module_changes.py b/nf_core/modules/lint/module_changes.py index eb76f4b88b..121de00c0a 100644 --- a/nf_core/modules/lint/module_changes.py +++ b/nf_core/modules/lint/module_changes.py @@ -7,7 +7,7 @@ from pathlib import Path import nf_core.modules.modules_repo -from nf_core.modules.modules_differ import ModulesDiffer +from nf_core.components.components_differ import ComponentsDiffer def module_changes(module_lint_object, module): @@ -30,7 +30,8 @@ def module_changes(module_lint_object, module): tempdir = tempdir_parent / "tmp_module_dir" shutil.copytree(module.component_dir, tempdir) try: - new_lines = ModulesDiffer.try_apply_patch( + new_lines = ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module.org, module.patch_path, diff --git a/nf_core/modules/lint/module_patch.py b/nf_core/modules/lint/module_patch.py index 29bf78a66b..6347c5c553 100644 --- a/nf_core/modules/lint/module_patch.py +++ b/nf_core/modules/lint/module_patch.py @@ -1,7 +1,7 @@ from pathlib import Path +from ...components.components_differ import ComponentsDiffer from ...components.nfcore_component import NFCoreComponent -from ..modules_differ import ModulesDiffer def module_patch(module_lint_obj, module: NFCoreComponent): @@ -66,11 +66,11 @@ def check_patch_valid(module, patch_path): continue topath = Path(line.split(" ")[1].strip("\n")) if frompath == Path("/dev/null"): - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.CREATED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.CREATED)) elif topath == Path("/dev/null"): - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.REMOVED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.REMOVED)) elif frompath == topath: - paths_in_patch.append((frompath, ModulesDiffer.DiffEnum.CHANGED)) + paths_in_patch.append((frompath, ComponentsDiffer.DiffEnum.CHANGED)) else: module.failed.append( ( @@ -105,7 +105,7 @@ def check_patch_valid(module, patch_path): # Warn about any created or removed files passed = True for path, diff_status in paths_in_patch: - if diff_status == ModulesDiffer.DiffEnum.CHANGED: + if diff_status == ComponentsDiffer.DiffEnum.CHANGED: if not Path(module.base_dir, path).exists(): module.failed.append( ( @@ -116,7 +116,7 @@ def check_patch_valid(module, patch_path): ) passed = False continue - elif diff_status == ModulesDiffer.DiffEnum.CREATED: + elif diff_status == ComponentsDiffer.DiffEnum.CREATED: if not Path(module.base_dir, path).exists(): module.failed.append( ( @@ -130,7 +130,7 @@ def check_patch_valid(module, patch_path): module.warned.append( ("patch", f"Patch file performs file creation of {path}. This is discouraged."), patch_path ) - elif diff_status == ModulesDiffer.DiffEnum.REMOVED: + elif diff_status == ComponentsDiffer.DiffEnum.REMOVED: if Path(module.base_dir, path).exists(): module.failed.append( ( @@ -161,7 +161,8 @@ def patch_reversible(module_lint_object, module, patch_path): (bool): False if any test failed, True otherwise """ try: - ModulesDiffer.try_apply_patch( + ComponentsDiffer.try_apply_patch( + module.component_type, module.component_name, module_lint_object.modules_repo.repo_path, patch_path, diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 05c64b6dee..15e98ffff8 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -19,7 +19,7 @@ from nf_core.modules.modules_repo import ModulesRepo from nf_core.pipelines.lint_utils import dump_json_with_prettier -from .modules_differ import ModulesDiffer +from ..components.components_differ import ComponentsDiffer log = logging.getLogger(__name__) @@ -308,7 +308,9 @@ def determine_branches_and_shas( # If the module/subworkflow is patched patch_file = component_path / f"{component}.diff" if patch_file.is_file(): - temp_module_dir = self.try_apply_patch_reverse(component, install_dir, patch_file, component_path) + temp_module_dir = self.try_apply_patch_reverse( + component_type, component, install_dir, patch_file, component_path + ) correct_commit_sha = self.find_correct_commit_sha( component_type, component, temp_module_dir, modules_repo ) @@ -805,7 +807,7 @@ def remove_entry(self, component_type, name, repo_url, install_dir, removed_by=N return False - def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, write_file=True): + def add_patch_entry(self, component_type, component_name, repo_url, install_dir, patch_filename, write_file=True): """ Adds (or replaces) the patch entry for a module """ @@ -815,9 +817,11 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr if repo_url not in self.modules_json["repos"]: raise LookupError(f"Repo '{repo_url}' not present in 'modules.json'") - if module_name not in self.modules_json["repos"][repo_url]["modules"][install_dir]: - raise LookupError(f"Module '{install_dir}/{module_name}' not present in 'modules.json'") - self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["patch"] = str(patch_filename) + if component_name not in self.modules_json["repos"][repo_url][component_type][install_dir]: + raise LookupError( + f"{component_type[:-1].title()} '{install_dir}/{component_name}' not present in 'modules.json'" + ) + self.modules_json["repos"][repo_url][component_type][install_dir][component_name]["patch"] = str(patch_filename) if write_file: self.dump() @@ -833,17 +837,17 @@ def remove_patch_entry(self, module_name, repo_url, install_dir, write_file=True if write_file: self.dump() - def get_patch_fn(self, module_name, repo_url, install_dir): + def get_patch_fn(self, component_type, component_name, repo_url, install_dir): """ - Get the patch filename of a module + Get the patch filename of a component Args: - module_name (str): The name of the module - repo_url (str): The URL of the repository containing the module - install_dir (str): The name of the directory where modules are installed + component_name (str): The name of the component + repo_url (str): The URL of the repository containing the component + install_dir (str): The name of the directory where components are installed Returns: - (str): The patch filename for the module, None if not present + (str): The patch filename for the component, None if not present """ if self.modules_json is None: self.load() @@ -851,48 +855,53 @@ def get_patch_fn(self, module_name, repo_url, install_dir): path = ( self.modules_json["repos"] .get(repo_url, {}) - .get("modules") + .get(component_type) .get(install_dir) - .get(module_name, {}) + .get(component_name, {}) .get("patch") ) return Path(path) if path is not None else None - def try_apply_patch_reverse(self, module, repo_name, patch_relpath, module_dir): + def try_apply_patch_reverse(self, component_type, component, repo_name, patch_relpath, component_dir): """ - Try reverse applying a patch file to the modified module files + Try reverse applying a patch file to the modified module or subworkflow files Args: - module (str): The name of the module - repo_name (str): The name of the repository where the module resides + component_type (str): The type of component [modules, subworkflows] + component (str): The name of the module or subworkflow + repo_name (str): The name of the repository where the component resides patch_relpath (Path | str): The path to patch file in the pipeline - module_dir (Path | str): The module directory in the pipeline + component_dir (Path | str): The component directory in the pipeline Returns: - (Path | str): The path of the folder where the module patched files are + (Path | str): The path of the folder where the component patched files are Raises: LookupError: If patch was not applied """ - module_fullname = str(Path(repo_name, module)) + component_fullname = str(Path(repo_name, component)) patch_path = Path(self.directory / patch_relpath) try: - new_files = ModulesDiffer.try_apply_patch(module, repo_name, patch_path, module_dir, reverse=True) + new_files = ComponentsDiffer.try_apply_patch( + component_type, component, repo_name, patch_path, component_dir, reverse=True + ) except LookupError as e: - raise LookupError(f"Failed to apply patch in reverse for module '{module_fullname}' due to: {e}") + raise LookupError( + f"Failed to apply patch in reverse for {component_type[:-1]} '{component_fullname}' due to: {e}" + ) # Write the patched files to a temporary directory log.debug("Writing patched files to tmpdir") temp_dir = Path(tempfile.mkdtemp()) - temp_module_dir = temp_dir / module - temp_module_dir.mkdir(parents=True, exist_ok=True) + temp_component_dir = temp_dir / component + temp_component_dir.mkdir(parents=True, exist_ok=True) for file, new_content in new_files.items(): - fn = temp_module_dir / file + fn = temp_component_dir / file with open(fn, "w") as fh: fh.writelines(new_content) - return temp_module_dir + return temp_component_dir def repo_present(self, repo_name): """ @@ -908,20 +917,21 @@ def repo_present(self, repo_name): return repo_name in self.modules_json.get("repos", {}) - def module_present(self, module_name, repo_url, install_dir): + def component_present(self, module_name, repo_url, install_dir, component_type): """ Checks if a module is present in the modules.json file Args: module_name (str): Name of the module repo_url (str): URL of the repository install_dir (str): Name of the directory where modules are installed + component_type (str): Type of component [modules, subworkflows] Returns: (bool): Whether the module is present in the 'modules.json' file """ if self.modules_json is None: self.load() assert self.modules_json is not None # mypy - return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get("modules", {}).get( + return module_name in self.modules_json.get("repos", {}).get(repo_url, {}).get(component_type, {}).get( install_dir, {} ) @@ -1119,8 +1129,10 @@ def dump(self, run_prettier: bool = False) -> None: """ Sort the modules.json, and write it to file """ + # Sort the modules.json + if self.modules_json is None: + self.load() if self.modules_json is not None: - # Sort the modules.json self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) if run_prettier: dump_json_with_prettier(self.modules_json_path, self.modules_json) diff --git a/nf_core/pipeline-template/.github/CONTRIBUTING.md b/nf_core/pipeline-template/.github/CONTRIBUTING.md index 0200ea26ce..37970c09e8 100644 --- a/nf_core/pipeline-template/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/.github/CONTRIBUTING.md @@ -1,4 +1,4 @@ -# {{ name }}: Contributing Guidelines +# `{{ name }}`: Contributing Guidelines Hi there! Many thanks for taking an interest in improving {{ name }}. @@ -66,7 +66,7 @@ These tests are run both with the latest available version of `Nextflow` and als - On your own fork, make a new branch `patch` based on `upstream/master`. - Fix the bug, and bump version (X.Y.Z+1). -- A PR should be made on `master` from patch to directly this particular bug. +- Open a pull-request from `patch` to `master` with the changes. {% if is_nfcore -%} @@ -78,13 +78,13 @@ For further information/help, please consult the [{{ name }} documentation](http ## Pipeline contribution conventions -To make the {{ name }} code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. +To make the `{{ name }}` code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. ### Adding a new step If you wish to contribute a new step, please use the following coding standards: -1. Define the corresponding input channel into your new process from the expected previous process channel +1. Define the corresponding input channel into your new process from the expected previous process channel. 2. Write the process block (see below). 3. Define the output channel if needed (see below). 4. Add any new parameters to `nextflow.config` with a default (see below). @@ -99,7 +99,7 @@ If you wish to contribute a new step, please use the following coding standards: ### Default values -Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope. +Parameters should be initialised / defined with default values within the `params` scope in `nextflow.config`. Once there, use `nf-core pipelines schema build` to add to `nextflow_schema.json`. diff --git a/nf_core/pipeline-template/.github/workflows/linting_comment.yml b/nf_core/pipeline-template/.github/workflows/linting_comment.yml index 908dcea159..63b20bb311 100644 --- a/nf_core/pipeline-template/.github/workflows/linting_comment.yml +++ b/nf_core/pipeline-template/.github/workflows/linting_comment.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download lint results - uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6 + uses: dawidd6/action-download-artifact@80620a5d27ce0ae443b965134db88467fc607b43 # v7 with: workflow: linting.yml workflow_conclusion: completed diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 67fda78658..16e6220aaf 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -79,9 +79,8 @@ If you wish to repeatedly use the same parameters for multiple runs, rather than Pipeline settings can be provided in a `yaml` or `json` file via `-params-file `. -:::warning -Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). -::: +> [!WARNING] +> Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). The above pipeline run specified with a params file in yaml format: @@ -110,7 +109,7 @@ nextflow pull {{ name }} ### Reproducibility -It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. +It is a good idea to specify the pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. @@ -118,15 +117,13 @@ This version number will be logged in reports when you run the pipeline, so that To further assist in reproducibility, you can use share and reuse [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. -:::tip -If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. -::: +> [!TIP] +> If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. ## Core Nextflow arguments -:::note -These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen). -::: +> [!NOTE] +> These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen) ### `-profile` @@ -134,13 +131,12 @@ Use this parameter to choose a configuration profile. Profiles can give configur Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Apptainer, Conda) - see below. -:::info -We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. -::: +> [!IMPORTANT] +> We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. {%- if nf_core_configs %} -The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). +The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to check if your system is suported, please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). {% else %} {% endif %} Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! @@ -185,13 +181,13 @@ Specify the path to a specific config file (this is a core Nextflow command). Se ### Resource requests -Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher requests (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. +Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the pipeline steps, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher resources request (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. To change the resource requests, please see the [max resources](https://nf-co.re/docs/usage/configuration#max-resources) and [tuning workflow resources](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources) section of the nf-core website. ### Custom Containers -In some cases you may wish to change which container or conda environment a step of the pipeline uses for a particular tool. By default nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However in some cases the pipeline specified version maybe out of date. +In some cases, you may wish to change the container or conda environment used by a pipeline steps for a particular tool. By default, nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However, in some cases the pipeline specified version maybe out of date. To use a different container from the default container or conda environment specified in a pipeline, please see the [updating tool versions](https://nf-co.re/docs/usage/configuration#updating-tool-versions) section of the nf-core website. diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index f714eb1d93..90c5728d9a 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -8,7 +8,7 @@ {%- if fastqc %} "fastqc": { "branch": "master", - "git_sha": "666652151335353eef2fcd58880bcef5bc2928e1", + "git_sha": "dc94b6ee04a05ddb9f7ae050712ff30a13149164", "installed_by": ["modules"] }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { @@ -23,17 +23,17 @@ "nf-core": { "utils_nextflow_pipeline": { "branch": "master", - "git_sha": "3aa0aec1d52d492fe241919f0c6100ebf0074082", + "git_sha": "c2b22d85f30a706a3073387f30380704fcae013b", "installed_by": ["subworkflows"] }, "utils_nfcore_pipeline": { "branch": "master", - "git_sha": "1b6b9a3338d011367137808b49b923515080e3ba", + "git_sha": "9a1e8bb6a5d205cf7807dcefca872a3314b2f3e6", "installed_by": ["subworkflows"] }{% if nf_schema %}, "utils_nfschema_plugin": { "branch": "master", - "git_sha": "bbd5a41f4535a8defafe6080e00ea74c45f4f96c", + "git_sha": "2fd2cd6d0e7b273747f32e465fdc6bcc3ae0814e", "installed_by": ["subworkflows"] }{% endif %} } diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf index d8989f4812..752c3a10c6 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/main.nf @@ -24,7 +24,7 @@ process FASTQC { // Make list of old name and new name pairs to use for renaming in the bash while loop def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } def rename_to = old_new_pairs*.join(' ').join(' ') - def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') + def renamed_files = old_new_pairs.collect{ _old_name, new_name -> new_name }.join(' ') // The total amount of allocated RAM by FastQC is equal to the number of threads defined (--threads) time the amount of RAM defined (--memory) // https://github.com/s-andrews/FastQC/blob/1faeea0412093224d7f6a07f777fad60a5650795/fastqc#L211-L222 diff --git a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml index 4827da7af2..2b2e62b8ae 100644 --- a/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/fastqc/meta.yml @@ -11,6 +11,7 @@ tools: FastQC gives general quality metrics about your reads. It provides information about the quality score distribution across your reads, the per base sequence content (%A/C/G/T). + You get information about adapter contamination and other overrepresented sequences. homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 6970d05183..21174bbdc5 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -41,7 +41,7 @@ params { email_on_fail = null plaintext_email = false {%- endif %} - {%- if modules %} + {%- if modules or nf_schema %} monochrome_logs = false{% endif %} {%- if slackreport or adaptivecard %} hook_url = null{% endif %} @@ -52,6 +52,7 @@ params { version = false {%- if test_config %} pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/'{% endif %} + trace_report_suffix = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') {%- if nf_core_configs -%} // Config options @@ -76,6 +77,11 @@ params { includeConfig 'conf/base.config' {%- else %} +{% if modules -%} +// Load modules.config for DSL2 module specific options +includeConfig 'conf/modules.config' +{%- endif %} + process { // TODO nf-core: Check the defaults for all processes cpus = { 1 * task.attempt } @@ -249,27 +255,39 @@ set -C # No clobber - prevent output redirection from overwriting files. // Disable process selector warnings by default. Use debug profile to enable warnings. nextflow.enable.configProcessNamesValidation = false -def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') timeline { enabled = true - file = "${params.outdir}/pipeline_info/execution_timeline_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_timeline_${params.trace_report_suffix}.html" } report { enabled = true - file = "${params.outdir}/pipeline_info/execution_report_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/execution_report_${params.trace_report_suffix}.html" } trace { enabled = true - file = "${params.outdir}/pipeline_info/execution_trace_${trace_timestamp}.txt" + file = "${params.outdir}/pipeline_info/execution_trace_${params.trace_report_suffix}.txt" } dag { enabled = true - file = "${params.outdir}/pipeline_info/pipeline_dag_${trace_timestamp}.html" + file = "${params.outdir}/pipeline_info/pipeline_dag_${params.trace_report_suffix}.html" } manifest { name = '{{ name }}' - author = """{{ author }}""" + author = """{{ author }}""" // The author field is deprecated from Nextflow version 24.10.0, use contributors instead + contributors = [ + // TODO nf-core: Update the field with the details of the contributors to your pipeline. New with Nextflow version 24.10.0 + {%- for author_name in author.split(",") %} + [ + name: '{{ author_name }}', + affiliation: '', + email: '', + github: '', + contribution: [], // List of contribution types ('author', 'maintainer' or 'contributor') + orcid: '' + ], + {%- endfor %} + ] homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' @@ -286,6 +304,7 @@ plugins { validation { defaultIgnoreParams = ["genomes"] + monochromeLogs = params.monochrome_logs help { enabled = true command = "nextflow run {{ name }} -profile --input samplesheet.csv --outdir " @@ -316,8 +335,3 @@ validation { }{% endif %} } {%- endif %} - -{% if modules -%} -// Load modules.config for DSL2 module specific options -includeConfig 'conf/modules.config' -{%- endif %} diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 4136a0b490..3e59a8ba54 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -182,7 +182,7 @@ "fa_icon": "fas fa-file-upload", "hidden": true },{% endif %} - {%- if modules %} + {%- if modules or nf_schema %} "monochrome_logs": { "type": "boolean", "description": "Do not use coloured log outputs.", @@ -229,7 +229,13 @@ "description": "Base URL or local path to location of pipeline test dataset files", "default": "https://raw.githubusercontent.com/nf-core/test-datasets/", "hidden": true - }{% endif %} + }{% endif %}, + "trace_report_suffix": { + "type": "string", + "fa_icon": "far calendar", + "description": "Suffix to add to the trace report filename. Default is the date and time in the format yyyy-MM-dd_HH-mm-ss.", + "hidden": true + } } } }, diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf index 0fcbf7b3f2..d6e593e852 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/main.nf @@ -92,10 +92,12 @@ def checkCondaChannels() { channels = config.channels } catch (NullPointerException e) { + log.debug(e) log.warn("Could not verify conda channel configuration.") return null } catch (IOException e) { + log.debug(e) log.warn("Could not verify conda channel configuration.") return null } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test index ca964ce8e1..02dbf094cd 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test @@ -52,10 +52,12 @@ nextflow_workflow { } then { - assertAll( - { assert workflow.success }, - { assert workflow.stdout.contains("nextflow_workflow v9.9.9") } - ) + expect { + with(workflow) { + assert success + assert "nextflow_workflow v9.9.9" in stdout + } + } } } diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf index 5cb7bafef3..228dbff897 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/main.nf @@ -56,21 +56,6 @@ def checkProfileProvided(nextflow_cli_args) { } } -// -// Citation string for pipeline -// -def workflowCitation() { - def temp_doi_ref = "" - def manifest_doi = workflow.manifest.doi.tokenize(",") - // Handling multiple DOIs - // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers - // Removing ` ` since the manifest.doi is a string and not a proper list - manifest_doi.each { doi_ref -> - temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n" - } - return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + temp_doi_ref + "\n" + "* The nf-core framework\n" + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + "* Software dependencies\n" + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md" -} - // // Generate workflow version string // @@ -150,33 +135,6 @@ def paramsSummaryMultiqc(summary_params) { return yaml_file_text } -// -// nf-core logo -// -def nfCoreLogo(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - String.format( - """\n - ${dashedLine(monochrome_logs)} - ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset} - ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset} - ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset} - ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset} - ${colors.green}`._,._,\'${colors.reset} - ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset} - ${dashedLine(monochrome_logs)} - """.stripIndent() - ) -} - -// -// Return dashed line -// -def dashedLine(monochrome_logs=true) { - def colors = logColours(monochrome_logs) as Map - return "-${colors.dim}----------------------------------------------------${colors.reset}-" -} - // // ANSII colours used for terminal logging // @@ -261,7 +219,8 @@ def attachMultiqcReport(multiqc_report) { } } } - catch (Exception all) { + catch (Exception msg) { + log.debug(msg) if (multiqc_report) { log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email") } @@ -340,7 +299,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi def email_html = html_template.toString() // Render the sendmail template - def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit + def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as MemoryUnit def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()] def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt") def sendmail_template = engine.createTemplate(sf).make(smail_fields) @@ -358,7 +317,9 @@ new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') ['sendmail', '-t'].execute() << sendmail_html log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-") } - catch (Exception all) { + catch (Exception msg) { + log.debug(msg) + log.debug("Trying with mail instead of sendmail") // Catch failures and try with plaintext def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address] mail_cmd.execute() << email_html diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test index 1dc317f8f7..e43d208b1b 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test @@ -41,58 +41,6 @@ nextflow_function { } } - test("Test Function workflowCitation") { - - function "workflowCitation" - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function nfCoreLogo") { - - function "nfCoreLogo" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - - test("Test Function dashedLine") { - - function "dashedLine" - - when { - function { - """ - input[0] = false - """ - } - } - - then { - assertAll( - { assert function.success }, - { assert snapshot(function.result).match() } - ) - } - } - test("Test Function without logColours") { function "logColours" diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap index 1037232c9e..02c6701413 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap @@ -17,26 +17,6 @@ }, "timestamp": "2024-02-28T12:02:59.729647" }, - "Test Function nfCoreLogo": { - "content": [ - "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:10.562934" - }, - "Test Function workflowCitation": { - "content": [ - "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:07.019761" - }, "Test Function without logColours": { "content": [ { @@ -95,16 +75,6 @@ }, "timestamp": "2024-02-28T12:03:17.969323" }, - "Test Function dashedLine": { - "content": [ - "-\u001b[2m----------------------------------------------------\u001b[0m-" - ], - "meta": { - "nf-test": "0.8.4", - "nextflow": "23.10.1" - }, - "timestamp": "2024-02-28T12:03:14.366181" - }, "Test Function with logColours": { "content": [ { diff --git a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test index 842dc432af..8fb3016487 100644 --- a/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test +++ b/nf_core/pipeline-template/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test @@ -42,7 +42,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { @@ -94,7 +94,7 @@ nextflow_workflow { params { test_data = '' - outdir = 1 + outdir = null } workflow { diff --git a/nf_core/pipelines/create/create.py b/nf_core/pipelines/create/create.py index 8ab547c1cc..61e0b63ec3 100644 --- a/nf_core/pipelines/create/create.py +++ b/nf_core/pipelines/create/create.py @@ -8,7 +8,7 @@ import re import shutil from pathlib import Path -from typing import Dict, List, Optional, Tuple, Union, cast +from typing import Dict, List, Optional, Tuple, Union import git import git.config @@ -21,7 +21,8 @@ from nf_core.pipelines.create.utils import CreateConfig, features_yml_path, load_features_yaml from nf_core.pipelines.create_logo import create_logo from nf_core.pipelines.lint_utils import run_prettier_on_file -from nf_core.utils import LintConfigType, NFCoreTemplateConfig +from nf_core.pipelines.rocrate import ROCrate +from nf_core.utils import NFCoreTemplateConfig, NFCoreYamlLintConfig log = logging.getLogger(__name__) @@ -67,7 +68,7 @@ def __init__( _, config_yml = nf_core.utils.load_tools_config(outdir if outdir else Path().cwd()) # Obtain a CreateConfig object from `.nf-core.yml` config file if config_yml is not None and getattr(config_yml, "template", None) is not None: - self.config = CreateConfig(**config_yml["template"].model_dump()) + self.config = CreateConfig(**config_yml["template"].model_dump(exclude_none=True)) else: raise UserWarning("The template configuration was not provided in '.nf-core.yml'.") # Update the output directory @@ -205,7 +206,7 @@ def obtain_jinja_params_dict( config_yml = None # Set the parameters for the jinja template - jinja_params = self.config.model_dump() + jinja_params = self.config.model_dump(exclude_none=True) # Add template areas to jinja params and create list of areas with paths to skip skip_areas = [] @@ -356,6 +357,11 @@ def render_template(self) -> None: # Make a logo and save it, if it is a nf-core pipeline self.make_pipeline_logo() + if self.config.skip_features is None or "ro-crate" not in self.config.skip_features: + # Create the RO-Crate metadata file + rocrate_obj = ROCrate(self.outdir) + rocrate_obj.create_rocrate(json_path=self.outdir / "ro-crate-metadata.json") + # Update the .nf-core.yml with linting configurations self.fix_linting() @@ -363,8 +369,8 @@ def render_template(self) -> None: config_fn, config_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and config_yml is not None: with open(str(config_fn), "w") as fh: - config_yml.template = NFCoreTemplateConfig(**self.config.model_dump()) - yaml.safe_dump(config_yml.model_dump(), fh) + config_yml.template = NFCoreTemplateConfig(**self.config.model_dump(exclude_none=True)) + yaml.safe_dump(config_yml.model_dump(exclude_none=True), fh) log.debug(f"Dumping pipeline template yml to pipeline config file '{config_fn.name}'") # Run prettier on files @@ -395,9 +401,9 @@ def fix_linting(self): # Add the lint content to the preexisting nf-core config config_fn, nf_core_yml = nf_core.utils.load_tools_config(self.outdir) if config_fn is not None and nf_core_yml is not None: - nf_core_yml.lint = cast(LintConfigType, lint_config) + nf_core_yml.lint = NFCoreYamlLintConfig(**lint_config) with open(self.outdir / config_fn, "w") as fh: - yaml.dump(nf_core_yml.model_dump(), fh, default_flow_style=False, sort_keys=False) + yaml.dump(nf_core_yml.model_dump(exclude_none=True), fh, default_flow_style=False, sort_keys=False) def make_pipeline_logo(self): """Fetch a logo for the new pipeline from the nf-core website""" diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index 5f23f5e03e..9841879e83 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -432,6 +432,19 @@ seqera_platform: You can extend this file adding any other desired configuration. nfcore_pipelines: False custom_pipelines: True +rocrate: + skippable_paths: + - "ro-crate-metadata.json" + short_description: "Add RO-Crate metadata" + description: "Add a RO-Crate metadata file to describe the pipeline" + help_text: | + RO-Crate is a metadata specification to describe research data and software. + This will add a `ro-crate-metadata.json` file to describe the pipeline. + nfcore_pipelines: False + custom_pipelines: True + linting: + files_warn: + - "ro-crate-metadata.json" vscode: skippable_paths: - ".vscode" diff --git a/nf_core/pipelines/download.py b/nf_core/pipelines/download.py index 9a329aeaff..d37dce86d1 100644 --- a/nf_core/pipelines/download.py +++ b/nf_core/pipelines/download.py @@ -839,11 +839,12 @@ def rectify_raw_container_matches(self, raw_findings): url_regex = ( r"https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" ) + oras_regex = r"oras:\/\/[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_\+.~#?&//=]*)" # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/39672069/713980 docker_regex = r"^(?:(?=[^:\/]{1,253})(?!-)[a-zA-Z0-9-]{1,63}(? List[str]: 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data' 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data' + Lastly, we want to remove at least a few Docker URIs for those modules, that have an oras:// download link. """ d: Dict[str, str] = {} - seqera_containers: List[str] = [] + seqera_containers_http: List[str] = [] + seqera_containers_oras: List[str] = [] all_others: List[str] = [] for c in container_list: if bool(re.search(r"/data$", c)): - seqera_containers.append(c) + seqera_containers_http.append(c) + elif bool(re.search(r"^oras://", c)): + seqera_containers_oras.append(c) else: all_others.append(c) @@ -1016,8 +1021,47 @@ def prioritize_direct_download(self, container_list: List[str]) -> List[str]: log.debug(f"{c} matches and will be saved as {k}") d[k] = c - # combine deduplicated others and Seqera containers - return sorted(list(d.values()) + seqera_containers) + combined_with_oras = self.reconcile_seqera_container_uris(seqera_containers_oras, list(d.values())) + + # combine deduplicated others (Seqera containers oras, http others and Docker URI others) and Seqera containers http + return sorted(list(set(combined_with_oras + seqera_containers_http))) + + @staticmethod + def reconcile_seqera_container_uris(prioritized_container_list: List[str], other_list: List[str]) -> List[str]: + """ + Helper function that takes a list of Seqera container URIs, + extracts the software string and builds a regex from them to filter out + similar containers from the second container list. + + prioritzed_container_list = [ + ... "oras://community.wave.seqera.io/library/multiqc:1.25.1--f0e743d16869c0bf", + ... "oras://community.wave.seqera.io/library/multiqc_pip_multiqc-plugins:e1f4877f1515d03c" + ... ] + + will be cleaned to + + ['library/multiqc:1.25.1', 'library/multiqc_pip_multiqc-plugins'] + + Subsequently, build a regex from those and filter out matching duplicates in other_list: + """ + if not prioritized_container_list: + return other_list + else: + # trim the URIs to the stem that contains the tool string, assign with Walrus operator to account for non-matching patterns + trimmed_priority_list = [ + match.group() + for c in set(prioritized_container_list) + if (match := re.search(r"library/.*?:[\d.]+", c) if "--" in c else re.search(r"library/[^\s:]+", c)) + ] + + # build regex + prioritized_containers = re.compile("|".join(f"{re.escape(c)}" for c in trimmed_priority_list)) + + # filter out matches in other list + filtered_containers = [c for c in other_list if not re.search(prioritized_containers, c)] + + # combine prioritized and regular container lists + return sorted(list(set(prioritized_container_list + filtered_containers))) def gather_registries(self, workflow_directory: str) -> None: """Fetch the registries from the pipeline config and CLI arguments and store them in a set. @@ -1419,9 +1463,10 @@ def singularity_pull_image( # Sometimes, container still contain an explicit library specification, which # resulted in attempted pulls e.g. from docker://quay.io/quay.io/qiime2/core:2022.11 # Thus, if an explicit registry is specified, the provided -l value is ignored. + # Additionally, check if the container to be pulled is native Singularity: oras:// protocol. container_parts = container.split("/") if len(container_parts) > 2: - address = f"docker://{container}" + address = container if container.startswith("oras://") else f"docker://{container}" absolute_URI = True else: address = f"docker://{library}/{container.replace('docker://', '')}" @@ -1843,6 +1888,9 @@ def __init__( elif re.search(r"manifest\sunknown", line): self.error_type = self.InvalidTagError(self) break + elif re.search(r"ORAS\sSIF\simage\sshould\shave\sa\ssingle\slayer", line): + self.error_type = self.NoSingularityContainerError(self) + break elif re.search(r"Image\sfile\salready\sexists", line): self.error_type = self.ImageExistsError(self) break @@ -1907,6 +1955,17 @@ def __init__(self, error_log): self.helpmessage = f'Saving image of "{self.error_log.container}" failed, because "{self.error_log.out_path}" exists.\nPlease troubleshoot the command \n"{" ".join(self.error_log.singularity_command)}" manually.\n' super().__init__(self.message) + class NoSingularityContainerError(RuntimeError): + """The container image is no native Singularity Image Format.""" + + def __init__(self, error_log): + self.error_log = error_log + self.message = ( + f'[bold red]"{self.error_log.container}" is no valid Singularity Image Format container.[/]\n' + ) + self.helpmessage = f"Pulling \"{self.error_log.container}\" failed, because it appears invalid. To convert from Docker's OCI format, prefix the URI with 'docker://' instead of 'oras://'.\n" + super().__init__(self.message) + class OtherError(RuntimeError): """Undefined error with the container""" diff --git a/nf_core/pipelines/lint/__init__.py b/nf_core/pipelines/lint/__init__.py index 8cc7c37cb2..154e38aea6 100644 --- a/nf_core/pipelines/lint/__init__.py +++ b/nf_core/pipelines/lint/__init__.py @@ -27,8 +27,8 @@ from nf_core import __version__ from nf_core.components.lint import ComponentLint from nf_core.pipelines.lint_utils import console +from nf_core.utils import NFCoreYamlConfig, NFCoreYamlLintConfig, strip_ansi_codes from nf_core.utils import plural_s as _s -from nf_core.utils import strip_ansi_codes from .actions_awsfulltest import actions_awsfulltest from .actions_awstest import actions_awstest @@ -112,7 +112,7 @@ def __init__( # Initialise the parent object super().__init__(wf_path) - self.lint_config = {} + self.lint_config: Optional[NFCoreYamlLintConfig] = None self.release_mode = release_mode self.fail_ignored = fail_ignored self.fail_warned = fail_warned @@ -173,13 +173,12 @@ def _load_lint_config(self) -> bool: Add parsed config to the `self.lint_config` class attribute. """ _, tools_config = nf_core.utils.load_tools_config(self.wf_path) - self.lint_config = getattr(tools_config, "lint", {}) or {} + self.lint_config = getattr(tools_config, "lint", None) or None is_correct = True - # Check if we have any keys that don't match lint test names if self.lint_config is not None: - for k in self.lint_config: - if k != "nfcore_components" and k not in self.lint_tests: + for k, v in self.lint_config: + if v is not None and k != "nfcore_components" and k not in self.lint_tests: # nfcore_components is an exception to allow custom pipelines without nf-core components log.warning(f"Found unrecognised test name '{k}' in pipeline lint config") is_correct = False @@ -594,7 +593,7 @@ def run_linting( lint_obj._load_lint_config() lint_obj.load_pipeline_config() - if "nfcore_components" in lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: + if lint_obj.lint_config and not lint_obj.lint_config["nfcore_components"]: module_lint_obj = None subworkflow_lint_obj = None else: @@ -679,5 +678,4 @@ def run_linting( if len(lint_obj.failed) > 0: if release_mode: log.info("Reminder: Lint tests were run in --release mode.") - return lint_obj, module_lint_obj, subworkflow_lint_obj diff --git a/nf_core/pipelines/lint/files_exist.py b/nf_core/pipelines/lint/files_exist.py index 9dd307d8b5..19c2498263 100644 --- a/nf_core/pipelines/lint/files_exist.py +++ b/nf_core/pipelines/lint/files_exist.py @@ -66,6 +66,7 @@ def files_exist(self) -> Dict[str, List[str]]: conf/igenomes.config .github/workflows/awstest.yml .github/workflows/awsfulltest.yml + ro-crate-metadata.json Files that *must not* be present, due to being renamed or removed in the template: @@ -171,6 +172,7 @@ def files_exist(self) -> Dict[str, List[str]]: [Path(".github", "workflows", "awstest.yml")], [Path(".github", "workflows", "awsfulltest.yml")], [Path("modules.json")], + [Path("ro-crate-metadata.json")], ] # List of strings. Fails / warns if any of the strings exist. @@ -198,6 +200,12 @@ def files_exist(self) -> Dict[str, List[str]]: ] files_warn_ifexists = [Path(".travis.yml")] + files_hint = [ + [ + ["ro-crate-metadata.json"], + ". Run `nf-core rocrate` to generate this file. Read more about RO-Crates in the [nf-core/tools docs](https://nf-co.re/tools#create-a-ro-crate-metadata-file).", + ], + ] # Remove files that should be ignored according to the linting config ignore_files = self.lint_config.get("files_exist", []) if self.lint_config is not None else [] @@ -225,7 +233,11 @@ def pf(file_path: Union[str, Path]) -> Path: if any([pf(f).is_file() for f in files]): passed.append(f"File found: {self._wrap_quotes(files)}") else: - warned.append(f"File not found: {self._wrap_quotes(files)}") + hint = "" + for file_hint in files_hint: + if file_hint[0] == files: + hint = str(file_hint[1]) + warned.append(f"File not found: {self._wrap_quotes(files)}{hint}") # Files that cause an error if they exist for file in files_fail_ifexists: diff --git a/nf_core/pipelines/lint/multiqc_config.py b/nf_core/pipelines/lint/multiqc_config.py index 2b0fc7902e..fec5b518e3 100644 --- a/nf_core/pipelines/lint/multiqc_config.py +++ b/nf_core/pipelines/lint/multiqc_config.py @@ -31,6 +31,15 @@ def multiqc_config(self) -> Dict[str, List[str]]: lint: multiqc_config: False + To disable this test only for specific sections, you can specify a list of section names. + For example: + + .. code-block:: yaml + lint: + multiqc_config: + - report_section_order + - report_comment + """ passed: List[str] = [] diff --git a/nf_core/pipelines/lint/nfcore_yml.py b/nf_core/pipelines/lint/nfcore_yml.py index e0d5fb2005..3395696d1d 100644 --- a/nf_core/pipelines/lint/nfcore_yml.py +++ b/nf_core/pipelines/lint/nfcore_yml.py @@ -1,7 +1,8 @@ -import re from pathlib import Path from typing import Dict, List +from ruamel.yaml import YAML + from nf_core import __version__ REPOSITORY_TYPES = ["pipeline", "modules"] @@ -26,21 +27,23 @@ def nfcore_yml(self) -> Dict[str, List[str]]: failed: List[str] = [] ignored: List[str] = [] + yaml = YAML() + # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get(".nf-core", []) if self.lint_config is not None else [] - try: - with open(Path(self.wf_path, ".nf-core.yml")) as fh: - content = fh.read() - except FileNotFoundError: - with open(Path(self.wf_path, ".nf-core.yaml")) as fh: - content = fh.read() + for ext in (".yml", ".yaml"): + try: + nf_core_yml = yaml.load(Path(self.wf_path) / f".nf-core{ext}") + break + except FileNotFoundError: + continue + else: + raise FileNotFoundError("No `.nf-core.yml` file found.") if "repository_type" not in ignore_configs: # Check that the repository type is set in the .nf-core.yml - repo_type_re = r"repository_type: (.+)" - match = re.search(repo_type_re, content) - if match: - repo_type = match.group(1) + if "repository_type" in nf_core_yml: + repo_type = nf_core_yml["repository_type"] if repo_type not in REPOSITORY_TYPES: failed.append( f"Repository type in `.nf-core.yml` is not valid. " @@ -55,10 +58,8 @@ def nfcore_yml(self) -> Dict[str, List[str]]: if "nf_core_version" not in ignore_configs: # Check that the nf-core version is set in the .nf-core.yml - nf_core_version_re = r"nf_core_version: (.+)" - match = re.search(nf_core_version_re, content) - if match: - nf_core_version = match.group(1).strip('"') + if "nf_core_version" in nf_core_yml: + nf_core_version = nf_core_yml["nf_core_version"] if nf_core_version != __version__ and "dev" not in nf_core_version: warned.append( f"nf-core version in `.nf-core.yml` is not set to the latest version. " diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index bdfad5200f..75b05f16ed 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -23,6 +23,21 @@ def readme(self): * If pipeline is released but still contains a 'zenodo.XXXXXXX' tag, the test fails + To disable this test, add the following to the pipeline's ``.nf-core.yml`` file: + + .. code-block:: yaml + lint: + readme: False + + To disable subsets of these tests, add the following to the pipeline's ``.nf-core.yml`` file: + + .. code-block:: yaml + + lint: + readme: + - nextflow_badge + - zenodo_release + """ passed = [] warned = [] diff --git a/nf_core/pipelines/lint/template_strings.py b/nf_core/pipelines/lint/template_strings.py index 11c5e82516..0cb669e553 100644 --- a/nf_core/pipelines/lint/template_strings.py +++ b/nf_core/pipelines/lint/template_strings.py @@ -39,8 +39,8 @@ def template_strings(self): ignored = [] # Files that should be ignored according to the linting config ignore_files = self.lint_config.get("template_strings", []) if self.lint_config is not None else [] - files = self.list_files() + files = self.list_files() # Loop through files, searching for string num_matches = 0 for fn in files: diff --git a/nf_core/pipelines/rocrate.py b/nf_core/pipelines/rocrate.py new file mode 100644 index 0000000000..915f203f00 --- /dev/null +++ b/nf_core/pipelines/rocrate.py @@ -0,0 +1,362 @@ +#!/usr/bin/env python +"""Code to deal with pipeline RO (Research Object) Crates""" + +import logging +import os +import re +import sys +from datetime import datetime +from pathlib import Path +from typing import Optional, Set, Union + +import requests +import rocrate.rocrate +from git import GitCommandError, InvalidGitRepositoryError +from repo2rocrate.nextflow import NextflowCrateBuilder +from rich.progress import BarColumn, Progress +from rocrate.model.person import Person +from rocrate.rocrate import ROCrate as BaseROCrate + +from nf_core.utils import Pipeline + +log = logging.getLogger(__name__) + + +class CustomNextflowCrateBuilder(NextflowCrateBuilder): + DATA_ENTITIES = NextflowCrateBuilder.DATA_ENTITIES + [ + ("docs/usage.md", "File", "Usage documentation"), + ("docs/output.md", "File", "Output documentation"), + ("suborkflows/local", "Dataset", "Pipeline-specific suborkflows"), + ("suborkflows/nf-core", "Dataset", "nf-core suborkflows"), + (".nf-core.yml", "File", "nf-core configuration file, configuring template features and linting rules"), + (".pre-commit-config.yaml", "File", "Configuration file for pre-commit hooks"), + (".prettierignore", "File", "Ignore file for prettier"), + (".prettierrc", "File", "Configuration file for prettier"), + ] + + +def custom_make_crate( + root: Path, + workflow: Optional[Path] = None, + repo_url: Optional[str] = None, + wf_name: Optional[str] = None, + wf_version: Optional[str] = None, + lang_version: Optional[str] = None, + ci_workflow: Optional[str] = "ci.yml", + diagram: Optional[Path] = None, +) -> BaseROCrate: + builder = CustomNextflowCrateBuilder(root, repo_url=repo_url) + + return builder.build( + workflow, + wf_name=wf_name, + wf_version=wf_version, + lang_version=lang_version, + license=None, + ci_workflow=ci_workflow, + diagram=diagram, + ) + + +class ROCrate: + """ + Class to generate an RO Crate for a pipeline + + """ + + def __init__(self, pipeline_dir: Path, version="") -> None: + """ + Initialise the ROCrate object + + Args: + pipeline_dir (Path): Path to the pipeline directory + version (str): Version of the pipeline to checkout + """ + from nf_core.utils import is_pipeline_directory, setup_requests_cachedir + + is_pipeline_directory(pipeline_dir) + self.pipeline_dir = pipeline_dir + self.version: str = version + self.crate: rocrate.rocrate.ROCrate + self.pipeline_obj = Pipeline(self.pipeline_dir) + self.pipeline_obj._load() + + setup_requests_cachedir() + + def create_rocrate(self, json_path: Union[None, Path] = None, zip_path: Union[None, Path] = None) -> bool: + """ + Create an RO Crate for a pipeline + + Args: + outdir (Path): Path to the output directory + json_path (Path): Path to the metadata file + zip_path (Path): Path to the zip file + + """ + + # Check that the checkout pipeline version is the same as the requested version + if self.version != "": + if self.version != self.pipeline_obj.nf_config.get("manifest.version"): + # using git checkout to get the requested version + log.info(f"Checking out pipeline version {self.version}") + if self.pipeline_obj.repo is None: + log.error(f"Pipeline repository not found in {self.pipeline_dir}") + sys.exit(1) + try: + self.pipeline_obj.repo.git.checkout(self.version) + self.pipeline_obj = Pipeline(self.pipeline_dir) + self.pipeline_obj._load() + except InvalidGitRepositoryError: + log.error(f"Could not find a git repository in {self.pipeline_dir}") + sys.exit(1) + except GitCommandError: + log.error(f"Could not checkout version {self.version}") + sys.exit(1) + self.version = self.pipeline_obj.nf_config.get("manifest.version", "") + self.make_workflow_rocrate() + + # Save just the JSON metadata file + if json_path is not None: + if json_path.name == "ro-crate-metadata.json": + json_path = json_path.parent + + log.info(f"Saving metadata file to '{json_path}'") + self.crate.metadata.write(json_path) + + # Save the whole crate zip file + if zip_path is not None: + if zip_path.name != "ro-crate.crate.zip": + zip_path = zip_path / "ro-crate.crate.zip" + log.info(f"Saving zip file '{zip_path}") + self.crate.write_zip(zip_path) + + if json_path is None and zip_path is None: + log.error("Please provide a path to save the ro-crate file or the zip file.") + return False + + return True + + def make_workflow_rocrate(self) -> None: + """ + Create an RO Crate for a pipeline + """ + if self.pipeline_obj is None: + raise ValueError("Pipeline object not loaded") + + diagram: Optional[Path] = None + # find files (metro|tube)_?(map)?.png in the pipeline directory or docs/ using pathlib + pattern = re.compile(r".*?(metro|tube|subway)_(map).*?\.png", re.IGNORECASE) + for file in self.pipeline_dir.rglob("*.png"): + if pattern.match(file.name): + log.debug(f"Found diagram: {file}") + diagram = file.relative_to(self.pipeline_dir) + break + + # Create the RO Crate object + + self.crate = custom_make_crate( + self.pipeline_dir, + self.pipeline_dir / "main.nf", + self.pipeline_obj.nf_config.get("manifest.homePage", ""), + self.pipeline_obj.nf_config.get("manifest.name", ""), + self.pipeline_obj.nf_config.get("manifest.version", ""), + self.pipeline_obj.nf_config.get("manifest.nextflowVersion", ""), + diagram=diagram, + ) + + # add readme as description + readme = self.pipeline_dir / "README.md" + + try: + self.crate.description = readme.read_text() + except FileNotFoundError: + log.error(f"Could not find README.md in {self.pipeline_dir}") + # get license from LICENSE file + license_file = self.pipeline_dir / "LICENSE" + try: + license = license_file.read_text() + if license.startswith("MIT"): + self.crate.license = "MIT" + else: + # prompt for license + log.info("Could not determine license from LICENSE file") + self.crate.license = input("Please enter the license for this pipeline: ") + except FileNotFoundError: + log.error(f"Could not find LICENSE file in {self.pipeline_dir}") + + self.crate.add_jsonld( + {"@id": "https://nf-co.re/", "@type": "Organization", "name": "nf-core", "url": "https://nf-co.re/"} + ) + + # Set metadata for main entity file + self.set_main_entity("main.nf") + + def set_main_entity(self, main_entity_filename: str): + """ + Set the main.nf as the main entity of the crate and add necessary metadata + """ + if self.crate.mainEntity is None: + raise ValueError("Main entity not set") + + self.crate.mainEntity.append_to( + "dct:conformsTo", "https://bioschemas.org/profiles/ComputationalWorkflow/1.0-RELEASE/", compact=True + ) + # add dateCreated and dateModified, based on the current data + self.crate.mainEntity.append_to("dateCreated", self.crate.root_dataset.get("dateCreated", ""), compact=True) + self.crate.mainEntity.append_to( + "dateModified", str(datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")), compact=True + ) + self.crate.mainEntity.append_to("sdPublisher", {"@id": "https://nf-co.re/"}, compact=True) + if self.version.endswith("dev"): + url = "dev" + else: + url = self.version + self.crate.mainEntity.append_to( + "url", f"https://nf-co.re/{self.crate.name.replace('nf-core/','')}/{url}/", compact=True + ) + self.crate.mainEntity.append_to("version", self.version, compact=True) + + # get keywords from nf-core website + remote_workflows = requests.get("https://nf-co.re/pipelines.json").json()["remote_workflows"] + # go through all remote workflows and find the one that matches the pipeline name + topics = ["nf-core", "nextflow"] + for remote_wf in remote_workflows: + assert self.pipeline_obj.pipeline_name is not None # mypy + if remote_wf["name"] == self.pipeline_obj.pipeline_name.replace("nf-core/", ""): + topics = topics + remote_wf["topics"] + break + + log.debug(f"Adding topics: {topics}") + self.crate.mainEntity.append_to("keywords", topics) + + self.add_main_authors(self.crate.mainEntity) + + self.crate.mainEntity = self.crate.mainEntity + + self.crate.mainEntity.append_to("license", self.crate.license) + self.crate.mainEntity.append_to("name", self.crate.name) + + if "dev" in self.version: + self.crate.creativeWorkStatus = "InProgress" + else: + self.crate.creativeWorkStatus = "Stable" + if self.pipeline_obj.repo is None: + log.error(f"Pipeline repository not found in {self.pipeline_dir}") + else: + tags = self.pipeline_obj.repo.tags + if tags: + # get the tag for this version + for tag in tags: + if tag.commit.hexsha == self.pipeline_obj.repo.head.commit.hexsha: + self.crate.mainEntity.append_to( + "dateCreated", + tag.commit.committed_datetime.strftime("%Y-%m-%dT%H:%M:%SZ"), + compact=True, + ) + + def add_main_authors(self, wf_file: rocrate.model.entity.Entity) -> None: + """ + Add workflow authors to the crate + """ + # add author entity to crate + + try: + authors = self.pipeline_obj.nf_config["manifest.author"].split(",") + # remove spaces + authors = [a.strip() for a in authors] + # add manifest authors as maintainer to crate + + except KeyError: + log.error("No author field found in manifest of nextflow.config") + return + # look at git contributors for author names + try: + git_contributors: Set[str] = set() + if self.pipeline_obj.repo is None: + log.info("No git repository found. No git contributors will be added as authors.") + return + commits_touching_path = list(self.pipeline_obj.repo.iter_commits(paths="main.nf")) + + for commit in commits_touching_path: + if commit.author.name is not None: + git_contributors.add(commit.author.name) + # exclude bots + contributors = {c for c in git_contributors if not c.endswith("bot") and c != "Travis CI User"} + + log.debug(f"Found {len(contributors)} git authors") + + progress_bar = Progress( + "[bold blue]{task.description}", + BarColumn(bar_width=None), + "[magenta]{task.completed} of {task.total}[reset] » [bold yellow]{task.fields[test_name]}", + transient=True, + disable=os.environ.get("HIDE_PROGRESS", None) is not None, + ) + with progress_bar: + bump_progress = progress_bar.add_task( + "Searching for author names on GitHub", total=len(contributors), test_name="" + ) + + for git_author in contributors: + progress_bar.update(bump_progress, advance=1, test_name=git_author) + git_author = ( + requests.get(f"https://api.github.com/users/{git_author}").json().get("name", git_author) + ) + if git_author is None: + log.debug(f"Could not find name for {git_author}") + continue + + except AttributeError: + log.debug("Could not find git contributors") + + # remove usernames (just keep names with spaces) + named_contributors = {c for c in contributors if " " in c} + + for author in named_contributors: + log.debug(f"Adding author: {author}") + + if self.pipeline_obj.repo is None: + log.info("No git repository found. No git contributors will be added as authors.") + return + # get email from git log + email = self.pipeline_obj.repo.git.log(f"--author={author}", "--pretty=format:%ae", "-1") + orcid = get_orcid(author) + author_entitity = self.crate.add( + Person( + self.crate, orcid if orcid is not None else "#" + email, properties={"name": author, "email": email} + ) + ) + wf_file.append_to("creator", author_entitity) + if author in authors: + wf_file.append_to("maintainer", author_entitity) + + +def get_orcid(name: str) -> Optional[str]: + """ + Get the ORCID for a given name + + Args: + name (str): Name of the author + + Returns: + str: ORCID URI or None + """ + base_url = "https://pub.orcid.org/v3.0/search/" + headers = { + "Accept": "application/json", + } + params = {"q": f'family-name:"{name.split()[-1]}" AND given-names:"{name.split()[0]}"'} + response = requests.get(base_url, params=params, headers=headers) + + if response.status_code == 200: + json_response = response.json() + if json_response.get("num-found") == 1: + orcid_uri = json_response.get("result")[0].get("orcid-identifier", {}).get("uri") + log.info(f"Using found ORCID for {name}. Please double-check: {orcid_uri}") + return orcid_uri + else: + log.debug(f"No exact ORCID found for {name}. See {response.url}") + return None + else: + log.info(f"API request to ORCID unsuccessful. Status code: {response.status_code}") + return None diff --git a/nf_core/pipelines/schema.py b/nf_core/pipelines/schema.py index 96ee3ffbb3..61fd6bc2d7 100644 --- a/nf_core/pipelines/schema.py +++ b/nf_core/pipelines/schema.py @@ -43,7 +43,7 @@ def __init__(self): self.schema_from_scratch = False self.no_prompts = False self.web_only = False - self.web_schema_build_url = "https://nf-co.re/pipeline_schema_builder" + self.web_schema_build_url = "https://oldsite.nf-co.re/pipeline_schema_builder" self.web_schema_build_web_url = None self.web_schema_build_api_url = None self.validation_plugin = None @@ -96,6 +96,7 @@ def _update_validation_plugin_from_config(self) -> None: conf.get("validation.help.shortParameter", "help"), conf.get("validation.help.fullParameter", "helpFull"), conf.get("validation.help.showHiddenParameter", "showHidden"), + "trace_report_suffix", # report suffix should be ignored by default as it is a Java Date object ] # Help parameter should be ignored by default ignored_params_config_str = conf.get("validation.defaultIgnoreParams", "") ignored_params_config = [ @@ -957,6 +958,7 @@ def launch_web_builder(self): """ Send pipeline schema to web builder and wait for response """ + content = { "post_content": "json_schema", "api": "true", @@ -965,6 +967,7 @@ def launch_web_builder(self): "schema": json.dumps(self.schema), } web_response = nf_core.utils.poll_nfcore_web_api(self.web_schema_build_url, content) + try: if "api_url" not in web_response: raise AssertionError('"api_url" not in web_response') diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index 12b29f15ec..781b4f5f00 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -6,7 +6,7 @@ import re import shutil from pathlib import Path -from typing import Dict, Optional, Union +from typing import Any, Dict, Optional, Tuple, Union import git import questionary @@ -105,7 +105,7 @@ def __init__( with open(template_yaml_path) as f: self.config_yml.template = yaml.safe_load(f) with open(self.config_yml_path, "w") as fh: - yaml.safe_dump(self.config_yml.model_dump(), fh) + yaml.safe_dump(self.config_yml.model_dump(exclude_none=True), fh) log.info(f"Saved pipeline creation settings to '{self.config_yml_path}'") raise SystemExit( f"Please commit your changes and delete the {template_yaml_path} file. Then run the sync command again." @@ -120,7 +120,7 @@ def __init__( requests.auth.HTTPBasicAuth(self.gh_username, os.environ["GITHUB_AUTH_TOKEN"]) ) - def sync(self): + def sync(self) -> None: """Find workflow attributes, create a new template pipeline on TEMPLATE""" # Clear requests_cache so that we don't get stale API responses @@ -271,7 +271,7 @@ def make_template_pipeline(self): self.config_yml.template.force = True with open(self.config_yml_path, "w") as config_path: - yaml.safe_dump(self.config_yml.model_dump(), config_path) + yaml.safe_dump(self.config_yml.model_dump(exclude_none=True), config_path) try: pipeline_create_obj = nf_core.pipelines.create.create.PipelineCreate( @@ -291,7 +291,7 @@ def make_template_pipeline(self): self.config_yml.template.outdir = "." # Update nf-core version self.config_yml.nf_core_version = nf_core.__version__ - dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump()) + dump_yaml_with_prettier(self.config_yml_path, self.config_yml.model_dump(exclude_none=True)) except Exception as err: # Reset to where you were to prevent git getting messed up. @@ -416,12 +416,8 @@ def close_open_template_merge_prs(self): list_prs_url = f"https://api.github.com/repos/{self.gh_repo}/pulls" with self.gh_api.cache_disabled(): list_prs_request = self.gh_api.get(list_prs_url) - try: - list_prs_json = json.loads(list_prs_request.content) - list_prs_pp = json.dumps(list_prs_json, indent=4) - except Exception: - list_prs_json = list_prs_request.content - list_prs_pp = list_prs_request.content + + list_prs_json, list_prs_pp = self._parse_json_response(list_prs_request) log.debug(f"GitHub API listing existing PRs:\n{list_prs_url}\n{list_prs_pp}") if list_prs_request.status_code != 200: @@ -462,12 +458,8 @@ def close_open_pr(self, pr) -> bool: # Update the PR status to be closed with self.gh_api.cache_disabled(): pr_request = self.gh_api.patch(url=pr["url"], data=json.dumps({"state": "closed"})) - try: - pr_request_json = json.loads(pr_request.content) - pr_request_pp = json.dumps(pr_request_json, indent=4) - except Exception: - pr_request_json = pr_request.content - pr_request_pp = pr_request.content + + pr_request_json, pr_request_pp = self._parse_json_response(pr_request) # PR update worked if pr_request.status_code == 200: @@ -481,6 +473,22 @@ def close_open_pr(self, pr) -> bool: log.warning(f"Could not close PR ('{pr_request.status_code}'):\n{pr['url']}\n{pr_request_pp}") return False + @staticmethod + def _parse_json_response(response) -> Tuple[Any, str]: + """Helper method to parse JSON response and create pretty-printed string. + + Args: + response: requests.Response object + + Returns: + Tuple of (parsed_json, pretty_printed_str) + """ + try: + json_data = json.loads(response.content) + return json_data, json.dumps(json_data, indent=4) + except Exception: + return response.content, str(response.content) + def reset_target_dir(self): """ Reset the target pipeline directory. Check out the original branch. diff --git a/nf_core/subworkflows/__init__.py b/nf_core/subworkflows/__init__.py index 88e8a09388..8e3c85a271 100644 --- a/nf_core/subworkflows/__init__.py +++ b/nf_core/subworkflows/__init__.py @@ -3,5 +3,6 @@ from .install import SubworkflowInstall from .lint import SubworkflowLint from .list import SubworkflowList +from .patch import SubworkflowPatch from .remove import SubworkflowRemove from .update import SubworkflowUpdate diff --git a/nf_core/subworkflows/lint/subworkflow_changes.py b/nf_core/subworkflows/lint/subworkflow_changes.py index a9c9616a21..cf0fd7211c 100644 --- a/nf_core/subworkflows/lint/subworkflow_changes.py +++ b/nf_core/subworkflows/lint/subworkflow_changes.py @@ -2,9 +2,12 @@ Check whether the content of a subworkflow has changed compared to the original repository """ +import shutil +import tempfile from pathlib import Path import nf_core.modules.modules_repo +from nf_core.components.components_differ import ComponentsDiffer def subworkflow_changes(subworkflow_lint_object, subworkflow): @@ -20,7 +23,29 @@ def subworkflow_changes(subworkflow_lint_object, subworkflow): Only runs when linting a pipeline, not the modules repository """ - tempdir = subworkflow.component_dir + if subworkflow.is_patched: + # If the subworkflow is patched, we need to apply + # the patch in reverse before comparing with the remote + tempdir_parent = Path(tempfile.mkdtemp()) + tempdir = tempdir_parent / "tmp_subworkflow_dir" + shutil.copytree(subworkflow.component_dir, tempdir) + try: + new_lines = ComponentsDiffer.try_apply_patch( + subworkflow.component_type, + subworkflow.component_name, + subworkflow.org, + subworkflow.patch_path, + tempdir, + reverse=True, + ) + for file, lines in new_lines.items(): + with open(tempdir / file, "w") as fh: + fh.writelines(lines) + except LookupError: + # This error is already reported by subworkflow_patch, so just return + return + else: + tempdir = subworkflow.component_dir subworkflow.branch = subworkflow_lint_object.modules_json.get_component_branch( "subworkflows", subworkflow.component_name, subworkflow.repo_url, subworkflow.org ) diff --git a/nf_core/subworkflows/patch.py b/nf_core/subworkflows/patch.py new file mode 100644 index 0000000000..3c8b3d5e4d --- /dev/null +++ b/nf_core/subworkflows/patch.py @@ -0,0 +1,10 @@ +import logging + +from nf_core.components.patch import ComponentPatch + +log = logging.getLogger(__name__) + + +class SubworkflowPatch(ComponentPatch): + def __init__(self, pipeline_dir, remote_url=None, branch=None, no_pull=False, installed_by=False): + super().__init__(pipeline_dir, "subworkflows", remote_url, branch, no_pull, installed_by) diff --git a/nf_core/utils.py b/nf_core/utils.py index 16125aed33..30b0743493 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -5,6 +5,7 @@ import concurrent.futures import datetime import errno +import fnmatch import hashlib import io import json @@ -19,7 +20,7 @@ import time from contextlib import contextmanager from pathlib import Path -from typing import Any, Callable, Dict, Generator, List, Literal, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Literal, Optional, Tuple, Union import git import prompt_toolkit.styles @@ -36,6 +37,9 @@ import nf_core +if TYPE_CHECKING: + from nf_core.pipelines.schema import PipelineSchema + log = logging.getLogger(__name__) # ASCII nf-core logo @@ -52,14 +56,29 @@ [ ("qmark", "fg:ansiblue bold"), # token in front of the question ("question", "bold"), # question text - ("answer", "fg:ansigreen nobold bg:"), # submitted answer text behind the question - ("pointer", "fg:ansiyellow bold"), # pointer used in select and checkbox prompts - ("highlighted", "fg:ansiblue bold"), # pointed-at choice in select and checkbox prompts - ("selected", "fg:ansiyellow noreverse bold"), # style for a selected item of a checkbox + ( + "answer", + "fg:ansigreen nobold bg:", + ), # submitted answer text behind the question + ( + "pointer", + "fg:ansiyellow bold", + ), # pointer used in select and checkbox prompts + ( + "highlighted", + "fg:ansiblue bold", + ), # pointed-at choice in select and checkbox prompts + ( + "selected", + "fg:ansiyellow noreverse bold", + ), # style for a selected item of a checkbox ("separator", "fg:ansiblack"), # separator in lists ("instruction", ""), # user instructions for select, rawselect, checkbox ("text", ""), # plain text - ("disabled", "fg:gray italic"), # disabled choices for select and checkbox prompts + ( + "disabled", + "fg:gray italic", + ), # disabled choices for select and checkbox prompts ("choice-default", "fg:ansiblack"), ("choice-default-changed", "fg:ansiyellow"), ("choice-required", "fg:ansired"), @@ -79,7 +98,11 @@ def fetch_remote_version(source_url): return remote_version -def check_if_outdated(current_version=None, remote_version=None, source_url="https://nf-co.re/tools_version"): +def check_if_outdated( + current_version=None, + remote_version=None, + source_url="https://nf-co.re/tools_version", +): """ Check if the current version of nf-core is outdated """ @@ -146,11 +169,12 @@ def __init__(self, wf_path: Path) -> None: self.wf_path = Path(wf_path) self.pipeline_name: Optional[str] = None self.pipeline_prefix: Optional[str] = None - self.schema_obj: Optional[Dict] = None + self.schema_obj: Optional[PipelineSchema] = None + self.repo: Optional[git.Repo] = None try: - repo = git.Repo(self.wf_path) - self.git_sha = repo.head.object.hexsha + self.repo = git.Repo(self.wf_path) + self.git_sha = self.repo.head.object.hexsha except Exception as e: log.debug(f"Could not find git hash for pipeline: {self.wf_path}. {e}") @@ -254,7 +278,7 @@ def fetch_wf_config(wf_path: Path, cache_config: bool = True) -> dict: """ log.debug(f"Got '{wf_path}' as path") - + wf_path = Path(wf_path) config = {} cache_fn = None cache_basedir = None @@ -441,6 +465,7 @@ def poll_nfcore_web_api(api_url: str, post_data: Optional[Dict] = None) -> Dict: if post_data is None: response = requests.get(api_url, headers={"Cache-Control": "no-cache"}) else: + log.debug(f"requesting {api_url} with {post_data}") response = requests.post(url=api_url, data=post_data) except requests.exceptions.Timeout: raise AssertionError(f"URL timed out: {api_url}") @@ -526,7 +551,8 @@ def __call__(self, r): with open(gh_cli_config_fn) as fh: gh_cli_config = yaml.safe_load(fh) self.auth = requests.auth.HTTPBasicAuth( - gh_cli_config["github.com"]["user"], gh_cli_config["github.com"]["oauth_token"] + gh_cli_config["github.com"]["user"], + gh_cli_config["github.com"]["oauth_token"], ) self.auth_mode = f"gh CLI config: {gh_cli_config['github.com']['user']}" except Exception: @@ -794,12 +820,18 @@ def get_tag_date(tag_date): # Obtain version and build match = re.search(r"(?::)+([A-Za-z\d\-_.]+)", img["image_name"]) if match is not None: - all_docker[match.group(1)] = {"date": get_tag_date(img["updated"]), "image": img} + all_docker[match.group(1)] = { + "date": get_tag_date(img["updated"]), + "image": img, + } elif img["image_type"] == "Singularity": # Obtain version and build match = re.search(r"(?::)+([A-Za-z\d\-_.]+)", img["image_name"]) if match is not None: - all_singularity[match.group(1)] = {"date": get_tag_date(img["updated"]), "image": img} + all_singularity[match.group(1)] = { + "date": get_tag_date(img["updated"]), + "image": img, + } # Obtain common builds from Docker and Singularity images common_keys = list(all_docker.keys() & all_singularity.keys()) current_date = None @@ -929,13 +961,19 @@ def prompt_pipeline_release_branch( # Releases if len(wf_releases) > 0: for tag in map(lambda release: release.get("tag_name"), wf_releases): - tag_display = [("fg:ansiblue", f"{tag} "), ("class:choice-default", "[release]")] + tag_display = [ + ("fg:ansiblue", f"{tag} "), + ("class:choice-default", "[release]"), + ] choices.append(questionary.Choice(title=tag_display, value=tag)) tag_set.append(str(tag)) # Branches for branch in wf_branches.keys(): - branch_display = [("fg:ansiyellow", f"{branch} "), ("class:choice-default", "[branch]")] + branch_display = [ + ("fg:ansiyellow", f"{branch} "), + ("class:choice-default", "[branch]"), + ] choices.append(questionary.Choice(title=branch_display, value=branch)) tag_set.append(branch) @@ -966,7 +1004,8 @@ def validate(self, value): return True else: raise questionary.ValidationError( - message="Invalid remote cache index file", cursor_position=len(value.text) + message="Invalid remote cache index file", + cursor_position=len(value.text), ) else: return True @@ -996,7 +1035,13 @@ def get_repo_releases_branches(pipeline, wfs): pipeline = wf.full_name # Store releases and stop loop - wf_releases = list(sorted(wf.releases, key=lambda k: k.get("published_at_timestamp", 0), reverse=True)) + wf_releases = list( + sorted( + wf.releases, + key=lambda k: k.get("published_at_timestamp", 0), + reverse=True, + ) + ) break # Arbitrary GitHub repo @@ -1016,7 +1061,13 @@ def get_repo_releases_branches(pipeline, wfs): raise AssertionError(f"Not able to find pipeline '{pipeline}'") except AttributeError: # Success! We have a list, which doesn't work with .get() which is looking for a dict key - wf_releases = list(sorted(rel_r.json(), key=lambda k: k.get("published_at_timestamp", 0), reverse=True)) + wf_releases = list( + sorted( + rel_r.json(), + key=lambda k: k.get("published_at_timestamp", 0), + reverse=True, + ) + ) # Get release tag commit hashes if len(wf_releases) > 0: @@ -1088,7 +1139,102 @@ def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) -LintConfigType = Optional[Dict[str, Union[List[str], List[Dict[str, List[str]]], bool]]] +class NFCoreYamlLintConfig(BaseModel): + """ + schema for linting config in `.nf-core.yml` should cover: + + .. code-block:: yaml + files_unchanged: + - .github/workflows/branch.yml + modules_config: False + modules_config: + - fastqc + # merge_markers: False + merge_markers: + - docs/my_pdf.pdf + nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + # multiqc_config: False + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + template_strings: False + template_strings: + - docs/my_pdf.pdf + nfcore_components: False + """ + + files_unchanged: Optional[Union[bool, List[str]]] = None + """ List of files that should not be changed """ + modules_config: Optional[Optional[Union[bool, List[str]]]] = None + """ List of modules that should not be changed """ + merge_markers: Optional[Optional[Union[bool, List[str]]]] = None + """ List of files that should not contain merge markers """ + nextflow_config: Optional[Optional[Union[bool, List[Union[str, Dict[str, List[str]]]]]]] = None + """ List of Nextflow config files that should not be changed """ + multiqc_config: Optional[Union[bool, List[str]]] = None + """ List of MultiQC config options that be changed """ + files_exist: Optional[Union[bool, List[str]]] = None + """ List of files that can not exist """ + template_strings: Optional[Optional[Union[bool, List[str]]]] = None + """ List of files that can contain template strings """ + readme: Optional[Union[bool, List[str]]] = None + """ Lint the README.md file """ + nfcore_components: Optional[bool] = None + """ Lint all required files to use nf-core modules and subworkflows """ + actions_ci: Optional[bool] = None + """ Lint all required files to use GitHub Actions CI """ + actions_awstest: Optional[bool] = None + """ Lint all required files to run tests on AWS """ + actions_awsfulltest: Optional[bool] = None + """ Lint all required files to run full tests on AWS """ + pipeline_todos: Optional[bool] = None + """ Lint for TODOs statements""" + plugin_includes: Optional[bool] = None + """ Lint for nextflow plugin """ + pipeline_name_conventions: Optional[bool] = None + """ Lint for pipeline name conventions """ + schema_lint: Optional[bool] = None + """ Lint nextflow_schema.json file""" + schema_params: Optional[bool] = None + """ Lint schema for all params """ + system_exit: Optional[bool] = None + """ Lint for System.exit calls in groovy/nextflow code """ + schema_description: Optional[bool] = None + """ Check that every parameter in the schema has a description. """ + actions_schema_validation: Optional[bool] = None + """ Lint GitHub Action workflow files with schema""" + modules_json: Optional[bool] = None + """ Lint modules.json file """ + modules_structure: Optional[bool] = None + """ Lint modules structure """ + base_config: Optional[bool] = None + """ Lint base.config file """ + nfcore_yml: Optional[bool] = None + """ Lint nf-core.yml """ + version_consistency: Optional[bool] = None + """ Lint for version consistency """ + included_configs: Optional[bool] = None + """ Lint for included configs """ + + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + def get(self, item: str, default: Any = None) -> Any: + if getattr(self, item, default) is None: + return default + return getattr(self, item, default) + + def __setitem__(self, item: str, value: Any) -> None: + setattr(self, item, value) class NFCoreYamlConfig(BaseModel): @@ -1100,7 +1246,7 @@ class NFCoreYamlConfig(BaseModel): """ Version of nf-core/tools used to create/update the pipeline """ org_path: Optional[str] = None """ Path to the organisation's modules repository (used for modules repo_type only) """ - lint: Optional[LintConfigType] = None + lint: Optional[NFCoreYamlLintConfig] = None """ Pipeline linting configuration, see https://nf-co.re/docs/nf-core-tools/pipelines/lint#linting-config for examples and documentation """ template: Optional[NFCoreTemplateConfig] = None """ Pipeline template configuration """ @@ -1115,6 +1261,9 @@ def __getitem__(self, item: str) -> Any: def get(self, item: str, default: Any = None) -> Any: return getattr(self, item, default) + def __setitem__(self, item: str, value: Any) -> None: + setattr(self, item, value) + def model_dump(self, **kwargs) -> Dict[str, Any]: # Get the initial data config = super().model_dump(**kwargs) @@ -1170,7 +1319,7 @@ def load_tools_config(directory: Union[str, Path] = ".") -> Tuple[Optional[Path] except ValidationError as e: error_message = f"Config file '{config_fn}' is invalid" for error in e.errors(): - error_message += f"\n{error['loc'][0]}: {error['msg']}" + error_message += f"\n{error['loc'][0]}: {error['msg']}\ninput: {error['input']}" raise AssertionError(error_message) wf_config = fetch_wf_config(Path(directory)) @@ -1224,7 +1373,7 @@ def get_first_available_path(directory: Union[Path, str], paths: List[str]) -> U return None -def sort_dictionary(d): +def sort_dictionary(d: Dict) -> Dict: """Sorts a nested dictionary recursively""" result = {} for k, v in sorted(d.items()): @@ -1365,3 +1514,21 @@ def set_wd(path: Path) -> Generator[None, None, None]: yield finally: os.chdir(start_wd) + + +def get_wf_files(wf_path: Path): + """Return a list of all files in a directory (ignores .gitigore files)""" + + wf_files = [] + + with open(Path(wf_path, ".gitignore")) as f: + lines = f.read().splitlines() + ignore = [line for line in lines if line and not line.startswith("#")] + + for path in Path(wf_path).rglob("*"): + if any(fnmatch.fnmatch(str(path), pattern) for pattern in ignore): + continue + if path.is_file(): + wf_files.append(str(path)) + + return wf_files diff --git a/requirements.txt b/requirements.txt index b7f1c39cab..51259938a8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,6 +18,8 @@ requests requests_cache rich-click==1.8.* rich>=13.3.1 +rocrate +repo2rocrate tabulate textual==0.71.0 trogon diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_http.nf similarity index 100% rename from tests/data/mock_module_containers/modules/mock_seqera_container.nf rename to tests/data/mock_module_containers/modules/mock_seqera_container_http.nf diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf new file mode 100644 index 0000000000..8278ac7917 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_oras.nf @@ -0,0 +1,11 @@ +process UMI_TRANSFER { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6' : + 'community.wave.seqera.io/library/umi-transfer:1.0.0--d30e8812ea280fa1' }" + + // truncated + +} diff --git a/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf b/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf new file mode 100644 index 0000000000..234ca04a45 --- /dev/null +++ b/tests/data/mock_module_containers/modules/mock_seqera_container_oras_mulled.nf @@ -0,0 +1,11 @@ +process UMI_TRANSFER_MULLED { + label 'process_single' + + conda "${moduleDir}/environment.yml" + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'oras://community.wave.seqera.io/library/umi-transfer_umicollapse:796a995ff53da9e3' : + 'community.wave.seqera.io/library/umi-transfer_umicollapse:3298d4f1b49e33bd' }" + + // truncated + +} diff --git a/tests/modules/test_modules_json.py b/tests/modules/test_modules_json.py index 0368c146c4..325a8073b7 100644 --- a/tests/modules/test_modules_json.py +++ b/tests/modules/test_modules_json.py @@ -175,14 +175,17 @@ def test_mod_json_repo_present(self): assert mod_json_obj.repo_present(NF_CORE_MODULES_REMOTE) is True assert mod_json_obj.repo_present("INVALID_REPO") is False - def test_mod_json_module_present(self): - """Tests the module_present function""" + def test_mod_json_component_present(self): + """Tests the component_present function""" mod_json_obj = ModulesJson(self.pipeline_dir) - assert mod_json_obj.module_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is True - assert mod_json_obj.module_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME) is False - assert mod_json_obj.module_present("fastqc", "INVALID_REPO", "INVALID_DIR") is False - assert mod_json_obj.module_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR") is False + assert mod_json_obj.component_present("fastqc", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME, "modules") is True + assert ( + mod_json_obj.component_present("INVALID_MODULE", NF_CORE_MODULES_REMOTE, NF_CORE_MODULES_NAME, "modules") + is False + ) + assert mod_json_obj.component_present("fastqc", "INVALID_REPO", "INVALID_DIR", "modules") is False + assert mod_json_obj.component_present("INVALID_MODULE", "INVALID_REPO", "INVALID_DIR", "modules") is False def test_mod_json_get_module_version(self): """Test the get_module_version function""" diff --git a/tests/modules/test_patch.py b/tests/modules/test_patch.py index 2f60cd4a20..f608278618 100644 --- a/tests/modules/test_patch.py +++ b/tests/modules/test_patch.py @@ -76,11 +76,11 @@ def test_create_patch_no_change(self): module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) # Check that no patch file has been added to the directory - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml"} + assert not (module_path / "bismark-align.diff").exists() # Check the 'modules.json' contains no patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) is None def test_create_patch_change(self): """Test creating a patch when there is a change to the module""" @@ -94,11 +94,11 @@ def test_create_patch_change(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -127,11 +127,11 @@ def test_create_patch_try_apply_successful(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -153,11 +153,11 @@ def test_create_patch_try_apply_successful(self): update_obj.move_files_from_tmp_dir(BISMARK_ALIGN, install_dir, REPO_NAME, SUCCEED_SHA) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -195,11 +195,11 @@ def test_create_patch_try_apply_failed(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -234,11 +234,11 @@ def test_create_patch_update_success(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -254,13 +254,13 @@ def test_create_patch_update_success(self): assert update_obj.update(BISMARK_ALIGN) # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn - ), modules_json_obj.get_patch_fn(BISMARK_ALIGN, GITLAB_URL, REPO_NAME) + ), modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, GITLAB_URL, REPO_NAME) # Check that the correct lines are in the patch file with open(module_path / patch_fn) as fh: @@ -295,11 +295,11 @@ def test_create_patch_update_fail(self): patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" # Check that a patch file with the correct name has been created - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -349,11 +349,11 @@ def test_remove_patch(self): # Check that a patch file with the correct name has been created patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml", patch_fn} + assert (module_path / patch_fn).exists() # Check the 'modules.json' contains a patch file for the module modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( "modules", REPO_NAME, BISMARK_ALIGN, patch_fn ) @@ -361,8 +361,8 @@ def test_remove_patch(self): mock_questionary.unsafe_ask.return_value = True patch_obj.remove(BISMARK_ALIGN) # Check that the diff file has been removed - assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", "environment.yml"} + assert not (module_path / patch_fn).exists() # Check that the 'modules.json' entry has been removed modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) - assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None + assert modules_json_obj.get_patch_fn("modules", BISMARK_ALIGN, REPO_URL, REPO_NAME) is None diff --git a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg index 450f1d303c..c34bd85230 100644 --- a/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg +++ b/tests/pipelines/__snapshots__/test_create_app/test_customisation_help.svg @@ -19,257 +19,257 @@ font-weight: 700; } - .terminal-333203530-matrix { + .terminal-4061415502-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-333203530-title { + .terminal-4061415502-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-333203530-r1 { fill: #c5c8c6 } -.terminal-333203530-r2 { fill: #e3e3e3 } -.terminal-333203530-r3 { fill: #989898 } -.terminal-333203530-r4 { fill: #e1e1e1 } -.terminal-333203530-r5 { fill: #4ebf71;font-weight: bold } -.terminal-333203530-r6 { fill: #1e1e1e } -.terminal-333203530-r7 { fill: #e2e2e2 } -.terminal-333203530-r8 { fill: #507bb3 } -.terminal-333203530-r9 { fill: #808080 } -.terminal-333203530-r10 { fill: #dde6ed;font-weight: bold } -.terminal-333203530-r11 { fill: #001541 } -.terminal-333203530-r12 { fill: #14191f } -.terminal-333203530-r13 { fill: #0178d4 } -.terminal-333203530-r14 { fill: #454a50 } -.terminal-333203530-r15 { fill: #e2e3e3;font-weight: bold } -.terminal-333203530-r16 { fill: #000000 } -.terminal-333203530-r17 { fill: #e4e4e4 } -.terminal-333203530-r18 { fill: #7ae998 } -.terminal-333203530-r19 { fill: #0a180e;font-weight: bold } -.terminal-333203530-r20 { fill: #008139 } -.terminal-333203530-r21 { fill: #fea62b;font-weight: bold } -.terminal-333203530-r22 { fill: #a7a9ab } -.terminal-333203530-r23 { fill: #e2e3e3 } + .terminal-4061415502-r1 { fill: #c5c8c6 } +.terminal-4061415502-r2 { fill: #e3e3e3 } +.terminal-4061415502-r3 { fill: #989898 } +.terminal-4061415502-r4 { fill: #e1e1e1 } +.terminal-4061415502-r5 { fill: #4ebf71;font-weight: bold } +.terminal-4061415502-r6 { fill: #1e1e1e } +.terminal-4061415502-r7 { fill: #e2e2e2 } +.terminal-4061415502-r8 { fill: #507bb3 } +.terminal-4061415502-r9 { fill: #808080 } +.terminal-4061415502-r10 { fill: #dde6ed;font-weight: bold } +.terminal-4061415502-r11 { fill: #001541 } +.terminal-4061415502-r12 { fill: #14191f } +.terminal-4061415502-r13 { fill: #0178d4 } +.terminal-4061415502-r14 { fill: #454a50 } +.terminal-4061415502-r15 { fill: #e2e3e3;font-weight: bold } +.terminal-4061415502-r16 { fill: #000000 } +.terminal-4061415502-r17 { fill: #e4e4e4 } +.terminal-4061415502-r18 { fill: #7ae998 } +.terminal-4061415502-r19 { fill: #0a180e;font-weight: bold } +.terminal-4061415502-r20 { fill: #008139 } +.terminal-4061415502-r21 { fill: #fea62b;font-weight: bold } +.terminal-4061415502-r22 { fill: #a7a9ab } +.terminal-4061415502-r23 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core pipelines create + nf-core pipelines create - + - - nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… - - -Template features - - -▔▔▔▔▔▔▔▔ -        Toggle all features -▁▁▁▁▁▁▁▁ -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use a GitHub Create a GitHub  Show help  -▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -pipeline. - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github CI testsThe pipeline will  Show help  -▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -actions for Continuous▃▃ -Integration (CI)  -testing - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use reference genomesThe pipeline will be  Hide help  -▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -copy of the most  -common reference  -genome files from  -iGenomes - - -Nf-core pipelines are configured to use a copy of the most common reference  -genome files. - -By selecting this option, your pipeline will include a configuration file  -specifying the paths to these files. - -The required code to use these files will also be included in the template.  -When the pipeline user provides an appropriate genome key, the pipeline will -automatically download the required reference files. -▅▅ -For more information about reference genomes in nf-core pipelines, see the  - - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github badgesThe README.md file of  Show help  -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Back  Continue  -▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - d Toggle dark mode  q Quit  a Toggle all  + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔ +        Toggle all features +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous▅▅ +Integration (CI)  +testing + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Hide help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + + +Nf-core pipelines are configured to use a copy of the most common reference  +genome files. + +By selecting this option, your pipeline will include a configuration file  +specifying the paths to these files. + +The required code to use these files will also be included in the template.  +When the pipeline user provides an appropriate genome key, the pipeline will +automatically download the required reference files. +▅▅ +For more information about reference genomes in nf-core pipelines, see the  + + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  diff --git a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg index 6e178ba840..b8dea05604 100644 --- a/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg +++ b/tests/pipelines/__snapshots__/test_create_app/test_type_custom.svg @@ -19,256 +19,256 @@ font-weight: 700; } - .terminal-3425198753-matrix { + .terminal-1727160999-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3425198753-title { + .terminal-1727160999-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3425198753-r1 { fill: #c5c8c6 } -.terminal-3425198753-r2 { fill: #e3e3e3 } -.terminal-3425198753-r3 { fill: #989898 } -.terminal-3425198753-r4 { fill: #e1e1e1 } -.terminal-3425198753-r5 { fill: #4ebf71;font-weight: bold } -.terminal-3425198753-r6 { fill: #1e1e1e } -.terminal-3425198753-r7 { fill: #0178d4 } -.terminal-3425198753-r8 { fill: #e2e2e2 } -.terminal-3425198753-r9 { fill: #507bb3 } -.terminal-3425198753-r10 { fill: #808080 } -.terminal-3425198753-r11 { fill: #dde6ed;font-weight: bold } -.terminal-3425198753-r12 { fill: #001541 } -.terminal-3425198753-r13 { fill: #14191f } -.terminal-3425198753-r14 { fill: #454a50 } -.terminal-3425198753-r15 { fill: #7ae998 } -.terminal-3425198753-r16 { fill: #e2e3e3;font-weight: bold } -.terminal-3425198753-r17 { fill: #0a180e;font-weight: bold } -.terminal-3425198753-r18 { fill: #000000 } -.terminal-3425198753-r19 { fill: #008139 } -.terminal-3425198753-r20 { fill: #fea62b;font-weight: bold } -.terminal-3425198753-r21 { fill: #a7a9ab } -.terminal-3425198753-r22 { fill: #e2e3e3 } + .terminal-1727160999-r1 { fill: #c5c8c6 } +.terminal-1727160999-r2 { fill: #e3e3e3 } +.terminal-1727160999-r3 { fill: #989898 } +.terminal-1727160999-r4 { fill: #e1e1e1 } +.terminal-1727160999-r5 { fill: #4ebf71;font-weight: bold } +.terminal-1727160999-r6 { fill: #1e1e1e } +.terminal-1727160999-r7 { fill: #0178d4 } +.terminal-1727160999-r8 { fill: #e2e2e2 } +.terminal-1727160999-r9 { fill: #507bb3 } +.terminal-1727160999-r10 { fill: #808080 } +.terminal-1727160999-r11 { fill: #dde6ed;font-weight: bold } +.terminal-1727160999-r12 { fill: #001541 } +.terminal-1727160999-r13 { fill: #14191f } +.terminal-1727160999-r14 { fill: #454a50 } +.terminal-1727160999-r15 { fill: #7ae998 } +.terminal-1727160999-r16 { fill: #e2e3e3;font-weight: bold } +.terminal-1727160999-r17 { fill: #0a180e;font-weight: bold } +.terminal-1727160999-r18 { fill: #000000 } +.terminal-1727160999-r19 { fill: #008139 } +.terminal-1727160999-r20 { fill: #fea62b;font-weight: bold } +.terminal-1727160999-r21 { fill: #a7a9ab } +.terminal-1727160999-r22 { fill: #e2e3e3 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - nf-core pipelines create + nf-core pipelines create - + - - nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… - - -Template features - - -▔▔▔▔▔▔▔▔ -        Toggle all features -▁▁▁▁▁▁▁▁ -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use a GitHub Create a GitHub  Show help  -▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -pipeline. - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github CI testsThe pipeline will  Show help  -▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -actions for Continuous -Integration (CI) ▄▄ -testing - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use reference genomesThe pipeline will be  Show help  -▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -copy of the most  -common reference  -genome files from  -iGenomes - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add Github badgesThe README.md file of  Show help  -▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -include GitHub badges - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Add configuration The pipeline will  Show help  -▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ -profiles containing  -custom parameters  -required to run  -nf-core pipelines at  -different institutions - -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ -        Use code lintersThe pipeline will  Show help  -▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ - Back  Continue  -▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ - - d Toggle dark mode  q Quit  a Toggle all  + + nf-core pipelines create — Create a new pipeline with the nf-core pipeline templa… + + +Template features + + +▔▔▔▔▔▔▔▔ +        Toggle all features +▁▁▁▁▁▁▁▁ +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use a GitHub Create a GitHub  Show help  +▁▁▁▁▁▁▁▁        repository.repository for the ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +pipeline. + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github CI testsThe pipeline will  Show help  +▁▁▁▁▁▁▁▁include several GitHub▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +actions for Continuous +Integration (CI) ▇▇ +testing + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use reference genomesThe pipeline will be  Show help  +▁▁▁▁▁▁▁▁configured to use a ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +copy of the most  +common reference  +genome files from  +iGenomes + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add Github badgesThe README.md file of  Show help  +▁▁▁▁▁▁▁▁the pipeline will ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +include GitHub badges + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Add configuration The pipeline will  Show help  +▁▁▁▁▁▁▁▁        filesinclude configuration ▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ +profiles containing  +custom parameters  +required to run  +nf-core pipelines at  +different institutions + +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ +        Use code lintersThe pipeline will  Show help  +▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔▔ + Back  Continue  +▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁ + + d Toggle dark mode  q Quit  a Toggle all  diff --git a/tests/pipelines/lint/test_files_exist.py b/tests/pipelines/lint/test_files_exist.py index 97dd346cdf..ebc529247e 100644 --- a/tests/pipelines/lint/test_files_exist.py +++ b/tests/pipelines/lint/test_files_exist.py @@ -1,5 +1,7 @@ from pathlib import Path +from ruamel.yaml import YAML + import nf_core.pipelines.lint from ..test_lint import TestLint @@ -9,17 +11,17 @@ class TestLintFilesExist(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) def test_files_exist_missing_config(self): """Lint test: critical files missing FAIL""" Path(self.new_pipeline, "CHANGELOG.md").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" + assert self.lint_obj._load() + self.lint_obj.nf_config["manifest.name"] = "nf-core/testpipeline" - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert "File not found: `CHANGELOG.md`" in results["failed"] def test_files_exist_missing_main(self): @@ -27,31 +29,27 @@ def test_files_exist_missing_main(self): Path(self.new_pipeline, "main.nf").unlink() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert "File not found: `main.nf`" in results["warned"] def test_files_exist_deprecated_file(self): """Check whether deprecated file issues warning""" - nf = Path(self.new_pipeline, "parameters.settings.json") - nf.touch() + Path(self.new_pipeline, "parameters.settings.json").touch() - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert results["failed"] == ["File must be removed: `parameters.settings.json`"] def test_files_exist_pass(self): """Lint check should pass if all files are there""" - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() + assert self.lint_obj._load() - results = lint_obj.files_exist() + results = self.lint_obj.files_exist() assert results["failed"] == [] def test_files_exist_pass_conditional_nfschema(self): @@ -62,9 +60,58 @@ def test_files_exist_pass_conditional_nfschema(self): with open(Path(self.new_pipeline, "nextflow.config"), "w") as f: f.write(config) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - lint_obj.nf_config["manifest.schema"] = "nf-core" - results = lint_obj.files_exist() + assert self.lint_obj._load() + self.lint_obj.nf_config["manifest.schema"] = "nf-core" + results = self.lint_obj.files_exist() assert results["failed"] == [] assert results["ignored"] == [] + + def test_files_exists_pass_nf_core_yml_config(self): + """Check if linting passes with a valid nf-core.yml config""" + valid_yaml = """ + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + """ + yaml = YAML() + nf_core_yml_path = Path(self.new_pipeline, ".nf-core.yml") + nf_core_yml = yaml.load(nf_core_yml_path) + + nf_core_yml["lint"] = yaml.load(valid_yaml) + yaml.dump(nf_core_yml, nf_core_yml_path) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + assert self.lint_obj._load() + + results = self.lint_obj.files_exist() + assert results["failed"] == [] + assert "File is ignored: `.github/CONTRIBUTING.md`" in results["ignored"] + assert "File is ignored: `CITATIONS.md`" in results["ignored"] + + def test_files_exists_fail_nf_core_yml_config(self): + """Check if linting fails with a valid nf-core.yml config""" + valid_yaml = """ + files_exist: + - CITATIONS.md + """ + + # remove CITATIONS.md + Path(self.new_pipeline, "CITATIONS.md").unlink() + assert self.lint_obj._load() + # test first if linting fails correctly + results = self.lint_obj.files_exist() + assert "File not found: `CITATIONS.md`" in results["failed"] + + yaml = YAML() + nf_core_yml_path = Path(self.new_pipeline, ".nf-core.yml") + nf_core_yml = yaml.load(nf_core_yml_path) + + nf_core_yml["lint"] = yaml.load(valid_yaml) + yaml.dump(nf_core_yml, nf_core_yml_path) + + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) + assert self.lint_obj._load() + + results = self.lint_obj.files_exist() + assert results["failed"] == [] + assert "File is ignored: `CITATIONS.md`" in results["ignored"] diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py index a655fb8ace..f8c3c1f31f 100644 --- a/tests/pipelines/lint/test_nextflow_config.py +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -6,7 +6,6 @@ import nf_core.pipelines.create.create import nf_core.pipelines.lint -from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -125,23 +124,30 @@ def test_allow_params_reference_in_main_nf(self): def test_default_values_ignored(self): """Test ignoring linting of default values.""" + valid_yaml = """ + nextflow_config: + - manifest.name + - config_defaults: + - params.custom_config_version + """ # Add custom_config_version to the ignore list nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" - nf_core_yml = NFCoreYamlConfig( - repository_type="pipeline", - lint={"nextflow_config": [{"config_defaults": ["params.custom_config_version"]}]}, - ) + + with open(nf_core_yml_path) as f: + nf_core_yml = yaml.safe_load(f) + nf_core_yml["lint"] = yaml.safe_load(valid_yaml) with open(nf_core_yml_path, "w") as f: - yaml.dump(nf_core_yml.model_dump(), f) + yaml.dump(nf_core_yml, f) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() lint_obj._load_lint_config() result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 - assert len(result["ignored"]) == 1 + assert len(result["ignored"]) == 2 assert "Config default value correct: params.custom_config_version" not in str(result["passed"]) assert "Config default ignored: params.custom_config_version" in str(result["ignored"]) + assert "Config variable ignored: `manifest.name`" in str(result["ignored"]) def test_default_values_float(self): """Test comparing two float values.""" diff --git a/tests/pipelines/lint/test_nfcore_yml.py b/tests/pipelines/lint/test_nfcore_yml.py index b49b60436d..2ac36ffe0c 100644 --- a/tests/pipelines/lint/test_nfcore_yml.py +++ b/tests/pipelines/lint/test_nfcore_yml.py @@ -1,8 +1,9 @@ -import re from pathlib import Path -import nf_core.pipelines.create +from ruamel.yaml import YAML + import nf_core.pipelines.lint +from nf_core.utils import NFCoreYamlConfig from ..test_lint import TestLint @@ -11,11 +12,14 @@ class TestLintNfCoreYml(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() - self.nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" + self.nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + self.yaml = YAML() + self.nf_core_yml: NFCoreYamlConfig = self.yaml.load(self.nf_core_yml_path) + self.lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) def test_nfcore_yml_pass(self): """Lint test: nfcore_yml - PASS""" - self.lint_obj._load() + assert self.lint_obj._load() results = self.lint_obj.nfcore_yml() assert "Repository type in `.nf-core.yml` is valid" in str(results["passed"]) @@ -27,33 +31,95 @@ def test_nfcore_yml_pass(self): def test_nfcore_yml_fail_repo_type(self): """Lint test: nfcore_yml - FAIL - repository type not set""" - with open(self.nf_core_yml) as fh: - content = fh.read() - new_content = content.replace("repository_type: pipeline", "repository_type: foo") - with open(self.nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - # assert that it raises assertion error + self.nf_core_yml["repository_type"] = "foo" + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) with self.assertRaises(AssertionError): - lint_obj._load() - results = lint_obj.nfcore_yml() - assert "Repository type in `.nf-core.yml` is not valid." in str(results["failed"]) - assert len(results.get("warned", [])) == 0 - assert len(results.get("passed", [])) >= 0 - assert len(results.get("ignored", [])) == 0 + self.lint_obj._load() def test_nfcore_yml_fail_nfcore_version(self): """Lint test: nfcore_yml - FAIL - nf-core version not set""" - with open(self.nf_core_yml) as fh: - content = fh.read() - new_content = re.sub(r"nf_core_version:.+", "nf_core_version: foo", content) - with open(self.nf_core_yml, "w") as fh: - fh.write(new_content) - lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) - lint_obj._load() - results = lint_obj.nfcore_yml() + self.nf_core_yml["nf_core_version"] = "foo" + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() assert "nf-core version in `.nf-core.yml` is not set to the latest version." in str(results["warned"]) assert len(results.get("failed", [])) == 0 assert len(results.get("passed", [])) >= 0 assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_nested_lint_config(self) -> None: + """Lint test: nfcore_yml with nested lint config - PASS""" + valid_yaml = """ + lint: + files_unchanged: + - .github/workflows/branch.yml + # modules_config: False + modules_config: + - fastqc + # merge_markers: False + merge_markers: + - docs/my_pdf.pdf + # nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + # template_strings: False + template_strings: + - docs/my_pdf.pdf + """ + self.nf_core_yml["lint"] = self.yaml.load(valid_yaml) + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + assert len(results.get("failed", [])) == 0 + assert len(results.get("warned", [])) == 0 + assert len(results.get("ignored", [])) == 0 + + def test_nfcore_yml_nested_lint_config_bool(self) -> None: + """Lint test: nfcore_yml with nested lint config - PASS""" + valid_yaml = """ + lint: + files_unchanged: + - .github/workflows/branch.yml + modules_config: False + # modules_config: + # - fastqc + merge_markers: False + # merge_markers: + # - docs/my_pdf.pdf + # nextflow_config: False + nextflow_config: + - manifest.name + - config_defaults: + - params.annotation_db + - params.multiqc_comment_headers + - params.custom_table_headers + multiqc_config: + - report_section_order + - report_comment + files_exist: + - .github/CONTRIBUTING.md + - CITATIONS.md + template_strings: False + # template_strings: + # - docs/my_pdf.pdf + """ + self.nf_core_yml["lint"] = self.yaml.load(valid_yaml) + self.yaml.dump(self.nf_core_yml, self.nf_core_yml_path) + + assert self.lint_obj._load() + results = self.lint_obj.nfcore_yml() + assert len(results.get("failed", [])) == 0 + assert len(results.get("warned", [])) == 0 + assert len(results.get("ignored", [])) == 0 diff --git a/tests/pipelines/lint/test_template_strings.py b/tests/pipelines/lint/test_template_strings.py index 406ba63e0c..37b7604806 100644 --- a/tests/pipelines/lint/test_template_strings.py +++ b/tests/pipelines/lint/test_template_strings.py @@ -1,6 +1,8 @@ import subprocess from pathlib import Path +import yaml + import nf_core.pipelines.create import nf_core.pipelines.lint @@ -11,6 +13,9 @@ class TestLintTemplateStrings(TestLint): def setUp(self) -> None: super().setUp() self.new_pipeline = self._make_pipeline_copy() + self.nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" + with open(self.nf_core_yml_path) as f: + self.nf_core_yml = yaml.safe_load(f) def test_template_strings(self): """Tests finding a template string in a file fails linting.""" @@ -28,9 +33,12 @@ def test_template_strings(self): def test_template_strings_ignored(self): """Tests ignoring template_strings""" # Ignore template_strings test - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings: False") + valid_yaml = """ + template_strings: false + """ + self.nf_core_yml["lint"] = yaml.safe_load(valid_yaml) + with open(self.nf_core_yml_path, "w") as f: + yaml.safe_dump(self.nf_core_yml, f) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj._load() lint_obj._lint_pipeline() @@ -43,13 +51,21 @@ def test_template_strings_ignore_file(self): txt_file = Path(self.new_pipeline) / "docs" / "test.txt" with open(txt_file, "w") as f: f.write("my {{ template_string }}") + subprocess.check_output(["git", "add", "docs"], cwd=self.new_pipeline) + # Ignore template_strings test - nf_core_yml = Path(self.new_pipeline) / ".nf-core.yml" - with open(nf_core_yml, "w") as f: - f.write("repository_type: pipeline\nlint:\n template_strings:\n - docs/test.txt") + valid_yaml = """ + template_strings: + - docs/test.txt + """ + self.nf_core_yml["lint"] = yaml.safe_load(valid_yaml) + with open(self.nf_core_yml_path, "w") as f: + yaml.safe_dump(self.nf_core_yml, f) + lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj._load() result = lint_obj.template_strings() + assert len(result["failed"]) == 0 assert len(result["ignored"]) == 1 diff --git a/tests/pipelines/test_download.py b/tests/pipelines/test_download.py index 86b07ef7f8..d1e2c41a68 100644 --- a/tests/pipelines/test_download.py +++ b/tests/pipelines/test_download.py @@ -257,7 +257,20 @@ def test_find_container_images_modules(self, tmp_path, mock_fetch_wf_config): not in download_obj.containers ) - # mock_seqera_container.nf + # mock_seqera_container_oras.nf + assert "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6" in download_obj.containers + assert "community.wave.seqera.io/library/umi-transfer:1.0.0--d30e8812ea280fa1" not in download_obj.containers + + # mock_seqera_container_oras_mulled.nf + assert ( + "oras://community.wave.seqera.io/library/umi-transfer_umicollapse:796a995ff53da9e3" + in download_obj.containers + ) + assert ( + "community.wave.seqera.io/library/umi-transfer_umicollapse:3298d4f1b49e33bd" not in download_obj.containers + ) + + # mock_seqera_container_http.nf assert ( "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" in download_obj.containers @@ -294,6 +307,7 @@ def test_prioritize_direct_download(self, tmp_path): "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--h9ee0642_1", "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data", "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data", + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data", ] result = download_obj.prioritize_direct_download(test_container) @@ -316,7 +330,7 @@ def test_prioritize_direct_download(self, tmp_path): assert "https://depot.galaxyproject.org/singularity/sortmerna:4.3.7--hdbdd923_0" in result assert "https://depot.galaxyproject.org/singularity/sortmerna:4.2.0--h9ee0642_1" in result - # Verify that Seqera containers are not deduplicated + # Verify that Seqera containers are not deduplicated... assert ( "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/63/6397750e9730a3fbcc5b4c43f14bd141c64c723fd7dad80e47921a68a7c3cd21/data" in result @@ -325,6 +339,58 @@ def test_prioritize_direct_download(self, tmp_path): "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" in result ) + # ...but identical ones are. + assert ( + result.count( + "https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/c2/c262fc09eca59edb5a724080eeceb00fb06396f510aefb229c2d2c6897e63975/data" + ) + == 1 + ) + + # + # Test for 'reconcile_seqera_container_uris' + # + @with_temporary_folder + def test_reconcile_seqera_container_uris(self, tmp_path): + download_obj = DownloadWorkflow(pipeline="dummy", outdir=tmp_path) + + prioritized_container = [ + "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6", + "oras://community.wave.seqera.io/library/sylph:0.6.1--b97274cdc1caa649", + ] + + test_container = [ + "https://depot.galaxyproject.org/singularity/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "nf-core/ubuntu:22.04", + "community.wave.seqera.io/library/umi-transfer:1.5.0--73c1a6b65e5b0b81", + "community.wave.seqera.io/library/sylph:0.6.1--a21713a57a65a373", + "biocontainers/sylph:0.6.1--b97274cdc1caa649", + ] + + # test that the test_container list is returned as it is, if no prioritized_containers are specified + result_empty = download_obj.reconcile_seqera_container_uris([], test_container) + assert result_empty == test_container + + result = download_obj.reconcile_seqera_container_uris(prioritized_container, test_container) + + # Verify that unrelated images are retained + assert "https://depot.galaxyproject.org/singularity/ubuntu:22.04" in result + assert "nf-core/ubuntu:22.04" in result + + # Verify that the priority works for regular Seqera container (Native Singularity over Docker, but only for Seqera registry) + assert "oras://community.wave.seqera.io/library/sylph:0.6.1--b97274cdc1caa649" in result + assert "community.wave.seqera.io/library/sylph:0.6.1--a21713a57a65a373" not in result + assert "biocontainers/sylph:0.6.1--b97274cdc1caa649" in result + + # Verify that version strings are respected: Version 1.0.0 does not replace version 1.5.0 + assert "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6" in result + assert "community.wave.seqera.io/library/umi-transfer:1.5.0--73c1a6b65e5b0b81" in result + + # assert that the deduplication works + assert test_container.count("nf-core/ubuntu:22.04") == 3 + assert result.count("nf-core/ubuntu:22.04") == 1 # # Tests for 'singularity_pull_image' @@ -356,11 +422,30 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "docker.io/bschiffthaler/sed", f"{tmp_dir}/sed.sif", None, "docker.io", mock_rich_progress ) + # Test successful pull with absolute oras:// URI + download_obj.singularity_pull_image( + "oras://community.wave.seqera.io/library/umi-transfer:1.0.0--e5b0c1a65b8173b6", + f"{tmp_dir}/umi-transfer-oras.sif", + None, + "docker.io", + mock_rich_progress, + ) + + # try pulling Docker container image with oras:// + with pytest.raises(ContainerError.NoSingularityContainerError): + download_obj.singularity_pull_image( + "oras://ghcr.io/matthiaszepper/umi-transfer:dev", + f"{tmp_dir}/umi-transfer-oras_impostor.sif", + None, + "docker.io", + mock_rich_progress, + ) + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExistsError is raised before attempting to pull.) with pytest.raises(ContainerError.RegistryNotFoundError): download_obj.singularity_pull_image( "hello-world", - f"{tmp_dir}/hello-world_new.sif", + f"{tmp_dir}/break_the_registry_test.sif", None, "register-this-domain-to-break-the-test.io", mock_rich_progress, @@ -396,7 +481,7 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p with pytest.raises(ContainerError.InvalidTagError): download_obj.singularity_pull_image( "ewels/multiqc:go-rewrite", - f"{tmp_dir}/umi-transfer.sif", + f"{tmp_dir}/multiqc-go.sif", None, "ghcr.io", mock_rich_progress, diff --git a/tests/pipelines/test_lint.py b/tests/pipelines/test_lint.py index 9ca29d249f..ca7353d50d 100644 --- a/tests/pipelines/test_lint.py +++ b/tests/pipelines/test_lint.py @@ -48,7 +48,8 @@ def test_init_pipeline_lint(self): def test_load_lint_config_not_found(self): """Try to load a linting config file that doesn't exist""" assert self.lint_obj._load_lint_config() - assert self.lint_obj.lint_config == {} + assert self.lint_obj.lint_config is not None + assert self.lint_obj.lint_config.model_dump(exclude_none=True) == {} def test_load_lint_config_ignore_all_tests(self): """Try to load a linting config file that ignores all tests""" @@ -64,7 +65,8 @@ def test_load_lint_config_ignore_all_tests(self): # Load the new lint config file and check lint_obj._load_lint_config() - assert sorted(list(lint_obj.lint_config.keys())) == sorted(lint_obj.lint_tests) + assert lint_obj.lint_config is not None + assert sorted(list(lint_obj.lint_config.model_dump(exclude_none=True))) == sorted(lint_obj.lint_tests) # Try running linting and make sure that all tests are ignored lint_obj._lint_pipeline() diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py new file mode 100644 index 0000000000..01a77ecd76 --- /dev/null +++ b/tests/pipelines/test_rocrate.py @@ -0,0 +1,127 @@ +"""Test the nf-core pipelines rocrate command""" + +import shutil +import tempfile +from pathlib import Path + +import git +import rocrate.rocrate +from git import Repo + +import nf_core.pipelines.create +import nf_core.pipelines.create.create +import nf_core.pipelines.rocrate +import nf_core.utils + +from ..test_pipelines import TestPipelines + + +class TestROCrate(TestPipelines): + """Class for lint tests""" + + def setUp(self) -> None: + super().setUp() + # add fake metro map + Path(self.pipeline_dir, "docs", "images", "nf-core-testpipeline_metro_map.png").touch() + # commit the changes + repo = Repo(self.pipeline_dir) + repo.git.add(A=True) + repo.index.commit("Initial commit") + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir) + + def tearDown(self): + """Clean up temporary files and folders""" + + if self.tmp_dir.exists(): + shutil.rmtree(self.tmp_dir) + + def test_rocrate_creation(self): + """Run the nf-core rocrate command""" + + # Run the command + self.rocrate_obj + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + # Check that the crate was created + self.assertTrue(Path(self.pipeline_dir, "ro-crate-metadata.json").exists()) + + # Check that the entries in the crate are correct + crate = rocrate.rocrate.ROCrate(self.pipeline_dir) + entities = crate.get_entities() + + # Check if the correct entities are set: + for entity in entities: + entity_json = entity.as_jsonld() + if entity_json["@id"] == "./": + self.assertEqual(entity_json.get("name"), "nf-core/testpipeline") + self.assertEqual(entity_json["mainEntity"], {"@id": "main.nf"}) + elif entity_json["@id"] == "#main.nf": + self.assertEqual(entity_json["programmingLanguage"], [{"@id": "#nextflow"}]) + self.assertEqual(entity_json["image"], [{"@id": "nf-core-testpipeline_metro_map.png"}]) + # assert there is a metro map + # elif entity_json["@id"] == "nf-core-testpipeline_metro_map.png": # FIXME waiting for https://github.com/ResearchObject/ro-crate-py/issues/174 + # self.assertEqual(entity_json["@type"], ["File", "ImageObject"]) + # assert that author is set as a person + elif "name" in entity_json and entity_json["name"] == "Test McTestFace": + self.assertEqual(entity_json["@type"], "Person") + # check that it is set as author of the main entity + if crate.mainEntity is not None: + self.assertEqual(crate.mainEntity["author"][0].id, entity_json["@id"]) + + def test_rocrate_creation_wrong_pipeline_dir(self): + """Run the nf-core rocrate command with a wrong pipeline directory""" + # Run the command + + # Check that it raises a UserWarning + with self.assertRaises(UserWarning): + nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir / "bad_dir") + + # assert that the crate was not created + self.assertFalse(Path(self.pipeline_dir / "bad_dir", "ro-crate-metadata.json").exists()) + + def test_rocrate_creation_with_wrong_version(self): + """Run the nf-core rocrate command with a pipeline version""" + # Run the command + + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir, version="1.0.0") + + # Check that the crate was created + with self.assertRaises(SystemExit): + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + def test_rocrate_creation_without_git(self): + """Run the nf-core rocrate command with a pipeline version""" + + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(self.pipeline_dir, version="1.0.0") + # remove git repo + shutil.rmtree(self.pipeline_dir / ".git") + # Check that the crate was created + with self.assertRaises(SystemExit): + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, self.pipeline_dir) + + def test_rocrate_creation_to_zip(self): + """Run the nf-core rocrate command with a zip output""" + assert self.rocrate_obj.create_rocrate(self.pipeline_dir, zip_path=self.pipeline_dir) + # Check that the crate was created + self.assertTrue(Path(self.pipeline_dir, "ro-crate.crate.zip").exists()) + + def test_rocrate_creation_for_fetchngs(self): + """Run the nf-core rocrate command with nf-core/fetchngs""" + tmp_dir = Path(tempfile.mkdtemp()) + # git clone nf-core/fetchngs + git.Repo.clone_from("https://github.com/nf-core/fetchngs", tmp_dir / "fetchngs") + # Run the command + self.rocrate_obj = nf_core.pipelines.rocrate.ROCrate(tmp_dir / "fetchngs", version="1.12.0") + assert self.rocrate_obj.create_rocrate(tmp_dir / "fetchngs", self.pipeline_dir) + + # Check that Sateesh Peri is mentioned in creator field + + crate = rocrate.rocrate.ROCrate(self.pipeline_dir) + entities = crate.get_entities() + for entity in entities: + entity_json = entity.as_jsonld() + if entity_json["@id"] == "#main.nf": + assert "https://orcid.org/0000-0002-9879-9070" in entity_json["creator"] + + # Clean up + shutil.rmtree(tmp_dir) diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index ffbe75510b..8bf8a3c4ec 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -56,6 +56,8 @@ def mocked_requests_get(url) -> MockResponse: for branch_no in range(3, 7) ] return MockResponse(response_data, 200, url) + if url == "https://nf-co.re/pipelines.json": + return MockResponse({"remote_workflows": [{"name": "testpipeline", "topics": ["test", "pipeline"]}]}, 200, url) return MockResponse([{"html_url": url}], 404, url) @@ -398,3 +400,33 @@ def test_reset_target_dir_fake_branch(self): with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: psync.reset_target_dir() assert exc_info.value.args[0].startswith("Could not reset to original branch `fake_branch`") + + def test_sync_no_changes(self): + """Test pipeline sync when no changes are needed""" + with mock.patch("requests.get", side_effect=mocked_requests_get), mock.patch( + "requests.post", side_effect=mocked_requests_post + ) as mock_post: + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + + # Mock that no changes were made + psync.made_changes = False + + # Run sync + psync.sync() + + # Verify no PR was created + mock_post.assert_not_called() + + def test_sync_no_github_token(self): + """Test sync fails appropriately when GitHub token is missing""" + # Ensure GitHub token is not set + if "GITHUB_AUTH_TOKEN" in os.environ: + del os.environ["GITHUB_AUTH_TOKEN"] + + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir, make_pr=True) + psync.made_changes = True # Force changes to trigger PR attempt + + # Run sync and check for appropriate error + with self.assertRaises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: + psync.sync() + self.assertIn("GITHUB_AUTH_TOKEN not set!", str(exc_info.exception)) diff --git a/tests/subworkflows/test_patch.py b/tests/subworkflows/test_patch.py new file mode 100644 index 0000000000..5bb6a6798e --- /dev/null +++ b/tests/subworkflows/test_patch.py @@ -0,0 +1,307 @@ +import os +import tempfile +from pathlib import Path +from unittest import mock + +import pytest + +import nf_core.components.components_command +import nf_core.components.patch +import nf_core.subworkflows + +from ..test_subworkflows import TestSubworkflows +from ..utils import GITLAB_REPO, GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL + +OLD_SHA = "dbb12457e32d3da8eea7dc4ae096201fff4747c5" +SUCCEED_SHA = "0a33e6a0d730ad22a0ec9f7f9a7540af6e943221" +FAIL_SHA = "b6e5e8739de9a1a0c4f85267144e43dbaf8f1461" + + +class TestSubworkflowsPatch(TestSubworkflows): + """ + Test the 'nf-core subworkflows patch' command + """ + + def modify_main_nf(self, path): + """Modify a file to test patch creation""" + with open(path) as fh: + lines = fh.readlines() + # We want a patch file that looks something like: + # - ch_fasta // channel: [ fasta ] + for line_index in range(len(lines)): + if lines[line_index] == " ch_fasta // channel: [ fasta ]\n": + to_pop = line_index + lines.pop(to_pop) + with open(path, "w") as fh: + fh.writelines(lines) + + def setup_patch(self, pipeline_dir, modify_subworkflow): + # Install the subworkflow bam_sort_stats_samtools + install_obj = nf_core.subworkflows.SubworkflowInstall( + pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + sha=OLD_SHA, + ) + + # Install the module + install_obj.install("bam_sort_stats_samtools") + + if modify_subworkflow: + # Modify the subworkflow + subworkflow_path = Path(pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + self.modify_main_nf(subworkflow_path / "main.nf") + + def test_create_patch_no_change(self): + """Test creating a patch when there is a change to the module""" + self.setup_patch(self.pipeline_dir, False) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + with pytest.raises(UserWarning): + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that no patch file has been added to the directory + assert not (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + def test_create_patch_change(self): + """Test creating a patch when there is no change to the subworkflow""" + self.setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + # Check that the correct lines are in the patch file + with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: + patch_lines = fh.readlines() + print(patch_lines) + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + def test_create_patch_try_apply_successful(self): + """Test creating a patch file and applying it to a new version of the the files""" + self.setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, sha=OLD_SHA, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", OLD_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / "bam_sort_stats_samtools.diff" + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is True + ) + + # Move the files from the temporary directory + update_obj.move_files_from_tmp_dir("bam_sort_stats_samtools", install_dir, GITLAB_REPO, OLD_SHA) + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + # Check that the correct lines are in the patch file + with open(subworkflow_path / "bam_sort_stats_samtools.diff") as fh: + patch_lines = fh.readlines() + subworkflow_relpath = subworkflow_path.relative_to(self.pipeline_dir) + assert f"--- {subworkflow_relpath / 'main.nf'}\n" in patch_lines, subworkflow_relpath / "main.nf" + assert f"+++ {subworkflow_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(subworkflow_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # These lines should have been removed by the patch + assert "- ch_fasta // channel: [ fasta ]\n" not in main_nf_lines + + def test_create_patch_try_apply_failed(self): + """Test creating a patch file and applying it to a new version of the the files""" + self.setup_patch(self.pipeline_dir, True) + subworkflow_relpath = Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + subworkflow_path = Path(self.pipeline_dir, subworkflow_relpath) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + update_obj = nf_core.subworkflows.SubworkflowUpdate( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH + ) + + # Install the new files + install_dir = Path(tempfile.mkdtemp()) + update_obj.install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, install_dir) + + # Try applying the patch + subworkflow_install_dir = install_dir / "bam_sort_stats_samtools" + patch_relpath = subworkflow_relpath / "bam_sort_stats_samtools.diff" + assert ( + update_obj.try_apply_patch( + "bam_sort_stats_samtools", GITLAB_REPO, patch_relpath, subworkflow_path, subworkflow_install_dir + ) + is False + ) + + def test_create_patch_update_success(self): + """ + Test creating a patch file and the updating the subworkflow + + Should have the same effect as 'test_create_patch_try_apply_successful' + but uses higher level api + """ + self.setup_patch(self.pipeline_dir, True) + swf_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = "bam_sort_stats_samtools.diff" + # Check that a patch file with the correct name has been created + assert (swf_path / patch_fn).exists() + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn) + + # Update the subworkflow + update_obj = nf_core.subworkflows.update.SubworkflowUpdate( + self.pipeline_dir, + sha=OLD_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + ) + assert update_obj.update("bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (swf_path / patch_fn).exists() + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn), modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) + + # Check that the correct lines are in the patch file + with open(swf_path / patch_fn) as fh: + patch_lines = fh.readlines() + swf_relpath = swf_path.relative_to(self.pipeline_dir) + assert f"--- {swf_relpath / 'main.nf'}\n" in patch_lines + assert f"+++ {swf_relpath / 'main.nf'}\n" in patch_lines + assert "- ch_fasta // channel: [ fasta ]\n" in patch_lines + + # Check that 'main.nf' is updated correctly + with open(swf_path / "main.nf") as fh: + main_nf_lines = fh.readlines() + # this line should have been removed by the patch + assert " ch_fasta // channel: [ fasta ]\n" not in main_nf_lines + + def test_create_patch_update_fail(self): + """ + Test creating a patch file and updating a subworkflow when there is a diff conflict + """ + self.setup_patch(self.pipeline_dir, True) + swf_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + patch_fn = "bam_sort_stats_samtools.diff" + # Check that a patch file with the correct name has been created + assert (swf_path / patch_fn).exists() + + # Check the 'modules.json' contains a patch file for the subworkflow + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn( + "subworkflows", "bam_sort_stats_samtools", GITLAB_URL, GITLAB_REPO + ) == Path("subworkflows", GITLAB_REPO, "bam_sort_stats_samtools", patch_fn) + + # Save the file contents for downstream comparison + with open(swf_path / patch_fn) as fh: + patch_contents = fh.read() + + update_obj = nf_core.subworkflows.update.SubworkflowUpdate( + self.pipeline_dir, + sha=FAIL_SHA, + show_diff=False, + update_deps=True, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_BRANCH, + ) + update_obj.update("bam_sort_stats_samtools") + + # Check that the installed files have not been affected by the attempted patch + temp_dir = Path(tempfile.mkdtemp()) + nf_core.components.components_command.ComponentCommand( + "subworkflows", self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH + ).install_component_files("bam_sort_stats_samtools", FAIL_SHA, update_obj.modules_repo, temp_dir) + + temp_module_dir = temp_dir / "bam_sort_stats_samtools" + for file in os.listdir(temp_module_dir): + assert file in os.listdir(swf_path) + with open(swf_path / file) as fh: + installed = fh.read() + with open(temp_module_dir / file) as fh: + shouldbe = fh.read() + assert installed == shouldbe + + # Check that the patch file is unaffected + with open(swf_path / patch_fn) as fh: + new_patch_contents = fh.read() + assert patch_contents == new_patch_contents + + def test_remove_patch(self): + """Test creating a patch when there is no change to the subworkflow""" + self.setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.subworkflows.SubworkflowPatch(self.pipeline_dir, GITLAB_URL, GITLAB_SUBWORKFLOWS_BRANCH) + patch_obj.patch("bam_sort_stats_samtools") + + subworkflow_path = Path(self.pipeline_dir, "subworkflows", GITLAB_REPO, "bam_sort_stats_samtools") + + # Check that a patch file with the correct name has been created + assert (subworkflow_path / "bam_sort_stats_samtools.diff").exists() + + with mock.patch.object(nf_core.components.patch.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove("bam_sort_stats_samtools") + # Check that the diff file has been removed + assert not (subworkflow_path / "bam_sort_stats_samtools.diff").exists() diff --git a/tests/subworkflows/test_update.py b/tests/subworkflows/test_update.py index 153038cd1d..9f5d1939f3 100644 --- a/tests/subworkflows/test_update.py +++ b/tests/subworkflows/test_update.py @@ -98,7 +98,7 @@ def test_install_at_hash_and_update_and_save_diff_to_file(self): with open(patch_path) as fh: line = fh.readline() assert line.startswith( - "Changes in module 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" + "Changes in component 'nf-core/fastq_align_bowtie2' between (f3c078809a2513f1c95de14f6633fe1f03572fdb) and" ) def test_install_at_hash_and_update_and_save_diff_limit_output(self):