diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml new file mode 100644 index 00000000..5e48a0b0 --- /dev/null +++ b/.github/workflows/contrib.yml @@ -0,0 +1,25 @@ +name: Contribution checks +on: [push, pull_request] + +defaults: + run: + shell: bash + +jobs: + stable: + name: Run ruff + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + - name: Set up Python 3 + uses: actions/setup-python@v5 + with: + python-version: 3 + - name: Lint EddyMotion + run: pipx run ruff check --diff + - name: Format EddyMotion + run: pipx run ruff format --diff diff --git a/.maint/update_authors.py b/.maint/update_authors.py index 66448a29..b270be0c 100644 --- a/.maint/update_authors.py +++ b/.maint/update_authors.py @@ -1,8 +1,10 @@ #!/usr/bin/env python3 """Update and sort the creators list of the zenodo record.""" + +import json import sys from pathlib import Path -import json + import click from fuzzywuzzy import fuzz, process @@ -36,10 +38,7 @@ def read_md_table(md_text): retval = [] for line in md_text.splitlines(): if line.strip().startswith("| --- |"): - keys = ( - k.replace("*", "").strip() - for k in prev.split("|") - ) + keys = (k.replace("*", "").strip() for k in prev.split("|")) keys = [k.lower() for k in keys if k] continue elif not keys: @@ -60,19 +59,13 @@ def sort_contributors(entries, git_lines, exclude=None, last=None): last = last or [] sorted_authors = sorted(entries, key=lambda i: i["name"]) - first_last = [ - " ".join(val["name"].split(",")[::-1]).strip() for val in sorted_authors - ] - first_last_excl = [ - " ".join(val["name"].split(",")[::-1]).strip() for val in exclude or [] - ] + first_last = [" ".join(val["name"].split(",")[::-1]).strip() for val in sorted_authors] + first_last_excl = [" ".join(val["name"].split(",")[::-1]).strip() for val in exclude or []] unmatched = [] author_matches = [] for ele in git_lines: - matches = process.extract( - ele, first_last, scorer=fuzz.token_sort_ratio, limit=2 - ) + matches = process.extract(ele, first_last, scorer=fuzz.token_sort_ratio, limit=2) # matches is a list [('First match', % Match), ('Second match', % Match)] if matches[0][1] > 80: val = sorted_authors[first_last.index(matches[0][0])] @@ -152,8 +145,9 @@ def cli(): @cli.command() @click.option("-z", "--zenodo-file", type=click.Path(exists=True), default=".zenodo.json") @click.option("-m", "--maintainers", type=click.Path(exists=True), default=".maint/MAINTAINERS.md") -@click.option("-c", "--contributors", type=click.Path(exists=True), - default=".maint/CONTRIBUTORS.md") +@click.option( + "-c", "--contributors", type=click.Path(exists=True), default=".maint/CONTRIBUTORS.md" +) @click.option("--pi", type=click.Path(exists=True), default=".maint/PIs.md") @click.option("-f", "--former-file", type=click.Path(exists=True), default=".maint/FORMER.md") def zenodo( @@ -176,15 +170,13 @@ def zenodo( ) zen_contributors, miss_contributors = sort_contributors( - _namelast(read_md_table(Path(contributors).read_text())), - data, - exclude=former + _namelast(read_md_table(Path(contributors).read_text())), data, exclude=former ) zen_pi = _namelast( sorted( read_md_table(Path(pi).read_text()), - key=lambda v: (int(v.get("position", -1)), v.get("lastname")) + key=lambda v: (int(v.get("position", -1)), v.get("lastname")), ) ) @@ -194,8 +186,7 @@ def zenodo( misses = set(miss_creators).intersection(miss_contributors) if misses: print( - "Some people made commits, but are missing in .maint/ " - f"files: {', '.join(misses)}", + "Some people made commits, but are missing in .maint/ " f"files: {', '.join(misses)}", file=sys.stderr, ) @@ -214,15 +205,14 @@ def zenodo( if isinstance(creator["affiliation"], list): creator["affiliation"] = creator["affiliation"][0] - Path(zenodo_file).write_text( - "%s\n" % json.dumps(zenodo, indent=2) - ) + Path(zenodo_file).write_text("%s\n" % json.dumps(zenodo, indent=2)) @cli.command() @click.option("-m", "--maintainers", type=click.Path(exists=True), default=".maint/MAINTAINERS.md") -@click.option("-c", "--contributors", type=click.Path(exists=True), - default=".maint/CONTRIBUTORS.md") +@click.option( + "-c", "--contributors", type=click.Path(exists=True), default=".maint/CONTRIBUTORS.md" +) @click.option("--pi", type=click.Path(exists=True), default=".maint/PIs.md") @click.option("-f", "--former-file", type=click.Path(exists=True), default=".maint/FORMER.md") def publication( @@ -232,9 +222,8 @@ def publication( former_file, ): """Generate the list of authors and affiliations for papers.""" - members = ( - _namelast(read_md_table(Path(maintainers).read_text())) - + _namelast(read_md_table(Path(contributors).read_text())) + members = _namelast(read_md_table(Path(maintainers).read_text())) + _namelast( + read_md_table(Path(contributors).read_text()) ) hits, misses = sort_contributors( @@ -246,15 +235,12 @@ def publication( pi_hits = _namelast( sorted( read_md_table(Path(pi).read_text()), - key=lambda v: (int(v.get("position", -1)), v.get("lastname")) + key=lambda v: (int(v.get("position", -1)), v.get("lastname")), ) ) pi_names = [pi["name"] for pi in pi_hits] - hits = [ - hit for hit in hits - if hit["name"] not in pi_names - ] + pi_hits + hits = [hit for hit in hits if hit["name"] not in pi_names] + pi_hits def _aslist(value): if isinstance(value, (list, tuple)): @@ -281,27 +267,19 @@ def _aslist(value): if misses: print( - "Some people made commits, but are missing in .maint/ " - f"files: {', '.join(misses)}", + "Some people made commits, but are missing in .maint/ " f"files: {', '.join(misses)}", file=sys.stderr, ) print("Authors (%d):" % len(hits)) print( "%s." - % "; ".join( - [ - "%s \\ :sup:`%s`\\ " % (i["name"], idx) - for i, idx in zip(hits, aff_indexes) - ] - ) + % "; ".join(["%s \\ :sup:`%s`\\ " % (i["name"], idx) for i, idx in zip(hits, aff_indexes)]) ) print( "\n\nAffiliations:\n%s" - % "\n".join( - ["{0: >2}. {1}".format(i + 1, a) for i, a in enumerate(affiliations)] - ) + % "\n".join(["{0: >2}. {1}".format(i + 1, a) for i, a in enumerate(affiliations)]) ) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 31c2bcbf..73e2ce76 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,23 +1,26 @@ +# To install the git pre-commit hook run: +# pre-commit install +# To update the pre-commit hooks run: +# pre-commit install-hooks + repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.1.0 + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.2.0 hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-json - - id: check-toml - - id: check-case-conflict - - id: check-docstring-first - - id: check-merge-conflict - - id: check-vcs-permalinks - - id: pretty-format-json - args: ['--autofix'] -- repo: https://github.com/psf/black - rev: 22.3.0 + - id: trailing-whitespace + - id: end-of-file-fixer + - id: debug-statements + - id: check-yaml + - id: check-json + - id: check-toml + - id: check-case-conflict + - id: check-docstring-first + - id: check-merge-conflict + - id: check-vcs-permalinks + - id: pretty-format-json + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.4 hooks: - - id: black -- repo: https://github.com/pycqa/isort - rev: 5.10.1 - hooks: - - id: isort + - id: ruff + args: [ --fix ] + - id: ruff-format diff --git a/nireports/__init__.py b/nireports/__init__.py index c71de6b1..97d3f1fe 100644 --- a/nireports/__init__.py +++ b/nireports/__init__.py @@ -21,13 +21,15 @@ # https://www.nipreps.org/community/licensing/ # """Add metadata on import.""" + __packagename__ = "nireports" __copyright__ = "2023, The NiPreps developers" try: from ._version import __version__ except ModuleNotFoundError: - from importlib.metadata import version, PackageNotFoundError + from importlib.metadata import PackageNotFoundError, version + try: __version__ = version(__packagename__) except PackageNotFoundError: diff --git a/nireports/assembler/data/__init__.py b/nireports/assembler/data/__init__.py index fab31927..5f59bfb6 100644 --- a/nireports/assembler/data/__init__.py +++ b/nireports/assembler/data/__init__.py @@ -10,6 +10,7 @@ .. autoclass:: Loader """ + from __future__ import annotations import atexit diff --git a/nireports/assembler/misc.py b/nireports/assembler/misc.py index d5f6439a..6677fb35 100644 --- a/nireports/assembler/misc.py +++ b/nireports/assembler/misc.py @@ -23,10 +23,11 @@ # STATEMENT OF CHANGES: This file was ported carrying over full git history from niworkflows, # another NiPreps project licensed under the Apache-2.0 terms, and has been changed since. """Miscellaneous utilities.""" + from collections import defaultdict from pathlib import Path -from bids.utils import listify +from bids.utils import listify from nipype.utils.filemanip import loadcrash @@ -242,7 +243,7 @@ def unfold_columns(indict, prefix=None, delimiter="_"): """ prefix = listify(prefix) if prefix is not None else [] - keys = sorted(set(list(indict.keys()))) + keys = sorted(set(indict.keys())) data = [] subdict = defaultdict(dict, {}) @@ -254,7 +255,7 @@ def unfold_columns(indict, prefix=None, delimiter="_"): subdict[col[0]][col[1]] = indict[key] if subdict: - for skey in sorted(list(subdict.keys())): + for skey in sorted(subdict.keys()): sskeys = list(subdict[skey].keys()) # If there is only one subkey, merge back diff --git a/nireports/assembler/report.py b/nireports/assembler/report.py index eb858601..0555fc6c 100644 --- a/nireports/assembler/report.py +++ b/nireports/assembler/report.py @@ -23,6 +23,7 @@ # STATEMENT OF CHANGES: This file was ported carrying over full git history from niworkflows, # another NiPreps project licensed under the Apache-2.0 terms, and has been changed since. """Core objects representing reports.""" + import re from collections import defaultdict from itertools import compress @@ -36,7 +37,6 @@ from nireports.assembler import data from nireports.assembler.reportlet import Reportlet - # Add a new figures spec try: add_config_paths(figures=data.load("nipreps.json")) @@ -270,8 +270,7 @@ def __init__( metadata = metadata or {} if "filename" not in metadata: metadata["filename"] = Path(out_filename).name.replace( - "".join(Path(out_filename).suffixes), - "" + "".join(Path(out_filename).suffixes), "" ) # Initialize structuring elements @@ -287,9 +286,7 @@ def __init__( "out_dir": str(out_dir), "reportlets_dir": str(root), } - meta_repl.update({ - kk: vv for kk, vv in metadata.items() if isinstance(vv, str) - }) + meta_repl.update({kk: vv for kk, vv in metadata.items() if isinstance(vv, str)}) meta_repl.update(bids_filters) expr = re.compile(f'{{({"|".join(meta_repl.keys())})}}') @@ -308,7 +305,8 @@ def __init__( # Path to the Jinja2 template self.template_path = ( - Path(settings["template_path"]) if "template_path" in settings + Path(settings["template_path"]) + if "template_path" in settings else data.load("report.tpl").absolute() ) @@ -383,7 +381,8 @@ def index(self, config): # do not display entities with the value None. c_filt = [ f'{key} {c_value}' - for key, c_value in zip(entities, c) if c_value is not None + for key, c_value in zip(entities, c) + if c_value is not None ] # Set a common title for this particular combination c title = "Reports for: %s." % ", ".join(c_filt) @@ -420,11 +419,11 @@ def process_plugins(self, config, metadata=None): self.footer = [] plugins = config.get("plugins", None) - for plugin in (plugins or []): + for plugin in plugins or []: env = jinja2.Environment( - loader=jinja2.FileSystemLoader(searchpath=str( - Path(__file__).parent / "data" / f"{plugin['type']}" - )), + loader=jinja2.FileSystemLoader( + searchpath=str(Path(__file__).parent / "data" / f"{plugin['type']}") + ), trim_blocks=True, lstrip_blocks=True, autoescape=False, @@ -434,12 +433,17 @@ def process_plugins(self, config, metadata=None): plugin_meta.update((metadata or {}).get(plugin["type"], {})) for member in ("header", "navbar", "footer"): old_value = getattr(self, member) - setattr(self, member, old_value + [ - env.get_template(f"{member}.tpl").render( - config=plugin, - metadata=plugin_meta, - ) - ]) + setattr( + self, + member, + old_value + + [ + env.get_template(f"{member}.tpl").render( + config=plugin, + metadata=plugin_meta, + ) + ], + ) def generate_report(self): """Once the Report has been indexed, the final HTML can be generated""" diff --git a/nireports/assembler/reportlet.py b/nireports/assembler/reportlet.py index a69c421a..d31d1231 100644 --- a/nireports/assembler/reportlet.py +++ b/nireports/assembler/reportlet.py @@ -23,14 +23,16 @@ # STATEMENT OF CHANGES: This file was ported carrying over full git history from niworkflows, # another NiPreps project licensed under the Apache-2.0 terms, and has been changed since. """The reporting visualization unit or *reportlet*.""" + +import re from pathlib import Path from uuid import uuid4 -import re + from nipype.utils.filemanip import copyfile + from nireports.assembler import data from nireports.assembler.misc import dict2html, read_crashfile - SVG_SNIPPET = [ """\
@@ -104,7 +106,7 @@
""" -HTML_BOILER_STYLE = ' font-family: \'Bitstream Charter\', \'Georgia\', Times;' +HTML_BOILER_STYLE = " font-family: 'Bitstream Charter', 'Georgia', Times;" class Reportlet: @@ -231,7 +233,6 @@ def __init__(self, layout, config=None, out_dir=None, bids_filters=None, metadat if ext == ".html": contents = src.read_text().strip() elif ext == ".svg": - entities = dict(bidsfile.entities) if desc_text: desc_text = desc_text.format(**entities) @@ -258,9 +259,7 @@ def __init__(self, layout, config=None, out_dir=None, bids_filters=None, metadat if line.strip().startswith("' - f'Could not find metadata for reportlet "{meta_id}"' - '

', - "" - )) + self.components.append( + ( + '", + "", + ) + ) return # meta_folded = meta_settings.get("folded", None) @@ -412,7 +413,7 @@ def __init__(self, layout, config=None, out_dir=None, bids_filters=None, metadat self.components.append( ( '', + "Failed to generate the boilerplate

", desc_text, ) ) diff --git a/nireports/assembler/tests/test_report.py b/nireports/assembler/tests/test_report.py index b8abde33..24d1b156 100644 --- a/nireports/assembler/tests/test_report.py +++ b/nireports/assembler/tests/test_report.py @@ -23,7 +23,7 @@ # STATEMENT OF CHANGES: This file was ported carrying over full git history from niworkflows, # another NiPreps project licensed under the Apache-2.0 terms, and has been changed since. """Exercising the visual report system (VRS).""" -import os + import tempfile from itertools import product from pathlib import Path @@ -34,17 +34,14 @@ from bids.layout import BIDSLayout from bids.layout.writing import build_path -from nireports.assembler.report import Report - from nireports.assembler import data - +from nireports.assembler.report import Report summary_meta = { "Summary": { "Structural images": 1, "FreeSurfer reconstruction": "Pre-existing directory", - "Output spaces": - "MNI152NLin2009cAsym, fsaverage5", + "Output spaces": "MNI152NLin2009cAsym, fsaverage5", } } @@ -294,9 +291,7 @@ def test_generated_reportlets(bids_sessions, ordering): def test_subject(tmp_path, subject, out_html): reports = tmp_path / "reports" Path( - reports - / "nireports" - / (subject if subject.startswith("sub-") else f"sub-{subject}") + reports / "nireports" / (subject if subject.startswith("sub-") else f"sub-{subject}") ).mkdir(parents=True) report = Report( @@ -319,11 +314,7 @@ def test_subject(tmp_path, subject, out_html): ) def test_session(tmp_path, subject, session, out_html): reports = tmp_path / "reports" - p = Path( - reports - / "nireports" - / (subject if subject.startswith("sub-") else f"sub-{subject}") - ) + p = Path(reports / "nireports" / (subject if subject.startswith("sub-") else f"sub-{subject}")) if session: p = p / (session if session.startswith("ses-") else f"ses-{session}") p.mkdir(parents=True) diff --git a/nireports/assembler/tools.py b/nireports/assembler/tools.py index 75b5b4d0..859cd59e 100644 --- a/nireports/assembler/tools.py +++ b/nireports/assembler/tools.py @@ -23,7 +23,9 @@ # STATEMENT OF CHANGES: This file was ported carrying over full git history from niworkflows, # another NiPreps project licensed under the Apache-2.0 terms, and has been changed since. """Utilities for the :mod:`~nireports.assembler` module.""" + from pathlib import Path + from nireports.assembler.report import Report diff --git a/nireports/conftest.py b/nireports/conftest.py index de82b989..c67ff257 100644 --- a/nireports/conftest.py +++ b/nireports/conftest.py @@ -21,22 +21,22 @@ # https://www.nipreps.org/community/licensing/ # """py.test configuration""" + import os -from sys import version_info +import tempfile from pathlib import Path -import numpy as np +from sys import version_info + import nibabel as nb +import numpy as np import pandas as pd import pytest -import tempfile # disable ET -os.environ['NO_ET'] = '1' +os.environ["NO_ET"] = "1" _datadir = (Path(__file__).parent / "tests" / "data").absolute() -niprepsdev_path = os.getenv( - "TEST_DATA_HOME", str(Path.home() / ".cache" / "nipreps-dev") -) +niprepsdev_path = os.getenv("TEST_DATA_HOME", str(Path.home() / ".cache" / "nipreps-dev")) test_output_dir = os.getenv("TEST_OUTPUT_DIR") test_workdir = os.getenv("TEST_WORK_DIR") @@ -60,8 +60,7 @@ def expand_namespace(doctest_namespace): doctest_namespace["tmpdir"] = tmpdir.name doctest_namespace["output_dir"] = ( - Path(test_output_dir) if test_output_dir is not None - else Path(tmpdir.name) + Path(test_output_dir) if test_output_dir is not None else Path(tmpdir.name) ) cwd = os.getcwd() diff --git a/nireports/interfaces/__init__.py b/nireports/interfaces/__init__.py index 24bdf11d..e190ef32 100644 --- a/nireports/interfaces/__init__.py +++ b/nireports/interfaces/__init__.py @@ -21,9 +21,10 @@ # https://www.nipreps.org/community/licensing/ # """NiPype interfaces to generate reportlets.""" + from nireports.interfaces.fmri import FMRISummary -from nireports.interfaces.nuisance import CompCorVariancePlot, ConfoundsCorrelationPlot from nireports.interfaces.mosaic import PlotContours, PlotMosaic, PlotSpikes +from nireports.interfaces.nuisance import CompCorVariancePlot, ConfoundsCorrelationPlot __all__ = ( "CompCorVariancePlot", diff --git a/nireports/interfaces/base.py b/nireports/interfaces/base.py index 6c6191d1..deca7519 100644 --- a/nireports/interfaces/base.py +++ b/nireports/interfaces/base.py @@ -26,6 +26,7 @@ # https://github.com/nipreps/mriqc/blob/1ffd4c8d1a20b44ebfea648a7b12bb32a425d4ec/ # mriqc/interfaces/viz.py """NiPype interface -- basic tooling.""" + from nipype.interfaces.base import ( BaseInterfaceInputSpec, File, diff --git a/nireports/interfaces/dmri.py b/nireports/interfaces/dmri.py index 83240376..abd6008f 100644 --- a/nireports/interfaces/dmri.py +++ b/nireports/interfaces/dmri.py @@ -21,18 +21,19 @@ # https://www.nipreps.org/community/licensing/ # """Diffusion MRI -specific visualization.""" -import numpy as np -import nibabel as nb -from nipype.utils.filemanip import fname_presuffix +import nibabel as nb +import numpy as np from nipype.interfaces.base import ( - File, BaseInterfaceInputSpec, - TraitedSpec, + File, SimpleInterface, - traits, + TraitedSpec, isdefined, + traits, ) +from nipype.utils.filemanip import fname_presuffix + from nireports.reportlets.modality.dwi import plot_heatmap diff --git a/nireports/interfaces/fmri.py b/nireports/interfaces/fmri.py index 4d752e39..6bf8bf22 100644 --- a/nireports/interfaces/fmri.py +++ b/nireports/interfaces/fmri.py @@ -21,20 +21,21 @@ # https://www.nipreps.org/community/licensing/ # """Functional MRI -specific visualization.""" -import numpy as np -import nibabel as nb -from nipype.utils.filemanip import fname_presuffix +import nibabel as nb +import numpy as np from nipype.interfaces.base import ( - File, BaseInterfaceInputSpec, - TraitedSpec, + File, SimpleInterface, - traits, + TraitedSpec, isdefined, + traits, ) -from nireports.tools.timeseries import cifti_timeseries, get_tr, nifti_timeseries +from nipype.utils.filemanip import fname_presuffix + from nireports.reportlets.modality.func import fMRIPlot +from nireports.tools.timeseries import cifti_timeseries, get_tr, nifti_timeseries class _FMRISummaryInputSpec(BaseInterfaceInputSpec): @@ -69,40 +70,41 @@ def _run_interface(self, runtime): newpath=runtime.cwd, ) - dataframe = pd.DataFrame({ - "outliers": np.loadtxt(self.inputs.outliers, usecols=[0]).tolist(), - # Pick non-standardize dvars (col 1) - # First timepoint is NaN (difference) - "DVARS": [np.nan] - + np.loadtxt(self.inputs.dvars, skiprows=1, usecols=[1]).tolist(), - # First timepoint is zero (reference volume) - "FD": [0.0] - + np.loadtxt(self.inputs.fd, skiprows=1, usecols=[0]).tolist(), - }) if ( - isdefined(self.inputs.outliers) - and isdefined(self.inputs.dvars) - and isdefined(self.inputs.fd) - ) else None + dataframe = ( + pd.DataFrame( + { + "outliers": np.loadtxt(self.inputs.outliers, usecols=[0]).tolist(), + # Pick non-standardize dvars (col 1) + # First timepoint is NaN (difference) + "DVARS": [np.nan] + + np.loadtxt(self.inputs.dvars, skiprows=1, usecols=[1]).tolist(), + # First timepoint is zero (reference volume) + "FD": [0.0] + np.loadtxt(self.inputs.fd, skiprows=1, usecols=[0]).tolist(), + } + ) + if ( + isdefined(self.inputs.outliers) + and isdefined(self.inputs.dvars) + and isdefined(self.inputs.fd) + ) + else None + ) input_data = nb.load(self.inputs.in_func) seg_file = self.inputs.in_segm if isdefined(self.inputs.in_segm) else None dataset, segments = ( cifti_timeseries(input_data) - if isinstance(input_data, nb.Cifti2Image) else - nifti_timeseries(input_data, seg_file) + if isinstance(input_data, nb.Cifti2Image) + else nifti_timeseries(input_data, seg_file) ) fig = fMRIPlot( dataset, segments=segments, spikes_files=( - [self.inputs.in_spikes_bg] - if isdefined(self.inputs.in_spikes_bg) else None - ), - tr=( - self.inputs.tr if isdefined(self.inputs.tr) else - get_tr(input_data) + [self.inputs.in_spikes_bg] if isdefined(self.inputs.in_spikes_bg) else None ), + tr=(self.inputs.tr if isdefined(self.inputs.tr) else get_tr(input_data)), confounds=dataframe, units={"outliers": "%", "FD": "mm"}, vlines={"FD": [self.inputs.fd_thres]}, diff --git a/nireports/interfaces/mosaic.py b/nireports/interfaces/mosaic.py index 3648e219..6c4e55c4 100644 --- a/nireports/interfaces/mosaic.py +++ b/nireports/interfaces/mosaic.py @@ -26,6 +26,7 @@ # https://github.com/nipreps/mriqc/blob/1ffd4c8d1a20b44ebfea648a7b12bb32a425d4ec/ # mriqc/interfaces/viz.py """Visualization of n-D images with mosaics cutting through planes.""" + from pathlib import Path import numpy as np @@ -127,7 +128,6 @@ class _PlotMosaicOutputSpec(TraitedSpec): class PlotMosaic(SimpleInterface): - """ Plots slices of a 3D volume into a pdf file """ @@ -136,10 +136,7 @@ class PlotMosaic(SimpleInterface): output_spec = _PlotMosaicOutputSpec def _run_interface(self, runtime): - mask = ( - self.inputs.bbox_mask_file if isdefined(self.inputs.bbox_mask_file) - else None - ) + mask = self.inputs.bbox_mask_file if isdefined(self.inputs.bbox_mask_file) else None title = self.inputs.title if isdefined(self.inputs.title) else None diff --git a/nireports/interfaces/nuisance.py b/nireports/interfaces/nuisance.py index 7783e42f..734767f4 100644 --- a/nireports/interfaces/nuisance.py +++ b/nireports/interfaces/nuisance.py @@ -21,15 +21,17 @@ # https://www.nipreps.org/community/licensing/ # """Screening nuisance signals.""" -from nipype.utils.filemanip import fname_presuffix + from nipype.interfaces.base import ( - File, BaseInterfaceInputSpec, - TraitedSpec, + File, SimpleInterface, - traits, + TraitedSpec, isdefined, + traits, ) +from nipype.utils.filemanip import fname_presuffix + from nireports.reportlets.nuisance import confounds_correlation_plot from nireports.reportlets.xca import compcor_variance_plot @@ -53,9 +55,7 @@ class _CompCorVariancePlotInputSpec(BaseInterfaceInputSpec): usedefault=True, desc="Levels of explained variance to include in plot", ) - out_file = traits.Either( - None, File, value=None, usedefault=True, desc="Path to save plot" - ) + out_file = traits.Either(None, File, value=None, usedefault=True, desc="Path to save plot") class _CompCorVariancePlotOutputSpec(TraitedSpec): @@ -88,12 +88,8 @@ def _run_interface(self, runtime): class _ConfoundsCorrelationPlotInputSpec(BaseInterfaceInputSpec): - confounds_file = File( - exists=True, mandatory=True, desc="File containing confound regressors" - ) - out_file = traits.Either( - None, File, value=None, usedefault=True, desc="Path to save plot" - ) + confounds_file = File(exists=True, mandatory=True, desc="File containing confound regressors") + out_file = traits.Either(None, File, value=None, usedefault=True, desc="Path to save plot") reference_column = traits.Str( "global_signal", usedefault=True, @@ -101,10 +97,7 @@ class _ConfoundsCorrelationPlotInputSpec(BaseInterfaceInputSpec): "which all correlation magnitudes " "should be ranked and plotted", ) - columns = traits.List( - traits.Str, - desc="Filter out all regressors not found in this list." - ) + columns = traits.List(traits.Str, desc="Filter out all regressors not found in this list.") max_dim = traits.Int( 20, usedefault=True, @@ -116,8 +109,7 @@ class _ConfoundsCorrelationPlotInputSpec(BaseInterfaceInputSpec): ignore_initial_volumes = traits.Int( 0, usedefault=True, - desc="Number of non-steady-state volumes at the beginning of the scan " - "to ignore.", + desc="Number of non-steady-state volumes at the beginning of the scan " "to ignore.", ) diff --git a/nireports/interfaces/reporting/base.py b/nireports/interfaces/reporting/base.py index 788937b2..3db2c49d 100644 --- a/nireports/interfaces/reporting/base.py +++ b/nireports/interfaces/reporting/base.py @@ -21,12 +21,15 @@ # https://www.nipreps.org/community/licensing/ # """class mixin and utilities for enabling reports for nipype interfaces.""" + from pathlib import Path + from nipype import logging -from nipype.utils.filemanip import fname_presuffix -from nipype.interfaces.base import File, traits, isdefined +from nipype.interfaces.base import File, isdefined, traits from nipype.interfaces.mixins import reporting -from nireports.reportlets.utils import cuts_from_bbox, compose_view +from nipype.utils.filemanip import fname_presuffix + +from nireports.reportlets.utils import compose_view, cuts_from_bbox _LOGGER = logging.getLogger("nipype.interface") @@ -47,10 +50,16 @@ class _SVGReportCapableInputSpec(reporting.ReportCapableInputSpec): class _RegistrationRCInputSpecRPT(_SVGReportCapableInputSpec): fixed_params = traits.Dict( - traits.Str, value={}, usedefault=True, desc="pass parameters to plotter", + traits.Str, + value={}, + usedefault=True, + desc="pass parameters to plotter", ) moving_params = traits.Dict( - traits.Str, value={}, usedefault=True, desc="pass parameters to plotter", + traits.Str, + value={}, + usedefault=True, + desc="pass parameters to plotter", ) @@ -69,8 +78,9 @@ class RegistrationRC(reporting.ReportCapableInterface): def _generate_report(self): """Generate the visual report.""" - from nilearn.image import threshold_img, load_img + from nilearn.image import load_img, threshold_img from nilearn.masking import apply_mask, unmask + from nireports.reportlets.mosaic import plot_registration _LOGGER.info("Generating visual report") @@ -157,8 +167,9 @@ class SurfaceSegmentationRC(reporting.ReportCapableInterface): def _generate_report(self): """Generate the visual report.""" - from nilearn.image import threshold_img, load_img + from nilearn.image import load_img, threshold_img from nilearn.masking import apply_mask, unmask + from nireports.reportlets.mosaic import plot_registration _LOGGER.info("Generating visual report") diff --git a/nireports/reportlets/modality/dwi.py b/nireports/reportlets/modality/dwi.py index e80f5d4b..117b16f5 100644 --- a/nireports/reportlets/modality/dwi.py +++ b/nireports/reportlets/modality/dwi.py @@ -21,9 +21,10 @@ # https://www.nipreps.org/community/licensing/ # """Visualizations for diffusion MRI data.""" + +import matplotlib as mpl import nibabel as nb import numpy as np -import matplotlib as mpl from matplotlib import pyplot as plt from mpl_toolkits.mplot3d import art3d from nilearn.plotting import plot_anat @@ -114,7 +115,7 @@ def plot_heatmap( figsize=(20, 1.6 * (len(b_indices) + 1)), ) axs[-1].spines[:].set_visible(False) - axs[-1].grid(which="minor", color="w", linestyle='-', linewidth=1) + axs[-1].grid(which="minor", color="w", linestyle="-", linewidth=1) for i, shelldata in enumerate(shells): x = shelldata[shelldata < imax] y = np.array([scalar[mask]] * len(b_indices[i])).reshape(-1)[shelldata < imax] @@ -127,8 +128,8 @@ def plot_heatmap( histdata, _, _ = np.histogram2d(x, y, bins=bins, range=((0, int(imax)), (0, 1))) axs[i].imshow( histdata.T, - interpolation='nearest', - origin='lower', + interpolation="nearest", + origin="lower", aspect="auto", cmap=cmap, ) @@ -144,7 +145,7 @@ def plot_heatmap( # axs[i].set_xticks(np.arange(bins[0] + 1) - .5, minor=True) axs[i].set_yticks(np.arange(bins[1] + 1) - 0.5, minor=True) - axs[i].grid(which="minor", color="w", linestyle='-', linewidth=1) + axs[i].grid(which="minor", color="w", linestyle="-", linewidth=1) axs[i].tick_params(which="minor", bottom=False, left=False) axs[i].set_ylabel(f"$b$ = {bvals[i]}\n($n$ = {len(b_indices[i])})", fontsize=15) @@ -174,8 +175,7 @@ def plot_heatmap( axs[-1].legend([f"{b}" for b in bvals], ncol=len(bvals), title="$b$ value") axs[-1].set_yticks([], labels=[]) axs[-1].set_xlabel( - f"SNR [noise floor estimated at {sigma:0.2f}]" if sigma is not None - else "DWI intensity", + f"SNR [noise floor estimated at {sigma:0.2f}]" if sigma is not None else "DWI intensity", fontsize=20, ) fig.supylabel(scalar_label, fontsize=20, y=0.65) @@ -222,7 +222,7 @@ def rotation_matrix(u, v): u[0] * v[1] - u[1] * v[0], ] ) - if (w ** 2).sum() < (np.finfo(w.dtype).eps * 10): + if (w**2).sum() < (np.finfo(w.dtype).eps * 10): # The vectors u and v are collinear return np.eye(3) diff --git a/nireports/reportlets/modality/func.py b/nireports/reportlets/modality/func.py index 152a4fb4..73aabc83 100644 --- a/nireports/reportlets/modality/func.py +++ b/nireports/reportlets/modality/func.py @@ -23,10 +23,11 @@ # STATEMENT OF CHANGES: This file was ported carrying over full git history from # other NiPreps projects licensed under the Apache-2.0 terms. """Visualizations specific to functional imaging.""" -import numpy as np + import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec +import numpy as np import pandas as pd +from matplotlib.gridspec import GridSpec from nireports.reportlets.nuisance import confoundplot, plot_carpet, spikesplot diff --git a/nireports/reportlets/mosaic.py b/nireports/reportlets/mosaic.py index b098890c..0e084163 100644 --- a/nireports/reportlets/mosaic.py +++ b/nireports/reportlets/mosaic.py @@ -23,20 +23,21 @@ # STATEMENT OF CHANGES: This file was ported carrying over full git history from # NiPreps projects licensed under the Apache-2.0 terms. """Base components to generate mosaic-like reportlets.""" -from warnings import warn -from uuid import uuid4 -from os import path as op + import math -import numpy as np -import nibabel as nb +from os import path as op +from uuid import uuid4 +from warnings import warn + import matplotlib as mpl import matplotlib.pyplot as plt +import nibabel as nb +import numpy as np from matplotlib.gridspec import GridSpec -from svgutils.transform import fromstring -from nilearn.plotting import plot_anat from nilearn import image as nlimage +from nilearn.plotting import plot_anat +from svgutils.transform import fromstring -from nireports.tools.ndimage import rotate_affine, rotation2canonical from nireports.reportlets.utils import ( _3d_in_file, _bbox, @@ -46,6 +47,7 @@ get_parula, robust_set_limits, ) +from nireports.tools.ndimage import rotate_affine, rotation2canonical def plot_segs( @@ -179,7 +181,6 @@ def plot_registration( def _plot_anat_with_contours(image, segs=None, compress="auto", **plot_params): - nsegs = len(segs or []) plot_params = plot_params or {} # plot_params' values can be None, however they MUST NOT @@ -293,8 +294,7 @@ def plot_slice( vmin=vmin, vmax=vmax, cmap=cmap, - extent=[0, dslice.shape[1] * spacing[1], - 0, dslice.shape[0] * spacing[0]], + extent=[0, dslice.shape[1] * spacing[1], 0, dslice.shape[0] * spacing[0]], interpolation="none", origin="lower", ) @@ -316,7 +316,7 @@ def plot_slice( horizontalalignment="center", verticalalignment="top", size=14, - bbox=dict(boxstyle="square,pad=0", ec=bgcolor, fc=bgcolor), + bbox={"boxstyle": "square,pad=0", "ec": bgcolor, "fc": bgcolor}, ) ax.text( 0.05, @@ -327,7 +327,7 @@ def plot_slice( horizontalalignment="center", verticalalignment="top", size=14, - bbox=dict(boxstyle="square,pad=0", ec=bgcolor, fc=bgcolor), + bbox={"boxstyle": "square,pad=0", "ec": bgcolor, "fc": bgcolor}, ) if label is not None: @@ -340,7 +340,7 @@ def plot_slice( horizontalalignment="right", verticalalignment="bottom", size=14, - bbox=dict(boxstyle="square,pad=0", ec=bgcolor, fc=bgcolor), + bbox={"boxstyle": "square,pad=0", "ec": bgcolor, "fc": bgcolor}, ) return ax @@ -357,7 +357,6 @@ def plot_slice_tern( vmax=None, vmin=None, ): - if isinstance(cmap, (str, bytes)): cmap = mpl.colormaps[cmap] @@ -405,7 +404,7 @@ def plot_slice_tern( horizontalalignment="center", verticalalignment="top", size=14, - bbox=dict(boxstyle="square,pad=0", ec="k", fc="k"), + bbox={"boxstyle": "square,pad=0", "ec": "k", "fc": "k"}, color="w", ) @@ -537,15 +536,13 @@ def plot_mosaic( out_file = "mosaic.svg" if plot_sagittal and views[1] is None and views[0] != "sagittal": - warn("Argument ``plot_sagittal`` for plot_mosaic() should not be used.") + warn("Argument ``plot_sagittal`` for plot_mosaic() should not be used.", stacklevel=2) views = (views[0], "sagittal", None) # Create mask for bounding box bbox_data = None if bbox_mask_file is not None: - bbox_data = np.asanyarray( - nb.as_closest_canonical(nb.load(bbox_mask_file)).dataobj - ) > 1e-3 + bbox_data = np.asanyarray(nb.as_closest_canonical(nb.load(bbox_mask_file)).dataobj) > 1e-3 elif img_data.shape[-1] > (ncols * maxrows): lowthres = np.percentile(img_data, 5) bbox_data = np.ones_like(img_data) @@ -575,13 +572,19 @@ def plot_mosaic( overlay_data = _bbox(overlay_data, bbox_data) # Decimate if too many values - z_vals = np.unique(np.linspace( - 0, shape[-1] - 1, num=(ncols * nrows), dtype=int, endpoint=True, - )) + z_vals = np.unique( + np.linspace( + 0, + shape[-1] - 1, + num=(ncols * nrows), + dtype=int, + endpoint=True, + ) + ) n_gs = sum(bool(v) for v in views) - main_mosaic_idx = np.full((nrows * ncols, ), -1, dtype=int) - main_mosaic_idx[:len(z_vals)] = z_vals + main_mosaic_idx = np.full((nrows * ncols,), -1, dtype=int) + main_mosaic_idx[: len(z_vals)] = z_vals main_mosaic_idx = main_mosaic_idx.reshape(nrows, ncols) fig_height = [] @@ -613,7 +616,7 @@ def plot_mosaic( # top=0.96, # bottom=0.01, hspace=0.001, - height_ratios=np.array(fig_height) / fig_height[0] + height_ratios=np.array(fig_height) / fig_height[0], ) est_vmin, est_vmax = _get_limits(img_data, only_plot_noise=only_plot_noise) @@ -665,7 +668,7 @@ def plot_mosaic( ) if overlay_mask: - msk_cmap = mpl.colormaps['Reds'] + msk_cmap = mpl.colormaps["Reds"] msk_cmap._init() alphas = np.linspace(0, 0.75, msk_cmap.N + 3) msk_cmap._lut[:, -1] = alphas diff --git a/nireports/reportlets/notebook.py b/nireports/reportlets/notebook.py index d0ff7501..6c534d37 100644 --- a/nireports/reportlets/notebook.py +++ b/nireports/reportlets/notebook.py @@ -26,11 +26,14 @@ # https://github.com/nipreps/niworkflows/blob/fa273d004c362d9562616253180e95694f07be3b/ # niworkflows/viz/notebook.py """Visualization component for Jupyter Notebooks.""" + from pathlib import Path -import numpy as np + import nibabel as nb -from nireports.reportlets.utils import compose_view, cuts_from_bbox +import numpy as np + from nireports.reportlets.mosaic import plot_registration +from nireports.reportlets.utils import compose_view, cuts_from_bbox def display( @@ -42,7 +45,8 @@ def display( moving_label="M", ): """Plot the flickering panels to show a registration process.""" - from IPython.display import SVG, display as _disp + from IPython.display import SVG + from IPython.display import display as _disp if isinstance(fixed_image, (str, Path)): fixed_image = nb.load(str(fixed_image)) diff --git a/nireports/reportlets/nuisance.py b/nireports/reportlets/nuisance.py index 9653780a..444104ec 100644 --- a/nireports/reportlets/nuisance.py +++ b/nireports/reportlets/nuisance.py @@ -23,17 +23,19 @@ # STATEMENT OF CHANGES: This file was ported carrying over full git history from # other NiPreps projects licensed under the Apache-2.0 terms. """Plotting distributions.""" + import math import os.path as op -import numpy as np import matplotlib as mpl import matplotlib.pyplot as plt +import numpy as np import seaborn as sns from matplotlib.backends.backend_pdf import FigureCanvasPdf as FigureCanvas -from matplotlib.gridspec import GridSpec, GridSpecFromSubplotSpec -from matplotlib.colors import Normalize from matplotlib.colorbar import ColorbarBase +from matplotlib.colors import Normalize +from matplotlib.gridspec import GridSpec, GridSpecFromSubplotSpec + from nireports.tools.ndimage import _get_values_inside_a_mask DEFAULT_DPI = 300 @@ -42,7 +44,6 @@ def plot_fd(fd_file, fd_radius, mean_fd_dist=None, figsize=DINA4_LANDSCAPE): - fd_power = _calc_fd(fd_file, fd_radius) fig = plt.Figure(figsize=figsize) @@ -70,7 +71,7 @@ def plot_fd(fd_file, fd_radius, mean_fd_dist=None, figsize=DINA4_LANDSCAPE): sns.distplot(mean_fd_dist, ax=ax) ax.set_xlabel("Mean Frame Displacement (over all subjects) [mm]") mean_fd = fd_power.mean() - label = fr"$\overline{{\text{{FD}}}}$ = {mean_fd:g}" + label = rf"$\overline{{\text{{FD}}}}$ = {mean_fd:g}" plot_vline(mean_fd, label, ax=ax) return fig @@ -297,7 +298,7 @@ def plot_carpet( # Cluster segments (if argument enabled) if sort_rows: - from scipy.cluster.hierarchy import linkage, dendrogram + from scipy.cluster.hierarchy import dendrogram, linkage from sklearn.cluster import ward_tree for seg_label, seg_idx in segments.items(): @@ -337,6 +338,7 @@ def plot_carpet( height_ratios=[len(v) for v in segments.values()], ) + label = "" for i, (label, indices) in enumerate(segments.items()): # Carpet plot ax = plt.subplot(gs[i]) @@ -403,7 +405,7 @@ def plot_carpet( ax, width="100%", height=0.01, - loc='lower center', + loc="lower center", borderpad=-4.1, ) axlegend.grid(False) @@ -415,14 +417,16 @@ def plot_carpet( axlegend.spines[loc].set_visible(False) axlegend.legend( - handles=[Patch(color=colors[i], label=l) for i, l in enumerate(segments.keys())], + handles=[ + Patch(color=colors[i], label=_label) for i, _label in enumerate(segments.keys()) + ], loc="upper center", bbox_to_anchor=(0.5, 0), shadow=False, fancybox=False, ncol=min(len(segments.keys()), 5), frameon=False, - prop={'size': 8}, + prop={"size": 8}, ) if output_file is not None: @@ -725,8 +729,8 @@ def confoundplot( units = units or "" stats_label = ( - fr"max: {maxv:.3f}{units} $\bullet$ mean: {mean:.3f}{units} " - fr"$\bullet$ $\sigma$: {stdv:.3f}" + rf"max: {maxv:.3f}{units} $\bullet$ mean: {mean:.3f}{units} " + rf"$\bullet$ $\sigma$: {stdv:.3f}" ) ax_ts.annotate( stats_label, @@ -764,7 +768,7 @@ def confoundplot( if cutoff is None: cutoff = [] - for i, thr in enumerate(cutoff): + for thr in cutoff: ax_ts.plot((0, ntsteps - 1), [thr] * 2, linewidth=0.2, color="dimgray") ax_ts.annotate( diff --git a/nireports/reportlets/surface.py b/nireports/reportlets/surface.py index 38928346..37d36764 100644 --- a/nireports/reportlets/surface.py +++ b/nireports/reportlets/surface.py @@ -23,11 +23,12 @@ # STATEMENT OF CHANGES: This file was ported carrying over full git history from # NiPreps projects licensed under the Apache-2.0 terms. """Plotting surface-supported data.""" -import numpy as np + import matplotlib.pyplot as plt -from matplotlib.colors import Normalize -from matplotlib import cm import nibabel as nb +import numpy as np +from matplotlib import cm +from matplotlib.colors import Normalize def cifti_surfaces_plot( @@ -119,25 +120,25 @@ def get_surface_meshes(density, surface_type): if mx is None: mx = np.max(data) - cmap = kwargs.pop('cmap', 'YlOrRd_r') + cmap = kwargs.pop("cmap", "YlOrRd_r") cbar_map = cm.ScalarMappable(norm=Normalize(mn, mx), cmap=cmap) # Make background maps that rescale to a medium gray - lh_bg = np.zeros(lh_data.shape, 'int8') - rh_bg = np.zeros(rh_data.shape, 'int8') + lh_bg = np.zeros(lh_data.shape, "int8") + rh_bg = np.zeros(rh_data.shape, "int8") lh_bg[:2] = [3, -2] rh_bg[:2] = [3, -2] lh_mesh, rh_mesh = get_surface_meshes(density, surface_type) - lh_kwargs = dict(surf_mesh=lh_mesh, surf_map=lh_data, bg_map=lh_bg) - rh_kwargs = dict(surf_mesh=rh_mesh, surf_map=rh_data, bg_map=rh_bg) + lh_kwargs = {"surf_mesh": lh_mesh, "surf_map": lh_data, "bg_map": lh_bg} + rh_kwargs = {"surf_mesh": rh_mesh, "surf_map": rh_data, "bg_map": rh_bg} # Build the figure figure = plt.figure(figsize=plt.figaspect(0.25), constrained_layout=True) - for i, view in enumerate(('lateral', 'medial')): - for j, hemi in enumerate(('left', 'right')): - title = f'{hemi.title()} - {view.title()}' - ax = figure.add_subplot(1, 4, i * 2 + j + 1, projection='3d', rasterized=True) + for i, view in enumerate(("lateral", "medial")): + for j, hemi in enumerate(("left", "right")): + title = f"{hemi.title()} - {view.title()}" + ax = figure.add_subplot(1, 4, i * 2 + j + 1, projection="3d", rasterized=True) hemi_kwargs = (lh_kwargs, rh_kwargs)[j] plot_surf( hemi=hemi, @@ -153,7 +154,7 @@ def get_surface_meshes(density, surface_type): # plot_surf sets this to 8, which seems a little far out, but 6 starts clipping ax.dist = 7 - figure.colorbar(cbar_map, shrink=0.2, ax=figure.axes, location='bottom') + figure.colorbar(cbar_map, shrink=0.2, ax=figure.axes, location="bottom") if output_file is not None: figure.savefig(output_file, bbox_inches="tight", dpi=400) diff --git a/nireports/reportlets/utils.py b/nireports/reportlets/utils.py index 4ca7b17c..7b15adee 100644 --- a/nireports/reportlets/utils.py +++ b/nireports/reportlets/utils.py @@ -26,22 +26,21 @@ # https://github.com/nipreps/niworkflows/blob/fa273d004c362d9562616253180e95694f07be3b/ # niworkflows/viz/utils.py """Helper tools for visualization purposes.""" -from pathlib import Path -from shutil import which -from tempfile import TemporaryDirectory -import subprocess + import base64 import re +import subprocess import warnings -from uuid import uuid4 from io import StringIO +from pathlib import Path +from shutil import which +from tempfile import TemporaryDirectory +from uuid import uuid4 -import numpy as np import nibabel as nb - +import numpy as np from nipype.utils import filemanip - SVGNS = "http://www.w3.org/2000/svg" @@ -175,7 +174,7 @@ def combine_svg(svg_list, axis="vertical"): if axis == "vertical": # Calculate the scale to fit all widths scales = [1.0] * len(svgs) - if not all([width[0] == sizes[0][0] for width in sizes[1:]]): + if not all(width[0] == sizes[0][0] for width in sizes[1:]): ref_size = sizes[0] for i, els in enumerate(sizes): scales[i] = ref_size[0] / els[0] @@ -186,7 +185,7 @@ def combine_svg(svg_list, axis="vertical"): elif axis == "horizontal": # Calculate the scale to fit all heights scales = [1.0] * len(svgs) - if not all([height[0] == sizes[0][1] for height in sizes[1:]]): + if not all(height[0] == sizes[0][1] for height in sizes[1:]): ref_size = sizes[0] for i, els in enumerate(sizes): scales[i] = ref_size[1] / els[1] @@ -231,7 +230,7 @@ def extract_svg(display_object, dpi=300, compress="auto"): end_tag = "" end_idx = image_svg.rfind(end_tag) if start_idx == -1 or end_idx == -1: - warnings.warn("svg tags not found in extract_svg") + warnings.warn("svg tags not found in extract_svg", stacklevel=2) # rfind gives the start index of the substr. We want this substr # included in our return value so we add its length to the index. end_idx += len(end_tag) @@ -340,7 +339,7 @@ def compose_view(bg_svgs, fg_svgs, ref=0, out_file="report.svg"): def _compose_view(bg_svgs, fg_svgs, ref=0): from svgutils.compose import Unit - from svgutils.transform import SVGFigure, GroupElement + from svgutils.transform import GroupElement, SVGFigure if fg_svgs is None: fg_svgs = [] diff --git a/nireports/reportlets/xca.py b/nireports/reportlets/xca.py index af0b1ef1..2476465a 100644 --- a/nireports/reportlets/xca.py +++ b/nireports/reportlets/xca.py @@ -23,11 +23,11 @@ # STATEMENT OF CHANGES: This file was ported carrying over full git history from niworkflows, # another NiPreps project licensed under the Apache-2.0 terms, and has been changed since. """Plotting results of component decompositions (xCA -- P/I-CA).""" -import numpy as np -import nibabel as nb -import pandas as pd import matplotlib.pyplot as plt +import nibabel as nb +import numpy as np +import pandas as pd from nilearn.plotting.cm import cold_white_hot from nireports.reportlets.utils import transform_to_2d @@ -77,11 +77,13 @@ def plot_melodic_components( """ import os + import numpy as np - from matplotlib.gridspec import GridSpec import pylab as plt import seaborn as sns + from matplotlib.gridspec import GridSpec from nilearn.image import index_img, iter_img + try: from nilearn.maskers import NiftiMasker except ImportError: # nilearn < 0.9 @@ -152,7 +154,7 @@ def plot_melodic_components( textcoords="axes fraction", size=12, color="#ea8800", - bbox=dict(boxstyle="round", fc="#f7dcb7", ec="#FC990E"), + bbox={"boxstyle": "round", "fc": "#f7dcb7", "ec": "#FC990E"}, ) ax.axes.get_xaxis().set_visible(False) ax.axes.get_yaxis().set_visible(False) @@ -163,7 +165,6 @@ def plot_melodic_components( if ICs.ndim == 3: ICs = ICs.slicer[..., None] for i, img in enumerate(iter_img(ICs)): - col = i % 2 row = i // 2 l_row = row * 2 + warning_row diff --git a/nireports/tests/conftest.py b/nireports/tests/conftest.py index 9077f2e3..0a8d706b 100644 --- a/nireports/tests/conftest.py +++ b/nireports/tests/conftest.py @@ -20,7 +20,8 @@ # # https://www.nipreps.org/community/licensing/ # -""" py.test configuration file """ +"""py.test configuration file""" + import os import pytest diff --git a/nireports/tests/generate_data.py b/nireports/tests/generate_data.py index dd8ecf81..f0da774d 100644 --- a/nireports/tests/generate_data.py +++ b/nireports/tests/generate_data.py @@ -10,12 +10,12 @@ def _create_dtseries_cifti(timepoints, models): def create_series_map(): return ci.Cifti2MatrixIndicesMap( (0,), - 'CIFTI_INDEX_TYPE_SERIES', + "CIFTI_INDEX_TYPE_SERIES", number_of_series_points=timepoints, series_exponent=0, series_start=0, series_step=1, - series_unit='SECOND', + series_unit="SECOND", ) def create_geometry_map(): @@ -41,7 +41,7 @@ def create_geometry_map(): setattr(bm, attr, indices) if model_type == "CIFTI_MODEL_TYPE_SURFACE": # define total vertices for surface models - setattr(bm, "surface_number_of_vertices", 32492) + bm.surface_number_of_vertices = 32492 index_offset += len(data) brain_models.append(bm) timeseries = np.column_stack((timeseries, data.T)) diff --git a/nireports/tests/test_dwi.py b/nireports/tests/test_dwi.py index efb6980e..6e43fea4 100644 --- a/nireports/tests/test_dwi.py +++ b/nireports/tests/test_dwi.py @@ -22,11 +22,9 @@ # """Test DWI reportlets.""" -import pytest -from pathlib import Path - import nibabel as nb import numpy as np +import pytest from matplotlib import pyplot as plt from nireports.reportlets.modality.dwi import plot_dwi, plot_gradients @@ -35,12 +33,12 @@ def test_plot_dwi(tmp_path, testdata_path, outdir): """Check the plot of DWI data.""" - stem = 'ds000114_sub-01_ses-test_desc-trunc_dwi' - dwi_img = nb.load(testdata_path / f'{stem}.nii.gz') + stem = "ds000114_sub-01_ses-test_desc-trunc_dwi" + dwi_img = nb.load(testdata_path / f"{stem}.nii.gz") affine = dwi_img.affine - bvecs = np.loadtxt(testdata_path / f'{stem}.bvec').T - bvals = np.loadtxt(testdata_path / f'{stem}.bval') + bvecs = np.loadtxt(testdata_path / f"{stem}.bvec").T + bvals = np.loadtxt(testdata_path / f"{stem}.bval") gradients = np.hstack([bvecs, bvals[:, None]]) @@ -51,18 +49,18 @@ def test_plot_dwi(tmp_path, testdata_path, outdir): _ = plot_dwi(dwi_img.get_fdata()[..., idx], affine, gradient=gradients[idx]) if outdir is not None: - plt.savefig(outdir / f'{stem}.svg', bbox_inches='tight') + plt.savefig(outdir / f"{stem}.svg", bbox_inches="tight") @pytest.mark.parametrize( - 'dwi_btable', - ['ds000114_singleshell', 'hcph_multishell', 'ds004737_dsi'], + "dwi_btable", + ["ds000114_singleshell", "hcph_multishell", "ds004737_dsi"], ) def test_plot_gradients(tmp_path, testdata_path, dwi_btable, outdir): """Check the plot of DWI gradients.""" - bvecs = np.loadtxt(testdata_path / f'{dwi_btable}.bvec').T - bvals = np.loadtxt(testdata_path / f'{dwi_btable}.bval') + bvecs = np.loadtxt(testdata_path / f"{dwi_btable}.bvec").T + bvals = np.loadtxt(testdata_path / f"{dwi_btable}.bval") b0s_mask = bvals < 50 @@ -70,4 +68,4 @@ def test_plot_gradients(tmp_path, testdata_path, dwi_btable, outdir): _ = plot_gradients(gradients) if outdir is not None: - plt.savefig(outdir / f'{dwi_btable}.svg', bbox_inches='tight') + plt.savefig(outdir / f"{dwi_btable}.svg", bbox_inches="tight") diff --git a/nireports/tests/test_interfaces.py b/nireports/tests/test_interfaces.py index 0db618c4..d361dd6b 100644 --- a/nireports/tests/test_interfaces.py +++ b/nireports/tests/test_interfaces.py @@ -21,6 +21,7 @@ # https://www.nipreps.org/community/licensing/ # """Tests plotting interfaces.""" + import os from shutil import copy @@ -45,7 +46,7 @@ def test_CompCorVariancePlot(datadir): _smoke_test_report(cc_rpt, "compcor_variance.svg") -@pytest.mark.parametrize('ignore_initial_volumes', (0, 1)) +@pytest.mark.parametrize("ignore_initial_volumes", (0, 1)) def test_ConfoundsCorrelationPlot(datadir, ignore_initial_volumes): """confounds correlation report test""" confounds_file = os.path.join(datadir, "confounds_test.tsv") diff --git a/nireports/tests/test_reportlets.py b/nireports/tests/test_reportlets.py index 85779baa..671334e0 100644 --- a/nireports/tests/test_reportlets.py +++ b/nireports/tests/test_reportlets.py @@ -21,22 +21,22 @@ # https://www.nipreps.org/community/licensing/ # """Test reportlets module.""" + import os -from pathlib import Path -from itertools import permutations from functools import partial +from itertools import permutations +from pathlib import Path import nibabel as nb import numpy as np import pandas as pd import pytest - from templateflow.api import get from nireports.reportlets.modality.func import fMRIPlot +from nireports.reportlets.mosaic import plot_mosaic from nireports.reportlets.nuisance import plot_carpet from nireports.reportlets.surface import cifti_surfaces_plot -from nireports.reportlets.mosaic import plot_mosaic from nireports.reportlets.xca import compcor_variance_plot, plot_melodic_components from nireports.tools.timeseries import cifti_timeseries as _cifti_timeseries from nireports.tools.timeseries import get_tr as _get_tr @@ -270,8 +270,8 @@ def create_surface_dtseries(): out_file = _create_dtseries_cifti( timepoints=10, models=[ - ('CIFTI_STRUCTURE_CORTEX_LEFT', np.random.rand(29696, 10)), - ('CIFTI_STRUCTURE_CORTEX_RIGHT', np.random.rand(29716, 10)), + ("CIFTI_STRUCTURE_CORTEX_LEFT", np.random.rand(29696, 10)), + ("CIFTI_STRUCTURE_CORTEX_RIGHT", np.random.rand(29716, 10)), ], ) yield str(out_file) @@ -328,10 +328,9 @@ def test_nifti_carpetplot(tmp_path, testdata_path, outdir): ) -_views = ( - list(permutations(("axial", "sagittal", "coronal", None), 3)) - + [(v, None, None) for v in ("axial", "sagittal", "coronal")] -) +_views = list(permutations(("axial", "sagittal", "coronal", None), 3)) + [ + (v, None, None) for v in ("axial", "sagittal", "coronal") +] @pytest.mark.parametrize("views", _views) @@ -339,9 +338,7 @@ def test_nifti_carpetplot(tmp_path, testdata_path, outdir): def test_mriqc_plot_mosaic(tmp_path, testdata_path, outdir, views, plot_sagittal): """Exercise the generation of mosaics.""" - fname = ( - f"mosaic_{'_'.join(v or 'none' for v in views)}_{plot_sagittal:d}.svg" - ) + fname = f"mosaic_{'_'.join(v or 'none' for v in views)}_{plot_sagittal:d}.svg" testfunc = partial( plot_mosaic, diff --git a/nireports/tools/timeseries.py b/nireports/tools/timeseries.py index 5f2e46f2..e184efff 100644 --- a/nireports/tools/timeseries.py +++ b/nireports/tools/timeseries.py @@ -26,8 +26,9 @@ # https://github.com/nipreps/niworkflows/blob/fa273d004c362d9562616253180e95694f07be3b/ # niworkflows/utils/timeseries.py """Extracting signals from NIfTI and CIFTI2 files.""" -import numpy as np + import nibabel as nb +import numpy as np def get_tr(img): @@ -72,13 +73,8 @@ def cifti_timeseries(dataset): } seg = {label: [] for label in list(labels.values()) + ["Other"]} for bm in matrix.get_index_map(1).brain_models: - label = ( - "Other" if bm.brain_structure not in labels else - labels[bm.brain_structure] - ) - seg[label] += list(range( - bm.index_offset, bm.index_offset + bm.index_count - )) + label = "Other" if bm.brain_structure not in labels else labels[bm.brain_structure] + seg[label] += list(range(bm.index_offset, bm.index_offset + bm.index_count)) return dataset.get_fdata(dtype="float32").T, seg @@ -108,9 +104,9 @@ def nifti_timeseries( if lut is None: lut = np.zeros((256,), dtype="uint8") lut[100:201] = 1 # Ctx GM - lut[30:99] = 2 # dGM - lut[1:11] = 3 # WM+CSF - lut[255] = 4 # Cerebellum + lut[30:99] = 2 # dGM + lut[1:11] = 3 # WM+CSF + lut[255] = 4 # Cerebellum # Apply lookup table segmentation = lut[segmentation] diff --git a/pyproject.toml b/pyproject.toml index 93aa541e..2db40460 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,10 +50,8 @@ doc = [ ] dev = [ - "black ~= 22.3.0", + "ruff", "pre-commit", - "isort ~= 5.10.1", - "flake8-pyproject", ] test = [ @@ -101,40 +99,49 @@ version-file = "nireports/_version.py" # Developer tool configurations # -[tool.black] +[tool.ruff] line-length = 99 -target-version = ['py39'] -skip-string-normalization = true -exclude = ''' -# Directories -/( - \.eggs - | \.git - | \.hg - | \.mypy_cache - | \.tox - | \.venv - | venv - | _build - | build - | dist -)/ -''' - -[tool.isort] -profile = 'black' -skip_gitignore = true - -[tool.flake8] -max-line-length = "99" -doctests = "False" -exclude = "*build/" -ignore = ["W503", "E203"] -per-file-ignores = [ - "**/__init__.py : F401", - "docs/conf.py : E265", +target-version = "py39" +exclude = [ + ".eggs", + ".git", + ".hg", + ".mypy_cache", + ".tox", + ".venv", + "venv", + "_build", + "build", + "dist", +] + +[tool.ruff.lint] +select = [ + "F", + "E", + "C", + "W", + "B", + "I", +] +ignore = [ + "E203", ] +[tool.ruff.lint.flake8-quotes] +inline-quotes = "double" + +[tool.ruff.lint.extend-per-file-ignores] +"*/__init__.py" = ["F401"] +"docs/conf.py" = ["E265"] +"/^\\s*\\.\\. _.*?: http/" = ["E501"] + +[tool.ruff.format] +quote-style = "double" + +[tool.ruff.lint.isort] +known-first-party=["nireports"] + [tool.pytest.ini_options] norecursedirs = [".git"] addopts = "-svx --doctest-modules -n auto"