Skip to content

Commit

Permalink
STYLE: Transition to ruff for code formatting
Browse files Browse the repository at this point in the history
Transition to `ruff` for code formatting in line with the rest of the
`nipreps` repositories.

Drop the flake8 `W503` rule, which is not implemented by `ruff`.

Add a GHA workflow to ensure that contributions not complying with the
style are detected.

Format the source code accordingly.
  • Loading branch information
jhlegarreta committed May 10, 2024
1 parent a8b3927 commit de36988
Show file tree
Hide file tree
Showing 33 changed files with 376 additions and 351 deletions.
24 changes: 24 additions & 0 deletions .github/workflows/contrib.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: Contribution checks
on: [push, pull_request]

defaults:
run:
shell: bash

jobs:
stable:
name: Run ruff
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
with:
submodules: recursive
fetch-depth: 0
- name: Set up Python 3
uses: actions/setup-python@v5
with:
python-version: 3

- name: Install and run pre-commit hooks
uses: pre-commit/action@v3.0.1
70 changes: 24 additions & 46 deletions .maint/update_authors.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
#!/usr/bin/env python3
"""Update and sort the creators list of the zenodo record."""

import json
import sys
from pathlib import Path
import json

import click
from fuzzywuzzy import fuzz, process

Expand Down Expand Up @@ -36,10 +38,7 @@ def read_md_table(md_text):
retval = []
for line in md_text.splitlines():
if line.strip().startswith("| --- |"):
keys = (
k.replace("*", "").strip()
for k in prev.split("|")
)
keys = (k.replace("*", "").strip() for k in prev.split("|"))
keys = [k.lower() for k in keys if k]
continue
elif not keys:
Expand All @@ -60,19 +59,13 @@ def sort_contributors(entries, git_lines, exclude=None, last=None):
last = last or []
sorted_authors = sorted(entries, key=lambda i: i["name"])

first_last = [
" ".join(val["name"].split(",")[::-1]).strip() for val in sorted_authors
]
first_last_excl = [
" ".join(val["name"].split(",")[::-1]).strip() for val in exclude or []
]
first_last = [" ".join(val["name"].split(",")[::-1]).strip() for val in sorted_authors]
first_last_excl = [" ".join(val["name"].split(",")[::-1]).strip() for val in exclude or []]

unmatched = []
author_matches = []
for ele in git_lines:
matches = process.extract(
ele, first_last, scorer=fuzz.token_sort_ratio, limit=2
)
matches = process.extract(ele, first_last, scorer=fuzz.token_sort_ratio, limit=2)
# matches is a list [('First match', % Match), ('Second match', % Match)]
if matches[0][1] > 80:
val = sorted_authors[first_last.index(matches[0][0])]
Expand Down Expand Up @@ -152,8 +145,9 @@ def cli():
@cli.command()
@click.option("-z", "--zenodo-file", type=click.Path(exists=True), default=".zenodo.json")
@click.option("-m", "--maintainers", type=click.Path(exists=True), default=".maint/MAINTAINERS.md")
@click.option("-c", "--contributors", type=click.Path(exists=True),
default=".maint/CONTRIBUTORS.md")
@click.option(
"-c", "--contributors", type=click.Path(exists=True), default=".maint/CONTRIBUTORS.md"
)
@click.option("--pi", type=click.Path(exists=True), default=".maint/PIs.md")
@click.option("-f", "--former-file", type=click.Path(exists=True), default=".maint/FORMER.md")
def zenodo(
Expand All @@ -176,15 +170,13 @@ def zenodo(
)

zen_contributors, miss_contributors = sort_contributors(
_namelast(read_md_table(Path(contributors).read_text())),
data,
exclude=former
_namelast(read_md_table(Path(contributors).read_text())), data, exclude=former
)

zen_pi = _namelast(
sorted(
read_md_table(Path(pi).read_text()),
key=lambda v: (int(v.get("position", -1)), v.get("lastname"))
key=lambda v: (int(v.get("position", -1)), v.get("lastname")),
)
)

Expand All @@ -194,8 +186,7 @@ def zenodo(
misses = set(miss_creators).intersection(miss_contributors)
if misses:
print(
"Some people made commits, but are missing in .maint/ "
f"files: {', '.join(misses)}",
"Some people made commits, but are missing in .maint/ " f"files: {', '.join(misses)}",
file=sys.stderr,
)

Expand All @@ -214,15 +205,14 @@ def zenodo(
if isinstance(creator["affiliation"], list):
creator["affiliation"] = creator["affiliation"][0]

Path(zenodo_file).write_text(
"%s\n" % json.dumps(zenodo, indent=2)
)
Path(zenodo_file).write_text("%s\n" % json.dumps(zenodo, indent=2))


@cli.command()
@click.option("-m", "--maintainers", type=click.Path(exists=True), default=".maint/MAINTAINERS.md")
@click.option("-c", "--contributors", type=click.Path(exists=True),
default=".maint/CONTRIBUTORS.md")
@click.option(
"-c", "--contributors", type=click.Path(exists=True), default=".maint/CONTRIBUTORS.md"
)
@click.option("--pi", type=click.Path(exists=True), default=".maint/PIs.md")
@click.option("-f", "--former-file", type=click.Path(exists=True), default=".maint/FORMER.md")
def publication(
Expand All @@ -232,9 +222,8 @@ def publication(
former_file,
):
"""Generate the list of authors and affiliations for papers."""
members = (
_namelast(read_md_table(Path(maintainers).read_text()))
+ _namelast(read_md_table(Path(contributors).read_text()))
members = _namelast(read_md_table(Path(maintainers).read_text())) + _namelast(
read_md_table(Path(contributors).read_text())
)

hits, misses = sort_contributors(
Expand All @@ -246,15 +235,12 @@ def publication(
pi_hits = _namelast(
sorted(
read_md_table(Path(pi).read_text()),
key=lambda v: (int(v.get("position", -1)), v.get("lastname"))
key=lambda v: (int(v.get("position", -1)), v.get("lastname")),
)
)

pi_names = [pi["name"] for pi in pi_hits]
hits = [
hit for hit in hits
if hit["name"] not in pi_names
] + pi_hits
hits = [hit for hit in hits if hit["name"] not in pi_names] + pi_hits

def _aslist(value):
if isinstance(value, (list, tuple)):
Expand All @@ -281,27 +267,19 @@ def _aslist(value):

if misses:
print(
"Some people made commits, but are missing in .maint/ "
f"files: {', '.join(misses)}",
"Some people made commits, but are missing in .maint/ " f"files: {', '.join(misses)}",
file=sys.stderr,
)

print("Authors (%d):" % len(hits))
print(
"%s."
% "; ".join(
[
"%s \\ :sup:`%s`\\ " % (i["name"], idx)
for i, idx in zip(hits, aff_indexes)
]
)
% "; ".join(["%s \\ :sup:`%s`\\ " % (i["name"], idx) for i, idx in zip(hits, aff_indexes)])
)

print(
"\n\nAffiliations:\n%s"
% "\n".join(
["{0: >2}. {1}".format(i + 1, a) for i, a in enumerate(affiliations)]
)
% "\n".join(["{0: >2}. {1}".format(i + 1, a) for i, a in enumerate(affiliations)])
)


Expand Down
43 changes: 23 additions & 20 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,23 +1,26 @@
# To install the git pre-commit hook run:
# pre-commit install
# To update the pre-commit hooks run:
# pre-commit install-hooks

repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.1.0
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.2.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-json
- id: check-toml
- id: check-case-conflict
- id: check-docstring-first
- id: check-merge-conflict
- id: check-vcs-permalinks
- id: pretty-format-json
args: ['--autofix']
- repo: https://github.com/psf/black
rev: 22.3.0
- id: trailing-whitespace
- id: end-of-file-fixer
- id: debug-statements
- id: check-yaml
- id: check-json
- id: check-toml
- id: check-case-conflict
- id: check-docstring-first
- id: check-merge-conflict
- id: check-vcs-permalinks
- id: pretty-format-json
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.4
hooks:
- id: black
- repo: https://github.com/pycqa/isort
rev: 5.10.1
hooks:
- id: isort
- id: ruff
args: [ --fix ]
- id: ruff-format
4 changes: 3 additions & 1 deletion nireports/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,15 @@
# https://www.nipreps.org/community/licensing/
#
"""Add metadata on import."""

__packagename__ = "nireports"
__copyright__ = "2023, The NiPreps developers"

try:
from ._version import __version__
except ModuleNotFoundError:
from importlib.metadata import version, PackageNotFoundError
from importlib.metadata import PackageNotFoundError, version

try:
__version__ = version(__packagename__)
except PackageNotFoundError:
Expand Down
1 change: 1 addition & 0 deletions nireports/assembler/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
.. autoclass:: Loader
"""

from __future__ import annotations

import atexit
Expand Down
7 changes: 4 additions & 3 deletions nireports/assembler/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,11 @@
# STATEMENT OF CHANGES: This file was ported carrying over full git history from niworkflows,
# another NiPreps project licensed under the Apache-2.0 terms, and has been changed since.
"""Miscellaneous utilities."""

from collections import defaultdict
from pathlib import Path
from bids.utils import listify

from bids.utils import listify
from nipype.utils.filemanip import loadcrash


Expand Down Expand Up @@ -242,7 +243,7 @@ def unfold_columns(indict, prefix=None, delimiter="_"):
"""
prefix = listify(prefix) if prefix is not None else []
keys = sorted(set(list(indict.keys())))
keys = sorted(set(indict.keys()))

data = []
subdict = defaultdict(dict, {})
Expand All @@ -254,7 +255,7 @@ def unfold_columns(indict, prefix=None, delimiter="_"):
subdict[col[0]][col[1]] = indict[key]

if subdict:
for skey in sorted(list(subdict.keys())):
for skey in sorted(subdict.keys()):
sskeys = list(subdict[skey].keys())

# If there is only one subkey, merge back
Expand Down
40 changes: 22 additions & 18 deletions nireports/assembler/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
# STATEMENT OF CHANGES: This file was ported carrying over full git history from niworkflows,
# another NiPreps project licensed under the Apache-2.0 terms, and has been changed since.
"""Core objects representing reports."""

import re
from collections import defaultdict
from itertools import compress
Expand All @@ -36,7 +37,6 @@
from nireports.assembler import data
from nireports.assembler.reportlet import Reportlet


# Add a new figures spec
try:
add_config_paths(figures=data.load("nipreps.json"))
Expand Down Expand Up @@ -270,8 +270,7 @@ def __init__(
metadata = metadata or {}
if "filename" not in metadata:
metadata["filename"] = Path(out_filename).name.replace(
"".join(Path(out_filename).suffixes),
""
"".join(Path(out_filename).suffixes), ""
)

# Initialize structuring elements
Expand All @@ -287,9 +286,7 @@ def __init__(
"out_dir": str(out_dir),
"reportlets_dir": str(root),
}
meta_repl.update({
kk: vv for kk, vv in metadata.items() if isinstance(vv, str)
})
meta_repl.update({kk: vv for kk, vv in metadata.items() if isinstance(vv, str)})
meta_repl.update(bids_filters)
expr = re.compile(f'{{({"|".join(meta_repl.keys())})}}')

Expand All @@ -308,7 +305,8 @@ def __init__(

# Path to the Jinja2 template
self.template_path = (
Path(settings["template_path"]) if "template_path" in settings
Path(settings["template_path"])
if "template_path" in settings
else data.load("report.tpl").absolute()
)

Expand Down Expand Up @@ -383,7 +381,8 @@ def index(self, config):
# do not display entities with the value None.
c_filt = [
f'{key} <span class="bids-entity">{c_value}</span>'
for key, c_value in zip(entities, c) if c_value is not None
for key, c_value in zip(entities, c)
if c_value is not None
]
# Set a common title for this particular combination c
title = "Reports for: %s." % ", ".join(c_filt)
Expand Down Expand Up @@ -420,11 +419,11 @@ def process_plugins(self, config, metadata=None):
self.footer = []

plugins = config.get("plugins", None)
for plugin in (plugins or []):
for plugin in plugins or []:
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(searchpath=str(
Path(__file__).parent / "data" / f"{plugin['type']}"
)),
loader=jinja2.FileSystemLoader(
searchpath=str(Path(__file__).parent / "data" / f"{plugin['type']}")
),
trim_blocks=True,
lstrip_blocks=True,
autoescape=False,
Expand All @@ -434,12 +433,17 @@ def process_plugins(self, config, metadata=None):
plugin_meta.update((metadata or {}).get(plugin["type"], {}))
for member in ("header", "navbar", "footer"):
old_value = getattr(self, member)
setattr(self, member, old_value + [
env.get_template(f"{member}.tpl").render(
config=plugin,
metadata=plugin_meta,
)
])
setattr(
self,
member,
old_value
+ [
env.get_template(f"{member}.tpl").render(
config=plugin,
metadata=plugin_meta,
)
],
)

def generate_report(self):
"""Once the Report has been indexed, the final HTML can be generated"""
Expand Down
Loading

0 comments on commit de36988

Please sign in to comment.