diff --git a/.maint/paper_author_list.py b/.maint/paper_author_list.py index 678ddeddc..55ed989fa 100644 --- a/.maint/paper_author_list.py +++ b/.maint/paper_author_list.py @@ -1,10 +1,10 @@ #!/usr/bin/env python3 """Generate an author list for a new paper or abstract.""" +import json import sys from pathlib import Path -import json -from update_zenodo import get_git_lines, sort_contributors +from update_zenodo import get_git_lines, sort_contributors # These authors should go last AUTHORS_LAST = ['Gorgolewski, Krzysztof J.', 'Poldrack, Russell A.', 'Esteban, Oscar'] @@ -21,9 +21,11 @@ def _aslist(inlist): contribs = json.loads(Path('.maint/contributors.json').read_text()) author_matches, unmatched = sort_contributors( - devs + contribs, get_git_lines(), + devs + contribs, + get_git_lines(), exclude=json.loads(Path('.maint/former.json').read_text()), - last=AUTHORS_LAST) + last=AUTHORS_LAST, + ) # Remove position affiliations = [] for item in author_matches: @@ -32,18 +34,34 @@ def _aslist(inlist): if a not in affiliations: affiliations.append(a) - aff_indexes = [', '.join(['%d' % (affiliations.index(a) + 1) - for a in _aslist(author.get('affiliation', 'Unaffiliated'))]) - for author in author_matches] + aff_indexes = [ + ', '.join( + [ + '%d' % (affiliations.index(a) + 1) + for a in _aslist(author.get('affiliation', 'Unaffiliated')) + ] + ) + for author in author_matches + ] - print("Some people made commits, but are missing in .maint/ " - "files: %s." % ', '.join(unmatched), file=sys.stderr) + print( + 'Some people made commits, but are missing in .maint/ ' + 'files: %s.' % ', '.join(unmatched), + file=sys.stderr, + ) print('Authors (%d):' % len(author_matches)) - print("%s." % '; '.join([ - '%s \\ :sup:`%s`\\ ' % (i['name'], idx) - for i, idx in zip(author_matches, aff_indexes) - ])) + print( + '%s.' + % '; '.join( + [ + '%s \\ :sup:`%s`\\ ' % (i['name'], idx) + for i, idx in zip(author_matches, aff_indexes, strict=False) + ] + ) + ) - print('\n\nAffiliations:\n%s' % '\n'.join(['{0: >2}. {1}'.format(i + 1, a) - for i, a in enumerate(affiliations)])) + print( + '\n\nAffiliations:\n%s' + % '\n'.join([f'{i + 1: >2}. {a}' for i, a in enumerate(affiliations)]) + ) diff --git a/.maint/update_authors.py b/.maint/update_authors.py index 38982575f..59d5ff2bc 100644 --- a/.maint/update_authors.py +++ b/.maint/update_authors.py @@ -1,8 +1,9 @@ #!/usr/bin/env python3 """Update and sort the creators list of the zenodo record.""" +import json import sys from pathlib import Path -import json + import click from fuzzywuzzy import fuzz, process @@ -35,22 +36,19 @@ def read_md_table(md_text): keys = None retval = [] for line in md_text.splitlines(): - if line.strip().startswith("| --- |"): - keys = ( - k.replace("*", "").strip() - for k in prev.split("|") - ) + if line.strip().startswith('| --- |'): + keys = (k.replace('*', '').strip() for k in prev.split('|')) keys = [k.lower() for k in keys if k] continue elif not keys: prev = line continue - if not line or not line.strip().startswith("|"): + if not line or not line.strip().startswith('|'): break - values = [v.strip() or None for v in line.split("|")][1:-1] - retval.append({k: v for k, v in zip(keys, values) if v}) + values = [v.strip() or None for v in line.split('|')][1:-1] + retval.append({k: v for k, v in zip(keys, values, strict=False) if v}) return retval @@ -58,21 +56,15 @@ def read_md_table(md_text): def sort_contributors(entries, git_lines, exclude=None, last=None): """Return a list of author dictionaries, ordered by contribution.""" last = last or [] - sorted_authors = sorted(entries, key=lambda i: i["name"]) + sorted_authors = sorted(entries, key=lambda i: i['name']) - first_last = [ - " ".join(val["name"].split(",")[::-1]).strip() for val in sorted_authors - ] - first_last_excl = [ - " ".join(val["name"].split(",")[::-1]).strip() for val in exclude or [] - ] + first_last = [' '.join(val['name'].split(',')[::-1]).strip() for val in sorted_authors] + first_last_excl = [' '.join(val['name'].split(',')[::-1]).strip() for val in exclude or []] unmatched = [] author_matches = [] for ele in git_lines: - matches = process.extract( - ele, first_last, scorer=fuzz.token_sort_ratio, limit=2 - ) + matches = process.extract(ele, first_last, scorer=fuzz.token_sort_ratio, limit=2) # matches is a list [('First match', % Match), ('Second match', % Match)] if matches[0][1] > 80: val = sorted_authors[first_last.index(matches[0][0])] @@ -85,7 +77,7 @@ def sort_contributors(entries, git_lines, exclude=None, last=None): if val not in author_matches: author_matches.append(val) - names = {" ".join(val["name"].split(",")[::-1]).strip() for val in author_matches} + names = {' '.join(val['name'].split(',')[::-1]).strip() for val in author_matches} for missing_name in first_last: if missing_name not in names: missing = sorted_authors[first_last.index(missing_name)] @@ -93,7 +85,7 @@ def sort_contributors(entries, git_lines, exclude=None, last=None): position_matches = [] for i, item in enumerate(author_matches): - pos = item.pop("position", None) + pos = item.pop('position', None) if pos is not None: position_matches.append((i, int(pos))) @@ -105,7 +97,7 @@ def sort_contributors(entries, git_lines, exclude=None, last=None): return author_matches, unmatched -def get_git_lines(fname="line-contributors.txt"): +def get_git_lines(fname='line-contributors.txt'): """Run git-line-summary.""" import shutil import subprocess as sp @@ -114,15 +106,15 @@ def get_git_lines(fname="line-contributors.txt"): lines = [] if contrib_file.exists(): - print("WARNING: Reusing existing line-contributors.txt file.", file=sys.stderr) + print('WARNING: Reusing existing line-contributors.txt file.', file=sys.stderr) lines = contrib_file.read_text().splitlines() - git_line_summary_path = shutil.which("git-line-summary") + git_line_summary_path = shutil.which('git-line-summary') if not lines and git_line_summary_path: - print("Running git-line-summary on repo") + print('Running git-line-summary on repo') lines = sp.check_output([git_line_summary_path]).decode().splitlines() - lines = [line for line in lines if "Not Committed Yet" not in line] - contrib_file.write_text("\n".join(lines)) + lines = [line for line in lines if 'Not Committed Yet' not in line] + contrib_file.write_text('\n'.join(lines)) if not lines: raise RuntimeError( @@ -132,13 +124,13 @@ def get_git_lines(fname="line-contributors.txt"): git-line-summary not found, please install git-extras. """ * (git_line_summary_path is None) ) - return [" ".join(line.strip().split()[1:-1]) for line in lines if "%" in line] + return [' '.join(line.strip().split()[1:-1]) for line in lines if '%' in line] def _namelast(inlist): retval = [] for i in inlist: - i["name"] = (f"{i.pop('name', '')} {i.pop('lastname', '')}").strip() + i['name'] = (f"{i.pop('name', '')} {i.pop('lastname', '')}").strip() retval.append(i) return retval @@ -150,12 +142,13 @@ def cli(): @cli.command() -@click.option("-z", "--zenodo-file", type=click.Path(exists=True), default=".zenodo.json") -@click.option("-m", "--maintainers", type=click.Path(exists=True), default=".maint/MAINTAINERS.md") -@click.option("-c", "--contributors", type=click.Path(exists=True), - default=".maint/CONTRIBUTORS.md") -@click.option("--pi", type=click.Path(exists=True), default=".maint/PIs.md") -@click.option("-f", "--former-file", type=click.Path(exists=True), default=".maint/FORMER.md") +@click.option('-z', '--zenodo-file', type=click.Path(exists=True), default='.zenodo.json') +@click.option('-m', '--maintainers', type=click.Path(exists=True), default='.maint/MAINTAINERS.md') +@click.option( + '-c', '--contributors', type=click.Path(exists=True), default='.maint/CONTRIBUTORS.md' +) +@click.option('--pi', type=click.Path(exists=True), default='.maint/PIs.md') +@click.option('-f', '--former-file', type=click.Path(exists=True), default='.maint/FORMER.md') def zenodo( zenodo_file, maintainers, @@ -176,55 +169,51 @@ def zenodo( ) zen_contributors, miss_contributors = sort_contributors( - _namelast(read_md_table(Path(contributors).read_text())), - data, - exclude=former + _namelast(read_md_table(Path(contributors).read_text())), data, exclude=former ) zen_pi = _namelast( sorted( read_md_table(Path(pi).read_text()), - key=lambda v: (int(v.get("position", -1)), v.get("lastname")) + key=lambda v: (int(v.get('position', -1)), v.get('lastname')), ) ) - zenodo["creators"] = zen_creators - zenodo["contributors"] = zen_contributors + zen_pi + zenodo['creators'] = zen_creators + zenodo['contributors'] = zen_contributors + zen_pi misses = set(miss_creators).intersection(miss_contributors) if misses: print( - "Some people made commits, but are missing in .maint/ " - f"files: {', '.join(misses)}", + "Some people made commits, but are missing in .maint/ " f"files: {', '.join(misses)}", file=sys.stderr, ) # Remove position - for creator in zenodo["creators"]: - creator.pop("position", None) - creator.pop("handle", None) - if isinstance(creator["affiliation"], list): - creator["affiliation"] = creator["affiliation"][0] - - for creator in zenodo["contributors"]: - creator.pop("handle", None) - creator["type"] = "Researcher" - creator.pop("position", None) - - if isinstance(creator["affiliation"], list): - creator["affiliation"] = creator["affiliation"][0] - - Path(zenodo_file).write_text( - "%s\n" % json.dumps(zenodo, indent=2) - ) + for creator in zenodo['creators']: + creator.pop('position', None) + creator.pop('handle', None) + if isinstance(creator['affiliation'], list): + creator['affiliation'] = creator['affiliation'][0] + + for creator in zenodo['contributors']: + creator.pop('handle', None) + creator['type'] = 'Researcher' + creator.pop('position', None) + + if isinstance(creator['affiliation'], list): + creator['affiliation'] = creator['affiliation'][0] + + Path(zenodo_file).write_text('%s\n' % json.dumps(zenodo, indent=2)) @cli.command() -@click.option("-m", "--maintainers", type=click.Path(exists=True), default=".maint/MAINTAINERS.md") -@click.option("-c", "--contributors", type=click.Path(exists=True), - default=".maint/CONTRIBUTORS.md") -@click.option("--pi", type=click.Path(exists=True), default=".maint/PIs.md") -@click.option("-f", "--former-file", type=click.Path(exists=True), default=".maint/FORMER.md") +@click.option('-m', '--maintainers', type=click.Path(exists=True), default='.maint/MAINTAINERS.md') +@click.option( + '-c', '--contributors', type=click.Path(exists=True), default='.maint/CONTRIBUTORS.md' +) +@click.option('--pi', type=click.Path(exists=True), default='.maint/PIs.md') +@click.option('-f', '--former-file', type=click.Path(exists=True), default='.maint/FORMER.md') def publication( maintainers, contributors, @@ -232,9 +221,8 @@ def publication( former_file, ): """Generate the list of authors and affiliations for papers.""" - members = ( - _namelast(read_md_table(Path(maintainers).read_text())) - + _namelast(read_md_table(Path(contributors).read_text())) + members = _namelast(read_md_table(Path(maintainers).read_text())) + _namelast( + read_md_table(Path(contributors).read_text()) ) hits, misses = sort_contributors( @@ -246,15 +234,12 @@ def publication( pi_hits = _namelast( sorted( read_md_table(Path(pi).read_text()), - key=lambda v: (int(v.get("position", -1)), v.get("lastname")) + key=lambda v: (int(v.get('position', -1)), v.get('lastname')), ) ) - pi_names = [pi["name"] for pi in pi_hits] - hits = [ - hit for hit in hits - if hit["name"] not in pi_names - ] + pi_hits + pi_names = [pi['name'] for pi in pi_hits] + hits = [hit for hit in hits if hit['name'] not in pi_names] + pi_hits def _aslist(value): if isinstance(value, (list, tuple)): @@ -264,16 +249,16 @@ def _aslist(value): # Remove position affiliations = [] for item in hits: - item.pop("position", None) - for a in _aslist(item.get("affiliation", "Unaffiliated")): + item.pop('position', None) + for a in _aslist(item.get('affiliation', 'Unaffiliated')): if a not in affiliations: affiliations.append(a) aff_indexes = [ - ", ".join( + ', '.join( [ - "%d" % (affiliations.index(a) + 1) - for a in _aslist(author.get("affiliation", "Unaffiliated")) + '%d' % (affiliations.index(a) + 1) + for a in _aslist(author.get('affiliation', 'Unaffiliated')) ] ) for author in hits @@ -281,30 +266,27 @@ def _aslist(value): if misses: print( - "Some people made commits, but are missing in .maint/ " - f"files: {', '.join(misses)}", + "Some people made commits, but are missing in .maint/ " f"files: {', '.join(misses)}", file=sys.stderr, ) - print("Authors (%d):" % len(hits)) + print('Authors (%d):' % len(hits)) print( - "%s." - % "; ".join( + '%s.' + % '; '.join( [ - "%s \\ :sup:`%s`\\ " % (i["name"], idx) - for i, idx in zip(hits, aff_indexes) + '%s \\ :sup:`%s`\\ ' % (i['name'], idx) + for i, idx in zip(hits, aff_indexes, strict=False) ] ) ) print( - "\n\nAffiliations:\n%s" - % "\n".join( - ["{0: >2}. {1}".format(i + 1, a) for i, a in enumerate(affiliations)] - ) + '\n\nAffiliations:\n%s' + % '\n'.join([f'{i + 1: >2}. {a}' for i, a in enumerate(affiliations)]) ) -if __name__ == "__main__": +if __name__ == '__main__': """ Install entry-point """ cli() diff --git a/.maint/update_zenodo.py b/.maint/update_zenodo.py index 5e05e0b99..bbde815fb 100755 --- a/.maint/update_zenodo.py +++ b/.maint/update_zenodo.py @@ -1,8 +1,9 @@ #!/usr/bin/env python3 """Update and sort the creators list of the zenodo record.""" +import json import sys from pathlib import Path -import json + from fuzzywuzzy import fuzz, process # These ORCIDs should go last @@ -15,17 +16,14 @@ def sort_contributors(entries, git_lines, exclude=None, last=None): last = last or [] sorted_authors = sorted(entries, key=lambda i: i['name']) - first_last = [' '.join(val['name'].split(',')[::-1]).strip() - for val in sorted_authors] - first_last_excl = [' '.join(val['name'].split(',')[::-1]).strip() - for val in exclude or []] + first_last = [' '.join(val['name'].split(',')[::-1]).strip() for val in sorted_authors] + first_last_excl = [' '.join(val['name'].split(',')[::-1]).strip() for val in exclude or []] unmatched = [] author_matches = [] position = 1 for ele in git_lines: - matches = process.extract(ele, first_last, scorer=fuzz.token_sort_ratio, - limit=2) + matches = process.extract(ele, first_last, scorer=fuzz.token_sort_ratio, limit=2) # matches is a list [('First match', % Match), ('Second match', % Match)] if matches[0][1] > 80: val = sorted_authors[first_last.index(matches[0][0])] @@ -62,6 +60,7 @@ def get_git_lines(fname='line-contributors.txt'): """Run git-line-summary.""" import shutil import subprocess as sp + contrib_file = Path(fname) lines = [] @@ -71,17 +70,19 @@ def get_git_lines(fname='line-contributors.txt'): cmd = [shutil.which('git-line-summary')] if cmd == [None]: - cmd = [shutil.which('git-summary'), "--line"] + cmd = [shutil.which('git-summary'), '--line'] if not lines and cmd[0]: print(f"Running {' '.join(cmd)!r} on repo") lines = sp.check_output(cmd).decode().splitlines() - lines = [line for line in lines if "Not Committed Yet" not in line] + lines = [line for line in lines if 'Not Committed Yet' not in line] contrib_file.write_text('\n'.join(lines)) if not lines: - raise RuntimeError("""\ -Could not find line-contributors from git repository.%s""" % """ \ -git-(line-)summary not found, please install git-extras. """ * (cmd[0] is None)) + raise RuntimeError( + 'Could not find line-contributors from git repository.%s' + % 'git-(line-)summary not found, please install git-extras.' + * (cmd[0] is None) + ) return [' '.join(line.strip().split()[1:-1]) for line in lines if '%' in line] @@ -90,14 +91,14 @@ def loads_table_from_markdown(s): table = [] header = None for line in s.splitlines(): - if line.startswith("|"): + if line.startswith('|'): if not header: # read header and strip bold - header = [item.strip("* ") for item in line.split('|')[1:-1]] + header = [item.strip('* ') for item in line.split('|')[1:-1]] else: values = [item.strip() for item in line.split('|')[1:-1]] - if any(any(c != "-" for c in item) for item in values): - table.append(dict(zip(header, values))) + if any(any(c != '-' for c in item) for item in values): + table.append(dict(zip(header, values, strict=False))) elif header: # we have already seen a table, we're past the end of that table break @@ -108,10 +109,11 @@ def loads_contributors(s): """Reformat contributors read from the Markdown table.""" return [ { - "affiliation": contributor["Affiliation"], - "name": "{}, {}".format(contributor["Lastname"], contributor["Name"]), - "orcid": contributor["ORCID"], - } for contributor in loads_table_from_markdown(s) + 'affiliation': contributor['Affiliation'], + 'name': '{}, {}'.format(contributor['Lastname'], contributor['Name']), + 'orcid': contributor['ORCID'], + } + for contributor in loads_table_from_markdown(s) ] @@ -123,20 +125,26 @@ def loads_contributors(s): creators = json.loads(Path('.maint/developers.json').read_text()) zen_creators, miss_creators = sort_contributors( - creators, data, + creators, + data, exclude=json.loads(Path('.maint/former.json').read_text()), - last=CREATORS_LAST) + last=CREATORS_LAST, + ) contributors = loads_contributors(Path('.maint/CONTRIBUTORS.md').read_text()) zen_contributors, miss_contributors = sort_contributors( - contributors, data, + contributors, + data, exclude=json.loads(Path('.maint/former.json').read_text()), - last=CONTRIBUTORS_LAST) + last=CONTRIBUTORS_LAST, + ) zenodo['creators'] = zen_creators zenodo['contributors'] = zen_contributors - print("Some people made commits, but are missing in .maint/ " - "files: %s." % ', '.join(set(miss_creators).intersection(miss_contributors)), - file=sys.stderr) + print( + 'Some people made commits, but are missing in .maint/ ' + 'files: %s.' % ', '.join(set(miss_creators).intersection(miss_contributors)), + file=sys.stderr, + ) # Remove position for creator in zenodo['creators']: diff --git a/docs/conf.py b/docs/conf.py index 35915dbf1..264ae6305 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -19,47 +19,47 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath("sphinxext")) -sys.path.insert(0, os.path.abspath("../wrapper")) +sys.path.append(os.path.abspath('sphinxext')) +sys.path.insert(0, os.path.abspath('../wrapper')) from github_link import make_linkcode_resolve # noqa: E402 # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.5.3" +needs_sphinx = '1.5.3' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named "sphinx.ext.*") or your custom # ones. extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.doctest", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.mathjax", - "sphinx.ext.linkcode", - "sphinx.ext.napoleon", - "sphinxarg.ext", # argparse extension - "nipype.sphinxext.plot_workflow", + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.linkcode', + 'sphinx.ext.napoleon', + 'sphinxarg.ext', # argparse extension + 'nipype.sphinxext.plot_workflow', ] # Mock modules in autodoc: autodoc_mock_imports = [ - "numpy", - "nitime", - "matplotlib", + 'numpy', + 'nitime', + 'matplotlib', ] -if pver.parse(sphinxversion) >= pver.parse("1.7.0"): +if pver.parse(sphinxversion) >= pver.parse('1.7.0'): autodoc_mock_imports += [ - "pandas", - "nilearn", - "seaborn", + 'pandas', + 'nilearn', + 'seaborn', ] # Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] +templates_path = ['_templates'] # Accept custom section names to be parsed for numpy-style docstrings # of parameters. @@ -67,34 +67,34 @@ # https://github.com/sphinx-contrib/napoleon/pull/10 is merged. napoleon_use_param = False napoleon_custom_sections = [ - ("Inputs", "Parameters"), - ("Outputs", "Parameters"), + ('Inputs', 'Parameters'), + ('Outputs', 'Parameters'), ] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = [".rst", ".md"] -source_suffix = ".rst" +source_suffix = '.rst' # The encoding of source files. # source_encoding = "utf-8-sig" # The master toctree document. -master_doc = "index" +master_doc = 'index' # General information about the project. -project = "fmriprep" -author = "The fMRIPrep developers" -copyright = f"2016-{datetime.now().year}, {author}" +project = 'fmriprep' +author = 'The fMRIPrep developers' +copyright = f'2016-{datetime.now().year}, {author}' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = "version" +version = 'version' # The full version, including alpha/beta/rc tags. -release = "version" +release = 'version' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -112,7 +112,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -130,7 +130,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" +pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -146,7 +146,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = "sphinx_rtd_theme" +html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -175,7 +175,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -240,20 +240,17 @@ # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = "fmriprepdoc" +htmlhelp_basename = 'fmriprepdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # 'preamble': '', - # Latex figure (float) alignment # 'figure_align': 'htbp', } @@ -262,9 +259,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, "fmriprep.tex", "fMRIprep Documentation", - author, - "manual"), + (master_doc, 'fmriprep.tex', 'fMRIprep Documentation', author, 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -292,10 +287,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, "fmriprep", "fmriprep Documentation", - [author], 1) -] +man_pages = [(master_doc, 'fmriprep', 'fmriprep Documentation', [author], 1)] # If true, show URL addresses after external links. # man_show_urls = False @@ -307,9 +299,15 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, "fmriprep", "fMRIprep Documentation", - author, "fmriprep", "One line description of project.", - "Miscellaneous"), + ( + master_doc, + 'fmriprep', + 'fMRIprep Documentation', + author, + 'fmriprep', + 'One line description of project.', + 'Miscellaneous', + ), ] # Documents to append as an appendix to all manuals. @@ -325,31 +323,31 @@ # texinfo_no_detailmenu = False # The following is used by sphinx.ext.linkcode to provide links to github -linkcode_resolve = make_linkcode_resolve("fmriprep", - "https://github.com/nipreps/" - "fmriprep/blob/{revision}/" - "{package}/{path}#L{lineno}") +linkcode_resolve = make_linkcode_resolve( + 'fmriprep', + 'https://github.com/nipreps/' 'fmriprep/blob/{revision}/' '{package}/{path}#L{lineno}', +) # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("https://docs.python.org/3/", None), - "numpy": ("https://numpy.org/doc/stable/", None), - "scipy": ("https://docs.scipy.org/doc/scipy/", None), - "matplotlib": ("https://matplotlib.org/stable/", None), - "bids": ("https://bids-standard.github.io/pybids/", None), - "nibabel": ("https://nipy.org/nibabel/", None), - "nipype": ("https://nipype.readthedocs.io/en/latest/", None), - "niworkflows": ("https://www.nipreps.org/niworkflows/", None), - "sdcflows": ("https://www.nipreps.org/sdcflows/", None), - "smriprep": ("https://www.nipreps.org/smriprep/", None), - "templateflow": ("https://www.templateflow.org/python-client", None), - "tedana": ("https://tedana.readthedocs.io/en/latest/", None), + 'python': ('https://docs.python.org/3/', None), + 'numpy': ('https://numpy.org/doc/stable/', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/', None), + 'matplotlib': ('https://matplotlib.org/stable/', None), + 'bids': ('https://bids-standard.github.io/pybids/', None), + 'nibabel': ('https://nipy.org/nibabel/', None), + 'nipype': ('https://nipype.readthedocs.io/en/latest/', None), + 'niworkflows': ('https://www.nipreps.org/niworkflows/', None), + 'sdcflows': ('https://www.nipreps.org/sdcflows/', None), + 'smriprep': ('https://www.nipreps.org/smriprep/', None), + 'templateflow': ('https://www.templateflow.org/python-client', None), + 'tedana': ('https://tedana.readthedocs.io/en/latest/', None), } -suppress_warnings = ["image.nonlocal_uri"] +suppress_warnings = ['image.nonlocal_uri'] def setup(app): - app.add_css_file("theme_overrides.css") + app.add_css_file('theme_overrides.css') # We need this for the boilerplate script - app.add_js_file("https://cdn.rawgit.com/chrisfilo/zenodo.js/v0.1/zenodo.js") + app.add_js_file('https://cdn.rawgit.com/chrisfilo/zenodo.js/v0.1/zenodo.js') diff --git a/docs/sphinxext/github_link.py b/docs/sphinxext/github_link.py index 54b5c2f8d..2fe41b66b 100644 --- a/docs/sphinxext/github_link.py +++ b/docs/sphinxext/github_link.py @@ -9,16 +9,16 @@ from functools import partial from operator import attrgetter -REVISION_CMD = "git rev-parse --short HEAD" +REVISION_CMD = 'git rev-parse --short HEAD' def _get_git_revision(): try: revision = subprocess.check_output(REVISION_CMD.split()).strip() except (subprocess.CalledProcessError, OSError): - print("Failed to execute git to get revision") + print('Failed to execute git to get revision') return None - return revision.decode("utf-8") + return revision.decode('utf-8') def _linkcode_resolve(domain, info, package, url_fmt, revision): @@ -38,14 +38,14 @@ def _linkcode_resolve(domain, info, package, url_fmt, revision): if revision is None: return - if domain not in ("py", "pyx"): + if domain not in ('py', 'pyx'): return - if not info.get("module") or not info.get("fullname"): + if not info.get('module') or not info.get('fullname'): return - class_name = info["fullname"].split(".")[0] - module = __import__(info["module"], fromlist=[class_name]) - obj = attrgetter(info["fullname"])(module) + class_name = info['fullname'].split('.')[0] + module = __import__(info['module'], fromlist=[class_name]) + obj = attrgetter(info['fullname'])(module) # Unwrap the object to get the correct source # file in case that is wrapped by a decorator @@ -67,7 +67,7 @@ def _linkcode_resolve(domain, info, package, url_fmt, revision): try: lineno = inspect.getsourcelines(obj)[1] except Exception: - lineno = "" + lineno = '' return url_fmt.format(revision=revision, package=package, path=fn, lineno=lineno) @@ -83,6 +83,4 @@ def make_linkcode_resolve(package, url_fmt): '{path}#L{lineno}') """ revision = _get_git_revision() - return partial( - _linkcode_resolve, revision=revision, package=package, url_fmt=url_fmt - ) + return partial(_linkcode_resolve, revision=revision, package=package, url_fmt=url_fmt) diff --git a/fmriprep/__about__.py b/fmriprep/__about__.py index de60eeb90..a8ce6b4f4 100644 --- a/fmriprep/__about__.py +++ b/fmriprep/__about__.py @@ -25,7 +25,7 @@ try: from ._version import __version__ except ImportError: - __version__ = "0+unknown" + __version__ = '0+unknown' __packagename__ = 'fmriprep' __copyright__ = 'Copyright 2023, The NiPreps Developers' @@ -35,6 +35,4 @@ ) __url__ = 'https://github.com/nipreps/fmriprep' -DOWNLOAD_URL = 'https://github.com/nipreps/{name}/archive/{ver}.tar.gz'.format( - name=__packagename__, ver=__version__ -) +DOWNLOAD_URL = f'https://github.com/nipreps/{__packagename__}/archive/{__version__}.tar.gz' diff --git a/fmriprep/_warnings.py b/fmriprep/_warnings.py index eb36378f1..70a32cc7e 100644 --- a/fmriprep/_warnings.py +++ b/fmriprep/_warnings.py @@ -24,7 +24,7 @@ import logging import warnings -_wlog = logging.getLogger("py.warnings") +_wlog = logging.getLogger('py.warnings') _wlog.addHandler(logging.NullHandler()) @@ -32,9 +32,9 @@ def _warn(message, category=None, stacklevel=1, source=None): """Redefine the warning function.""" if category is not None: category = type(category).__name__ - category = category.replace("type", "WARNING") + category = category.replace('type', 'WARNING') - logging.getLogger("py.warnings").warning(f"{category or 'WARNING'}: {message}") + logging.getLogger('py.warnings').warning(f"{category or 'WARNING'}: {message}") def _showwarning(message, category, filename, lineno, file=None, line=None): diff --git a/fmriprep/cli/parser.py b/fmriprep/cli/parser.py index 5da912240..920b37833 100644 --- a/fmriprep/cli/parser.py +++ b/fmriprep/cli/parser.py @@ -43,14 +43,14 @@ def _build_parser(**kwargs): def _path_exists(path, parser): """Ensure a given path exists.""" if path is None or not Path(path).exists(): - raise parser.error(f"Path does not exist: <{path}>.") + raise parser.error(f'Path does not exist: <{path}>.') return Path(path).absolute() def _is_file(path, parser): """Ensure a given path exists and it is a file.""" path = _path_exists(path, parser) if not path.is_file(): - raise parser.error(f"Path should point to a file (or symlink of file): <{path}>.") + raise parser.error(f'Path should point to a file (or symlink of file): <{path}>.') return path def _min_one(value, parser): @@ -61,19 +61,19 @@ def _min_one(value, parser): return value def _to_gb(value): - scale = {"G": 1, "T": 10**3, "M": 1e-3, "K": 1e-6, "B": 1e-9} - digits = "".join([c for c in value if c.isdigit()]) - units = value[len(digits) :] or "M" + scale = {'G': 1, 'T': 10**3, 'M': 1e-3, 'K': 1e-6, 'B': 1e-9} + digits = ''.join([c for c in value if c.isdigit()]) + units = value[len(digits) :] or 'M' return int(digits) * scale[units[0]] def _drop_sub(value): - return value[4:] if value.startswith("sub-") else value + return value[4:] if value.startswith('sub-') else value def _filter_pybids_none_any(dct): import bids return { - k: bids.layout.Query.NONE if v is None else (bids.layout.Query.ANY if v == "*" else v) + k: bids.layout.Query.NONE if v is None else (bids.layout.Query.ANY if v == '*' else v) for k, v in dct.items() } @@ -85,33 +85,31 @@ def _bids_filter(value, parser): try: return loads(Path(value).read_text(), object_hook=_filter_pybids_none_any) except JSONDecodeError: - raise parser.error(f"JSON syntax error in: <{value}>.") + raise parser.error(f'JSON syntax error in: <{value}>.') else: - raise parser.error(f"Path does not exist: <{value}>.") + raise parser.error(f'Path does not exist: <{value}>.') def _slice_time_ref(value, parser): - if value == "start": + if value == 'start': value = 0 - elif value == "middle": + elif value == 'middle': value = 0.5 try: value = float(value) except ValueError: raise parser.error( - "Slice time reference must be number, 'start', or 'middle'. " f"Received {value}." + "Slice time reference must be number, 'start', or 'middle'. " f'Received {value}.' ) if not 0 <= value <= 1: - raise parser.error(f"Slice time reference must be in range 0-1. Received {value}.") + raise parser.error(f'Slice time reference must be in range 0-1. Received {value}.') return value - verstr = f"fMRIPrep v{config.environment.version}" + verstr = f'fMRIPrep v{config.environment.version}' currentv = Version(config.environment.version) is_release = not any((currentv.is_devrelease, currentv.is_prerelease, currentv.is_postrelease)) parser = ArgumentParser( - description="fMRIPrep: fMRI PREProcessing workflows v{}".format( - config.environment.version - ), + description=f'fMRIPrep: fMRI PREProcessing workflows v{config.environment.version}', formatter_class=ArgumentDefaultsHelpFormatter, **kwargs, ) @@ -125,41 +123,41 @@ def _slice_time_ref(value, parser): # required, positional arguments # IMPORTANT: they must go directly with the parser object parser.add_argument( - "bids_dir", - action="store", + 'bids_dir', + action='store', type=PathExists, - help="The root folder of a BIDS valid dataset (sub-XXXXX folders should " - "be found at the top level in this folder).", + help='The root folder of a BIDS valid dataset (sub-XXXXX folders should ' + 'be found at the top level in this folder).', ) parser.add_argument( - "output_dir", - action="store", + 'output_dir', + action='store', type=Path, - help="The output path for the outcomes of preprocessing and visual reports", + help='The output path for the outcomes of preprocessing and visual reports', ) parser.add_argument( - "analysis_level", - choices=["participant"], + 'analysis_level', + choices=['participant'], help='Processing stage to be run, only "participant" in the case of ' - "fMRIPrep (see BIDS-Apps specification).", + 'fMRIPrep (see BIDS-Apps specification).', ) - g_bids = parser.add_argument_group("Options for filtering BIDS queries") + g_bids = parser.add_argument_group('Options for filtering BIDS queries') g_bids.add_argument( - "--skip_bids_validation", - "--skip-bids-validation", - action="store_true", + '--skip_bids_validation', + '--skip-bids-validation', + action='store_true', default=False, - help="Assume the input dataset is BIDS compliant and skip the validation", + help='Assume the input dataset is BIDS compliant and skip the validation', ) g_bids.add_argument( - "--participant-label", - "--participant_label", - action="store", - nargs="+", + '--participant-label', + '--participant_label', + action='store', + nargs='+', type=_drop_sub, - help="A space delimited list of participant identifiers or a single " - "identifier (the sub- prefix can be removed)", + help='A space delimited list of participant identifiers or a single ' + 'identifier (the sub- prefix can be removed)', ) # Re-enable when option is actually implemented # g_bids.add_argument('-s', '--session-id', action='store', default='single_session', @@ -168,130 +166,130 @@ def _slice_time_ref(value, parser): # g_bids.add_argument('-r', '--run-id', action='store', default='single_run', # help='Select a specific run to be processed') g_bids.add_argument( - "-t", "--task-id", action="store", help="Select a specific task to be processed" + '-t', '--task-id', action='store', help='Select a specific task to be processed' ) g_bids.add_argument( - "--echo-idx", - action="store", + '--echo-idx', + action='store', type=int, - help="Select a specific echo to be processed in a multiecho series", + help='Select a specific echo to be processed in a multiecho series', ) g_bids.add_argument( - "--bids-filter-file", - dest="bids_filters", - action="store", + '--bids-filter-file', + dest='bids_filters', + action='store', type=BIDSFilter, - metavar="FILE", - help="A JSON file describing custom BIDS input filters using PyBIDS. " - "For further details, please check out " - "https://fmriprep.readthedocs.io/en/%s/faq.html#" - "how-do-I-select-only-certain-files-to-be-input-to-fMRIPrep" - % (currentv.base_version if is_release else "latest"), + metavar='FILE', + help='A JSON file describing custom BIDS input filters using PyBIDS. ' + 'For further details, please check out ' + 'https://fmriprep.readthedocs.io/en/%s/faq.html#' + 'how-do-I-select-only-certain-files-to-be-input-to-fMRIPrep' + % (currentv.base_version if is_release else 'latest'), ) g_bids.add_argument( - "-d", - "--derivatives", - action="store", - metavar="PATH", + '-d', + '--derivatives', + action='store', + metavar='PATH', type=Path, - nargs="*", - help="Search PATH(s) for pre-computed derivatives.", + nargs='*', + help='Search PATH(s) for pre-computed derivatives.', ) g_bids.add_argument( - "--bids-database-dir", - metavar="PATH", + '--bids-database-dir', + metavar='PATH', type=Path, - help="Path to a PyBIDS database folder, for faster indexing (especially " - "useful for large datasets). Will be created if not present.", + help='Path to a PyBIDS database folder, for faster indexing (especially ' + 'useful for large datasets). Will be created if not present.', ) - g_perfm = parser.add_argument_group("Options to handle performance") + g_perfm = parser.add_argument_group('Options to handle performance') g_perfm.add_argument( - "--nprocs", - "--nthreads", - "--n_cpus", - "--n-cpus", + '--nprocs', + '--nthreads', + '--n_cpus', + '--n-cpus', dest='nprocs', - action="store", + action='store', type=PositiveInt, - help="Maximum number of threads across all processes", + help='Maximum number of threads across all processes', ) g_perfm.add_argument( - "--omp-nthreads", - action="store", + '--omp-nthreads', + action='store', type=PositiveInt, - help="Maximum number of threads per-process", + help='Maximum number of threads per-process', ) g_perfm.add_argument( - "--mem", - "--mem_mb", - "--mem-mb", - dest="memory_gb", - action="store", + '--mem', + '--mem_mb', + '--mem-mb', + dest='memory_gb', + action='store', type=_to_gb, - metavar="MEMORY_MB", - help="Upper bound memory limit for fMRIPrep processes", + metavar='MEMORY_MB', + help='Upper bound memory limit for fMRIPrep processes', ) g_perfm.add_argument( - "--low-mem", - action="store_true", - help="Attempt to reduce memory usage (will increase disk usage in working directory)", + '--low-mem', + action='store_true', + help='Attempt to reduce memory usage (will increase disk usage in working directory)', ) g_perfm.add_argument( - "--use-plugin", - "--nipype-plugin-file", - action="store", - metavar="FILE", + '--use-plugin', + '--nipype-plugin-file', + action='store', + metavar='FILE', type=IsFile, - help="Nipype plugin configuration file", + help='Nipype plugin configuration file', ) g_perfm.add_argument( - "--sloppy", - action="store_true", + '--sloppy', + action='store_true', default=False, - help="Use low-quality tools for speed - TESTING ONLY", + help='Use low-quality tools for speed - TESTING ONLY', ) - g_subset = parser.add_argument_group("Options for performing only a subset of the workflow") - g_subset.add_argument("--anat-only", action="store_true", help="Run anatomical workflows only") + g_subset = parser.add_argument_group('Options for performing only a subset of the workflow') + g_subset.add_argument('--anat-only', action='store_true', help='Run anatomical workflows only') g_subset.add_argument( - "--level", - action="store", - default="full", - choices=["minimal", "resampling", "full"], + '--level', + action='store', + default='full', + choices=['minimal', 'resampling', 'full'], help="Processing level; may be 'minimal' (nothing that can be recomputed), " "'resampling' (recomputable targets that aid in resampling) " "or 'full' (all target outputs).", ) g_subset.add_argument( - "--boilerplate-only", - "--boilerplate_only", - action="store_true", + '--boilerplate-only', + '--boilerplate_only', + action='store_true', default=False, - help="Generate boilerplate only", + help='Generate boilerplate only', ) g_subset.add_argument( - "--reports-only", - action="store_true", + '--reports-only', + action='store_true', default=False, help="Only generate reports, don't run workflows. This will only rerun report " - "aggregation, not reportlet generation for specific nodes.", + 'aggregation, not reportlet generation for specific nodes.', ) - g_conf = parser.add_argument_group("Workflow configuration") + g_conf = parser.add_argument_group('Workflow configuration') g_conf.add_argument( - "--ignore", + '--ignore', required=False, - action="store", - nargs="+", + action='store', + nargs='+', default=[], - choices=["fieldmaps", "slicetiming", "sbref", "t2w", "flair", "fmap-jacobian"], - help="Ignore selected aspects of the input dataset to disable corresponding " - "parts of the workflow (a space delimited list)", + choices=['fieldmaps', 'slicetiming', 'sbref', 't2w', 'flair', 'fmap-jacobian'], + help='Ignore selected aspects of the input dataset to disable corresponding ' + 'parts of the workflow (a space delimited list)', ) g_conf.add_argument( - "--output-spaces", - nargs="*", + '--output-spaces', + nargs='*', action=OutputReferencesAction, help="""\ Standard and non-standard spaces to resample anatomical and functional images to. \ @@ -304,366 +302,366 @@ def _slice_time_ref(value, parser): the spatial normalization. To generate no BOLD outputs, use this option without specifying \ any spatial references. For further details, please check out \ https://fmriprep.readthedocs.io/en/%s/spaces.html""" - % (currentv.base_version if is_release else "latest"), + % (currentv.base_version if is_release else 'latest'), ) g_conf.add_argument( - "--longitudinal", - action="store_true", - help="Treat dataset as longitudinal - may increase runtime", + '--longitudinal', + action='store_true', + help='Treat dataset as longitudinal - may increase runtime', ) g_conf.add_argument( - "--bold2t1w-init", - action="store", - default="register", - choices=["register", "header"], + '--bold2t1w-init', + action='store', + default='register', + choices=['register', 'header'], help='Either "register" (the default) to initialize volumes at center or "header"' - " to use the header information when coregistering BOLD to T1w images.", + ' to use the header information when coregistering BOLD to T1w images.', ) g_conf.add_argument( - "--bold2t1w-dof", - action="store", + '--bold2t1w-dof', + action='store', default=6, choices=[6, 9, 12], type=int, - help="Degrees of freedom when registering BOLD to T1w images. " - "6 degrees (rotation and translation) are used by default.", + help='Degrees of freedom when registering BOLD to T1w images. ' + '6 degrees (rotation and translation) are used by default.', ) g_conf.add_argument( - "--force-bbr", - action="store_true", - dest="use_bbr", + '--force-bbr', + action='store_true', + dest='use_bbr', default=None, - help="Always use boundary-based registration (no goodness-of-fit checks)", + help='Always use boundary-based registration (no goodness-of-fit checks)', ) g_conf.add_argument( - "--force-no-bbr", - action="store_false", - dest="use_bbr", + '--force-no-bbr', + action='store_false', + dest='use_bbr', default=None, - help="Do not use boundary-based registration (no goodness-of-fit checks)", + help='Do not use boundary-based registration (no goodness-of-fit checks)', ) g_conf.add_argument( - "--slice-time-ref", + '--slice-time-ref', required=False, - action="store", + action='store', default=None, type=SliceTimeRef, - help="The time of the reference slice to correct BOLD values to, as a fraction " - "acquisition time. 0 indicates the start, 0.5 the midpoint, and 1 the end " - "of acquisition. The alias `start` corresponds to 0, and `middle` to 0.5. " - "The default value is 0.5.", + help='The time of the reference slice to correct BOLD values to, as a fraction ' + 'acquisition time. 0 indicates the start, 0.5 the midpoint, and 1 the end ' + 'of acquisition. The alias `start` corresponds to 0, and `middle` to 0.5. ' + 'The default value is 0.5.', ) g_conf.add_argument( - "--dummy-scans", + '--dummy-scans', required=False, - action="store", + action='store', default=None, type=int, - help="Number of nonsteady-state volumes. Overrides automatic detection.", + help='Number of nonsteady-state volumes. Overrides automatic detection.', ) g_conf.add_argument( - "--random-seed", - dest="_random_seed", - action="store", + '--random-seed', + dest='_random_seed', + action='store', type=int, default=None, - help="Initialize the random seed for the workflow", + help='Initialize the random seed for the workflow', ) g_conf.add_argument( - "--me-t2s-fit-method", - action="store", - default="curvefit", - choices=["curvefit", "loglin"], + '--me-t2s-fit-method', + action='store', + default='curvefit', + choices=['curvefit', 'loglin'], help=( - "The method by which to estimate T2* and S0 for multi-echo data. " + 'The method by which to estimate T2* and S0 for multi-echo data. ' "'curvefit' uses nonlinear regression. " "It is more memory intensive, but also may be more accurate, than 'loglin'. " "'loglin' uses log-linear regression. " - "It is faster and less memory intensive, but may be less accurate." + 'It is faster and less memory intensive, but may be less accurate.' ), ) - g_outputs = parser.add_argument_group("Options for modulating outputs") + g_outputs = parser.add_argument_group('Options for modulating outputs') g_outputs.add_argument( - "--output-layout", - action="store", - default="bids", - choices=("bids", "legacy"), - help="Organization of outputs. \"bids\" (default) places fMRIPrep derivatives " - "directly in the output directory, and defaults to placing FreeSurfer " - "derivatives in /sourcedata/freesurfer. \"legacy\" creates " - "derivative datasets as subdirectories of outputs.", + '--output-layout', + action='store', + default='bids', + choices=('bids', 'legacy'), + help='Organization of outputs. "bids" (default) places fMRIPrep derivatives ' + 'directly in the output directory, and defaults to placing FreeSurfer ' + 'derivatives in /sourcedata/freesurfer. "legacy" creates ' + 'derivative datasets as subdirectories of outputs.', ) g_outputs.add_argument( - "--me-output-echos", - action="store_true", + '--me-output-echos', + action='store_true', default=False, - help="Output individual echo time series with slice, motion and susceptibility " - "correction. Useful for further Tedana processing post-fMRIPrep.", + help='Output individual echo time series with slice, motion and susceptibility ' + 'correction. Useful for further Tedana processing post-fMRIPrep.', ) g_outputs.add_argument( - "--medial-surface-nan", + '--medial-surface-nan', required=False, - action="store_true", + action='store_true', default=False, - help="Replace medial wall values with NaNs on functional GIFTI files. Only " - "performed for GIFTI files mapped to a freesurfer subject (fsaverage or fsnative).", + help='Replace medial wall values with NaNs on functional GIFTI files. Only ' + 'performed for GIFTI files mapped to a freesurfer subject (fsaverage or fsnative).', ) g_conf.add_argument( - "--project-goodvoxels", + '--project-goodvoxels', required=False, - action="store_true", + action='store_true', default=False, - help="Exclude voxels whose timeseries have locally high coefficient of variation " - "from surface resampling. Only performed for GIFTI files mapped to a freesurfer subject " - "(fsaverage or fsnative).", + help='Exclude voxels whose timeseries have locally high coefficient of variation ' + 'from surface resampling. Only performed for GIFTI files mapped to a freesurfer subject ' + '(fsaverage or fsnative).', ) g_outputs.add_argument( - "--md-only-boilerplate", - action="store_true", + '--md-only-boilerplate', + action='store_true', default=False, - help="Skip generation of HTML and LaTeX formatted citation with pandoc", + help='Skip generation of HTML and LaTeX formatted citation with pandoc', ) g_outputs.add_argument( - "--cifti-output", - nargs="?", - const="91k", + '--cifti-output', + nargs='?', + const='91k', default=False, - choices=("91k", "170k"), + choices=('91k', '170k'), type=str, - help="Output preprocessed BOLD as a CIFTI dense timeseries. " - "Optionally, the number of grayordinate can be specified " - "(default is 91k, which equates to 2mm resolution)", + help='Output preprocessed BOLD as a CIFTI dense timeseries. ' + 'Optionally, the number of grayordinate can be specified ' + '(default is 91k, which equates to 2mm resolution)', ) g_outputs.add_argument( - "--no-msm", - action="store_false", - dest="run_msmsulc", - help="Disable Multimodal Surface Matching surface registration.", + '--no-msm', + action='store_false', + dest='run_msmsulc', + help='Disable Multimodal Surface Matching surface registration.', ) - g_aroma = parser.add_argument_group("[DEPRECATED] Options for running ICA_AROMA") + g_aroma = parser.add_argument_group('[DEPRECATED] Options for running ICA_AROMA') g_aroma.add_argument( - "--use-aroma", - action="store_true", + '--use-aroma', + action='store_true', default=False, - help="Deprecated. Will raise an error in 24.0.", + help='Deprecated. Will raise an error in 24.0.', ) g_aroma.add_argument( - "--aroma-melodic-dimensionality", - dest="aroma_melodic_dim", - action="store", + '--aroma-melodic-dimensionality', + dest='aroma_melodic_dim', + action='store', default=0, type=int, - help="Deprecated. Will raise an error in 24.0.", + help='Deprecated. Will raise an error in 24.0.', ) g_aroma.add_argument( - "--error-on-aroma-warnings", - action="store_true", - dest="aroma_err_on_warn", + '--error-on-aroma-warnings', + action='store_true', + dest='aroma_err_on_warn', default=False, - help="Deprecated. Will raise an error in 24.0.", + help='Deprecated. Will raise an error in 24.0.', ) - g_confounds = parser.add_argument_group("Options relating to confounds") + g_confounds = parser.add_argument_group('Options relating to confounds') g_confounds.add_argument( - "--return-all-components", - dest="regressors_all_comps", + '--return-all-components', + dest='regressors_all_comps', required=False, - action="store_true", + action='store_true', default=False, - help="Include all components estimated in CompCor decomposition in the confounds " - "file instead of only the components sufficient to explain 50 percent of " - "BOLD variance in each CompCor mask", + help='Include all components estimated in CompCor decomposition in the confounds ' + 'file instead of only the components sufficient to explain 50 percent of ' + 'BOLD variance in each CompCor mask', ) g_confounds.add_argument( - "--fd-spike-threshold", - dest="regressors_fd_th", + '--fd-spike-threshold', + dest='regressors_fd_th', required=False, - action="store", + action='store', default=0.5, type=float, - help="Threshold for flagging a frame as an outlier on the basis of framewise " - "displacement", + help='Threshold for flagging a frame as an outlier on the basis of framewise ' + 'displacement', ) g_confounds.add_argument( - "--dvars-spike-threshold", - dest="regressors_dvars_th", + '--dvars-spike-threshold', + dest='regressors_dvars_th', required=False, - action="store", + action='store', default=1.5, type=float, - help="Threshold for flagging a frame as an outlier on the basis of standardised DVARS", + help='Threshold for flagging a frame as an outlier on the basis of standardised DVARS', ) # ANTs options - g_ants = parser.add_argument_group("Specific options for ANTs registrations") + g_ants = parser.add_argument_group('Specific options for ANTs registrations') g_ants.add_argument( - "--skull-strip-template", - default="OASIS30ANTs", + '--skull-strip-template', + default='OASIS30ANTs', type=Reference.from_string, - help="Select a template for skull-stripping with antsBrainExtraction " - "(OASIS30ANTs, by default)", + help='Select a template for skull-stripping with antsBrainExtraction ' + '(OASIS30ANTs, by default)', ) g_ants.add_argument( - "--skull-strip-fixed-seed", - action="store_true", - help="Do not use a random seed for skull-stripping - will ensure " - "run-to-run replicability when used with --omp-nthreads 1 and " - "matching --random-seed ", + '--skull-strip-fixed-seed', + action='store_true', + help='Do not use a random seed for skull-stripping - will ensure ' + 'run-to-run replicability when used with --omp-nthreads 1 and ' + 'matching --random-seed ', ) g_ants.add_argument( - "--skull-strip-t1w", - action="store", - choices=("auto", "skip", "force"), - default="force", + '--skull-strip-t1w', + action='store', + choices=('auto', 'skip', 'force'), + default='force', help="Perform T1-weighted skull stripping ('force' ensures skull " "stripping, 'skip' ignores skull stripping, and 'auto' applies brain extraction " - "based on the outcome of a heuristic to check whether the brain is already masked).", + 'based on the outcome of a heuristic to check whether the brain is already masked).', ) # Fieldmap options - g_fmap = parser.add_argument_group("Specific options for handling fieldmaps") + g_fmap = parser.add_argument_group('Specific options for handling fieldmaps') g_fmap.add_argument( - "--fmap-bspline", - action="store_true", + '--fmap-bspline', + action='store_true', default=False, - help="Fit a B-Spline field using least-squares (experimental)", + help='Fit a B-Spline field using least-squares (experimental)', ) g_fmap.add_argument( - "--fmap-no-demean", - action="store_false", + '--fmap-no-demean', + action='store_false', default=True, - help="Do not remove median (within mask) from fieldmap", + help='Do not remove median (within mask) from fieldmap', ) # SyN-unwarp options - g_syn = parser.add_argument_group("Specific options for SyN distortion correction") + g_syn = parser.add_argument_group('Specific options for SyN distortion correction') g_syn.add_argument( - "--use-syn-sdc", - nargs="?", - choices=["warn", "error"], - action="store", - const="error", + '--use-syn-sdc', + nargs='?', + choices=['warn', 'error'], + action='store', + const='error', default=False, - help="Use fieldmap-less distortion correction based on anatomical image; " - "if unable, error (default) or warn based on optional argument.", + help='Use fieldmap-less distortion correction based on anatomical image; ' + 'if unable, error (default) or warn based on optional argument.', ) g_syn.add_argument( - "--force-syn", - action="store_true", + '--force-syn', + action='store_true', default=False, - help="EXPERIMENTAL/TEMPORARY: Use SyN correction in addition to " - "fieldmap correction, if available", + help='EXPERIMENTAL/TEMPORARY: Use SyN correction in addition to ' + 'fieldmap correction, if available', ) # FreeSurfer options - g_fs = parser.add_argument_group("Specific options for FreeSurfer preprocessing") + g_fs = parser.add_argument_group('Specific options for FreeSurfer preprocessing') g_fs.add_argument( - "--fs-license-file", - metavar="FILE", + '--fs-license-file', + metavar='FILE', type=IsFile, - help="Path to FreeSurfer license key file. Get it (for free) by registering" - " at https://surfer.nmr.mgh.harvard.edu/registration.html", + help='Path to FreeSurfer license key file. Get it (for free) by registering' + ' at https://surfer.nmr.mgh.harvard.edu/registration.html', ) g_fs.add_argument( - "--fs-subjects-dir", - metavar="PATH", + '--fs-subjects-dir', + metavar='PATH', type=Path, - help="Path to existing FreeSurfer subjects directory to reuse. " - "(default: OUTPUT_DIR/freesurfer)", + help='Path to existing FreeSurfer subjects directory to reuse. ' + '(default: OUTPUT_DIR/freesurfer)', ) g_fs.add_argument( - "--no-submm-recon", - action="store_false", - dest="hires", - help="Disable sub-millimeter (hires) reconstruction", + '--no-submm-recon', + action='store_false', + dest='hires', + help='Disable sub-millimeter (hires) reconstruction', ) g_fs.add_argument( - "--fs-no-reconall", - action="store_false", - dest="run_reconall", - help="Disable FreeSurfer surface preprocessing.", + '--fs-no-reconall', + action='store_false', + dest='run_reconall', + help='Disable FreeSurfer surface preprocessing.', ) - g_carbon = parser.add_argument_group("Options for carbon usage tracking") + g_carbon = parser.add_argument_group('Options for carbon usage tracking') g_carbon.add_argument( - "--track-carbon", - action="store_true", - help="Tracks power draws using CodeCarbon package", + '--track-carbon', + action='store_true', + help='Tracks power draws using CodeCarbon package', ) g_carbon.add_argument( - "--country-code", - action="store", - default="CAN", + '--country-code', + action='store', + default='CAN', type=str, - help="Country ISO code used by carbon trackers", + help='Country ISO code used by carbon trackers', ) - g_other = parser.add_argument_group("Other options") - g_other.add_argument("--version", action="version", version=verstr) + g_other = parser.add_argument_group('Other options') + g_other.add_argument('--version', action='version', version=verstr) g_other.add_argument( - "-v", - "--verbose", - dest="verbose_count", - action="count", + '-v', + '--verbose', + dest='verbose_count', + action='count', default=0, - help="Increases log verbosity for each occurrence, debug level is -vvv", + help='Increases log verbosity for each occurrence, debug level is -vvv', ) g_other.add_argument( - "-w", - "--work-dir", - action="store", + '-w', + '--work-dir', + action='store', type=Path, - default=Path("work").absolute(), - help="Path where intermediate results should be stored", + default=Path('work').absolute(), + help='Path where intermediate results should be stored', ) g_other.add_argument( - "--clean-workdir", - action="store_true", + '--clean-workdir', + action='store_true', default=False, - help="Clears working directory of contents. Use of this flag is not " - "recommended when running concurrent processes of fMRIPrep.", + help='Clears working directory of contents. Use of this flag is not ' + 'recommended when running concurrent processes of fMRIPrep.', ) g_other.add_argument( - "--resource-monitor", - action="store_true", + '--resource-monitor', + action='store_true', default=False, help="Enable Nipype's resource monitoring to keep track of memory and CPU usage", ) g_other.add_argument( - "--config-file", - action="store", - metavar="FILE", - help="Use pre-generated configuration file. Values in file will be overridden " - "by command-line arguments.", + '--config-file', + action='store', + metavar='FILE', + help='Use pre-generated configuration file. Values in file will be overridden ' + 'by command-line arguments.', ) g_other.add_argument( - "--write-graph", - action="store_true", + '--write-graph', + action='store_true', default=False, - help="Write workflow graph.", + help='Write workflow graph.', ) g_other.add_argument( - "--stop-on-first-crash", - action="store_true", + '--stop-on-first-crash', + action='store_true', default=False, - help="Force stopping on first crash, even if a work directory was specified.", + help='Force stopping on first crash, even if a work directory was specified.', ) g_other.add_argument( - "--notrack", - action="store_true", + '--notrack', + action='store_true', default=False, - help="Opt-out of sending tracking information of this run to " - "the FMRIPREP developers. This information helps to " - "improve FMRIPREP and provides an indicator of real " - "world usage crucial for obtaining funding.", + help='Opt-out of sending tracking information of this run to ' + 'the FMRIPREP developers. This information helps to ' + 'improve FMRIPREP and provides an indicator of real ' + 'world usage crucial for obtaining funding.', ) g_other.add_argument( - "--debug", - action="store", - nargs="+", - choices=config.DEBUG_MODES + ("all",), + '--debug', + action='store', + nargs='+', + choices=config.DEBUG_MODES + ('all',), help="Debug mode(s) to enable. 'all' is alias for all available modes.", ) @@ -680,7 +678,7 @@ def _slice_time_ref(value, parser): _blist = is_flagged() if _blist[0]: - _reason = _blist[1] or "unknown" + _reason = _blist[1] or 'unknown' print( """\ WARNING: Version %s of fMRIPrep (current) has been FLAGGED @@ -704,9 +702,9 @@ def parse_args(args=None, namespace=None): opts = parser.parse_args(args, namespace) if opts.config_file: - skip = {} if opts.reports_only else {"execution": ("run_uuid",)} + skip = {} if opts.reports_only else {'execution': ('run_uuid',)} config.load(opts.config_file, skip=skip, init=False) - config.loggers.cli.info(f"Loaded previous configuration file {opts.config_file}") + config.loggers.cli.info(f'Loaded previous configuration file {opts.config_file}') config.execution.log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG)) config.from_dict(vars(opts), init=['nipype']) @@ -714,19 +712,19 @@ def parse_args(args=None, namespace=None): if not config.execution.notrack: import pkgutil - if pkgutil.find_loader("sentry_sdk") is None: + if pkgutil.find_loader('sentry_sdk') is None: config.execution.notrack = True - config.loggers.cli.warning("Telemetry disabled because sentry_sdk is not installed.") + config.loggers.cli.warning('Telemetry disabled because sentry_sdk is not installed.') else: config.loggers.cli.info( - "Telemetry system to collect crashes and errors is enabled " - "- thanks for your feedback!. Use option ``--notrack`` to opt out." + 'Telemetry system to collect crashes and errors is enabled ' + '- thanks for your feedback!. Use option ``--notrack`` to opt out.' ) # Initialize --output-spaces if not defined if config.execution.output_spaces is None: config.execution.output_spaces = SpatialReferences( - [Reference("MNI152NLin2009cAsym", {"res": "native"})] + [Reference('MNI152NLin2009cAsym', {'res': 'native'})] ) # Retrieve logging level @@ -738,12 +736,12 @@ def parse_args(args=None, namespace=None): with open(opts.use_plugin) as f: plugin_settings = yaml.load(f, Loader=yaml.FullLoader) - _plugin = plugin_settings.get("plugin") + _plugin = plugin_settings.get('plugin') if _plugin: config.nipype.plugin = _plugin - config.nipype.plugin_args = plugin_settings.get("plugin_args", {}) + config.nipype.plugin_args = plugin_settings.get('plugin_args', {}) config.nipype.nprocs = opts.nprocs or config.nipype.plugin_args.get( - "n_procs", config.nipype.nprocs + 'n_procs', config.nipype.nprocs ) # Resource management options @@ -751,12 +749,12 @@ def parse_args(args=None, namespace=None): # This may need to be revisited if people try to use batch plugins if 1 < config.nipype.nprocs < config.nipype.omp_nthreads: build_log.warning( - f"Per-process threads (--omp-nthreads={config.nipype.omp_nthreads}) exceed " - f"total threads (--nthreads/--n_cpus={config.nipype.nprocs})" + f'Per-process threads (--omp-nthreads={config.nipype.omp_nthreads}) exceed ' + f'total threads (--nthreads/--n_cpus={config.nipype.nprocs})' ) # Inform the user about the risk of using brain-extracted images - if config.workflow.skull_strip_t1w == "auto": + if config.workflow.skull_strip_t1w == 'auto': build_log.warning( """\ Option ``--skull-strip-t1w`` was set to 'auto'. A heuristic will be \ @@ -777,23 +775,23 @@ def parse_args(args=None, namespace=None): output_layout = config.execution.output_layout if config.execution.fs_subjects_dir is None: - if output_layout == "bids": - config.execution.fs_subjects_dir = output_dir / "sourcedata" / "freesurfer" - elif output_layout == "legacy": - config.execution.fs_subjects_dir = output_dir / "freesurfer" + if output_layout == 'bids': + config.execution.fs_subjects_dir = output_dir / 'sourcedata' / 'freesurfer' + elif output_layout == 'legacy': + config.execution.fs_subjects_dir = output_dir / 'freesurfer' if config.execution.fmriprep_dir is None: - if output_layout == "bids": + if output_layout == 'bids': config.execution.fmriprep_dir = output_dir - elif output_layout == "legacy": - config.execution.fmriprep_dir = output_dir / "fmriprep" + elif output_layout == 'legacy': + config.execution.fmriprep_dir = output_dir / 'fmriprep' # Wipe out existing work_dir if opts.clean_workdir and work_dir.exists(): from niworkflows.utils.misc import clean_directory - build_log.info(f"Clearing previous fMRIPrep working directory: {work_dir}") + build_log.info(f'Clearing previous fMRIPrep working directory: {work_dir}') if not clean_directory(work_dir): - build_log.warning(f"Could not clear all contents of working directory: {work_dir}") + build_log.warning(f'Could not clear all contents of working directory: {work_dir}') # Update the config with an empty dict to trigger initialization of all config # sections (we used `init=False` above). @@ -804,17 +802,17 @@ def parse_args(args=None, namespace=None): # Ensure input and output folders are not the same if output_dir == bids_dir: parser.error( - "The selected output folder is the same as the input BIDS folder. " - "Please modify the output path (suggestion: %s)." + 'The selected output folder is the same as the input BIDS folder. ' + 'Please modify the output path (suggestion: %s).' % bids_dir - / "derivatives" - / ("fmriprep-%s" % version.split("+")[0]) + / 'derivatives' + / ('fmriprep-%s' % version.split('+')[0]) ) if bids_dir in work_dir.parents: parser.error( - "The selected working directory is a subdirectory of the input BIDS folder. " - "Please modify the output path." + 'The selected working directory is a subdirectory of the input BIDS folder. ' + 'Please modify the output path.' ) # Validate inputs @@ -822,13 +820,13 @@ def parse_args(args=None, namespace=None): from ..utils.bids import validate_input_dir build_log.info( - "Making sure the input data is BIDS compliant (warnings can be ignored in most " - "cases)." + 'Making sure the input data is BIDS compliant (warnings can be ignored in most ' + 'cases).' ) validate_input_dir(config.environment.exec_env, opts.bids_dir, opts.participant_label) # Setup directories - config.execution.log_dir = config.execution.fmriprep_dir / "logs" + config.execution.log_dir = config.execution.fmriprep_dir / 'logs' # Check and create output and working directories config.execution.log_dir.mkdir(exist_ok=True, parents=True) work_dir.mkdir(exist_ok=True, parents=True) @@ -843,8 +841,8 @@ def parse_args(args=None, namespace=None): missing_subjects = participant_label - set(all_subjects) if missing_subjects: parser.error( - "One or more participant labels were not found in the BIDS directory: " - "%s." % ", ".join(missing_subjects) + 'One or more participant labels were not found in the BIDS directory: ' + '%s.' % ', '.join(missing_subjects) ) config.execution.participant_label = sorted(participant_label) diff --git a/fmriprep/cli/run.py b/fmriprep/cli/run.py index 891284463..559cb8777 100644 --- a/fmriprep/cli/run.py +++ b/fmriprep/cli/run.py @@ -48,8 +48,8 @@ def main(): ) ): config.loggers.cli.warning( - "ICA-AROMA was removed in fMRIPrep 23.1.0. The --use-aroma, --aroma-err-on-warn, " - "and --aroma-melodic-dim flags will error in fMRIPrep 24.0.0." + 'ICA-AROMA was removed in fMRIPrep 23.1.0. The --use-aroma, --aroma-err-on-warn, ' + 'and --aroma-melodic-dim flags will error in fMRIPrep 24.0.0.' ) # Code Carbon @@ -57,20 +57,20 @@ def main(): from codecarbon import OfflineEmissionsTracker country_iso_code = config.execution.country_code - config.loggers.workflow.log(25, "CodeCarbon tracker started ...") - config.loggers.workflow.log(25, f"Using country_iso_code: {country_iso_code}") - config.loggers.workflow.log(25, f"Saving logs at: {config.execution.log_dir}") + config.loggers.workflow.log(25, 'CodeCarbon tracker started ...') + config.loggers.workflow.log(25, f'Using country_iso_code: {country_iso_code}') + config.loggers.workflow.log(25, f'Saving logs at: {config.execution.log_dir}') tracker = OfflineEmissionsTracker( output_dir=config.execution.log_dir, country_iso_code=country_iso_code ) tracker.start() - if "pdb" in config.execution.debug: + if 'pdb' in config.execution.debug: from fmriprep.utils.debug import setup_exceptionhook setup_exceptionhook() - config.nipype.plugin = "Linear" + config.nipype.plugin = 'Linear' sentry_sdk = None if not config.execution.notrack and not config.execution.debug: @@ -84,14 +84,14 @@ def main(): # CRITICAL Save the config to a file. This is necessary because the execution graph # is built as a separate process to keep the memory footprint low. The most # straightforward way to communicate with the child process is via the filesystem. - config_file = config.execution.work_dir / config.execution.run_uuid / "config.toml" + config_file = config.execution.work_dir / config.execution.run_uuid / 'config.toml' config_file.parent.mkdir(exist_ok=True, parents=True) config.to_filename(config_file) # CRITICAL Call build_workflow(config_file, retval) in a subprocess. # Because Python on Linux does not ever free virtual memory (VM), running the # workflow construction jailed within a process preempts excessive VM buildup. - if "pdb" not in config.execution.debug: + if 'pdb' not in config.execution.debug: with Manager() as mgr: retval = mgr.dict() p = Process(target=build_workflow, args=(str(config_file), retval)) @@ -100,13 +100,13 @@ def main(): retval = dict(retval.items()) # Convert to base dictionary if p.exitcode: - retval["return_code"] = p.exitcode + retval['return_code'] = p.exitcode else: retval = build_workflow(str(config_file), {}) - exitcode = retval.get("return_code", 0) - fmriprep_wf = retval.get("workflow", None) + exitcode = retval.get('return_code', 0) + fmriprep_wf = retval.get('workflow', None) # CRITICAL Load the config from the file. This is necessary because the ``build_workflow`` # function executed constrained in a process may change the config (and thus the global @@ -117,7 +117,7 @@ def main(): sys.exit(int(exitcode > 0)) if fmriprep_wf and config.execution.write_graph: - fmriprep_wf.write_graph(graph2use="colored", format="svg", simple_form=True) + fmriprep_wf.write_graph(graph2use='colored', format='svg', simple_form=True) exitcode = exitcode or (fmriprep_wf is None) * EX_SOFTWARE if exitcode != 0: @@ -140,16 +140,16 @@ def main(): # Sentry tracking if sentry_sdk is not None: with sentry_sdk.configure_scope() as scope: - scope.set_tag("run_uuid", config.execution.run_uuid) - scope.set_tag("npart", len(config.execution.participant_label)) - sentry_sdk.add_breadcrumb(message="fMRIPrep started", level="info") - sentry_sdk.capture_message("fMRIPrep started", level="info") + scope.set_tag('run_uuid', config.execution.run_uuid) + scope.set_tag('npart', len(config.execution.participant_label)) + sentry_sdk.add_breadcrumb(message='fMRIPrep started', level='info') + sentry_sdk.capture_message('fMRIPrep started', level='info') config.loggers.workflow.log( 15, - "\n".join(["fMRIPrep config:"] + ["\t\t%s" % s for s in config.dumps().splitlines()]), + '\n'.join(['fMRIPrep config:'] + ['\t\t%s' % s for s in config.dumps().splitlines()]), ) - config.loggers.workflow.log(25, "fMRIPrep started!") + config.loggers.workflow.log(25, 'fMRIPrep started!') errno = 1 # Default is error exit unless otherwise set try: fmriprep_wf.run(**config.nipype.get_plugin()) @@ -158,48 +158,48 @@ def main(): from ..utils.telemetry import process_crashfile crashfolders = [ - config.execution.fmriprep_dir / f"sub-{s}" / "log" / config.execution.run_uuid + config.execution.fmriprep_dir / f'sub-{s}' / 'log' / config.execution.run_uuid for s in config.execution.participant_label ] for crashfolder in crashfolders: - for crashfile in crashfolder.glob("crash*.*"): + for crashfile in crashfolder.glob('crash*.*'): process_crashfile(crashfile) - if sentry_sdk is not None and "Workflow did not execute cleanly" not in str(e): + if sentry_sdk is not None and 'Workflow did not execute cleanly' not in str(e): sentry_sdk.capture_exception(e) - config.loggers.workflow.critical("fMRIPrep failed: %s", e) + config.loggers.workflow.critical('fMRIPrep failed: %s', e) raise else: - config.loggers.workflow.log(25, "fMRIPrep finished successfully!") + config.loggers.workflow.log(25, 'fMRIPrep finished successfully!') if sentry_sdk is not None: - success_message = "fMRIPrep finished without errors" - sentry_sdk.add_breadcrumb(message=success_message, level="info") - sentry_sdk.capture_message(success_message, level="info") + success_message = 'fMRIPrep finished without errors' + sentry_sdk.add_breadcrumb(message=success_message, level='info') + sentry_sdk.capture_message(success_message, level='info') # Bother users with the boilerplate only iff the workflow went okay. - boiler_file = config.execution.fmriprep_dir / "logs" / "CITATION.md" + boiler_file = config.execution.fmriprep_dir / 'logs' / 'CITATION.md' if boiler_file.exists(): if config.environment.exec_env in ( - "singularity", - "docker", - "fmriprep-docker", + 'singularity', + 'docker', + 'fmriprep-docker', ): - boiler_file = Path("") / boiler_file.relative_to( + boiler_file = Path('') / boiler_file.relative_to( config.execution.output_dir ) config.loggers.workflow.log( 25, - "Works derived from this fMRIPrep execution should include the " - f"boilerplate text found in {boiler_file}.", + 'Works derived from this fMRIPrep execution should include the ' + f'boilerplate text found in {boiler_file}.', ) if config.workflow.run_reconall: from niworkflows.utils.misc import _copy_any from templateflow import api - dseg_tsv = str(api.get("fsaverage", suffix="dseg", extension=[".tsv"])) - _copy_any(dseg_tsv, str(config.execution.fmriprep_dir / "desc-aseg_dseg.tsv")) - _copy_any(dseg_tsv, str(config.execution.fmriprep_dir / "desc-aparcaseg_dseg.tsv")) + dseg_tsv = str(api.get('fsaverage', suffix='dseg', extension=['.tsv'])) + _copy_any(dseg_tsv, str(config.execution.fmriprep_dir / 'desc-aseg_dseg.tsv')) + _copy_any(dseg_tsv, str(config.execution.fmriprep_dir / 'desc-aparcaseg_dseg.tsv')) errno = 0 finally: from .. import data @@ -207,9 +207,9 @@ def main(): # Code Carbon if config.execution.track_carbon: emissions: float = tracker.stop() - config.loggers.workflow.log(25, "CodeCarbon tracker has stopped.") - config.loggers.workflow.log(25, f"Saving logs at: {config.execution.log_dir}") - config.loggers.workflow.log(25, f"Carbon emissions: {emissions} kg") + config.loggers.workflow.log(25, 'CodeCarbon tracker has stopped.') + config.loggers.workflow.log(25, f'Saving logs at: {config.execution.log_dir}') + config.loggers.workflow.log(25, f'Carbon emissions: {emissions} kg') from fmriprep.reports.core import generate_reports @@ -218,22 +218,22 @@ def main(): config.execution.participant_label, config.execution.fmriprep_dir, config.execution.run_uuid, - config=data.load("reports-spec.yml"), - packagename="fmriprep", + config=data.load('reports-spec.yml'), + packagename='fmriprep', ) write_derivative_description(config.execution.bids_dir, config.execution.fmriprep_dir) write_bidsignore(config.execution.fmriprep_dir) if sentry_sdk is not None and failed_reports: sentry_sdk.capture_message( - "Report generation failed for %d subjects" % failed_reports, - level="error", + 'Report generation failed for %d subjects' % failed_reports, + level='error', ) sys.exit(int((errno + failed_reports) > 0)) -if __name__ == "__main__": +if __name__ == '__main__': raise RuntimeError( - "fmriprep/cli/run.py should not be run directly;\n" - "Please `pip install` fmriprep and use the `fmriprep` command" + 'fmriprep/cli/run.py should not be run directly;\n' + 'Please `pip install` fmriprep and use the `fmriprep` command' ) diff --git a/fmriprep/cli/tests/test_parser.py b/fmriprep/cli/tests/test_parser.py index ef39c1a7c..46b295ad8 100644 --- a/fmriprep/cli/tests/test_parser.py +++ b/fmriprep/cli/tests/test_parser.py @@ -32,16 +32,16 @@ from .. import version as _version from ..parser import _build_parser, parse_args -MIN_ARGS = ["data/", "out/", "participant"] +MIN_ARGS = ['data/', 'out/', 'participant'] @pytest.mark.parametrize( - "args,code", + 'args,code', [ ([], 2), (MIN_ARGS, 2), # bids_dir does not exist - (MIN_ARGS + ["--fs-license-file"], 2), - (MIN_ARGS + ["--fs-license-file", "fslicense.txt"], 2), + (MIN_ARGS + ['--fs-license-file'], 2), + (MIN_ARGS + ['--fs-license-file', 'fslicense.txt'], 2), ], ) def test_parser_errors(args, code): @@ -52,17 +52,17 @@ def test_parser_errors(args, code): assert error.value.code == code -@pytest.mark.parametrize("args", [MIN_ARGS, MIN_ARGS + ["--fs-license-file"]]) +@pytest.mark.parametrize('args', [MIN_ARGS, MIN_ARGS + ['--fs-license-file']]) def test_parser_valid(tmp_path, args): """Check valid arguments.""" - datapath = tmp_path / "data" + datapath = tmp_path / 'data' datapath.mkdir(exist_ok=True) args[0] = str(datapath) - if "--fs-license-file" in args: - _fs_file = tmp_path / "license.txt" - _fs_file.write_text("") - args.insert(args.index("--fs-license-file") + 1, str(_fs_file.absolute())) + if '--fs-license-file' in args: + _fs_file = tmp_path / 'license.txt' + _fs_file.write_text('') + args.insert(args.index('--fs-license-file') + 1, str(_fs_file.absolute())) opts = _build_parser().parse_args(args) @@ -70,36 +70,36 @@ def test_parser_valid(tmp_path, args): @pytest.mark.parametrize( - "argval,gb", + 'argval,gb', [ - ("1G", 1), - ("1GB", 1), - ("1000", 1), # Default units are MB - ("32000", 32), # Default units are MB - ("4000", 4), # Default units are MB - ("1000M", 1), - ("1000MB", 1), - ("1T", 1000), - ("1TB", 1000), - ("%dK" % 1e6, 1), - ("%dKB" % 1e6, 1), - ("%dB" % 1e9, 1), + ('1G', 1), + ('1GB', 1), + ('1000', 1), # Default units are MB + ('32000', 32), # Default units are MB + ('4000', 4), # Default units are MB + ('1000M', 1), + ('1000MB', 1), + ('1T', 1000), + ('1TB', 1000), + ('%dK' % 1e6, 1), + ('%dKB' % 1e6, 1), + ('%dB' % 1e9, 1), ], ) def test_memory_arg(tmp_path, argval, gb): """Check the correct parsing of the memory argument.""" - datapath = tmp_path / "data" + datapath = tmp_path / 'data' datapath.mkdir(exist_ok=True) - _fs_file = tmp_path / "license.txt" - _fs_file.write_text("") + _fs_file = tmp_path / 'license.txt' + _fs_file.write_text('') - args = [str(datapath)] + MIN_ARGS[1:] + ["--fs-license-file", str(_fs_file), "--mem", argval] + args = [str(datapath)] + MIN_ARGS[1:] + ['--fs-license-file', str(_fs_file), '--mem', argval] opts = _build_parser().parse_args(args) assert opts.memory_gb == gb -@pytest.mark.parametrize("current,latest", [("1.0.0", "1.3.2"), ("1.3.2", "1.3.2")]) +@pytest.mark.parametrize('current,latest', [('1.0.0', '1.3.2'), ('1.3.2', '1.3.2')]) def test_get_parser_update(monkeypatch, capsys, current, latest): """Make sure the out-of-date banner is shown.""" expectation = Version(current) < Version(latest) @@ -107,8 +107,8 @@ def test_get_parser_update(monkeypatch, capsys, current, latest): def _mock_check_latest(*args, **kwargs): return Version(latest) - monkeypatch.setattr(config.environment, "version", current) - monkeypatch.setattr(_version, "check_latest", _mock_check_latest) + monkeypatch.setattr(config.environment, 'version', current) + monkeypatch.setattr(_version, 'check_latest', _mock_check_latest) _build_parser() captured = capsys.readouterr().err @@ -121,37 +121,37 @@ def _mock_check_latest(*args, **kwargs): assert (msg in captured) is expectation -@pytest.mark.parametrize("flagged", [(True, None), (True, "random reason"), (False, None)]) +@pytest.mark.parametrize('flagged', [(True, None), (True, 'random reason'), (False, None)]) def test_get_parser_blacklist(monkeypatch, capsys, flagged): """Make sure the blacklisting banner is shown.""" def _mock_is_bl(*args, **kwargs): return flagged - monkeypatch.setattr(_version, "is_flagged", _mock_is_bl) + monkeypatch.setattr(_version, 'is_flagged', _mock_is_bl) _build_parser() captured = capsys.readouterr().err - assert ("FLAGGED" in captured) is flagged[0] + assert ('FLAGGED' in captured) is flagged[0] if flagged[0]: - assert (flagged[1] or "reason: unknown") in captured + assert (flagged[1] or 'reason: unknown') in captured def test_parse_args(tmp_path, minimal_bids): """Basic smoke test showing that our parse_args() function implements the BIDS App protocol""" - out_dir = tmp_path / "out" - work_dir = tmp_path / "work" + out_dir = tmp_path / 'out' + work_dir = tmp_path / 'work' parse_args( args=[ str(minimal_bids), str(out_dir), - "participant", # BIDS App - "-w", + 'participant', # BIDS App + '-w', str(work_dir), # Don't pollute CWD - "--skip-bids-validation", # Empty files make BIDS sad + '--skip-bids-validation', # Empty files make BIDS sad ] ) assert config.execution.layout.root == str(minimal_bids) @@ -159,10 +159,10 @@ def test_parse_args(tmp_path, minimal_bids): def test_bids_filter_file(tmp_path, capsys): - bids_path = tmp_path / "data" - out_path = tmp_path / "out" - bff = tmp_path / "filter.json" - args = [str(bids_path), str(out_path), "participant", "--bids-filter-file", str(bff)] + bids_path = tmp_path / 'data' + out_path = tmp_path / 'out' + bff = tmp_path / 'filter.json' + args = [str(bids_path), str(out_path), 'participant', '--bids-filter-file', str(bff)] bids_path.mkdir() parser = _build_parser() @@ -171,7 +171,7 @@ def test_bids_filter_file(tmp_path, capsys): parser.parse_args(args) err = capsys.readouterr().err - assert "Path does not exist:" in err + assert 'Path does not exist:' in err bff.write_text('{"invalid json": }') @@ -179,17 +179,17 @@ def test_bids_filter_file(tmp_path, capsys): parser.parse_args(args) err = capsys.readouterr().err - assert "JSON syntax error in:" in err + assert 'JSON syntax error in:' in err _reset_config() -@pytest.mark.parametrize("st_ref", (None, "0", "1", "0.5", "start", "middle")) +@pytest.mark.parametrize('st_ref', (None, '0', '1', '0.5', 'start', 'middle')) def test_slice_time_ref(tmp_path, st_ref): - bids_path = tmp_path / "data" - out_path = tmp_path / "out" - args = [str(bids_path), str(out_path), "participant"] + bids_path = tmp_path / 'data' + out_path = tmp_path / 'out' + args = [str(bids_path), str(out_path), 'participant'] if st_ref: - args.extend(["--slice-time-ref", st_ref]) + args.extend(['--slice-time-ref', st_ref]) bids_path.mkdir() parser = _build_parser() @@ -199,19 +199,19 @@ def test_slice_time_ref(tmp_path, st_ref): @pytest.mark.parametrize( - "args, expectation", + 'args, expectation', ( ([], False), - (["--use-syn-sdc"], "error"), - (["--use-syn-sdc", "error"], "error"), - (["--use-syn-sdc", "warn"], "warn"), - (["--use-syn-sdc", "other"], (SystemExit, ArgumentError)), + (['--use-syn-sdc'], 'error'), + (['--use-syn-sdc', 'error'], 'error'), + (['--use-syn-sdc', 'warn'], 'warn'), + (['--use-syn-sdc', 'other'], (SystemExit, ArgumentError)), ), ) def test_use_syn_sdc(tmp_path, args, expectation): - bids_path = tmp_path / "data" - out_path = tmp_path / "out" - args = [str(bids_path), str(out_path), "participant"] + args + bids_path = tmp_path / 'data' + out_path = tmp_path / 'out' + args = [str(bids_path), str(out_path), 'participant'] + args bids_path.mkdir() parser = _build_parser() diff --git a/fmriprep/cli/tests/test_version.py b/fmriprep/cli/tests/test_version.py index 41d39998d..ea50a6563 100644 --- a/fmriprep/cli/tests/test_version.py +++ b/fmriprep/cli/tests/test_version.py @@ -36,7 +36,7 @@ class MockResponse: """Mocks the requests module so that Pypi is not actually queried.""" status_code = 200 - _json = {"releases": {"1.0.0": None, "1.0.1": None, "1.1.0": None, "1.1.1rc1": None}} + _json = {'releases': {'1.0.0': None, '1.0.1': None, '1.1.0': None, '1.1.1rc1': None}} def __init__(self, code=200, json=None): """Allow setting different response codes.""" @@ -52,52 +52,52 @@ def json(self): def test_check_latest1(tmpdir, monkeypatch): """Test latest version check.""" tmpdir.chdir() - monkeypatch.setenv("HOME", str(tmpdir)) + monkeypatch.setenv('HOME', str(tmpdir)) assert str(Path.home()) == str(tmpdir) def mock_get(*args, **kwargs): return MockResponse() - monkeypatch.setattr(requests, "get", mock_get) + monkeypatch.setattr(requests, 'get', mock_get) # Initially, cache should not exist - cachefile = Path.home() / ".cache" / "fmriprep" / "latest" + cachefile = Path.home() / '.cache' / 'fmriprep' / 'latest' assert not cachefile.exists() # First check actually fetches from pypi v = check_latest() assert cachefile.exists() assert isinstance(v, Version) - assert v == Version("1.1.0") - assert cachefile.read_text().split("|") == [ + assert v == Version('1.1.0') + assert cachefile.read_text().split('|') == [ str(v), datetime.now().strftime(DATE_FMT), ] # Second check - test the cache file is read - cachefile.write_text("|".join(("1.0.0", cachefile.read_text().split("|")[1]))) + cachefile.write_text('|'.join(('1.0.0', cachefile.read_text().split('|')[1]))) v = check_latest() assert isinstance(v, Version) - assert v == Version("1.0.0") + assert v == Version('1.0.0') # Third check - forced oudating of cache - cachefile.write_text("2.0.0|20180121") + cachefile.write_text('2.0.0|20180121') v = check_latest() assert isinstance(v, Version) - assert v == Version("1.1.0") + assert v == Version('1.1.0') # Mock timeouts def mock_get(*args, **kwargs): raise requests.exceptions.Timeout - monkeypatch.setattr(requests, "get", mock_get) + monkeypatch.setattr(requests, 'get', mock_get) - cachefile.write_text("|".join(("1.0.0", cachefile.read_text().split("|")[1]))) + cachefile.write_text('|'.join(('1.0.0', cachefile.read_text().split('|')[1]))) v = check_latest() assert isinstance(v, Version) - assert v == Version("1.0.0") + assert v == Version('1.0.0') - cachefile.write_text("2.0.0|20180121") + cachefile.write_text('2.0.0|20180121') v = check_latest() assert v is None @@ -107,24 +107,24 @@ def mock_get(*args, **kwargs): @pytest.mark.parametrize( - ("result", "code", "json"), + ('result', 'code', 'json'), [ (None, 404, None), - (None, 200, {"releases": {"1.0.0rc1": None}}), - (Version("1.1.0"), 200, None), - (Version("1.0.0"), 200, {"releases": {"1.0.0": None}}), + (None, 200, {'releases': {'1.0.0rc1': None}}), + (Version('1.1.0'), 200, None), + (Version('1.0.0'), 200, {'releases': {'1.0.0': None}}), ], ) def test_check_latest2(tmpdir, monkeypatch, result, code, json): """Test latest version check with varying server responses.""" tmpdir.chdir() - monkeypatch.setenv("HOME", str(tmpdir)) + monkeypatch.setenv('HOME', str(tmpdir)) assert str(Path.home()) == str(tmpdir) def mock_get(*args, **kwargs): return MockResponse(code=code, json=json) - monkeypatch.setattr(requests, "get", mock_get) + monkeypatch.setattr(requests, 'get', mock_get) v = check_latest() if result is None: @@ -135,62 +135,62 @@ def mock_get(*args, **kwargs): @pytest.mark.parametrize( - "bad_cache", + 'bad_cache', [ - "3laj#r???d|3akajdf#", - "2.0.0|3akajdf#", - "|".join(("2.0.0", datetime.now().strftime(DATE_FMT), "")), - "", + '3laj#r???d|3akajdf#', + '2.0.0|3akajdf#', + '|'.join(('2.0.0', datetime.now().strftime(DATE_FMT), '')), + '', ], ) def test_check_latest3(tmpdir, monkeypatch, bad_cache): """Test latest version check when the cache file is corrupted.""" tmpdir.chdir() - monkeypatch.setenv("HOME", str(tmpdir)) + monkeypatch.setenv('HOME', str(tmpdir)) assert str(Path.home()) == str(tmpdir) def mock_get(*args, **kwargs): return MockResponse() - monkeypatch.setattr(requests, "get", mock_get) + monkeypatch.setattr(requests, 'get', mock_get) # Initially, cache should not exist - cachefile = Path.home() / ".cache" / "fmriprep" / "latest" + cachefile = Path.home() / '.cache' / 'fmriprep' / 'latest' cachefile.parent.mkdir(parents=True, exist_ok=True) assert not cachefile.exists() cachefile.write_text(bad_cache) v = check_latest() assert isinstance(v, Version) - assert v == Version("1.1.0") + assert v == Version('1.1.0') @pytest.mark.parametrize( - ("result", "version", "code", "json"), + ('result', 'version', 'code', 'json'), [ - (False, "1.2.1", 200, {"flagged": {"1.0.0": None}}), - (True, "1.2.1", 200, {"flagged": {"1.2.1": None}}), - (True, "1.2.1", 200, {"flagged": {"1.2.1": "FATAL Bug!"}}), - (False, "1.2.1", 404, {"flagged": {"1.0.0": None}}), - (False, "1.2.1", 200, {"flagged": []}), - (False, "1.2.1", 200, {}), + (False, '1.2.1', 200, {'flagged': {'1.0.0': None}}), + (True, '1.2.1', 200, {'flagged': {'1.2.1': None}}), + (True, '1.2.1', 200, {'flagged': {'1.2.1': 'FATAL Bug!'}}), + (False, '1.2.1', 404, {'flagged': {'1.0.0': None}}), + (False, '1.2.1', 200, {'flagged': []}), + (False, '1.2.1', 200, {}), ], ) def test_is_flagged(monkeypatch, result, version, code, json): """Test that the flagged-versions check is correct.""" - monkeypatch.setattr(_version, "__version__", version) + monkeypatch.setattr(_version, '__version__', version) def mock_get(*args, **kwargs): return MockResponse(code=code, json=json) - monkeypatch.setattr(requests, "get", mock_get) + monkeypatch.setattr(requests, 'get', mock_get) val, reason = is_flagged() assert val is result test_reason = None if val: - test_reason = json.get("flagged", {}).get(version, None) + test_reason = json.get('flagged', {}).get(version, None) if test_reason is not None: assert reason == test_reason @@ -200,18 +200,18 @@ def mock_get(*args, **kwargs): def test_readonly(tmp_path, monkeypatch): """Test behavior when $HOME/.cache/fmriprep/latest can't be written out.""" - home_path = Path("/home/readonly") if getenv("TEST_READONLY_FILESYSTEM") else tmp_path - monkeypatch.setenv("HOME", str(home_path)) - cachedir = home_path / ".cache" + home_path = Path('/home/readonly') if getenv('TEST_READONLY_FILESYSTEM') else tmp_path + monkeypatch.setenv('HOME', str(home_path)) + cachedir = home_path / '.cache' - if getenv("TEST_READONLY_FILESYSTEM") is None: + if getenv('TEST_READONLY_FILESYSTEM') is None: if geteuid() == 0: - pytest.skip("Cannot mock being unable to create directories as root") + pytest.skip('Cannot mock being unable to create directories as root') cachedir.mkdir(mode=0o555, exist_ok=True) # Make sure creating the folder will raise the exception. with pytest.raises(OSError): - (cachedir / "fmriprep").mkdir(parents=True) + (cachedir / 'fmriprep').mkdir(parents=True) # Should not raise check_latest() diff --git a/fmriprep/cli/version.py b/fmriprep/cli/version.py index 747d02bdb..e3d8d5b1f 100644 --- a/fmriprep/cli/version.py +++ b/fmriprep/cli/version.py @@ -30,7 +30,7 @@ from .. import __version__ RELEASE_EXPIRY_DAYS = 14 -DATE_FMT = "%Y%m%d" +DATE_FMT = '%Y%m%d' def check_latest(): @@ -40,7 +40,7 @@ def check_latest(): latest = None date = None outdated = None - cachefile = Path.home() / ".cache" / "fmriprep" / "latest" + cachefile = Path.home() / '.cache' / 'fmriprep' / 'latest' try: cachefile.parent.mkdir(parents=True, exist_ok=True) except OSError: @@ -48,7 +48,7 @@ def check_latest(): if cachefile and cachefile.exists(): try: - latest, date = cachefile.read_text().split("|") + latest, date = cachefile.read_text().split('|') except Exception: pass else: @@ -63,12 +63,12 @@ def check_latest(): if latest is None or outdated is True: try: - response = requests.get(url="https://pypi.org/pypi/fmriprep/json", timeout=1.0) + response = requests.get(url='https://pypi.org/pypi/fmriprep/json', timeout=1.0) except Exception: response = None if response and response.status_code == 200: - versions = [Version(rel) for rel in response.json()["releases"].keys()] + versions = [Version(rel) for rel in response.json()['releases'].keys()] versions = [rel for rel in versions if not rel.is_prerelease] if versions: latest = sorted(versions)[-1] @@ -77,7 +77,7 @@ def check_latest(): if cachefile is not None and latest is not None: try: - cachefile.write_text("|".join(("%s" % latest, datetime.now().strftime(DATE_FMT)))) + cachefile.write_text('|'.join(('%s' % latest, datetime.now().strftime(DATE_FMT)))) except Exception: pass @@ -98,7 +98,7 @@ def is_flagged(): response = None if response and response.status_code == 200: - flagged = response.json().get("flagged", {}) or {} + flagged = response.json().get('flagged', {}) or {} if __version__ in flagged: return True, flagged[__version__] diff --git a/fmriprep/cli/workflow.py b/fmriprep/cli/workflow.py index 6d754a93d..cce82b2af 100644 --- a/fmriprep/cli/workflow.py +++ b/fmriprep/cli/workflow.py @@ -34,7 +34,6 @@ def build_workflow(config_file, retval): """Create the Nipype Workflow that supports the whole execution graph.""" - from pathlib import Path from niworkflows.utils.bids import collect_participants from niworkflows.utils.misc import check_valid_fs_license @@ -52,26 +51,26 @@ def build_workflow(config_file, retval): fmriprep_dir = config.execution.fmriprep_dir version = config.environment.version - retval["return_code"] = 1 - retval["workflow"] = None + retval['return_code'] = 1 + retval['workflow'] = None - banner = [f"Running fMRIPrep version {version}"] - notice_path = data.load.readable("NOTICE") + banner = [f'Running fMRIPrep version {version}'] + notice_path = data.load.readable('NOTICE') if notice_path.exists(): - banner[0] += "\n" + banner[0] += '\n' banner += [f"License NOTICE {'#' * 50}"] - banner += [f"fMRIPrep {version}"] + banner += [f'fMRIPrep {version}'] banner += notice_path.read_text().splitlines(keepends=False)[1:] - banner += ["#" * len(banner[1])] + banner += ['#' * len(banner[1])] build_log.log(25, f"\n{' ' * 9}".join(banner)) # warn if older results exist: check for dataset_description.json in output folder - msg = check_pipeline_version("fMRIPrep", version, fmriprep_dir / "dataset_description.json") + msg = check_pipeline_version('fMRIPrep', version, fmriprep_dir / 'dataset_description.json') if msg is not None: build_log.warning(msg) # Please note this is the input folder's dataset_description.json - dset_desc_path = config.execution.bids_dir / "dataset_description.json" + dset_desc_path = config.execution.bids_dir / 'dataset_description.json' if dset_desc_path.exists(): from hashlib import sha256 @@ -85,34 +84,34 @@ def build_workflow(config_file, retval): # Called with reports only if config.execution.reports_only: - build_log.log(25, "Running --reports-only on participants %s", ", ".join(subject_list)) - retval["return_code"] = generate_reports( + build_log.log(25, 'Running --reports-only on participants %s', ', '.join(subject_list)) + retval['return_code'] = generate_reports( config.execution.participant_label, config.execution.fmriprep_dir, config.execution.run_uuid, - config=data.load("reports-spec.yml"), - packagename="fmriprep", + config=data.load('reports-spec.yml'), + packagename='fmriprep', ) return retval # Build main workflow init_msg = [ "Building fMRIPrep's workflow:", - f"BIDS dataset path: {config.execution.bids_dir}.", - f"Participant list: {subject_list}.", - f"Run identifier: {config.execution.run_uuid}.", - f"Output spaces: {config.execution.output_spaces}.", + f'BIDS dataset path: {config.execution.bids_dir}.', + f'Participant list: {subject_list}.', + f'Run identifier: {config.execution.run_uuid}.', + f'Output spaces: {config.execution.output_spaces}.', ] if config.execution.derivatives: - init_msg += [f"Searching for derivatives: {config.execution.derivatives}."] + init_msg += [f'Searching for derivatives: {config.execution.derivatives}.'] if config.execution.fs_subjects_dir: init_msg += [f"Pre-run FreeSurfer's SUBJECTS_DIR: {config.execution.fs_subjects_dir}."] build_log.log(25, f"\n{' ' * 11}* ".join(init_msg)) - retval["workflow"] = init_fmriprep_wf() + retval['workflow'] = init_fmriprep_wf() # Check for FS license after building the workflow if not check_valid_fs_license(): @@ -133,25 +132,25 @@ def build_workflow(config_file, retval): 2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \ (for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html""" ) - retval["return_code"] = 126 # 126 == Command invoked cannot execute. + retval['return_code'] = 126 # 126 == Command invoked cannot execute. return retval # Check workflow for missing commands - missing = check_deps(retval["workflow"]) + missing = check_deps(retval['workflow']) if missing: build_log.critical( - "Cannot run fMRIPrep. Missing dependencies:%s", - "\n\t* ".join([""] + [f"{cmd} (Interface: {iface})" for iface, cmd in missing]), + 'Cannot run fMRIPrep. Missing dependencies:%s', + '\n\t* '.join([''] + [f'{cmd} (Interface: {iface})' for iface, cmd in missing]), ) - retval["return_code"] = 127 # 127 == command not found. + retval['return_code'] = 127 # 127 == command not found. return retval config.to_filename(config_file) build_log.info( - "fMRIPrep workflow graph with %d nodes built successfully.", - len(retval["workflow"]._get_all_nodes()), + 'fMRIPrep workflow graph with %d nodes built successfully.', + len(retval['workflow']._get_all_nodes()), ) - retval["return_code"] = 0 + retval['return_code'] = 0 return retval @@ -160,10 +159,10 @@ def build_boilerplate(config_file, workflow): from .. import config config.load(config_file) - logs_path = config.execution.fmriprep_dir / "logs" + logs_path = config.execution.fmriprep_dir / 'logs' boilerplate = workflow.visit_desc() citation_files = { - ext: logs_path / ("CITATION.%s" % ext) for ext in ("bib", "tex", "md", "html") + ext: logs_path / ('CITATION.%s' % ext) for ext in ('bib', 'tex', 'md', 'html') } if boilerplate: @@ -176,52 +175,51 @@ def build_boilerplate(config_file, workflow): except FileNotFoundError: pass - citation_files["md"].write_text(boilerplate) + citation_files['md'].write_text(boilerplate) - if not config.execution.md_only_boilerplate and citation_files["md"].exists(): - from pathlib import Path + if not config.execution.md_only_boilerplate and citation_files['md'].exists(): from subprocess import CalledProcessError, TimeoutExpired, check_call from .. import data - bib_text = data.load.readable("boilerplate.bib").read_text() - citation_files["bib"].write_text( - bib_text.replace("fMRIPrep ", f"fMRIPrep {config.environment.version}") + bib_text = data.load.readable('boilerplate.bib').read_text() + citation_files['bib'].write_text( + bib_text.replace('fMRIPrep ', f'fMRIPrep {config.environment.version}') ) # Generate HTML file resolving citations cmd = [ - "pandoc", - "-s", - "--bibliography", - str(citation_files["bib"]), - "--citeproc", - "--metadata", + 'pandoc', + '-s', + '--bibliography', + str(citation_files['bib']), + '--citeproc', + '--metadata', 'pagetitle="fMRIPrep citation boilerplate"', - str(citation_files["md"]), - "-o", - str(citation_files["html"]), + str(citation_files['md']), + '-o', + str(citation_files['html']), ] - config.loggers.cli.info("Generating an HTML version of the citation boilerplate...") + config.loggers.cli.info('Generating an HTML version of the citation boilerplate...') try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): - config.loggers.cli.warning("Could not generate CITATION.html file:\n%s", " ".join(cmd)) + config.loggers.cli.warning('Could not generate CITATION.html file:\n%s', ' '.join(cmd)) # Generate LaTex file resolving citations cmd = [ - "pandoc", - "-s", - "--bibliography", - str(citation_files["bib"]), - "--natbib", - str(citation_files["md"]), - "-o", - str(citation_files["tex"]), + 'pandoc', + '-s', + '--bibliography', + str(citation_files['bib']), + '--natbib', + str(citation_files['md']), + '-o', + str(citation_files['tex']), ] - config.loggers.cli.info("Generating a LaTeX version of the citation boilerplate...") + config.loggers.cli.info('Generating a LaTeX version of the citation boilerplate...') try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): - config.loggers.cli.warning("Could not generate CITATION.tex file:\n%s", " ".join(cmd)) + config.loggers.cli.warning('Could not generate CITATION.tex file:\n%s', ' '.join(cmd)) diff --git a/fmriprep/config.py b/fmriprep/config.py index f7ec4551b..abe353440 100644 --- a/fmriprep/config.py +++ b/fmriprep/config.py @@ -91,14 +91,14 @@ from multiprocessing import set_start_method # Disable NiPype etelemetry always -_disable_et = bool(os.getenv("NO_ET") is not None or os.getenv("NIPYPE_NO_ET") is not None) -os.environ["NIPYPE_NO_ET"] = "1" -os.environ["NO_ET"] = "1" +_disable_et = bool(os.getenv('NO_ET') is not None or os.getenv('NIPYPE_NO_ET') is not None) +os.environ['NIPYPE_NO_ET'] = '1' +os.environ['NO_ET'] = '1' -CONFIG_FILENAME = "fmriprep.toml" +CONFIG_FILENAME = 'fmriprep.toml' try: - set_start_method("forkserver") + set_start_method('forkserver') except RuntimeError: pass # context has been already set finally: @@ -115,28 +115,28 @@ from . import __version__ -if not hasattr(sys, "_is_pytest_session"): +if not hasattr(sys, '_is_pytest_session'): sys._is_pytest_session = False # Trick to avoid sklearn's FutureWarnings # Disable all warnings in main and children processes only on production versions if not any( ( - "+" in __version__, - __version__.endswith(".dirty"), - os.getenv("FMRIPREP_DEV", "0").lower() in ("1", "on", "true", "y", "yes"), + '+' in __version__, + __version__.endswith('.dirty'), + os.getenv('FMRIPREP_DEV', '0').lower() in ('1', 'on', 'true', 'y', 'yes'), ) ): from ._warnings import logging - os.environ["PYTHONWARNINGS"] = "ignore" -elif os.getenv("FMRIPREP_WARNINGS", "0").lower() in ("1", "on", "true", "y", "yes"): + os.environ['PYTHONWARNINGS'] = 'ignore' +elif os.getenv('FMRIPREP_WARNINGS', '0').lower() in ('1', 'on', 'true', 'y', 'yes'): # allow disabling warnings on development versions # https://github.com/nipreps/fmriprep/pull/2080#discussion_r409118765 from ._warnings import logging else: import logging -logging.addLevelName(25, "IMPORTANT") # Add a new level between INFO and WARNING -logging.addLevelName(15, "VERBOSE") # Add a new level between INFO and DEBUG +logging.addLevelName(25, 'IMPORTANT') # Add a new level between INFO and WARNING +logging.addLevelName(15, 'VERBOSE') # Add a new level between INFO and DEBUG DEFAULT_MEMORY_MIN_GB = 0.01 @@ -150,29 +150,29 @@ from requests import get as _get_url with suppress((ConnectionError, ReadTimeout)): - _get_url("https://rig.mit.edu/et/projects/nipy/nipype", timeout=0.05) + _get_url('https://rig.mit.edu/et/projects/nipy/nipype', timeout=0.05) # Execution environment _exec_env = os.name _docker_ver = None # special variable set in the container -if os.getenv("IS_DOCKER_8395080871"): - _exec_env = "singularity" - _cgroup = Path("/proc/1/cgroup") - if _cgroup.exists() and "docker" in _cgroup.read_text(): - _docker_ver = os.getenv("DOCKER_VERSION_8395080871") - _exec_env = "fmriprep-docker" if _docker_ver else "docker" +if os.getenv('IS_DOCKER_8395080871'): + _exec_env = 'singularity' + _cgroup = Path('/proc/1/cgroup') + if _cgroup.exists() and 'docker' in _cgroup.read_text(): + _docker_ver = os.getenv('DOCKER_VERSION_8395080871') + _exec_env = 'fmriprep-docker' if _docker_ver else 'docker' del _cgroup -_fs_license = os.getenv("FS_LICENSE") -if not _fs_license and os.getenv("FREESURFER_HOME"): - _fs_home = os.getenv("FREESURFER_HOME") - if _fs_home and (Path(_fs_home) / "license.txt").is_file(): - _fs_license = str(Path(_fs_home) / "license.txt") +_fs_license = os.getenv('FS_LICENSE') +if not _fs_license and os.getenv('FREESURFER_HOME'): + _fs_home = os.getenv('FREESURFER_HOME') + if _fs_home and (Path(_fs_home) / 'license.txt').is_file(): + _fs_license = str(Path(_fs_home) / 'license.txt') del _fs_home _templateflow_home = Path( - os.getenv("TEMPLATEFLOW_HOME", os.path.join(os.getenv("HOME"), ".cache", "templateflow")) + os.getenv('TEMPLATEFLOW_HOME', os.path.join(os.getenv('HOME'), '.cache', 'templateflow')) ) try: @@ -182,28 +182,28 @@ except Exception: _free_mem_at_start = None -_oc_limit = "n/a" -_oc_policy = "n/a" +_oc_limit = 'n/a' +_oc_policy = 'n/a' try: # Memory policy may have a large effect on types of errors experienced - _proc_oc_path = Path("/proc/sys/vm/overcommit_memory") + _proc_oc_path = Path('/proc/sys/vm/overcommit_memory') if _proc_oc_path.exists(): - _oc_policy = {"0": "heuristic", "1": "always", "2": "never"}.get( - _proc_oc_path.read_text().strip(), "unknown" + _oc_policy = {'0': 'heuristic', '1': 'always', '2': 'never'}.get( + _proc_oc_path.read_text().strip(), 'unknown' ) - if _oc_policy != "never": - _proc_oc_kbytes = Path("/proc/sys/vm/overcommit_kbytes") + if _oc_policy != 'never': + _proc_oc_kbytes = Path('/proc/sys/vm/overcommit_kbytes') if _proc_oc_kbytes.exists(): _oc_limit = _proc_oc_kbytes.read_text().strip() - if _oc_limit in ("0", "n/a") and Path("/proc/sys/vm/overcommit_ratio").exists(): - _oc_limit = "{}%".format(Path("/proc/sys/vm/overcommit_ratio").read_text().strip()) + if _oc_limit in ('0', 'n/a') and Path('/proc/sys/vm/overcommit_ratio').exists(): + _oc_limit = '{}%'.format(Path('/proc/sys/vm/overcommit_ratio').read_text().strip()) except Exception: pass # Debug modes are names that influence the exposure of internal details to # the user, either through additional derivatives or increased verbosity -DEBUG_MODES = ("compcor", "fieldmaps", "pdb") +DEBUG_MODES = ('compcor', 'fieldmaps', 'pdb') class _Config: @@ -213,7 +213,7 @@ class _Config: def __init__(self): """Avert instantiation.""" - raise RuntimeError("Configuration type is not instantiable.") + raise RuntimeError('Configuration type is not instantiable.') @classmethod def load(cls, settings, init=True, ignore=None): @@ -243,7 +243,7 @@ def get(cls): out = {} for k, v in cls.__dict__.items(): - if k.startswith("_") or v is None: + if k.startswith('_') or v is None: continue if callable(getattr(cls, k)): continue @@ -253,7 +253,7 @@ def get(cls): else: v = str(v) if isinstance(v, SpatialReferences): - v = " ".join(str(s) for s in v.references) or None + v = ' '.join(str(s) for s in v.references) or None if isinstance(v, Reference): v = str(v) or None out[k] = v @@ -297,7 +297,7 @@ class environment(_Config): class nipype(_Config): """Nipype settings.""" - crashfile_format = "txt" + crashfile_format = 'txt' """The file format for crashfiles, either text (txt) or pickle (pklz).""" get_linked_libs = False """Run NiPype's tool to enlist linked libraries for every interface.""" @@ -307,11 +307,11 @@ class nipype(_Config): """Number of processes (compute tasks) that can be run in parallel (multiprocessing only).""" omp_nthreads = None """Number of CPUs a single process can access for multithreaded execution.""" - plugin = "MultiProc" + plugin = 'MultiProc' """NiPype's execution plugin.""" plugin_args = { - "maxtasksperchild": 1, - "raise_insufficient": False, + 'maxtasksperchild': 1, + 'raise_insufficient': False, } """Settings for NiPype's execution plugin.""" remove_unnecessary_outputs = True @@ -325,13 +325,13 @@ class nipype(_Config): def get_plugin(cls): """Format a dictionary for Nipype consumption.""" out = { - "plugin": cls.plugin, - "plugin_args": cls.plugin_args, + 'plugin': cls.plugin, + 'plugin_args': cls.plugin_args, } - if cls.plugin in ("MultiProc", "LegacyMultiProc"): - out["plugin_args"]["n_procs"] = int(cls.nprocs) + if cls.plugin in ('MultiProc', 'LegacyMultiProc'): + out['plugin_args']['n_procs'] = int(cls.nprocs) if cls.memory_gb: - out["plugin_args"]["memory_gb"] = float(cls.memory_gb) + out['plugin_args']['memory_gb'] = float(cls.memory_gb) return out @classmethod @@ -343,10 +343,10 @@ def init(cls): if cls.resource_monitor: ncfg.update_config( { - "monitoring": { - "enabled": cls.resource_monitor, - "sample_frequency": "0.5", - "summary_append": True, + 'monitoring': { + 'enabled': cls.resource_monitor, + 'sample_frequency': '0.5', + 'summary_append': True, } } ) @@ -355,13 +355,13 @@ def init(cls): # Nipype config (logs and execution) ncfg.update_config( { - "execution": { - "crashdump_dir": str(execution.log_dir), - "crashfile_format": cls.crashfile_format, - "get_linked_libs": cls.get_linked_libs, - "remove_unnecessary_outputs": cls.remove_unnecessary_outputs, - "stop_on_first_crash": cls.stop_on_first_crash, - "check_version": False, # disable future telemetry + 'execution': { + 'crashdump_dir': str(execution.log_dir), + 'crashfile_format': cls.crashfile_format, + 'get_linked_libs': cls.get_linked_libs, + 'remove_unnecessary_outputs': cls.remove_unnecessary_outputs, + 'stop_on_first_crash': cls.stop_on_first_crash, + 'check_version': False, # disable future telemetry } } ) @@ -411,7 +411,7 @@ class execution(_Config): """Do not collect telemetry information for *fMRIPrep*.""" track_carbon = False """Tracks power draws using CodeCarbon package.""" - country_code = "CAN" + country_code = 'CAN' """Country ISO code used by carbon trackers.""" output_dir = None """Folder where derivatives will be stored.""" @@ -432,7 +432,7 @@ class execution(_Config): """Select a particular task from all available in the dataset.""" templateflow_home = _templateflow_home """The root folder of the TemplateFlow client.""" - work_dir = Path("work").absolute() + work_dir = Path('work').absolute() """Path to a working directory where intermediate results will be available.""" write_graph = False """Write out the computational graph corresponding to the planned preprocessing.""" @@ -440,24 +440,24 @@ class execution(_Config): _layout = None _paths = ( - "bids_dir", - "derivatives", - "bids_database_dir", - "fmriprep_dir", - "fs_license_file", - "fs_subjects_dir", - "layout", - "log_dir", - "output_dir", - "templateflow_home", - "work_dir", + 'bids_dir', + 'derivatives', + 'bids_database_dir', + 'fmriprep_dir', + 'fs_license_file', + 'fs_subjects_dir', + 'layout', + 'log_dir', + 'output_dir', + 'templateflow_home', + 'work_dir', ) @classmethod def init(cls): """Create a new BIDS Layout accessible with :attr:`~execution.layout`.""" if cls.fs_license_file and Path(cls.fs_license_file).is_file(): - os.environ["FS_LICENSE"] = str(cls.fs_license_file) + os.environ['FS_LICENSE'] = str(cls.fs_license_file) if cls._layout is None: import re @@ -465,20 +465,20 @@ def init(cls): from bids.layout import BIDSLayout from bids.layout.index import BIDSLayoutIndexer - _db_path = cls.bids_database_dir or (cls.work_dir / cls.run_uuid / "bids_db") + _db_path = cls.bids_database_dir or (cls.work_dir / cls.run_uuid / 'bids_db') _db_path.mkdir(exist_ok=True, parents=True) # Recommended after PyBIDS 12.1 _indexer = BIDSLayoutIndexer( validate=False, ignore=( - "code", - "stimuli", - "sourcedata", - "models", - re.compile(r"^\."), + 'code', + 'stimuli', + 'sourcedata', + 'models', + re.compile(r'^\.'), re.compile( - r"sub-[a-zA-Z0-9]+(/ses-[a-zA-Z0-9]+)?/(beh|dwi|eeg|ieeg|meg|perf)" + r'sub-[a-zA-Z0-9]+(/ses-[a-zA-Z0-9]+)?/(beh|dwi|eeg|ieeg|meg|perf)' ), ), ) @@ -496,11 +496,11 @@ def init(cls): # unserialize pybids Query enum values for acq, filters in cls.bids_filters.items(): cls.bids_filters[acq] = { - k: getattr(Query, v[7:-4]) if not isinstance(v, Query) and "Query" in v else v + k: getattr(Query, v[7:-4]) if not isinstance(v, Query) and 'Query' in v else v for k, v in filters.items() } - if "all" in cls.debug: + if 'all' in cls.debug: cls.debug = list(DEBUG_MODES) @@ -527,7 +527,7 @@ class workflow(_Config): (positive = exact, negative = maximum).""" bold2t1w_dof = None """Degrees of freedom of the BOLD-to-T1w registration steps.""" - bold2t1w_init = "register" + bold2t1w_init = 'register' """Whether to use standard coregistration ('register') or to initialize coregistration from the BOLD image-header ('header').""" cifti_output = None @@ -564,9 +564,9 @@ class workflow(_Config): """Run FreeSurfer's surface reconstruction.""" skull_strip_fixed_seed = False """Fix a seed for skull-stripping.""" - skull_strip_template = "OASIS30ANTs" + skull_strip_template = 'OASIS30ANTs' """Change default brain extraction template.""" - skull_strip_t1w = "force" + skull_strip_t1w = 'force' """Skip brain extraction of the T1w image (default is ``force``, meaning that *fMRIPrep* will run brain extraction of the T1w).""" slice_time_ref = 0.5 @@ -584,25 +584,25 @@ class workflow(_Config): use_syn_sdc = None """Run *fieldmap-less* susceptibility-derived distortions estimation in the absence of any alternatives.""" - me_t2s_fit_method = "curvefit" + me_t2s_fit_method = 'curvefit' """The method by which to estimate T2*/S0 for multi-echo data""" class loggers: """Keep loggers easily accessible (see :py:func:`init`).""" - _fmt = "%(asctime)s,%(msecs)d %(name)-2s " "%(levelname)-2s:\n\t %(message)s" - _datefmt = "%y%m%d-%H:%M:%S" + _fmt = '%(asctime)s,%(msecs)d %(name)-2s %(levelname)-2s:\n\t %(message)s' + _datefmt = '%y%m%d-%H:%M:%S' default = logging.getLogger() """The root logger.""" - cli = logging.getLogger("cli") + cli = logging.getLogger('cli') """Command-line interface logging.""" - workflow = logging.getLogger("nipype.workflow") + workflow = logging.getLogger('nipype.workflow') """NiPype's workflow logger.""" - interface = logging.getLogger("nipype.interface") + interface = logging.getLogger('nipype.interface') """NiPype's interface logger.""" - utils = logging.getLogger("nipype.utils") + utils = logging.getLogger('nipype.utils') """NiPype's utils logger.""" @classmethod @@ -627,7 +627,7 @@ def init(cls): cls.workflow.setLevel(execution.log_level) cls.utils.setLevel(execution.log_level) ncfg.update_config( - {"logging": {"log_directory": str(execution.log_dir), "log_to_file": True}} + {'logging': {'log_directory': str(execution.log_dir), 'log_to_file': True}} ) @@ -657,7 +657,7 @@ def init(cls): def _set_ants_seed(): """Fix random seed for antsRegistration, antsAI, antsMotionCorr""" val = random.randint(1, 65536) - os.environ["ANTS_RANDOM_SEED"] = str(val) + os.environ['ANTS_RANDOM_SEED'] = str(val) return val @@ -718,7 +718,7 @@ def initialize(x): filename = Path(filename) settings = loads(filename.read_text()) for sectionname, configs in settings.items(): - if sectionname != "environment": + if sectionname != 'environment': section = getattr(sys.modules[__name__], sectionname) ignore = skip.get(sectionname) section.load(configs, ignore=ignore, init=initialize(sectionname)) @@ -728,17 +728,17 @@ def initialize(x): def get(flat=False): """Get config as a dict.""" settings = { - "environment": environment.get(), - "execution": execution.get(), - "workflow": workflow.get(), - "nipype": nipype.get(), - "seeds": seeds.get(), + 'environment': environment.get(), + 'execution': execution.get(), + 'workflow': workflow.get(), + 'nipype': nipype.get(), + 'seeds': seeds.get(), } if not flat: return settings return { - ".".join((section, k)): v + '.'.join((section, k)): v for section, configs in settings.items() for k, v in configs.items() } @@ -764,15 +764,15 @@ def init_spaces(checkpoint=True): spaces = execution.output_spaces or SpatialReferences() if not isinstance(spaces, SpatialReferences): spaces = SpatialReferences( - [ref for s in spaces.split(" ") for ref in Reference.from_string(s)] + [ref for s in spaces.split(' ') for ref in Reference.from_string(s)] ) if checkpoint and not spaces.is_cached(): spaces.checkpoint() # Add the default standard space if not already present (required by several sub-workflows) - if "MNI152NLin2009cAsym" not in spaces.get_spaces(nonstandard=False, dim=(3,)): - spaces.add(Reference("MNI152NLin2009cAsym", {})) + if 'MNI152NLin2009cAsym' not in spaces.get_spaces(nonstandard=False, dim=(3,)): + spaces.add(Reference('MNI152NLin2009cAsym', {})) # Ensure user-defined spatial references for outputs are correctly parsed. # Certain options require normalization to a space not explicitly defined by users. @@ -780,8 +780,8 @@ def init_spaces(checkpoint=True): cifti_output = workflow.cifti_output if cifti_output: # CIFTI grayordinates to corresponding FSL-MNI resolutions. - vol_res = "2" if cifti_output == "91k" else "1" - spaces.add(Reference("MNI152NLin6Asym", {"res": vol_res})) + vol_res = '2' if cifti_output == '91k' else '1' + spaces.add(Reference('MNI152NLin6Asym', {'res': vol_res})) # Make the SpatialReferences object available workflow.spaces = spaces diff --git a/fmriprep/conftest.py b/fmriprep/conftest.py index c1ef44abe..8a2dd0bf0 100644 --- a/fmriprep/conftest.py +++ b/fmriprep/conftest.py @@ -18,12 +18,12 @@ def copytree_or_skip(source, target): data_dir = ir_files('fmriprep') / source if not data_dir.exists(): - pytest.skip(f"Cannot chdir into {data_dir!r}. Probably in a zipped distribution.") + pytest.skip(f'Cannot chdir into {data_dir!r}. Probably in a zipped distribution.') try: copytree(data_dir, target / data_dir.name) except Exception: - pytest.skip(f"Cannot copy {data_dir!r} into {target / data_dir.name}. Probably in a zip.") + pytest.skip(f'Cannot copy {data_dir!r} into {target / data_dir.name}. Probably in a zip.') @pytest.fixture(autouse=True) @@ -32,12 +32,12 @@ def populate_namespace(doctest_namespace, tmp_path): doctest_namespace['testdir'] = tmp_path -@pytest.fixture +@pytest.fixture() def minimal_bids(tmp_path): - bids = tmp_path / "bids" + bids = tmp_path / 'bids' bids.mkdir() Path.write_text( - bids / "dataset_description.json", json.dumps({"Name": "Test DS", "BIDSVersion": "1.8.0"}) + bids / 'dataset_description.json', json.dumps({'Name': 'Test DS', 'BIDSVersion': '1.8.0'}) ) T1w = bids / 'sub-01' / 'anat' / 'sub-01_T1w.nii.gz' T1w.parent.mkdir(parents=True) diff --git a/fmriprep/data/__init__.py b/fmriprep/data/__init__.py index 98ab4cfb9..42cb87292 100644 --- a/fmriprep/data/__init__.py +++ b/fmriprep/data/__init__.py @@ -35,7 +35,7 @@ except ImportError: from importlib_resources.abc import Traversable -__all__ = ["load"] +__all__ = ['load'] class Loader: @@ -127,19 +127,19 @@ def _doc(self): directory. """ top_level = sorted( - os.path.relpath(p, self.files) + "/"[: p.is_dir()] + os.path.relpath(p, self.files) + '/'[: p.is_dir()] for p in self.files.iterdir() - if p.name[0] not in (".", "_") and p.name != "tests" + if p.name[0] not in ('.', '_') and p.name != 'tests' ) doclines = [ - f"Load package files relative to ``{self._anchor}``.", - "", - "This package contains the following (top-level) files/directories:", - "", - *(f"* ``{path}``" for path in top_level), + f'Load package files relative to ``{self._anchor}``.', + '', + 'This package contains the following (top-level) files/directories:', + '', + *(f'* ``{path}``' for path in top_level), ] - return "\n".join(doclines) + return '\n'.join(doclines) def readable(self, *segments) -> Traversable: """Provide read access to a resource through a Path-like interface. diff --git a/fmriprep/interfaces/__init__.py b/fmriprep/interfaces/__init__.py index ba7b40e5d..9fe92fce9 100644 --- a/fmriprep/interfaces/__init__.py +++ b/fmriprep/interfaces/__init__.py @@ -4,7 +4,7 @@ class DerivativesDataSink(_DDSink): - out_path_base = "" + out_path_base = '' -__all__ = ("DerivativesDataSink",) +__all__ = ('DerivativesDataSink',) diff --git a/fmriprep/interfaces/confounds.py b/fmriprep/interfaces/confounds.py index b59b11534..17f16a389 100644 --- a/fmriprep/interfaces/confounds.py +++ b/fmriprep/interfaces/confounds.py @@ -54,18 +54,18 @@ class _aCompCorMasksInputSpec(BaseInterfaceInputSpec): - in_vfs = InputMultiObject(File(exists=True), desc="Input volume fractions.") + in_vfs = InputMultiObject(File(exists=True), desc='Input volume fractions.') is_aseg = traits.Bool( False, usedefault=True, desc="Whether the input volume fractions come from FS' aseg." ) bold_zooms = traits.Tuple( - traits.Float, traits.Float, traits.Float, mandatory=True, desc="BOLD series zooms" + traits.Float, traits.Float, traits.Float, mandatory=True, desc='BOLD series zooms' ) class _aCompCorMasksOutputSpec(TraitedSpec): out_masks = OutputMultiObject( - File(exists=True), desc="CSF, WM and combined masks, respectively" + File(exists=True), desc='CSF, WM and combined masks, respectively' ) @@ -78,7 +78,7 @@ class aCompCorMasks(SimpleInterface): def _run_interface(self, runtime): from ..utils.confounds import acompcor_masks - self._results["out_masks"] = acompcor_masks( + self._results['out_masks'] = acompcor_masks( self.inputs.in_vfs, self.inputs.is_aseg, self.inputs.bold_zooms, @@ -104,12 +104,12 @@ class FilterDropped(SimpleInterface): output_spec = _FilterDroppedOutputSpec def _run_interface(self, runtime): - self._results["out_file"] = fname_presuffix( + self._results['out_file'] = fname_presuffix( self.inputs.in_file, suffix='_filtered', use_ext=True, newpath=runtime.cwd ) metadata = pd.read_csv(self.inputs.in_file, sep='\t') - metadata[metadata.retained].to_csv(self._results["out_file"], sep='\t', index=False) + metadata[metadata.retained].to_csv(self._results['out_file'], sep='\t', index=False) return runtime @@ -143,42 +143,42 @@ def _run_interface(self, runtime): metadata = pd.read_csv(self.inputs.metadata_file, sep='\t') except pd.errors.EmptyDataError: # Can occur when testing on short datasets; otherwise rare - self._results["components_file"] = self.inputs.components_file - self._results["metadata_file"] = self.inputs.metadata_file + self._results['components_file'] = self.inputs.components_file + self._results['metadata_file'] = self.inputs.metadata_file return runtime - self._results["components_file"] = fname_presuffix( + self._results['components_file'] = fname_presuffix( self.inputs.components_file, suffix='_renamed', use_ext=True, newpath=runtime.cwd ) - self._results["metadata_file"] = fname_presuffix( + self._results['metadata_file'] = fname_presuffix( self.inputs.metadata_file, suffix='_renamed', use_ext=True, newpath=runtime.cwd ) - all_comp_cor = metadata[metadata["retained"]] + all_comp_cor = metadata[metadata['retained']] - c_comp_cor = all_comp_cor[all_comp_cor["mask"] == "CSF"] - w_comp_cor = all_comp_cor[all_comp_cor["mask"] == "WM"] - a_comp_cor = all_comp_cor[all_comp_cor["mask"] == "combined"] + c_comp_cor = all_comp_cor[all_comp_cor['mask'] == 'CSF'] + w_comp_cor = all_comp_cor[all_comp_cor['mask'] == 'WM'] + a_comp_cor = all_comp_cor[all_comp_cor['mask'] == 'combined'] - c_orig = c_comp_cor["component"] - c_new = [f"c_comp_cor_{i:02d}" for i in range(len(c_orig))] + c_orig = c_comp_cor['component'] + c_new = [f'c_comp_cor_{i:02d}' for i in range(len(c_orig))] - w_orig = w_comp_cor["component"] - w_new = [f"w_comp_cor_{i:02d}" for i in range(len(w_orig))] + w_orig = w_comp_cor['component'] + w_new = [f'w_comp_cor_{i:02d}' for i in range(len(w_orig))] - a_orig = a_comp_cor["component"] - a_new = [f"a_comp_cor_{i:02d}" for i in range(len(a_orig))] + a_orig = a_comp_cor['component'] + a_new = [f'a_comp_cor_{i:02d}' for i in range(len(a_orig))] - final_components = components.rename(columns=dict(zip(c_orig, c_new))) - final_components.rename(columns=dict(zip(w_orig, w_new)), inplace=True) - final_components.rename(columns=dict(zip(a_orig, a_new)), inplace=True) - final_components.to_csv(self._results["components_file"], sep='\t', index=False) + final_components = components.rename(columns=dict(zip(c_orig, c_new, strict=False))) + final_components.rename(columns=dict(zip(w_orig, w_new, strict=False)), inplace=True) + final_components.rename(columns=dict(zip(a_orig, a_new, strict=False)), inplace=True) + final_components.to_csv(self._results['components_file'], sep='\t', index=False) - metadata.loc[c_comp_cor.index, "component"] = c_new - metadata.loc[w_comp_cor.index, "component"] = w_new - metadata.loc[a_comp_cor.index, "component"] = a_new + metadata.loc[c_comp_cor.index, 'component'] = c_new + metadata.loc[w_comp_cor.index, 'component'] = w_new + metadata.loc[a_comp_cor.index, 'component'] = a_new - metadata.to_csv(self._results["metadata_file"], sep='\t', index=False) + metadata.to_csv(self._results['metadata_file'], sep='\t', index=False) return runtime @@ -221,6 +221,7 @@ class GatherConfounds(SimpleInterface): 0 0.1 0.2 """ + input_spec = GatherConfoundsInputSpec output_spec = GatherConfoundsOutputSpec @@ -275,7 +276,7 @@ def _gather_confounds( """ def less_breakable(a_string): - '''hardens the string to different envs (i.e., case insensitive, no whitespace, '#' ''' + """hardens the string to different envs (i.e., case insensitive, no whitespace, '#'""" return ''.join(a_string.split()).strip('#') # Taken from https://stackoverflow.com/questions/1175208/ @@ -315,7 +316,7 @@ def _adjust_indices(left_df, right_df): confounds_data = pd.DataFrame() for file_name in all_files: # assumes they all have headings already try: - new = pd.read_csv(file_name, sep="\t") + new = pd.read_csv(file_name, sep='\t') except pd.errors.EmptyDataError: # No data, nothing to concat continue @@ -337,9 +338,9 @@ def _adjust_indices(left_df, right_df): class _FMRISummaryInputSpec(BaseInterfaceInputSpec): - in_nifti = File(exists=True, mandatory=True, desc="input BOLD (4D NIfTI file)") - in_cifti = File(exists=True, desc="input BOLD (CIFTI dense timeseries)") - in_segm = File(exists=True, desc="volumetric segmentation corresponding to in_nifti") + in_nifti = File(exists=True, mandatory=True, desc='input BOLD (4D NIfTI file)') + in_cifti = File(exists=True, desc='input BOLD (CIFTI dense timeseries)') + in_segm = File(exists=True, desc='volumetric segmentation corresponding to in_nifti') confounds_file = File(exists=True, desc="BIDS' _confounds.tsv file") str_or_tuple = traits.Either( @@ -351,7 +352,7 @@ class _FMRISummaryInputSpec(BaseInterfaceInputSpec): str_or_tuple, minlen=1, desc='list of headers to extract from the confounds_file' ) tr = traits.Either(None, traits.Float, usedefault=True, desc='the repetition time') - drop_trs = traits.Int(0, usedefault=True, desc="dummy scans") + drop_trs = traits.Int(0, usedefault=True, desc='dummy scans') class _FMRISummaryOutputSpec(TraitedSpec): @@ -380,9 +381,9 @@ def _run_interface(self, runtime): nb.load(seg_file), remap_rois=False, labels=( - ("WM+CSF", "Edge") + ('WM+CSF', 'Edge') if has_cifti - else ("Ctx GM", "dGM", "sWM+sCSF", "dWM+dCSF", "Cb", "Edge") + else ('Ctx GM', 'dGM', 'sWM+sCSF', 'dWM+dCSF', 'Cb', 'Edge') ), ) @@ -402,7 +403,7 @@ def _run_interface(self, runtime): dataframe = pd.read_csv( self.inputs.confounds_file, - sep="\t", + sep='\t', index_col=None, dtype='float32', na_filter=True, @@ -441,5 +442,5 @@ def _run_interface(self, runtime): nskip=self.inputs.drop_trs, paired_carpet=has_cifti, ).plot() - fig.savefig(self._results["out_file"], bbox_inches="tight") + fig.savefig(self._results['out_file'], bbox_inches='tight') return runtime diff --git a/fmriprep/interfaces/conftest.py b/fmriprep/interfaces/conftest.py index 9b42b4002..a8511d04b 100644 --- a/fmriprep/interfaces/conftest.py +++ b/fmriprep/interfaces/conftest.py @@ -19,17 +19,17 @@ def _chdir(path): os.chdir(cwd) -@pytest.fixture(scope="module") +@pytest.fixture(scope='module') def data_dir(): - return Path(__file__).parent / "tests" / "data" + return Path(__file__).parent / 'tests' / 'data' @pytest.fixture(autouse=True) def _docdir(request, tmp_path): # Trigger ONLY for the doctests. - doctest_plugin = request.config.pluginmanager.getplugin("doctest") + doctest_plugin = request.config.pluginmanager.getplugin('doctest') if isinstance(request.node, doctest_plugin.DoctestItem): - copytree(Path(__file__).parent / "tests" / "data", tmp_path, dirs_exist_ok=True) + copytree(Path(__file__).parent / 'tests' / 'data', tmp_path, dirs_exist_ok=True) # Chdir only for the duration of the test. with _chdir(tmp_path): diff --git a/fmriprep/interfaces/gifti.py b/fmriprep/interfaces/gifti.py index 0fe321d71..54a537acd 100644 --- a/fmriprep/interfaces/gifti.py +++ b/fmriprep/interfaces/gifti.py @@ -11,8 +11,8 @@ class CreateROIInputSpec(TraitedSpec): subject_id = traits.Str(desc='subject ID') hemisphere = traits.Enum( - "L", - "R", + 'L', + 'R', mandatory=True, desc='hemisphere', ) @@ -35,11 +35,11 @@ def _run_interface(self, runtime): subject = 'sub-XYZ' img = nb.GiftiImage.from_filename(self.inputs.thickness_file) # wb_command -set-structure - img.meta["AnatomicalStructurePrimary"] = {'L': 'CortexLeft', 'R': 'CortexRight'}[hemi] + img.meta['AnatomicalStructurePrimary'] = {'L': 'CortexLeft', 'R': 'CortexRight'}[hemi] darray = img.darrays[0] # wb_command -set-map-names meta = darray.meta - meta['Name'] = f"{subject}_{hemi}_ROI" + meta['Name'] = f'{subject}_{hemi}_ROI' # wb_command -metric-palette calls have no effect on ROI files # Compiling an odd sequence of math operations that works out to: @@ -61,7 +61,7 @@ def _run_interface(self, runtime): img.darrays[0] = darray - out_filename = os.path.join(runtime.cwd, f"{subject}.{hemi}.roi.native.shape.gii") + out_filename = os.path.join(runtime.cwd, f'{subject}.{hemi}.roi.native.shape.gii') img.to_filename(out_filename) - self._results["roi_file"] = out_filename + self._results['roi_file'] = out_filename return runtime diff --git a/fmriprep/interfaces/maths.py b/fmriprep/interfaces/maths.py index 24231ae59..5de7cadb4 100644 --- a/fmriprep/interfaces/maths.py +++ b/fmriprep/interfaces/maths.py @@ -6,16 +6,16 @@ class ClipInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, desc="Input imaging file") - out_file = File(desc="Output file name") + in_file = File(exists=True, mandatory=True, desc='Input imaging file') + out_file = File(desc='Output file name') minimum = traits.Float( - -np.inf, usedefault=True, desc="Values under minimum are set to minimum" + -np.inf, usedefault=True, desc='Values under minimum are set to minimum' ) - maximum = traits.Float(np.inf, usedefault=True, desc="Values over maximum are set to maximum") + maximum = traits.Float(np.inf, usedefault=True, desc='Values over maximum are set to maximum') class ClipOutputSpec(TraitedSpec): - out_file = File(desc="Output file name") + out_file = File(desc='Output file name') class Clip(SimpleInterface): @@ -41,24 +41,24 @@ def _run_interface(self, runtime): if np.any((data < self.inputs.minimum) | (data > self.inputs.maximum)): if not out_file: out_file = fname_presuffix( - self.inputs.in_file, suffix="_clipped", newpath=runtime.cwd + self.inputs.in_file, suffix='_clipped', newpath=runtime.cwd ) np.clip(data, self.inputs.minimum, self.inputs.maximum, out=data) img.__class__(data, img.affine, img.header).to_filename(out_file) elif not out_file: out_file = self.inputs.in_file - self._results["out_file"] = out_file + self._results['out_file'] = out_file return runtime class Label2MaskInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, desc="Input label file") - label_val = traits.Int(mandatory=True, dec="Label value to create mask from") + in_file = File(exists=True, mandatory=True, desc='Input label file') + label_val = traits.Int(mandatory=True, dec='Label value to create mask from') class Label2MaskOutputSpec(TraitedSpec): - out_file = File(desc="Output file name") + out_file = File(desc='Output file name') class Label2Mask(SimpleInterface): @@ -76,9 +76,9 @@ def _run_interface(self, runtime): out_img = img.__class__(mask, img.affine, img.header) out_img.set_data_dtype(np.uint8) - out_file = fname_presuffix(self.inputs.in_file, suffix="_mask", newpath=runtime.cwd) + out_file = fname_presuffix(self.inputs.in_file, suffix='_mask', newpath=runtime.cwd) out_img.to_filename(out_file) - self._results["out_file"] = out_file + self._results['out_file'] = out_file return runtime diff --git a/fmriprep/interfaces/reports.py b/fmriprep/interfaces/reports.py index 5108e6236..106c57eb2 100644 --- a/fmriprep/interfaces/reports.py +++ b/fmriprep/interfaces/reports.py @@ -222,7 +222,7 @@ class FunctionalSummaryInputSpec(TraitedSpec): tr = traits.Float(desc='Repetition time', mandatory=True) dummy_scans = traits.Either(traits.Int(), None, desc='number of dummy scans specified by user') algo_dummy_scans = traits.Int(desc='number of dummy scans determined by algorithm') - echo_idx = InputMultiObject(traits.Str, usedefault=True, desc="BIDS echo identifiers") + echo_idx = InputMultiObject(traits.Str, usedefault=True, desc='BIDS echo identifiers') orientation = traits.Str(mandatory=True, desc='Orientation of the voxel axes') @@ -255,30 +255,30 @@ def _generate_segment(self): pedir = get_world_pedir(self.inputs.orientation, self.inputs.pe_direction) - dummy_scan_tmp = "{n_dum}" + dummy_scan_tmp = '{n_dum}' if self.inputs.dummy_scans == self.inputs.algo_dummy_scans: dummy_scan_msg = ' '.join( - [dummy_scan_tmp, "(Confirmed: {n_alg} automatically detected)"] + [dummy_scan_tmp, '(Confirmed: {n_alg} automatically detected)'] ).format(n_dum=self.inputs.dummy_scans, n_alg=self.inputs.algo_dummy_scans) # the number of dummy scans was specified by the user and # it is not equal to the number detected by the algorithm elif self.inputs.dummy_scans is not None: dummy_scan_msg = ' '.join( - [dummy_scan_tmp, "(Warning: {n_alg} automatically detected)"] + [dummy_scan_tmp, '(Warning: {n_alg} automatically detected)'] ).format(n_dum=self.inputs.dummy_scans, n_alg=self.inputs.algo_dummy_scans) # the number of dummy scans was not specified by the user else: dummy_scan_msg = dummy_scan_tmp.format(n_dum=self.inputs.algo_dummy_scans) - multiecho = "Single-echo EPI sequence." + multiecho = 'Single-echo EPI sequence.' n_echos = len(self.inputs.echo_idx) if n_echos == 1: multiecho = ( - f"Multi-echo EPI sequence: only echo {self.inputs.echo_idx[0]} processed " - "in single-echo mode." + f'Multi-echo EPI sequence: only echo {self.inputs.echo_idx[0]} processed ' + 'in single-echo mode.' ) if n_echos > 2: - multiecho = f"Multi-echo EPI sequence: {n_echos} echoes." + multiecho = f'Multi-echo EPI sequence: {n_echos} echoes.' return FUNCTIONAL_TEMPLATE.format( pedir=pedir, @@ -305,18 +305,18 @@ def _generate_segment(self): return ABOUT_TEMPLATE.format( version=self.inputs.version, command=self.inputs.command, - date=time.strftime("%Y-%m-%d %H:%M:%S %z"), + date=time.strftime('%Y-%m-%d %H:%M:%S %z'), ) class LabeledHistogramInputSpec(nrb._SVGReportCapableInputSpec): - in_file = traits.File(exists=True, mandatory=True, desc="Image containing values to plot") + in_file = traits.File(exists=True, mandatory=True, desc='Image containing values to plot') label_file = traits.File( exists=True, - desc="Mask or label image where non-zero values will be used to extract data from in_file", + desc='Mask or label image where non-zero values will be used to extract data from in_file', ) - mapping = traits.Dict(desc="Map integer label values onto names of voxels") - xlabel = traits.Str("voxels", usedefault=True, desc="Description of values plotted") + mapping = traits.Dict(desc='Map integer label values onto names of voxels') + xlabel = traits.Str('voxels', usedefault=True, desc='Description of values plotted') class LabeledHistogram(nrb.ReportingInterface): @@ -336,7 +336,7 @@ def _generate_report(self): if self.inputs.label_file: label_img = nb.load(self.inputs.label_file) if label_img.shape != img.shape[:3] or not np.allclose(label_img.affine, img.affine): - label_img = resample_to_img(label_img, img, interpolation="nearest") + label_img = resample_to_img(label_img, img, interpolation='nearest') labels = np.uint16(label_img.dataobj) else: labels = np.uint8(data > 0) @@ -354,19 +354,19 @@ def _generate_report(self): def get_world_pedir(ornt, pe_direction): """Return world direction of phase encoding""" - axes = (("Right", "Left"), ("Anterior", "Posterior"), ("Superior", "Inferior")) - ax_idcs = {"i": 0, "j": 1, "k": 2} + axes = (('Right', 'Left'), ('Anterior', 'Posterior'), ('Superior', 'Inferior')) + ax_idcs = {'i': 0, 'j': 1, 'k': 2} if pe_direction is not None: axcode = ornt[ax_idcs[pe_direction[0]]] - inv = pe_direction[1:] == "-" + inv = pe_direction[1:] == '-' for ax in axes: for flip in (ax, ax[::-1]): if flip[not inv].startswith(axcode): - return "-".join(flip) + return '-'.join(flip) LOGGER.warning( - "Cannot determine world direction of phase encoding. " - f"Orientation: {ornt}; PE dir: {pe_direction}" + 'Cannot determine world direction of phase encoding. ' + f'Orientation: {ornt}; PE dir: {pe_direction}' ) - return "Could not be determined - assuming Anterior-Posterior" + return 'Could not be determined - assuming Anterior-Posterior' diff --git a/fmriprep/interfaces/resampling.py b/fmriprep/interfaces/resampling.py index 6111623cb..6413b4a4d 100644 --- a/fmriprep/interfaces/resampling.py +++ b/fmriprep/interfaces/resampling.py @@ -25,46 +25,46 @@ class ResampleSeriesInputSpec(TraitedSpec): - in_file = File(exists=True, mandatory=True, desc="3D or 4D image file to resample") - ref_file = File(exists=True, mandatory=True, desc="File to resample in_file to") + in_file = File(exists=True, mandatory=True, desc='3D or 4D image file to resample') + ref_file = File(exists=True, mandatory=True, desc='File to resample in_file to') transforms = InputMultiObject( File(exists=True), mandatory=True, - desc="Transform files, from in_file to ref_file (image mode)", + desc='Transform files, from in_file to ref_file (image mode)', ) inverse = InputMultiObject( traits.Bool, value=[False], usedefault=True, - desc="Whether to invert each file in transforms", + desc='Whether to invert each file in transforms', ) - fieldmap = File(exists=True, desc="Fieldmap file resampled into reference space") - ro_time = traits.Float(desc="EPI readout time (s).") + fieldmap = File(exists=True, desc='Fieldmap file resampled into reference space') + ro_time = traits.Float(desc='EPI readout time (s).') pe_dir = traits.Enum( - "i", - "i-", - "j", - "j-", - "k", - "k-", - desc="the phase-encoding direction corresponding to in_data", + 'i', + 'i-', + 'j', + 'j-', + 'k', + 'k-', + desc='the phase-encoding direction corresponding to in_data', ) - jacobian = traits.Bool(mandatory=True, desc="Whether to apply Jacobian correction") - num_threads = traits.Int(1, usedefault=True, desc="Number of threads to use for resampling") - output_data_type = traits.Str("float32", usedefault=True, desc="Data type of output image") - order = traits.Int(3, usedefault=True, desc="Order of interpolation (0=nearest, 3=cubic)") + jacobian = traits.Bool(mandatory=True, desc='Whether to apply Jacobian correction') + num_threads = traits.Int(1, usedefault=True, desc='Number of threads to use for resampling') + output_data_type = traits.Str('float32', usedefault=True, desc='Data type of output image') + order = traits.Int(3, usedefault=True, desc='Order of interpolation (0=nearest, 3=cubic)') mode = traits.Str( 'constant', usedefault=True, - desc="How data is extended beyond its boundaries. " - "See scipy.ndimage.map_coordinates for more details.", + desc='How data is extended beyond its boundaries. ' + 'See scipy.ndimage.map_coordinates for more details.', ) - cval = traits.Float(0.0, usedefault=True, desc="Value to fill past edges of data") - prefilter = traits.Bool(True, usedefault=True, desc="Spline-prefilter data if order > 1") + cval = traits.Float(0.0, usedefault=True, desc='Value to fill past edges of data') + prefilter = traits.Bool(True, usedefault=True, desc='Spline-prefilter data if order > 1') class ResampleSeriesOutputSpec(TraitedSpec): - out_file = File(desc="Resampled image or series") + out_file = File(desc='Resampled image or series') class ResampleSeries(SimpleInterface): @@ -91,12 +91,12 @@ def _run_interface(self, runtime): pe_info = None if pe_dir and ro_time: - pe_axis = "ijk".index(pe_dir[0]) - pe_flip = pe_dir.endswith("-") + pe_axis = 'ijk'.index(pe_dir[0]) + pe_flip = pe_dir.endswith('-') # Nitransforms displacements are positive source, axcodes = ensure_positive_cosines(source) - axis_flip = axcodes[pe_axis] in "LPI" + axis_flip = axcodes[pe_axis] in 'LPI' pe_info = [(pe_axis, -ro_time if (axis_flip ^ pe_flip) else ro_time)] * nvols @@ -122,29 +122,29 @@ def _run_interface(self, runtime): class ReconstructFieldmapInputSpec(TraitedSpec): in_coeffs = InputMultiObject( - File(exists=True), mandatory=True, desc="SDCflows-style spline coefficient files" + File(exists=True), mandatory=True, desc='SDCflows-style spline coefficient files' ) target_ref_file = File( - exists=True, mandatory=True, desc="Image to reconstruct the field in alignment with" + exists=True, mandatory=True, desc='Image to reconstruct the field in alignment with' ) fmap_ref_file = File( - exists=True, mandatory=True, desc="Reference file aligned with coefficients" + exists=True, mandatory=True, desc='Reference file aligned with coefficients' ) transforms = InputMultiObject( File(exists=True), mandatory=True, - desc="Transform files, from in_file to ref_file (image mode)", + desc='Transform files, from in_file to ref_file (image mode)', ) inverse = InputMultiObject( traits.Bool, value=[False], usedefault=True, - desc="Whether to invert each file in transforms", + desc='Whether to invert each file in transforms', ) class ReconstructFieldmapOutputSpec(TraitedSpec): - out_file = File(desc="Fieldmap reconstructed in target_ref_file space") + out_file = File(desc='Fieldmap reconstructed in target_ref_file space') class ReconstructFieldmap(SimpleInterface): @@ -181,13 +181,13 @@ def _run_interface(self, runtime): class DistortionParametersInputSpec(TraitedSpec): - in_file = File(exists=True, desc="EPI image corresponding to the metadata") - metadata = traits.Dict(mandatory=True, desc="metadata corresponding to the inputs") + in_file = File(exists=True, desc='EPI image corresponding to the metadata') + metadata = traits.Dict(mandatory=True, desc='metadata corresponding to the inputs') class DistortionParametersOutputSpec(TraitedSpec): readout_time = traits.Float - pe_direction = traits.Enum("i", "i-", "j", "j-", "k", "k-") + pe_direction = traits.Enum('i', 'i-', 'j', 'j-', 'k', 'k-') class DistortionParameters(SimpleInterface): @@ -204,11 +204,11 @@ def _run_interface(self, runtime): from sdcflows.utils.epimanip import get_trt try: - self._results["readout_time"] = get_trt( + self._results['readout_time'] = get_trt( self.inputs.metadata, self.inputs.in_file or None, ) - self._results["pe_direction"] = self.inputs.metadata["PhaseEncodingDirection"] + self._results['pe_direction'] = self.inputs.metadata['PhaseEncodingDirection'] except (KeyError, ValueError): pass @@ -547,7 +547,7 @@ def resample_image( else: if any(isinstance(xfm, nt.linear.LinearTransformsMapping) for xfm in transforms): classes = [xfm.__class__.__name__ for xfm in transforms] - raise ValueError(f"HMC transforms must come last. Found sequence: {classes}") + raise ValueError(f'HMC transforms must come last. Found sequence: {classes}') transform_list: list = transforms.transforms hmc = [] diff --git a/fmriprep/interfaces/tests/test_confounds.py b/fmriprep/interfaces/tests/test_confounds.py index b32cf278e..c42e5a163 100644 --- a/fmriprep/interfaces/tests/test_confounds.py +++ b/fmriprep/interfaces/tests/test_confounds.py @@ -6,14 +6,14 @@ def test_RenameACompCor(tmp_path, data_dir): - renamer = pe.Node(confounds.RenameACompCor(), name="renamer", base_dir=str(tmp_path)) - renamer.inputs.components_file = data_dir / "acompcor_truncated.tsv" - renamer.inputs.metadata_file = data_dir / "component_metadata_truncated.tsv" + renamer = pe.Node(confounds.RenameACompCor(), name='renamer', base_dir=str(tmp_path)) + renamer.inputs.components_file = data_dir / 'acompcor_truncated.tsv' + renamer.inputs.metadata_file = data_dir / 'component_metadata_truncated.tsv' res = renamer.run() - target_components = Path.read_text(data_dir / "acompcor_renamed.tsv") - target_meta = Path.read_text(data_dir / "component_metadata_renamed.tsv") + target_components = Path.read_text(data_dir / 'acompcor_renamed.tsv') + target_meta = Path.read_text(data_dir / 'component_metadata_renamed.tsv') renamed_components = Path(res.outputs.components_file).read_text() renamed_meta = Path(res.outputs.metadata_file).read_text() assert renamed_components == target_components @@ -21,11 +21,11 @@ def test_RenameACompCor(tmp_path, data_dir): def test_FilterDropped(tmp_path, data_dir): - filt = pe.Node(confounds.FilterDropped(), name="filt", base_dir=str(tmp_path)) - filt.inputs.in_file = data_dir / "component_metadata_truncated.tsv" + filt = pe.Node(confounds.FilterDropped(), name='filt', base_dir=str(tmp_path)) + filt.inputs.in_file = data_dir / 'component_metadata_truncated.tsv' res = filt.run() - target_meta = Path.read_text(data_dir / "component_metadata_filtered.tsv") + target_meta = Path.read_text(data_dir / 'component_metadata_filtered.tsv') filtered_meta = Path(res.outputs.out_file).read_text() assert filtered_meta == target_meta diff --git a/fmriprep/interfaces/tests/test_maths.py b/fmriprep/interfaces/tests/test_maths.py index 3fa98a9e0..209878cd5 100644 --- a/fmriprep/interfaces/tests/test_maths.py +++ b/fmriprep/interfaces/tests/test_maths.py @@ -6,19 +6,19 @@ def test_Clip(tmp_path): - in_file = str(tmp_path / "input.nii") + in_file = str(tmp_path / 'input.nii') data = np.array([[[-1.0, 1.0], [-2.0, 2.0]]]) nb.Nifti1Image(data, np.eye(4)).to_filename(in_file) - threshold = pe.Node(Clip(in_file=in_file, minimum=0), name="threshold", base_dir=tmp_path) + threshold = pe.Node(Clip(in_file=in_file, minimum=0), name='threshold', base_dir=tmp_path) ret = threshold.run() - assert ret.outputs.out_file == str(tmp_path / "threshold/input_clipped.nii") + assert ret.outputs.out_file == str(tmp_path / 'threshold/input_clipped.nii') out_img = nb.load(ret.outputs.out_file) assert np.allclose(out_img.get_fdata(), [[[0.0, 1.0], [0.0, 2.0]]]) - threshold2 = pe.Node(Clip(in_file=in_file, minimum=-3), name="threshold2", base_dir=tmp_path) + threshold2 = pe.Node(Clip(in_file=in_file, minimum=-3), name='threshold2', base_dir=tmp_path) ret = threshold2.run() @@ -26,18 +26,18 @@ def test_Clip(tmp_path): out_img = nb.load(ret.outputs.out_file) assert np.allclose(out_img.get_fdata(), [[[-1.0, 1.0], [-2.0, 2.0]]]) - clip = pe.Node(Clip(in_file=in_file, minimum=-1, maximum=1), name="clip", base_dir=tmp_path) + clip = pe.Node(Clip(in_file=in_file, minimum=-1, maximum=1), name='clip', base_dir=tmp_path) ret = clip.run() - assert ret.outputs.out_file == str(tmp_path / "clip/input_clipped.nii") + assert ret.outputs.out_file == str(tmp_path / 'clip/input_clipped.nii') out_img = nb.load(ret.outputs.out_file) assert np.allclose(out_img.get_fdata(), [[[-1.0, 1.0], [-1.0, 1.0]]]) - nonpositive = pe.Node(Clip(in_file=in_file, maximum=0), name="nonpositive", base_dir=tmp_path) + nonpositive = pe.Node(Clip(in_file=in_file, maximum=0), name='nonpositive', base_dir=tmp_path) ret = nonpositive.run() - assert ret.outputs.out_file == str(tmp_path / "nonpositive/input_clipped.nii") + assert ret.outputs.out_file == str(tmp_path / 'nonpositive/input_clipped.nii') out_img = nb.load(ret.outputs.out_file) assert np.allclose(out_img.get_fdata(), [[[-1.0, 0.0], [-2.0, 0.0]]]) diff --git a/fmriprep/interfaces/tests/test_reports.py b/fmriprep/interfaces/tests/test_reports.py index 02c9db823..662657f13 100644 --- a/fmriprep/interfaces/tests/test_reports.py +++ b/fmriprep/interfaces/tests/test_reports.py @@ -26,7 +26,7 @@ @pytest.mark.parametrize( - "orientation,pe_dir,expected", + 'orientation,pe_dir,expected', [ ('RAS', 'j', 'Posterior-Anterior'), ('RAS', 'j-', 'Anterior-Posterior'), diff --git a/fmriprep/interfaces/workbench.py b/fmriprep/interfaces/workbench.py index f13fb5b67..5c20dee51 100644 --- a/fmriprep/interfaces/workbench.py +++ b/fmriprep/interfaces/workbench.py @@ -14,11 +14,11 @@ ) from nipype.interfaces.workbench.base import WBCommand -iflogger = logging.getLogger("nipype.interface") +iflogger = logging.getLogger('nipype.interface') class OpenMPTraitedSpec(CommandLineInputSpec): - num_threads = traits.Int(desc="allows for specifying more threads") + num_threads = traits.Int(desc='allows for specifying more threads') class OpenMPCommandMixin(CommandLine): @@ -28,20 +28,20 @@ class OpenMPCommandMixin(CommandLine): def __init__(self, **inputs): super().__init__(**inputs) - self.inputs.on_trait_change(self._num_threads_update, "num_threads") + self.inputs.on_trait_change(self._num_threads_update, 'num_threads') if not self._num_threads: - self._num_threads = os.environ.get("OMP_NUM_THREADS", None) + self._num_threads = os.environ.get('OMP_NUM_THREADS', None) if not isdefined(self.inputs.num_threads) and self._num_threads: self.inputs.num_threads = int(self._num_threads) self._num_threads_update() def _num_threads_update(self): if self.inputs.num_threads: - self.inputs.environ.update({"OMP_NUM_THREADS": str(self.inputs.num_threads)}) + self.inputs.environ.update({'OMP_NUM_THREADS': str(self.inputs.num_threads)}) def run(self, **inputs): - if "num_threads" in inputs: - self.inputs.num_threads = inputs["num_threads"] + if 'num_threads' in inputs: + self.inputs.num_threads = inputs['num_threads'] self._num_threads_update() return super().run(**inputs) @@ -50,89 +50,89 @@ class MetricDilateInputSpec(OpenMPTraitedSpec): in_file = File( exists=True, mandatory=True, - argstr="%s ", + argstr='%s ', position=0, - desc="the metric to dilate", + desc='the metric to dilate', ) surf_file = File( exists=True, mandatory=True, - argstr="%s ", + argstr='%s ', position=1, - desc="the surface to compute on", + desc='the surface to compute on', ) distance = traits.Float( mandatory=True, - argstr="%f ", + argstr='%f ', position=2, - desc="distance in mm to dilate", + desc='distance in mm to dilate', ) out_file = File( - name_source=["in_file"], - name_template="%s.func.gii", + name_source=['in_file'], + name_template='%s.func.gii', keep_extension=False, - argstr="%s ", + argstr='%s ', position=3, - desc="output - the output metric", + desc='output - the output metric', ) bad_vertex_roi_file = File( - argstr="-bad-vertex-roi %s ", + argstr='-bad-vertex-roi %s ', position=4, - desc="metric file, positive values denote vertices to have their values replaced", + desc='metric file, positive values denote vertices to have their values replaced', ) data_roi_file = File( - argstr="-data-roi %s ", + argstr='-data-roi %s ', position=5, - desc="metric file, positive values denote vertices that have data", + desc='metric file, positive values denote vertices that have data', ) column = traits.Int( position=6, - argstr="-column %d ", - desc="the column number", + argstr='-column %d ', + desc='the column number', ) nearest = traits.Bool( position=7, - argstr="-nearest ", - desc="use the nearest good value instead of a weighted average", + argstr='-nearest ', + desc='use the nearest good value instead of a weighted average', ) linear = traits.Bool( position=8, - argstr="-linear ", - desc="fill in values with linear interpolation along strongest gradient", + argstr='-linear ', + desc='fill in values with linear interpolation along strongest gradient', ) exponent = traits.Float( - argstr="-exponent %f ", + argstr='-exponent %f ', position=9, default=6.0, - desc="exponent n to use in (area / (distance ^ n)) as the " - "weighting function (default 6)", + desc='exponent n to use in (area / (distance ^ n)) as the ' + 'weighting function (default 6)', ) corrected_areas = File( - argstr="-corrected-areas %s ", + argstr='-corrected-areas %s ', position=10, - desc="vertex areas to use instead of computing them from the surface", + desc='vertex areas to use instead of computing them from the surface', ) legacy_cutoff = traits.Bool( position=11, - argstr="-legacy-cutoff ", - desc="use the v1.3.2 method of choosing how many vertices to " - "use when calculating the dilated value with weighted method", + argstr='-legacy-cutoff ', + desc='use the v1.3.2 method of choosing how many vertices to ' + 'use when calculating the dilated value with weighted method', ) class MetricDilateOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="output file") + out_file = File(exists=True, desc='output file') class MetricDilate(WBCommand, OpenMPCommandMixin): @@ -158,98 +158,98 @@ class MetricDilate(WBCommand, OpenMPCommandMixin): input_spec = MetricDilateInputSpec output_spec = MetricDilateOutputSpec - _cmd = "wb_command -metric-dilate " + _cmd = 'wb_command -metric-dilate ' class MetricResampleInputSpec(OpenMPTraitedSpec): in_file = File( exists=True, mandatory=True, - argstr="%s", + argstr='%s', position=0, - desc="The metric file to resample", + desc='The metric file to resample', ) current_sphere = File( exists=True, mandatory=True, - argstr="%s", + argstr='%s', position=1, - desc="A sphere surface with the mesh that the metric is currently on", + desc='A sphere surface with the mesh that the metric is currently on', ) new_sphere = File( exists=True, mandatory=True, - argstr="%s", + argstr='%s', position=2, - desc="A sphere surface that is in register with and" - " has the desired output mesh", + desc='A sphere surface that is in register with and' + ' has the desired output mesh', ) method = traits.Enum( - "ADAP_BARY_AREA", - "BARYCENTRIC", - argstr="%s", + 'ADAP_BARY_AREA', + 'BARYCENTRIC', + argstr='%s', mandatory=True, position=3, - desc="The method name - ADAP_BARY_AREA method is recommended for" - " ordinary metric data, because it should use all data while" - " downsampling, unlike BARYCENTRIC. If ADAP_BARY_AREA is used," - " exactly one of area_surfs or area_metrics must be specified", + desc='The method name - ADAP_BARY_AREA method is recommended for' + ' ordinary metric data, because it should use all data while' + ' downsampling, unlike BARYCENTRIC. If ADAP_BARY_AREA is used,' + ' exactly one of area_surfs or area_metrics must be specified', ) out_file = File( - name_source=["new_sphere"], - name_template="%s.out", + name_source=['new_sphere'], + name_template='%s.out', keep_extension=True, - argstr="%s", + argstr='%s', position=4, - desc="The output metric", + desc='The output metric', ) area_surfs = traits.Bool( position=5, - argstr="-area-surfs", - xor=["area_metrics"], - desc="Specify surfaces to do vertex area correction based on", + argstr='-area-surfs', + xor=['area_metrics'], + desc='Specify surfaces to do vertex area correction based on', ) area_metrics = traits.Bool( position=5, - argstr="-area-metrics", - xor=["area_surfs"], - desc="Specify vertex area metrics to do area correction based on", + argstr='-area-metrics', + xor=['area_surfs'], + desc='Specify vertex area metrics to do area correction based on', ) current_area = File( exists=True, position=6, - argstr="%s", - desc="A relevant anatomical surface with mesh OR" - " a metric file with vertex areas for mesh", + argstr='%s', + desc='A relevant anatomical surface with mesh OR' + ' a metric file with vertex areas for mesh', ) new_area = File( exists=True, position=7, - argstr="%s", - desc="A relevant anatomical surface with mesh OR" - " a metric file with vertex areas for mesh", + argstr='%s', + desc='A relevant anatomical surface with mesh OR' + ' a metric file with vertex areas for mesh', ) roi_metric = File( exists=True, position=8, - argstr="-current-roi %s", - desc="Input roi on the current mesh used to exclude non-data vertices", + argstr='-current-roi %s', + desc='Input roi on the current mesh used to exclude non-data vertices', ) valid_roi_out = traits.Bool( position=9, - argstr="-valid-roi-out", - desc="Output the ROI of vertices that got data from valid source vertices", + argstr='-valid-roi-out', + desc='Output the ROI of vertices that got data from valid source vertices', ) largest = traits.Bool( position=10, - argstr="-largest", - desc="Use only the value of the vertex with the largest weight", + argstr='-largest', + desc='Use only the value of the vertex with the largest weight', ) class MetricResampleOutputSpec(TraitedSpec): - out_file = File(exists=True, desc="the output metric") - roi_file = File(desc="ROI of vertices that got data from valid source vertices") + out_file = File(exists=True, desc='the output metric') + roi_file = File(desc='ROI of vertices that got data from valid source vertices') class MetricResample(WBCommand, OpenMPCommandMixin): @@ -276,184 +276,182 @@ class MetricResample(WBCommand, OpenMPCommandMixin): input_spec = MetricResampleInputSpec output_spec = MetricResampleOutputSpec - _cmd = "wb_command -metric-resample" + _cmd = 'wb_command -metric-resample' def _format_arg(self, opt, spec, val): - if opt in ("current_area", "new_area"): + if opt in ('current_area', 'new_area'): if not self.inputs.area_surfs and not self.inputs.area_metrics: - raise ValueError( - "{} was set but neither area_surfs or" " area_metrics were set".format(opt) - ) - if opt == "method": + raise ValueError(f'{opt} was set but neither area_surfs or area_metrics were set') + if opt == 'method': if ( - val == "ADAP_BARY_AREA" + val == 'ADAP_BARY_AREA' and not self.inputs.area_surfs and not self.inputs.area_metrics ): - raise ValueError("Exactly one of area_surfs or area_metrics" " must be specified") - if opt == "valid_roi_out" and val: + raise ValueError('Exactly one of area_surfs or area_metrics must be specified') + if opt == 'valid_roi_out' and val: # generate a filename and add it to argstr - roi_out = self._gen_filename(self.inputs.in_file, suffix="_roi") - iflogger.info("Setting roi output file as", roi_out) - spec.argstr += " " + roi_out + roi_out = self._gen_filename(self.inputs.in_file, suffix='_roi') + iflogger.info('Setting roi output file as', roi_out) + spec.argstr += ' ' + roi_out return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = super()._list_outputs() if self.inputs.valid_roi_out: - roi_file = self._gen_filename(self.inputs.in_file, suffix="_roi") - outputs["roi_file"] = os.path.abspath(roi_file) + roi_file = self._gen_filename(self.inputs.in_file, suffix='_roi') + outputs['roi_file'] = os.path.abspath(roi_file) return outputs class VolumeToSurfaceMappingInputSpec(OpenMPTraitedSpec): volume_file = File( exists=True, - argstr="%s", + argstr='%s', mandatory=True, position=1, - desc="the volume to map data from", + desc='the volume to map data from', ) surface_file = File( exists=True, - argstr="%s", + argstr='%s', mandatory=True, position=2, - desc="the surface to map the data onto", + desc='the surface to map the data onto', ) out_file = File( - name_source=["surface_file"], - name_template="%s_mapped.func.gii", + name_source=['surface_file'], + name_template='%s_mapped.func.gii', keep_extension=False, - argstr="%s", + argstr='%s', position=3, - desc="the output metric file", + desc='the output metric file', ) method = traits.Enum( - "trilinear", - "enclosing", - "cubic", - "ribbon-constrained", - "myelin-style", - argstr="-%s", + 'trilinear', + 'enclosing', + 'cubic', + 'ribbon-constrained', + 'myelin-style', + argstr='-%s', position=4, - desc="the interpolation method to use", + desc='the interpolation method to use', ) _ribbon_constrained = [ - "inner_surface", - "outer_surface", - "volume_roi", - "weighted", - "voxel_subdiv", - "gaussian", - "interpolate", - "bad_vertices_out", - "output_weights", - "output_weights_text", + 'inner_surface', + 'outer_surface', + 'volume_roi', + 'weighted', + 'voxel_subdiv', + 'gaussian', + 'interpolate', + 'bad_vertices_out', + 'output_weights', + 'output_weights_text', ] _myelin_style = [ - "ribbon_roi", - "thickness", - "sigma", - "legacy_bug", + 'ribbon_roi', + 'thickness', + 'sigma', + 'legacy_bug', ] inner_surface = File( exists=True, - argstr="%s", + argstr='%s', position=5, - desc="the inner surface of the ribbon [-ribbon-constrained]", + desc='the inner surface of the ribbon [-ribbon-constrained]', xor=_myelin_style, ) outer_surface = File( exists=True, - argstr="%s", + argstr='%s', position=6, - desc="the outer surface of the ribbon [-ribbon-constrained]", + desc='the outer surface of the ribbon [-ribbon-constrained]', xor=_myelin_style, ) volume_roi = File( exists=True, - argstr="-volume-roi %s", + argstr='-volume-roi %s', position=7, - desc="use a volume roi [-ribbon-constrained]", + desc='use a volume roi [-ribbon-constrained]', xor=_myelin_style, ) weighted = traits.Bool( - argstr="-weighted", + argstr='-weighted', position=8, - desc="treat the roi values as weightings rather than binary [-ribbon-constrained]", - requires=["volume_roi"], + desc='treat the roi values as weightings rather than binary [-ribbon-constrained]', + requires=['volume_roi'], xor=_myelin_style, ) voxel_subdiv = traits.Int( default_value=3, - argstr="-voxel-subdiv %d", - desc="voxel divisions while estimating voxel weights [-ribbon-constrained]", + argstr='-voxel-subdiv %d', + desc='voxel divisions while estimating voxel weights [-ribbon-constrained]', xor=_myelin_style, ) thin_columns = traits.Bool( - argstr="-thin-columns", - desc="use non-overlapping polyhedra [-ribbon-constrained]", + argstr='-thin-columns', + desc='use non-overlapping polyhedra [-ribbon-constrained]', xor=_myelin_style, ) gaussian = traits.Float( - argstr="-gaussian %g", + argstr='-gaussian %g', desc="reduce weight to voxels that aren't near [-ribbon-constrained]", xor=_myelin_style, ) interpolate = traits.Enum( - "CUBIC", - "TRILINEAR", - "ENCLOSING_VOXEL", - argstr="-interpolate %s", - desc="instead of a weighted average of voxels, " - "interpolate at subpoints inside the ribbon [-ribbon-constrained]", + 'CUBIC', + 'TRILINEAR', + 'ENCLOSING_VOXEL', + argstr='-interpolate %s', + desc='instead of a weighted average of voxels, ' + 'interpolate at subpoints inside the ribbon [-ribbon-constrained]', xor=_myelin_style, ) bad_vertices_out = File( - argstr="-bad-vertices-out %s", + argstr='-bad-vertices-out %s', desc="output an ROI of which vertices didn't intersect any valid voxels", xor=_myelin_style, ) output_weights = traits.Int( - argstr="-output-weights %(0)d output_weights.nii.gz", - desc="write the voxel weights for a vertex to a volume file", + argstr='-output-weights %(0)d output_weights.nii.gz', + desc='write the voxel weights for a vertex to a volume file', xor=_myelin_style, ) output_weights_text = traits.File( - argstr="-output-weights-text %s", - desc="write the voxel weights for all vertices to a text file", + argstr='-output-weights-text %s', + desc='write the voxel weights for all vertices to a text file', xor=_myelin_style, ) ribbon_roi = File( exists=True, - argstr="%s", + argstr='%s', position=5, - desc="an roi volume of the cortical ribbon for this hemisphere [-myelin-style]", + desc='an roi volume of the cortical ribbon for this hemisphere [-myelin-style]', xor=_ribbon_constrained, ) thickness = File( exists=True, - argstr="%s", + argstr='%s', position=6, - desc="the thickness metric file for this hemisphere [-myelin-style]", + desc='the thickness metric file for this hemisphere [-myelin-style]', xor=_ribbon_constrained, ) sigma = traits.Float( - argstr="%g", + argstr='%g', position=7, - desc="gaussian kernel in mm for weighting voxels within range [-myelin-style]", + desc='gaussian kernel in mm for weighting voxels within range [-myelin-style]', xor=_ribbon_constrained, ) legacy_bug = traits.Bool( - argstr="-legacy-bug", + argstr='-legacy-bug', position=8, - desc="use the old bug in the myelin-style algorithm [-myelin-style]", + desc='use the old bug in the myelin-style algorithm [-myelin-style]', xor=_ribbon_constrained, ) subvol_select = traits.Int( - argstr="-subvol-select %d", - desc="select a single subvolume to map", + argstr='-subvol-select %d', + desc='select a single subvolume to map', ) """\ @@ -521,10 +519,10 @@ class VolumeToSurfaceMappingInputSpec(OpenMPTraitedSpec): class VolumeToSurfaceMappingOutputSpec(TraitedSpec): - out_file = File(desc="the output metric file") - bad_vertices_file = File(desc="the output metric file of vertices that have no data") - weights_file = File(desc="volume to write the weights to") - weights_text_file = File(desc="the output text filename") + out_file = File(desc='the output metric file') + bad_vertices_file = File(desc='the output metric file of vertices that have no data') + weights_file = File(desc='volume to write the weights to') + weights_text_file = File(desc='the output text filename') class VolumeToSurfaceMapping(WBCommand, OpenMPCommandMixin): @@ -585,25 +583,25 @@ class VolumeToSurfaceMapping(WBCommand, OpenMPCommandMixin): input_spec = VolumeToSurfaceMappingInputSpec output_spec = VolumeToSurfaceMappingOutputSpec - _cmd = "wb_command -volume-to-surface-mapping" + _cmd = 'wb_command -volume-to-surface-mapping' def _format_arg(self, opt, spec, val): if opt in self.input_spec._ribbon_constrained: - if self.inputs.method != "ribbon-constrained": - return "" + if self.inputs.method != 'ribbon-constrained': + return '' elif opt in self.input_spec._myelin_style: - if self.inputs.method != "myelin-style": - return "" + if self.inputs.method != 'myelin-style': + return '' return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = super()._list_outputs() if isdefined(self.inputs.bad_vertices_out): - outputs["bad_vertices_file"] = os.path.abspath(self.inputs.bad_vertices_out) + outputs['bad_vertices_file'] = os.path.abspath(self.inputs.bad_vertices_out) if isdefined(self.inputs.output_weights): - outputs["weights_file"] = os.path.abspath(self.inputs.output_weights) + outputs['weights_file'] = os.path.abspath(self.inputs.output_weights) if isdefined(self.inputs.output_weights_text): - outputs["weights_text_file"] = os.path.abspath(self.inputs.output_weights_text) + outputs['weights_text_file'] = os.path.abspath(self.inputs.output_weights_text) return outputs @@ -624,36 +622,36 @@ class MetricMaskInputSpec(CommandLineInputSpec): in_file = File( exists=True, - argstr="%s", + argstr='%s', position=1, mandatory=True, - desc="input metric file", + desc='input metric file', ) mask = File( exists=True, - argstr="%s", + argstr='%s', position=2, mandatory=True, - desc="mask metric file", + desc='mask metric file', ) out_file = File( - name_template="%s_masked.func.gii", - name_source=["in_file"], + name_template='%s_masked.func.gii', + name_source=['in_file'], keep_extension=False, - argstr="%s", + argstr='%s', position=3, - desc="output metric file", + desc='output metric file', ) column = traits.Either( traits.Int, traits.String, - argstr="-column %s", - desc="select a single column by number or name", + argstr='-column %s', + desc='select a single column by number or name', ) class MetricMaskOutputSpec(TraitedSpec): - out_file = File(desc="output metric file") + out_file = File(desc='output metric file') class MetricMask(WBCommand): @@ -671,7 +669,7 @@ class MetricMask(WBCommand): input_spec = MetricMaskInputSpec output_spec = MetricMaskOutputSpec - _cmd = "wb_command -metric-mask" + _cmd = 'wb_command -metric-mask' class MetricFillHolesInputSpec(TraitedSpec): @@ -692,34 +690,34 @@ class MetricFillHolesInputSpec(TraitedSpec): surface_file = File( mandatory=True, exists=True, - argstr="%s", + argstr='%s', position=1, - desc="surface to use for neighbor information", + desc='surface to use for neighbor information', ) metric_file = File( mandatory=True, exists=True, - argstr="%s", + argstr='%s', position=2, - desc="input ROI metric", + desc='input ROI metric', ) out_file = File( - name_template="%s_filled.shape.gii", - name_source="metric_file", + name_template='%s_filled.shape.gii', + name_source='metric_file', keep_extension=False, - argstr="%s", + argstr='%s', position=3, - desc="output ROI metric", + desc='output ROI metric', ) corrected_areas = File( exists=True, - argstr="-corrected-areas %s", - desc="vertex areas to use instead of computing them from the surface", + argstr='-corrected-areas %s', + desc='vertex areas to use instead of computing them from the surface', ) class MetricFillHolesOutputSpec(TraitedSpec): - out_file = File(desc="output ROI metric") + out_file = File(desc='output ROI metric') class MetricFillHoles(WBCommand): @@ -738,7 +736,7 @@ class MetricFillHoles(WBCommand): input_spec = MetricFillHolesInputSpec output_spec = MetricFillHolesOutputSpec - _cmd = "wb_command -metric-fill-holes" + _cmd = 'wb_command -metric-fill-holes' class MetricRemoveIslandsInputSpec(TraitedSpec): @@ -759,34 +757,34 @@ class MetricRemoveIslandsInputSpec(TraitedSpec): surface_file = File( mandatory=True, exists=True, - argstr="%s", + argstr='%s', position=1, - desc="surface to use for neighbor information", + desc='surface to use for neighbor information', ) metric_file = File( mandatory=True, exists=True, - argstr="%s", + argstr='%s', position=2, - desc="input ROI metric", + desc='input ROI metric', ) out_file = File( - name_template="%s_noislands.shape.gii", - name_source="metric_file", + name_template='%s_noislands.shape.gii', + name_source='metric_file', keep_extension=False, - argstr="%s", + argstr='%s', position=3, - desc="output ROI metric", + desc='output ROI metric', ) corrected_areas = File( exists=True, - argstr="-corrected-areas %s", - desc="vertex areas to use instead of computing them from the surface", + argstr='-corrected-areas %s', + desc='vertex areas to use instead of computing them from the surface', ) class MetricRemoveIslandsOutputSpec(TraitedSpec): - out_file = File(desc="output ROI metric") + out_file = File(desc='output ROI metric') class MetricRemoveIslands(WBCommand): @@ -805,4 +803,4 @@ class MetricRemoveIslands(WBCommand): input_spec = MetricRemoveIslandsInputSpec output_spec = MetricRemoveIslandsOutputSpec - _cmd = "wb_command -metric-remove-islands" + _cmd = 'wb_command -metric-remove-islands' diff --git a/fmriprep/reports/core.py b/fmriprep/reports/core.py index 950524e50..c3a76c22d 100644 --- a/fmriprep/reports/core.py +++ b/fmriprep/reports/core.py @@ -34,18 +34,18 @@ def _load_config(self, config): from yaml import safe_load as load settings = load(config.read_text()) - self.packagename = self.packagename or settings.get("package", None) + self.packagename = self.packagename or settings.get('package', None) # Removed from here: Appending self.packagename to self.root and self.out_dir # In this version, pass reportlets_dir and out_dir with fmriprep in the path. if self.subject_id is not None: - self.root = self.root / f"sub-{self.subject_id}" + self.root = self.root / f'sub-{self.subject_id}' - if "template_path" in settings: - self.template_path = config.parent / settings["template_path"] + if 'template_path' in settings: + self.template_path = config.parent / settings['template_path'] - self.index(settings["sections"]) + self.index(settings['sections']) # @@ -92,7 +92,7 @@ def generate_reports( """Execute run_reports on a list of subjects.""" reportlets_dir = None if work_dir is not None: - reportlets_dir = Path(work_dir) / "reportlets" + reportlets_dir = Path(work_dir) / 'reportlets' report_errors = [ run_reports( output_dir, @@ -109,13 +109,15 @@ def generate_reports( if errno: import logging - logger = logging.getLogger("cli") - error_list = ", ".join( - f"{subid} ({err})" for subid, err in zip(subject_list, report_errors) if err + logger = logging.getLogger('cli') + error_list = ', '.join( + f'{subid} ({err})' + for subid, err in zip(subject_list, report_errors, strict=False) + if err ) logger.error( - "Preprocessing did not finish successfully. Errors occurred while processing " - "data from participants: %s. Check the HTML reports for details.", + 'Preprocessing did not finish successfully. Errors occurred while processing ' + 'data from participants: %s. Check the HTML reports for details.', error_list, ) return errno diff --git a/fmriprep/tests/test_config.py b/fmriprep/tests/test_config.py index 92b7f0703..8174d2bd9 100644 --- a/fmriprep/tests/test_config.py +++ b/fmriprep/tests/test_config.py @@ -22,7 +22,6 @@ # """Check the configuration module and file.""" import os -from pathlib import Path from unittest.mock import patch import pytest @@ -46,7 +45,7 @@ def _reset_config(): def test_reset_config(): execution = config.execution - setattr(execution, 'bids_dir', 'TESTING') + execution.bids_dir = 'TESTING' assert config.execution.bids_dir == 'TESTING' _reset_config() assert config.execution.bids_dir is None @@ -58,7 +57,7 @@ def test_reset_config(): def test_config_spaces(): """Check that all necessary spaces are recorded in the config.""" - settings = loads(data.load.readable("tests/config.toml").read_text()) + settings = loads(data.load.readable('tests/config.toml').read_text()) for sectionname, configs in settings.items(): if sectionname != 'environment': section = getattr(config, sectionname) @@ -68,9 +67,9 @@ def test_config_spaces(): config.init_spaces() spaces = config.workflow.spaces - assert "MNI152NLin6Asym:res-1" not in [str(s) for s in spaces.get_standard(full_spec=True)] + assert 'MNI152NLin6Asym:res-1' not in [str(s) for s in spaces.get_standard(full_spec=True)] - assert "MNI152NLin6Asym_res-1" not in [ + assert 'MNI152NLin6Asym_res-1' not in [ format_reference((s.fullname, s.spec)) for s in spaces.references if s.standard and s.dim == 3 @@ -80,9 +79,9 @@ def test_config_spaces(): config.init_spaces() spaces = config.workflow.spaces - assert "MNI152NLin6Asym:res-1" in [str(s) for s in spaces.get_standard(full_spec=True)] + assert 'MNI152NLin6Asym:res-1' in [str(s) for s in spaces.get_standard(full_spec=True)] - assert "MNI152NLin6Asym_res-1" in [ + assert 'MNI152NLin6Asym_res-1' in [ format_reference((s.fullname, s.spec)) for s in spaces.references if s.standard and s.dim == 3 @@ -104,17 +103,17 @@ def test_config_spaces(): @pytest.mark.parametrize( - "master_seed,ants_seed,numpy_seed", [(1, 17612, 8272), (100, 19094, 60232)] + 'master_seed,ants_seed,numpy_seed', [(1, 17612, 8272), (100, 19094, 60232)] ) def test_prng_seed(master_seed, ants_seed, numpy_seed): """Ensure seeds are properly tracked""" seeds = config.seeds with patch.dict(os.environ, {}): seeds.load({'_random_seed': master_seed}, init=True) - assert getattr(seeds, 'master') == master_seed + assert seeds.master == master_seed assert seeds.ants == ants_seed assert seeds.numpy == numpy_seed - assert os.getenv("ANTS_RANDOM_SEED") == str(ants_seed) + assert os.getenv('ANTS_RANDOM_SEED') == str(ants_seed) _reset_config() for seed in ('_random_seed', 'master', 'ants', 'numpy'): diff --git a/fmriprep/tests/test_fsl6.py b/fmriprep/tests/test_fsl6.py index 21b19e8e5..5c6cde597 100644 --- a/fmriprep/tests/test_fsl6.py +++ b/fmriprep/tests/test_fsl6.py @@ -7,25 +7,25 @@ from nipype.interfaces import fsl fslversion = fsl.Info.version() -TEMPLATE = tf.get("MNI152NLin2009cAsym", resolution=2, desc=None, suffix="T1w") +TEMPLATE = tf.get('MNI152NLin2009cAsym', resolution=2, desc=None, suffix='T1w') -@pytest.mark.skipif(fslversion is None, reason="fsl required") +@pytest.mark.skipif(fslversion is None, reason='fsl required') @pytest.mark.skipif( - fslversion and LooseVersion(fslversion) < LooseVersion("6.0.0"), reason="FSL6 test" + fslversion and LooseVersion(fslversion) < LooseVersion('6.0.0'), reason='FSL6 test' ) @pytest.mark.parametrize( - "path_parent,filename", + 'path_parent,filename', [ - (".", "brain.nii.gz"), + ('.', 'brain.nii.gz'), ( - "pneumonoultramicroscopicsilicovolcanoconiosis/floccinaucinihilipilification", - "supercalifragilisticexpialidocious.nii.gz", + 'pneumonoultramicroscopicsilicovolcanoconiosis/floccinaucinihilipilification', + 'supercalifragilisticexpialidocious.nii.gz', ), ( - "pneumonoultramicroscopicsilicovolcanoconiosis/floccinaucinihilipilification/" - "antidisestablishmentarianism/pseudopseudohypoparathyroidism/sesquipedalian", - "brain.nii.gz", + 'pneumonoultramicroscopicsilicovolcanoconiosis/floccinaucinihilipilification/' + 'antidisestablishmentarianism/pseudopseudohypoparathyroidism/sesquipedalian', + 'brain.nii.gz', ), ], ) @@ -33,7 +33,7 @@ def test_fsl6_long_filenames(tmp_path, path_parent, filename): test_dir = tmp_path / path_parent test_dir.mkdir(parents=True, exist_ok=True) in_file = test_dir / filename - out_file = test_dir / "output.nii.gz" + out_file = test_dir / 'output.nii.gz' shutil.copy(TEMPLATE, in_file) bet = fsl.BET(in_file=in_file, out_file=out_file).run() diff --git a/fmriprep/utils/asynctools.py b/fmriprep/utils/asynctools.py index c61016849..8d92a69ad 100644 --- a/fmriprep/utils/asynctools.py +++ b/fmriprep/utils/asynctools.py @@ -1,5 +1,6 @@ import asyncio -from typing import Callable, TypeVar +from collections.abc import Callable +from typing import TypeVar R = TypeVar('R') diff --git a/fmriprep/utils/bids.py b/fmriprep/utils/bids.py index b93b92d0e..1b44656a7 100644 --- a/fmriprep/utils/bids.py +++ b/fmriprep/utils/bids.py @@ -26,7 +26,6 @@ import json import os import sys -import typing as ty from collections import defaultdict from pathlib import Path @@ -43,12 +42,12 @@ def collect_derivatives( entities: dict, fieldmap_id: str | None, spec: dict | None = None, - patterns: ty.List[str] | None = None, + patterns: list[str] | None = None, ): """Gather existing derivatives and compose a cache.""" if spec is None or patterns is None: _spec, _patterns = tuple( - json.loads(load_data.readable("io_spec.json").read_text()).values() + json.loads(load_data.readable('io_spec.json').read_text()).values() ) if spec is None: @@ -57,21 +56,21 @@ def collect_derivatives( patterns = _patterns derivs_cache = defaultdict(list, {}) - layout = BIDSLayout(derivatives_dir, config=["bids", "derivatives"], validate=False) + layout = BIDSLayout(derivatives_dir, config=['bids', 'derivatives'], validate=False) derivatives_dir = Path(derivatives_dir) # search for both boldrefs - for k, q in spec["baseline"].items(): + for k, q in spec['baseline'].items(): query = {**q, **entities} item = layout.get(return_type='filename', **query) if not item: continue - derivs_cache["%s_boldref" % k] = item[0] if len(item) == 1 else item + derivs_cache['%s_boldref' % k] = item[0] if len(item) == 1 else item for xfm, q in spec['transforms'].items(): query = {**q, **entities} - if xfm == "boldref2fmap": - query["to"] = fieldmap_id + if xfm == 'boldref2fmap': + query['to'] = fieldmap_id item = layout.get(return_type='filename', **q) if not item: continue @@ -81,20 +80,20 @@ def collect_derivatives( def write_bidsignore(deriv_dir): bids_ignore = ( - "*.html", - "logs/", - "figures/", # Reports - "*_xfm.*", # Unspecified transform files - "*.surf.gii", # Unspecified structural outputs + '*.html', + 'logs/', + 'figures/', # Reports + '*_xfm.*', # Unspecified transform files + '*.surf.gii', # Unspecified structural outputs # Unspecified functional outputs - "*_boldref.nii.gz", - "*_bold.func.gii", - "*_mixing.tsv", - "*_timeseries.tsv", + '*_boldref.nii.gz', + '*_bold.func.gii', + '*_mixing.tsv', + '*_timeseries.tsv', ) - ignore_file = Path(deriv_dir) / ".bidsignore" + ignore_file = Path(deriv_dir) / '.bidsignore' - ignore_file.write_text("\n".join(bids_ignore) + "\n") + ignore_file.write_text('\n'.join(bids_ignore) + '\n') def write_derivative_description(bids_dir, deriv_dir): @@ -121,13 +120,13 @@ def write_derivative_description(bids_dir, deriv_dir): # Keys that can only be set by environment if 'FMRIPREP_DOCKER_TAG' in os.environ: desc['GeneratedBy'][0]['Container'] = { - "Type": "docker", - "Tag": f"nipreps/fmriprep:{os.environ['FMRIPREP_DOCKER_TAG']}", + 'Type': 'docker', + 'Tag': f"nipreps/fmriprep:{os.environ['FMRIPREP_DOCKER_TAG']}", } if 'FMRIPREP_SINGULARITY_URL' in os.environ: desc['GeneratedBy'][0]['Container'] = { - "Type": "singularity", - "URI": os.getenv('FMRIPREP_SINGULARITY_URL'), + 'Type': 'singularity', + 'URI': os.getenv('FMRIPREP_SINGULARITY_URL'), } # Keys deriving from source dataset @@ -152,52 +151,52 @@ def validate_input_dir(exec_env, bids_dir, participant_label): import tempfile validator_config_dict = { - "ignore": [ - "EVENTS_COLUMN_ONSET", - "EVENTS_COLUMN_DURATION", - "TSV_EQUAL_ROWS", - "TSV_EMPTY_CELL", - "TSV_IMPROPER_NA", - "VOLUME_COUNT_MISMATCH", - "BVAL_MULTIPLE_ROWS", - "BVEC_NUMBER_ROWS", - "DWI_MISSING_BVAL", - "INCONSISTENT_SUBJECTS", - "INCONSISTENT_PARAMETERS", - "BVEC_ROW_LENGTH", - "B_FILE", - "PARTICIPANT_ID_COLUMN", - "PARTICIPANT_ID_MISMATCH", - "TASK_NAME_MUST_DEFINE", - "PHENOTYPE_SUBJECTS_MISSING", - "STIMULUS_FILE_MISSING", - "DWI_MISSING_BVEC", - "EVENTS_TSV_MISSING", - "TSV_IMPROPER_NA", - "ACQTIME_FMT", - "Participants age 89 or higher", - "DATASET_DESCRIPTION_JSON_MISSING", - "FILENAME_COLUMN", - "WRONG_NEW_LINE", - "MISSING_TSV_COLUMN_CHANNELS", - "MISSING_TSV_COLUMN_IEEG_CHANNELS", - "MISSING_TSV_COLUMN_IEEG_ELECTRODES", - "UNUSED_STIMULUS", - "CHANNELS_COLUMN_SFREQ", - "CHANNELS_COLUMN_LOWCUT", - "CHANNELS_COLUMN_HIGHCUT", - "CHANNELS_COLUMN_NOTCH", - "CUSTOM_COLUMN_WITHOUT_DESCRIPTION", - "ACQTIME_FMT", - "SUSPICIOUSLY_LONG_EVENT_DESIGN", - "SUSPICIOUSLY_SHORT_EVENT_DESIGN", - "MALFORMED_BVEC", - "MALFORMED_BVAL", - "MISSING_TSV_COLUMN_EEG_ELECTRODES", - "MISSING_SESSION", + 'ignore': [ + 'EVENTS_COLUMN_ONSET', + 'EVENTS_COLUMN_DURATION', + 'TSV_EQUAL_ROWS', + 'TSV_EMPTY_CELL', + 'TSV_IMPROPER_NA', + 'VOLUME_COUNT_MISMATCH', + 'BVAL_MULTIPLE_ROWS', + 'BVEC_NUMBER_ROWS', + 'DWI_MISSING_BVAL', + 'INCONSISTENT_SUBJECTS', + 'INCONSISTENT_PARAMETERS', + 'BVEC_ROW_LENGTH', + 'B_FILE', + 'PARTICIPANT_ID_COLUMN', + 'PARTICIPANT_ID_MISMATCH', + 'TASK_NAME_MUST_DEFINE', + 'PHENOTYPE_SUBJECTS_MISSING', + 'STIMULUS_FILE_MISSING', + 'DWI_MISSING_BVEC', + 'EVENTS_TSV_MISSING', + 'TSV_IMPROPER_NA', + 'ACQTIME_FMT', + 'Participants age 89 or higher', + 'DATASET_DESCRIPTION_JSON_MISSING', + 'FILENAME_COLUMN', + 'WRONG_NEW_LINE', + 'MISSING_TSV_COLUMN_CHANNELS', + 'MISSING_TSV_COLUMN_IEEG_CHANNELS', + 'MISSING_TSV_COLUMN_IEEG_ELECTRODES', + 'UNUSED_STIMULUS', + 'CHANNELS_COLUMN_SFREQ', + 'CHANNELS_COLUMN_LOWCUT', + 'CHANNELS_COLUMN_HIGHCUT', + 'CHANNELS_COLUMN_NOTCH', + 'CUSTOM_COLUMN_WITHOUT_DESCRIPTION', + 'ACQTIME_FMT', + 'SUSPICIOUSLY_LONG_EVENT_DESIGN', + 'SUSPICIOUSLY_SHORT_EVENT_DESIGN', + 'MALFORMED_BVEC', + 'MALFORMED_BVAL', + 'MISSING_TSV_COLUMN_EEG_ELECTRODES', + 'MISSING_SESSION', ], - "error": ["NO_T1W"], - "ignoredFiles": ['/dataset_description.json', '/participants.tsv'], + 'error': ['NO_T1W'], + 'ignoredFiles': ['/dataset_description.json', '/participants.tsv'], } # Limit validation only to data from requested participants if participant_label: @@ -229,14 +228,14 @@ def validate_input_dir(exec_env, bids_dir, participant_label): ignored_subs = all_subs.difference(selected_subs) if ignored_subs: for sub in ignored_subs: - validator_config_dict["ignoredFiles"].append("/sub-%s/**" % sub) + validator_config_dict['ignoredFiles'].append('/sub-%s/**' % sub) with tempfile.NamedTemporaryFile(mode='w+', suffix='.json') as temp: temp.write(json.dumps(validator_config_dict)) temp.flush() try: subprocess.check_call(['bids-validator', str(bids_dir), '-c', temp.name]) except FileNotFoundError: - print("bids-validator does not appear to be installed", file=sys.stderr) + print('bids-validator does not appear to be installed', file=sys.stderr) def check_pipeline_version(pipeline_name, cvers, data_desc): @@ -282,15 +281,15 @@ def check_pipeline_version(pipeline_name, cvers, data_desc): desc = json.loads(data_desc.read_text()) generators = { - generator["Name"]: generator.get("Version", "0+unknown") - for generator in desc.get("GeneratedBy", []) + generator['Name']: generator.get('Version', '0+unknown') + for generator in desc.get('GeneratedBy', []) } dvers = generators.get(pipeline_name) if dvers is None: # Very old style - dvers = desc.get("PipelineDescription", {}).get("Version", "0+unknown") + dvers = desc.get('PipelineDescription', {}).get('Version', '0+unknown') if Version(cvers).public != Version(dvers).public: - return "Previous output generated by version {} found.".format(dvers) + return f'Previous output generated by version {dvers} found.' def extract_entities(file_list): @@ -334,6 +333,6 @@ def dismiss_echo(entities=None): echo_idx = config.execution.echo_idx if echo_idx is None or len(listify(echo_idx)) > 2: - entities.append("echo") + entities.append('echo') return entities diff --git a/fmriprep/utils/confounds.py b/fmriprep/utils/confounds.py index 7ba66d2f5..38e15f80a 100644 --- a/fmriprep/utils/confounds.py +++ b/fmriprep/utils/confounds.py @@ -133,7 +133,7 @@ def acompcor_masks(in_files, is_aseg=False, zooms=None): csf_file = mask2vf( csf_file, zooms=zooms, - out_file=str(Path("acompcor_csf.nii.gz").absolute()), + out_file=str(Path('acompcor_csf.nii.gz').absolute()), ) csf_data = nb.load(csf_file).get_fdata() wm_data = mask2vf(in_files[1], zooms=zooms) @@ -145,8 +145,8 @@ def acompcor_masks(in_files, is_aseg=False, zooms=None): gm_data = binary_dilation(gm_data, structure=ball(3)) # Output filenames - wm_file = str(Path("acompcor_wm.nii.gz").absolute()) - combined_file = str(Path("acompcor_wmcsf.nii.gz").absolute()) + wm_file = str(Path('acompcor_wm.nii.gz').absolute()) + combined_file = str(Path('acompcor_wmcsf.nii.gz').absolute()) # Prepare WM mask wm_data[gm_data] = 0 # Make sure voxel does not contain GM diff --git a/fmriprep/utils/debug.py b/fmriprep/utils/debug.py index c207bbb0a..befda6b88 100644 --- a/fmriprep/utils/debug.py +++ b/fmriprep/utils/debug.py @@ -71,7 +71,7 @@ def _pdb_excepthook(type, value, tb): from IPython.core import ultratb sys.excepthook = ultratb.FormattedTB( - mode="Verbose", + mode='Verbose', # color_scheme='Linux', call_pdb=is_interactive(), ) diff --git a/fmriprep/utils/misc.py b/fmriprep/utils/misc.py index be3233f1d..6f7973913 100644 --- a/fmriprep/utils/misc.py +++ b/fmriprep/utils/misc.py @@ -21,7 +21,6 @@ # https://www.nipreps.org/community/licensing/ # """Miscellaneous utilities.""" -import typing as ty from functools import cache @@ -45,12 +44,12 @@ def fips_enabled(): """ from pathlib import Path - fips = Path("/proc/sys/crypto/fips_enabled") - return fips.exists() and fips.read_text()[0] != "0" + fips = Path('/proc/sys/crypto/fips_enabled') + return fips.exists() and fips.read_text()[0] != '0' @cache -def estimate_bold_mem_usage(bold_fname: str) -> ty.Tuple[int, dict]: +def estimate_bold_mem_usage(bold_fname: str) -> tuple[int, dict]: import nibabel as nb import numpy as np @@ -60,9 +59,9 @@ def estimate_bold_mem_usage(bold_fname: str) -> ty.Tuple[int, dict]: bold_size_gb = 8 * nvox / (1024**3) bold_tlen = img.shape[-1] mem_gb = { - "filesize": bold_size_gb, - "resampled": bold_size_gb * 4, - "largemem": bold_size_gb * (max(bold_tlen / 100, 1.0) + 4), + 'filesize': bold_size_gb, + 'resampled': bold_size_gb * 4, + 'largemem': bold_size_gb * (max(bold_tlen / 100, 1.0) + 4), } return bold_tlen, mem_gb diff --git a/fmriprep/utils/telemetry.py b/fmriprep/utils/telemetry.py index 88f3d0854..c95067a68 100644 --- a/fmriprep/utils/telemetry.py +++ b/fmriprep/utils/telemetry.py @@ -29,49 +29,49 @@ from .. import __version__, config -sentry_sdk = optional_package("sentry_sdk")[0] -migas = optional_package("migas")[0] +sentry_sdk = optional_package('sentry_sdk')[0] +migas = optional_package('migas')[0] CHUNK_SIZE = 16384 # Group common events with pre specified fingerprints KNOWN_ERRORS = { - 'permission-denied': ["PermissionError: [Errno 13] Permission denied"], + 'permission-denied': ['PermissionError: [Errno 13] Permission denied'], 'memory-error': [ - "MemoryError", - "Cannot allocate memory", - "Return code: 134", + 'MemoryError', + 'Cannot allocate memory', + 'Return code: 134', ], - 'reconall-already-running': ["ERROR: it appears that recon-all is already running"], - 'no-disk-space': ["[Errno 28] No space left on device", "[Errno 122] Disk quota exceeded"], + 'reconall-already-running': ['ERROR: it appears that recon-all is already running'], + 'no-disk-space': ['[Errno 28] No space left on device', '[Errno 122] Disk quota exceeded'], 'segfault': [ - "Segmentation Fault", - "Segfault", - "Return code: 139", + 'Segmentation Fault', + 'Segfault', + 'Return code: 139', ], 'potential-race-condition': [ - "[Errno 39] Directory not empty", - "_unfinished.json", + '[Errno 39] Directory not empty', + '_unfinished.json', ], 'keyboard-interrupt': [ - "KeyboardInterrupt", + 'KeyboardInterrupt', ], } def sentry_setup(): """Set-up sentry.""" - release = config.environment.version or "dev" + release = config.environment.version or 'dev' environment = ( - "dev" + 'dev' if ( os.getenv('FMRIPREP_DEV', '').lower in ('1', 'on', 'yes', 'y', 'true') or ('+' in release) ) - else "prod" + else 'prod' ) sentry_sdk.init( - "https://d5a16b0c38d84d1584dfc93b9fb1ade6@sentry.io/1137693", + 'https://d5a16b0c38d84d1584dfc93b9fb1ade6@sentry.io/1137693', release=release, environment=environment, before_send=before_send, @@ -89,7 +89,7 @@ def process_crashfile(crashfile): # Extract node name node_name = crash_info.pop('node').split('.')[-1] - scope.set_tag("node_name", node_name) + scope.set_tag('node_name', node_name) # Massage the traceback, extract the gist traceback = crash_info.pop('traceback') @@ -134,12 +134,12 @@ def process_crashfile(crashfile): sentry_sdk.add_breadcrumb(message=fingerprint, level='fatal') else: # remove file paths - fingerprint = re.sub(r"(/[^/ ]*)+/?", '', message) + fingerprint = re.sub(r'(/[^/ ]*)+/?', '', message) # remove words containing numbers - fingerprint = re.sub(r"([a-zA-Z]*[0-9]+[a-zA-Z]*)+", '', fingerprint) + fingerprint = re.sub(r'([a-zA-Z]*[0-9]+[a-zA-Z]*)+', '', fingerprint) # adding the return code if it exists for line in message.splitlines(): - if line.startswith("Return code"): + if line.startswith('Return code'): fingerprint += line break @@ -151,11 +151,11 @@ def before_send(event, hints): """Filter log messages about crashed nodes.""" if 'logentry' in event and 'message' in event['logentry']: msg = event['logentry']['message'] - if msg.startswith("could not run node:"): + if msg.startswith('could not run node:'): return None - if msg.startswith("Saving crash info to "): + if msg.startswith('Saving crash info to '): return None - if re.match("Node .+ failed to run on host .+", msg): + if re.match('Node .+ failed to run on host .+', msg): return None if 'breadcrumbs' in event and isinstance(event['breadcrumbs'], list): @@ -212,4 +212,4 @@ def send_crumb(**kwargs) -> dict: """ Communicate with the migas telemetry server. This requires `migas.setup()` to be called. """ - return migas.add_breadcrumb("nipreps/fmriprep", __version__, **kwargs) + return migas.add_breadcrumb('nipreps/fmriprep', __version__, **kwargs) diff --git a/fmriprep/utils/transforms.py b/fmriprep/utils/transforms.py index 951d1d9f9..2bbecc994 100644 --- a/fmriprep/utils/transforms.py +++ b/fmriprep/utils/transforms.py @@ -17,10 +17,10 @@ def load_transforms(xfm_paths: list[Path], inverse: list[bool]) -> nt.base.Trans if len(inverse) == 1: inverse *= len(xfm_paths) elif len(inverse) != len(xfm_paths): - raise ValueError("Mismatched number of transforms and inverses") + raise ValueError('Mismatched number of transforms and inverses') chain = None - for path, inv in zip(xfm_paths[::-1], inverse[::-1]): + for path, inv in zip(xfm_paths[::-1], inverse[::-1], strict=False): path = Path(path) if path.suffix == '.h5': xfm = load_ants_h5(path) diff --git a/fmriprep/workflows/base.py b/fmriprep/workflows/base.py index 55ff76266..866342c26 100644 --- a/fmriprep/workflows/base.py +++ b/fmriprep/workflows/base.py @@ -82,7 +82,7 @@ def init_fmriprep_wf(): derivatives=config.execution.output_dir, freesurfer_home=os.getenv('FREESURFER_HOME'), spaces=config.workflow.spaces.get_fs_spaces(), - minimum_fs_version="7.0.0", + minimum_fs_version='7.0.0', ), name='fsdir_run_%s' % config.execution.run_uuid.replace('-', '_'), run_without_submitting=True, @@ -94,7 +94,7 @@ def init_fmriprep_wf(): single_subject_wf = init_single_subject_wf(subject_id) single_subject_wf.config['execution']['crashdump_dir'] = str( - config.execution.fmriprep_dir / f"sub-{subject_id}" / "log" / config.execution.run_uuid + config.execution.fmriprep_dir / f'sub-{subject_id}' / 'log' / config.execution.run_uuid ) for node in single_subject_wf._get_all_nodes(): node.config = deepcopy(single_subject_wf.config) @@ -105,7 +105,7 @@ def init_fmriprep_wf(): # Dump a copy of the config file into the log directory log_dir = ( - config.execution.fmriprep_dir / f"sub-{subject_id}" / 'log' / config.execution.run_uuid + config.execution.fmriprep_dir / f'sub-{subject_id}' / 'log' / config.execution.run_uuid ) log_dir.mkdir(exist_ok=True, parents=True) config.to_filename(log_dir / 'fmriprep.toml') @@ -156,7 +156,6 @@ def init_single_subject_wf(subject_id: str): from smriprep.workflows.outputs import ( init_ds_anat_volumes_wf, init_ds_grayord_metrics_wf, - init_ds_surface_metrics_wf, init_template_iterator_wf, ) from smriprep.workflows.surfaces import ( @@ -169,20 +168,18 @@ def init_single_subject_wf(subject_id: str): from fmriprep.workflows.bold.base import init_bold_wf workflow = Workflow(name=f'sub_{subject_id}_wf') - workflow.__desc__ = """ + workflow.__desc__ = f""" Results included in this manuscript come from preprocessing -performed using *fMRIPrep* {fmriprep_ver} +performed using *fMRIPrep* {config.environment.version} (@fmriprep1; @fmriprep2; RRID:SCR_016216), -which is based on *Nipype* {nipype_ver} +which is based on *Nipype* {config.environment.nipype_version} (@nipype1; @nipype2; RRID:SCR_002502). -""".format( - fmriprep_ver=config.environment.version, nipype_ver=config.environment.nipype_version - ) - workflow.__postdesc__ = """ +""" + workflow.__postdesc__ = f""" Many internal operations of *fMRIPrep* use -*Nilearn* {nilearn_ver} [@nilearn, RRID:SCR_001362], +*Nilearn* {NILEARN_VERSION} [@nilearn, RRID:SCR_001362], mostly within the functional processing workflow. For more details of the pipeline, see [the section corresponding to workflows in *fMRIPrep*'s documentation]\ @@ -200,9 +197,7 @@ def init_single_subject_wf(subject_id: str): ### References -""".format( - nilearn_ver=NILEARN_VERSION - ) +""" subject_data = collect_data( config.execution.layout, @@ -222,8 +217,8 @@ def init_single_subject_wf(subject_id: str): if not anat_only and not subject_data['bold']: task_id = config.execution.task_id raise RuntimeError( - "No BOLD images found for participant {} and task {}. " - "All workflows require BOLD images.".format( + 'No BOLD images found for participant {} and task {}. ' + 'All workflows require BOLD images.'.format( subject_id, task_id if task_id else '' ) ) @@ -252,7 +247,7 @@ def init_single_subject_wf(subject_id: str): from smriprep.utils.bids import collect_derivatives as collect_anat_derivatives std_spaces = spaces.get_spaces(nonstandard=False, dim=(3,)) - std_spaces.append("fsnative") + std_spaces.append('fsnative') for deriv_dir in config.execution.derivatives: anatomical_cache.update( collect_anat_derivatives( @@ -296,7 +291,7 @@ def init_single_subject_wf(subject_id: str): DerivativesDataSink( base_directory=config.execution.fmriprep_dir, desc='summary', - datatype="figures", + datatype='figures', dismiss_entities=dismiss_echo(), ), name='ds_report_summary', @@ -307,7 +302,7 @@ def init_single_subject_wf(subject_id: str): DerivativesDataSink( base_directory=config.execution.fmriprep_dir, desc='about', - datatype="figures", + datatype='figures', dismiss_entities=dismiss_echo(), ), name='ds_report_about', @@ -361,13 +356,13 @@ def init_single_subject_wf(subject_id: str): # Set up the template iterator once, if used template_iterator_wf = None select_MNI2009c_xfm = None - if config.workflow.level == "full": + if config.workflow.level == 'full': if spaces.cached.get_spaces(nonstandard=False, dim=(3,)): template_iterator_wf = init_template_iterator_wf(spaces=spaces) ds_std_volumes_wf = init_ds_anat_volumes_wf( bids_root=bids_root, output_dir=fmriprep_dir, - name="ds_std_volumes_wf", + name='ds_std_volumes_wf', ) workflow.connect([ (anat_fit_wf, template_iterator_wf, [ @@ -376,30 +371,30 @@ def init_single_subject_wf(subject_id: str): ]), (anat_fit_wf, ds_std_volumes_wf, [ ('outputnode.t1w_valid_list', 'inputnode.source_files'), - ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), - ("outputnode.t1w_mask", "inputnode.t1w_mask"), - ("outputnode.t1w_dseg", "inputnode.t1w_dseg"), - ("outputnode.t1w_tpms", "inputnode.t1w_tpms"), + ('outputnode.t1w_preproc', 'inputnode.t1w_preproc'), + ('outputnode.t1w_mask', 'inputnode.t1w_mask'), + ('outputnode.t1w_dseg', 'inputnode.t1w_dseg'), + ('outputnode.t1w_tpms', 'inputnode.t1w_tpms'), ]), (template_iterator_wf, ds_std_volumes_wf, [ - ("outputnode.std_t1w", "inputnode.ref_file"), - ("outputnode.anat2std_xfm", "inputnode.anat2std_xfm"), - ("outputnode.space", "inputnode.space"), - ("outputnode.cohort", "inputnode.cohort"), - ("outputnode.resolution", "inputnode.resolution"), + ('outputnode.std_t1w', 'inputnode.ref_file'), + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.space', 'inputnode.space'), + ('outputnode.cohort', 'inputnode.cohort'), + ('outputnode.resolution', 'inputnode.resolution'), ]), ]) # fmt:skip if 'MNI152NLin2009cAsym' in spaces.get_spaces(): select_MNI2009c_xfm = pe.Node( - KeySelect(fields=["std2anat_xfm"], key="MNI152NLin2009cAsym"), - name="select_MNI2009c_xfm", + KeySelect(fields=['std2anat_xfm'], key='MNI152NLin2009cAsym'), + name='select_MNI2009c_xfm', run_without_submitting=True, ) workflow.connect([ (anat_fit_wf, select_MNI2009c_xfm, [ - ("outputnode.std2anat_xfm", "std2anat_xfm"), - ("outputnode.template", "keys"), + ('outputnode.std2anat_xfm', 'std2anat_xfm'), + ('outputnode.template', 'keys'), ]), ]) # fmt:skip @@ -411,23 +406,23 @@ def init_single_subject_wf(subject_id: str): from smriprep.interfaces.templateflow import TemplateFlowSelect ref = Reference( - "MNI152NLin6Asym", - {"res": 2 if config.workflow.cifti_output == "91k" else 1}, + 'MNI152NLin6Asym', + {'res': 2 if config.workflow.cifti_output == '91k' else 1}, ) select_MNI6_xfm = pe.Node( - KeySelect(fields=["anat2std_xfm"], key=ref.fullname), - name="select_MNI6", + KeySelect(fields=['anat2std_xfm'], key=ref.fullname), + name='select_MNI6', run_without_submitting=True, ) select_MNI6_tpl = pe.Node( TemplateFlowSelect(template=ref.fullname, resolution=ref.spec['res']), - name="select_MNI6_tpl", + name='select_MNI6_tpl', ) workflow.connect([ (anat_fit_wf, select_MNI6_xfm, [ - ("outputnode.anat2std_xfm", "anat2std_xfm"), - ("outputnode.template", "keys"), + ('outputnode.anat2std_xfm', 'anat2std_xfm'), + ('outputnode.template', 'keys'), ]), ]) # fmt:skip @@ -444,43 +439,43 @@ def init_single_subject_wf(subject_id: str): ds_grayord_metrics_wf = init_ds_grayord_metrics_wf( bids_root=bids_root, output_dir=fmriprep_dir, - metrics=["curv", "thickness", "sulc"], + metrics=['curv', 'thickness', 'sulc'], cifti_output=config.workflow.cifti_output, ) workflow.connect([ (anat_fit_wf, curv_wf, [ - ("outputnode.subject_id", "inputnode.subject_id"), - ("outputnode.subjects_dir", "inputnode.subjects_dir"), + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ]), (anat_fit_wf, hcp_morphometrics_wf, [ - ("outputnode.subject_id", "inputnode.subject_id"), - ("outputnode.thickness", "inputnode.thickness"), - ("outputnode.sulc", "inputnode.sulc"), - ("outputnode.midthickness", "inputnode.midthickness"), + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.thickness', 'inputnode.thickness'), + ('outputnode.sulc', 'inputnode.sulc'), + ('outputnode.midthickness', 'inputnode.midthickness'), ]), (curv_wf, hcp_morphometrics_wf, [ - ("outputnode.curv", "inputnode.curv"), + ('outputnode.curv', 'inputnode.curv'), ]), (anat_fit_wf, resample_midthickness_wf, [ ('outputnode.midthickness', 'inputnode.midthickness'), ( f"outputnode.sphere_reg_{'msm' if msm_sulc else 'fsLR'}", - "inputnode.sphere_reg_fsLR", + 'inputnode.sphere_reg_fsLR', ), ]), (anat_fit_wf, morph_grayords_wf, [ - ("outputnode.midthickness", "inputnode.midthickness"), + ('outputnode.midthickness', 'inputnode.midthickness'), ( f'outputnode.sphere_reg_{"msm" if msm_sulc else "fsLR"}', 'inputnode.sphere_reg_fsLR', ), ]), (hcp_morphometrics_wf, morph_grayords_wf, [ - ("outputnode.curv", "inputnode.curv"), - ("outputnode.thickness", "inputnode.thickness"), - ("outputnode.sulc", "inputnode.sulc"), - ("outputnode.roi", "inputnode.roi"), + ('outputnode.curv', 'inputnode.curv'), + ('outputnode.thickness', 'inputnode.thickness'), + ('outputnode.sulc', 'inputnode.sulc'), + ('outputnode.roi', 'inputnode.roi'), ]), (resample_midthickness_wf, morph_grayords_wf, [ ('outputnode.midthickness_fsLR', 'inputnode.midthickness_fsLR'), @@ -489,12 +484,12 @@ def init_single_subject_wf(subject_id: str): ('outputnode.t1w_valid_list', 'inputnode.source_files'), ]), (morph_grayords_wf, ds_grayord_metrics_wf, [ - ("outputnode.curv_fsLR", "inputnode.curv"), - ("outputnode.curv_metadata", "inputnode.curv_metadata"), - ("outputnode.thickness_fsLR", "inputnode.thickness"), - ("outputnode.thickness_metadata", "inputnode.thickness_metadata"), - ("outputnode.sulc_fsLR", "inputnode.sulc"), - ("outputnode.sulc_metadata", "inputnode.sulc_metadata"), + ('outputnode.curv_fsLR', 'inputnode.curv'), + ('outputnode.curv_metadata', 'inputnode.curv_metadata'), + ('outputnode.thickness_fsLR', 'inputnode.thickness'), + ('outputnode.thickness_metadata', 'inputnode.thickness_metadata'), + ('outputnode.sulc_fsLR', 'inputnode.sulc'), + ('outputnode.sulc_metadata', 'inputnode.sulc_metadata'), ]), ]) # fmt:skip @@ -505,7 +500,7 @@ def init_single_subject_wf(subject_id: str): layout=config.execution.layout, subject_id=subject_id, bold_data=bold_runs, - ignore_fieldmaps="fieldmaps" in config.workflow.ignore, + ignore_fieldmaps='fieldmaps' in config.workflow.ignore, use_syn=config.workflow.use_syn_sdc, force_syn=config.workflow.force_syn, filters=config.execution.get().get('bids_filters', {}).get('fmap'), @@ -513,16 +508,16 @@ def init_single_subject_wf(subject_id: str): if fmap_estimators: config.loggers.workflow.info( - "B0 field inhomogeneity map will be estimated with the following " - f"{len(fmap_estimators)} estimator(s): " - f"{[e.method for e in fmap_estimators]}." + 'B0 field inhomogeneity map will be estimated with the following ' + f'{len(fmap_estimators)} estimator(s): ' + f'{[e.method for e in fmap_estimators]}.' ) from sdcflows import fieldmaps as fm from sdcflows.workflows.base import init_fmap_preproc_wf fmap_wf = init_fmap_preproc_wf( - debug="fieldmaps" in config.execution.debug, + debug='fieldmaps' in config.execution.debug, estimators=fmap_estimators, omp_nthreads=omp_nthreads, output_dir=fmriprep_dir, @@ -538,20 +533,20 @@ def init_single_subject_wf(subject_id: str): # Overwrite ``out_path_base`` of sdcflows's DataSinks for node in fmap_wf.list_node_names(): - if node.split(".")[-1].startswith("ds_"): - fmap_wf.get_node(node).interface.out_path_base = "" + if node.split('.')[-1].startswith('ds_'): + fmap_wf.get_node(node).interface.out_path_base = '' fmap_select_std = pe.Node( - KeySelect(fields=["std2anat_xfm"], key="MNI152NLin2009cAsym"), - name="fmap_select_std", + KeySelect(fields=['std2anat_xfm'], key='MNI152NLin2009cAsym'), + name='fmap_select_std', run_without_submitting=True, ) if any(estimator.method == fm.EstimatorType.ANAT for estimator in fmap_estimators): # fmt:off workflow.connect([ (anat_fit_wf, fmap_select_std, [ - ("outputnode.std2anat_xfm", "std2anat_xfm"), - ("outputnode.template", "keys")]), + ('outputnode.std2anat_xfm', 'std2anat_xfm'), + ('outputnode.template', 'keys')]), ]) # fmt:on @@ -569,26 +564,26 @@ def init_single_subject_wf(subject_id: str): suffices = [s.suffix for s in estimator.sources] if estimator.method == fm.EstimatorType.PEPOLAR: - if len(suffices) == 2 and all(suf in ("epi", "bold", "sbref") for suf in suffices): - wf_inputs = getattr(fmap_wf.inputs, f"in_{estimator.bids_id}") + if len(suffices) == 2 and all(suf in ('epi', 'bold', 'sbref') for suf in suffices): + wf_inputs = getattr(fmap_wf.inputs, f'in_{estimator.bids_id}') wf_inputs.in_data = [str(s.path) for s in estimator.sources] wf_inputs.metadata = [s.metadata for s in estimator.sources] else: - raise NotImplementedError("Sophisticated PEPOLAR schemes are unsupported.") + raise NotImplementedError('Sophisticated PEPOLAR schemes are unsupported.') elif estimator.method == fm.EstimatorType.ANAT: from sdcflows.workflows.fit.syn import init_syn_preprocessing_wf - sources = [str(s.path) for s in estimator.sources if s.suffix in ("bold", "sbref")] + sources = [str(s.path) for s in estimator.sources if s.suffix in ('bold', 'sbref')] source_meta = [ - s.metadata for s in estimator.sources if s.suffix in ("bold", "sbref") + s.metadata for s in estimator.sources if s.suffix in ('bold', 'sbref') ] syn_preprocessing_wf = init_syn_preprocessing_wf( omp_nthreads=omp_nthreads, debug=config.execution.sloppy, auto_bold_nss=True, t1w_inversion=False, - name=f"syn_preprocessing_{estimator.bids_id}", + name=f'syn_preprocessing_{estimator.bids_id}', ) syn_preprocessing_wf.inputs.inputnode.in_epis = sources syn_preprocessing_wf.inputs.inputnode.in_meta = source_meta @@ -596,32 +591,30 @@ def init_single_subject_wf(subject_id: str): # fmt:off workflow.connect([ (anat_fit_wf, syn_preprocessing_wf, [ - ("outputnode.t1w_preproc", "inputnode.in_anat"), - ("outputnode.t1w_mask", "inputnode.mask_anat"), + ('outputnode.t1w_preproc', 'inputnode.in_anat'), + ('outputnode.t1w_mask', 'inputnode.mask_anat'), ]), (fmap_select_std, syn_preprocessing_wf, [ - ("std2anat_xfm", "inputnode.std2anat_xfm"), + ('std2anat_xfm', 'inputnode.std2anat_xfm'), ]), (syn_preprocessing_wf, fmap_wf, [ - ("outputnode.epi_ref", f"in_{estimator.bids_id}.epi_ref"), - ("outputnode.epi_mask", f"in_{estimator.bids_id}.epi_mask"), - ("outputnode.anat_ref", f"in_{estimator.bids_id}.anat_ref"), - ("outputnode.anat_mask", f"in_{estimator.bids_id}.anat_mask"), - ("outputnode.sd_prior", f"in_{estimator.bids_id}.sd_prior"), + ('outputnode.epi_ref', f'in_{estimator.bids_id}.epi_ref'), + ('outputnode.epi_mask', f'in_{estimator.bids_id}.epi_mask'), + ('outputnode.anat_ref', f'in_{estimator.bids_id}.anat_ref'), + ('outputnode.anat_mask', f'in_{estimator.bids_id}.anat_mask'), + ('outputnode.sd_prior', f'in_{estimator.bids_id}.sd_prior'), ]), ]) # fmt:on # Append the functional section to the existing anatomical excerpt # That way we do not need to stream down the number of bold datasets - func_pre_desc = """ + func_pre_desc = f""" Functional data preprocessing -: For each of the {num_bold} BOLD runs found per subject (across all +: For each of the {len(bold_runs)} BOLD runs found per subject (across all tasks and sessions), the following preprocessing was performed. -""".format( - num_bold=len(bold_runs) - ) +""" for bold_series in bold_runs: bold_file = bold_series[0] @@ -650,7 +643,7 @@ def init_single_subject_wf(subject_id: str): if bold_wf is None: continue - bold_wf.__desc__ = func_pre_desc + (bold_wf.__desc__ or "") + bold_wf.__desc__ = func_pre_desc + (bold_wf.__desc__ or '') workflow.connect([ (anat_fit_wf, bold_wf, [ @@ -674,32 +667,32 @@ def init_single_subject_wf(subject_id: str): if fieldmap_id: workflow.connect([ (fmap_wf, bold_wf, [ - ("outputnode.fmap", "inputnode.fmap"), - ("outputnode.fmap_ref", "inputnode.fmap_ref"), - ("outputnode.fmap_coeff", "inputnode.fmap_coeff"), - ("outputnode.fmap_mask", "inputnode.fmap_mask"), - ("outputnode.fmap_id", "inputnode.fmap_id"), - ("outputnode.method", "inputnode.sdc_method"), + ('outputnode.fmap', 'inputnode.fmap'), + ('outputnode.fmap_ref', 'inputnode.fmap_ref'), + ('outputnode.fmap_coeff', 'inputnode.fmap_coeff'), + ('outputnode.fmap_mask', 'inputnode.fmap_mask'), + ('outputnode.fmap_id', 'inputnode.fmap_id'), + ('outputnode.method', 'inputnode.sdc_method'), ]), ]) # fmt:skip - if config.workflow.level == "full": + if config.workflow.level == 'full': if template_iterator_wf is not None: workflow.connect([ (template_iterator_wf, bold_wf, [ - ("outputnode.anat2std_xfm", "inputnode.anat2std_xfm"), - ("outputnode.space", "inputnode.std_space"), - ("outputnode.resolution", "inputnode.std_resolution"), - ("outputnode.cohort", "inputnode.std_cohort"), - ("outputnode.std_t1w", "inputnode.std_t1w"), - ("outputnode.std_mask", "inputnode.std_mask"), + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.space', 'inputnode.std_space'), + ('outputnode.resolution', 'inputnode.std_resolution'), + ('outputnode.cohort', 'inputnode.std_cohort'), + ('outputnode.std_t1w', 'inputnode.std_t1w'), + ('outputnode.std_mask', 'inputnode.std_mask'), ]), ]) # fmt:skip if select_MNI2009c_xfm is not None: workflow.connect([ (select_MNI2009c_xfm, bold_wf, [ - ("std2anat_xfm", "inputnode.mni2009c2anat_xfm"), + ('std2anat_xfm', 'inputnode.mni2009c2anat_xfm'), ]), ]) # fmt:skip @@ -709,10 +702,10 @@ def init_single_subject_wf(subject_id: str): # want MNI152NLin6Asym outputs, but we'll live with it. if config.workflow.cifti_output: workflow.connect([ - (select_MNI6_xfm, bold_wf, [("anat2std_xfm", "inputnode.anat2mni6_xfm")]), - (select_MNI6_tpl, bold_wf, [("brain_mask", "inputnode.mni6_mask")]), + (select_MNI6_xfm, bold_wf, [('anat2std_xfm', 'inputnode.anat2mni6_xfm')]), + (select_MNI6_tpl, bold_wf, [('brain_mask', 'inputnode.mni6_mask')]), (hcp_morphometrics_wf, bold_wf, [ - ("outputnode.roi", "inputnode.cortex_mask"), + ('outputnode.roi', 'inputnode.cortex_mask'), ]), (resample_midthickness_wf, bold_wf, [ ('outputnode.midthickness_fsLR', 'inputnode.midthickness_fsLR'), @@ -753,11 +746,11 @@ def map_fieldmap_estimation( if not fmap_estimators: if use_syn: message = ( - "Fieldmap-less (SyN) estimation was requested, but PhaseEncodingDirection " - "information appears to be absent." + 'Fieldmap-less (SyN) estimation was requested, but PhaseEncodingDirection ' + 'information appears to be absent.' ) config.loggers.workflow.error(message) - if use_syn == "error": + if use_syn == 'error': raise ValueError(message) return [], {} @@ -806,12 +799,12 @@ def clean_datasinks(workflow: pe.Workflow) -> pe.Workflow: # Overwrite ``out_path_base`` of smriprep's DataSinks for node in workflow.list_node_names(): if node.split('.')[-1].startswith('ds_'): - workflow.get_node(node).interface.out_path_base = "" + workflow.get_node(node).interface.out_path_base = '' return workflow def get_estimator(layout, fname): - field_source = layout.get_metadata(fname).get("B0FieldSource") + field_source = layout.get_metadata(fname).get('B0FieldSource') if isinstance(field_source, str): field_source = (field_source,) @@ -822,7 +815,7 @@ def get_estimator(layout, fname): from sdcflows.fieldmaps import get_identifier # Fallback to IntendedFor - intended_rel = re.sub(r"^sub-[a-zA-Z0-9]*/", "", str(Path(fname).relative_to(layout.root))) + intended_rel = re.sub(r'^sub-[a-zA-Z0-9]*/', '', str(Path(fname).relative_to(layout.root))) field_source = get_identifier(intended_rel) return field_source diff --git a/fmriprep/workflows/bold/apply.py b/fmriprep/workflows/bold/apply.py index c5ff2fd8b..65d50622c 100644 --- a/fmriprep/workflows/bold/apply.py +++ b/fmriprep/workflows/bold/apply.py @@ -92,30 +92,30 @@ def init_bold_volumetric_resample_wf( inputnode = pe.Node( niu.IdentityInterface( fields=[ - "bold_file", - "bold_ref_file", - "target_ref_file", - "target_mask", + 'bold_file', + 'bold_ref_file', + 'target_ref_file', + 'target_mask', # HMC - "motion_xfm", + 'motion_xfm', # SDC - "boldref2fmap_xfm", - "fmap_ref", - "fmap_coeff", - "fmap_id", + 'boldref2fmap_xfm', + 'fmap_ref', + 'fmap_coeff', + 'fmap_id', # Anatomical - "boldref2anat_xfm", + 'boldref2anat_xfm', # Template - "anat2std_xfm", + 'anat2std_xfm', # Entity for selecting target resolution - "resolution", + 'resolution', ], ), name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(fields=["bold_file", "resampling_reference"]), + niu.IdentityInterface(fields=['bold_file', 'resampling_reference']), name='outputnode', ) @@ -125,7 +125,7 @@ def init_bold_volumetric_resample_wf( bold2target = pe.Node(niu.Merge(2), name='bold2target', run_without_submitting=True) resample = pe.Node( ResampleSeries(jacobian=jacobian), - name="resample", + name='resample', n_procs=omp_nthreads, mem_gb=mem_gb['resampled'], ) @@ -154,13 +154,13 @@ def init_bold_volumetric_resample_wf( return workflow fmap_select = pe.Node( - KeySelect(fields=["fmap_ref", "fmap_coeff"], key=fieldmap_id), - name="fmap_select", + KeySelect(fields=['fmap_ref', 'fmap_coeff'], key=fieldmap_id), + name='fmap_select', run_without_submitting=True, ) distortion_params = pe.Node( DistortionParameters(metadata=metadata), - name="distortion_params", + name='distortion_params', run_without_submitting=True, ) fmap2target = pe.Node(niu.Merge(2), name='fmap2target', run_without_submitting=True) @@ -170,13 +170,13 @@ def init_bold_volumetric_resample_wf( run_without_submitting=True, ) - fmap_recon = pe.Node(ReconstructFieldmap(), name="fmap_recon", mem_gb=1) + fmap_recon = pe.Node(ReconstructFieldmap(), name='fmap_recon', mem_gb=1) workflow.connect([ (inputnode, fmap_select, [ - ("fmap_ref", "fmap_ref"), - ("fmap_coeff", "fmap_coeff"), - ("fmap_id", "keys"), + ('fmap_ref', 'fmap_ref'), + ('fmap_coeff', 'fmap_coeff'), + ('fmap_id', 'keys'), ]), (inputnode, distortion_params, [('bold_file', 'in_file')]), (inputnode, fmap2target, [('boldref2fmap_xfm', 'in1')]), @@ -184,15 +184,15 @@ def init_bold_volumetric_resample_wf( (boldref2target, fmap2target, [('out', 'in2')]), (boldref2target, inverses, [('out', 'inlist')]), (fmap_select, fmap_recon, [ - ("fmap_coeff", "in_coeffs"), - ("fmap_ref", "fmap_ref_file"), + ('fmap_coeff', 'in_coeffs'), + ('fmap_ref', 'fmap_ref_file'), ]), (fmap2target, fmap_recon, [('out', 'transforms')]), (inverses, fmap_recon, [('out', 'inverse')]), # Inject fieldmap correction into resample node (distortion_params, resample, [ - ("readout_time", "ro_time"), - ("pe_direction", "pe_dir"), + ('readout_time', 'ro_time'), + ('pe_direction', 'pe_dir'), ]), (fmap_recon, resample, [('out_file', 'fieldmap')]), ]) # fmt:skip @@ -214,4 +214,4 @@ def _gen_inverses(inlist: list) -> list[bool]: def _is_native(value): - return value == "native" + return value == 'native' diff --git a/fmriprep/workflows/bold/base.py b/fmriprep/workflows/bold/base.py index c7289c6d0..bd20a84b7 100644 --- a/fmriprep/workflows/bold/base.py +++ b/fmriprep/workflows/bold/base.py @@ -55,7 +55,7 @@ def init_bold_wf( *, - bold_series: ty.List[str], + bold_series: list[str], precomputed: dict = {}, fieldmap_id: ty.Optional[str] = None, ) -> pe.Workflow: @@ -177,21 +177,21 @@ def init_bold_wf( nvols, mem_gb = estimate_bold_mem_usage(bold_file) if nvols <= 5 - config.execution.sloppy: config.loggers.workflow.warning( - f"Too short BOLD series (<= 5 timepoints). Skipping processing of <{bold_file}>." + f'Too short BOLD series (<= 5 timepoints). Skipping processing of <{bold_file}>.' ) return config.loggers.workflow.debug( - "Creating bold processing workflow for <%s> (%.2f GB / %d TRs). " - "Memory resampled/largemem=%.2f/%.2f GB.", + 'Creating bold processing workflow for <%s> (%.2f GB / %d TRs). ' + 'Memory resampled/largemem=%.2f/%.2f GB.', bold_file, - mem_gb["filesize"], + mem_gb['filesize'], nvols, - mem_gb["resampled"], - mem_gb["largemem"], + mem_gb['resampled'], + mem_gb['largemem'], ) - workflow = Workflow(name=_get_wf_name(bold_file, "bold")) + workflow = Workflow(name=_get_wf_name(bold_file, 'bold')) workflow.__postdesc__ = """\ All resamplings can be performed with *a single interpolation step* by composing all the pertinent transformations (i.e. head-motion @@ -205,43 +205,43 @@ def init_bold_wf( niu.IdentityInterface( fields=[ # Anatomical coregistration - "t1w_preproc", - "t1w_mask", - "t1w_dseg", - "t1w_tpms", + 't1w_preproc', + 't1w_mask', + 't1w_dseg', + 't1w_tpms', # FreeSurfer outputs - "subjects_dir", - "subject_id", - "fsnative2t1w_xfm", - "white", - "midthickness", - "pial", - "sphere_reg_fsLR", - "midthickness_fsLR", - "cortex_mask", - "anat_ribbon", + 'subjects_dir', + 'subject_id', + 'fsnative2t1w_xfm', + 'white', + 'midthickness', + 'pial', + 'sphere_reg_fsLR', + 'midthickness_fsLR', + 'cortex_mask', + 'anat_ribbon', # Fieldmap registration - "fmap", - "fmap_ref", - "fmap_coeff", - "fmap_mask", - "fmap_id", - "sdc_method", + 'fmap', + 'fmap_ref', + 'fmap_coeff', + 'fmap_mask', + 'fmap_id', + 'sdc_method', # Volumetric templates - "anat2std_xfm", - "std_t1w", - "std_mask", - "std_space", - "std_resolution", - "std_cohort", + 'anat2std_xfm', + 'std_t1w', + 'std_mask', + 'std_space', + 'std_resolution', + 'std_cohort', # MNI152NLin6Asym warp, for CIFTI use - "anat2mni6_xfm", - "mni6_mask", + 'anat2mni6_xfm', + 'mni6_mask', # MNI152NLin2009cAsym inverse warp, for carpetplotting - "mni2009c2anat_xfm", + 'mni2009c2anat_xfm', ], ), - name="inputnode", + name='inputnode', ) # @@ -263,16 +263,16 @@ def init_bold_wf( ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id'), ('fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm'), - ("fmap", "inputnode.fmap"), - ("fmap_ref", "inputnode.fmap_ref"), - ("fmap_coeff", "inputnode.fmap_coeff"), - ("fmap_mask", "inputnode.fmap_mask"), - ("fmap_id", "inputnode.fmap_id"), - ("sdc_method", "inputnode.sdc_method"), + ('fmap', 'inputnode.fmap'), + ('fmap_ref', 'inputnode.fmap_ref'), + ('fmap_coeff', 'inputnode.fmap_coeff'), + ('fmap_mask', 'inputnode.fmap_mask'), + ('fmap_id', 'inputnode.fmap_id'), + ('sdc_method', 'inputnode.sdc_method'), ]), ]) # fmt:skip - if config.workflow.level == "minimal": + if config.workflow.level == 'minimal': return workflow # Now that we're resampling and combining, multiecho matters @@ -296,16 +296,16 @@ def init_bold_wf( workflow.connect([ (inputnode, bold_native_wf, [ - ("fmap_ref", "inputnode.fmap_ref"), - ("fmap_coeff", "inputnode.fmap_coeff"), - ("fmap_id", "inputnode.fmap_id"), + ('fmap_ref', 'inputnode.fmap_ref'), + ('fmap_coeff', 'inputnode.fmap_coeff'), + ('fmap_id', 'inputnode.fmap_id'), ]), (bold_fit_wf, bold_native_wf, [ - ("outputnode.coreg_boldref", "inputnode.boldref"), - ("outputnode.bold_mask", "inputnode.bold_mask"), - ("outputnode.motion_xfm", "inputnode.motion_xfm"), - ("outputnode.boldref2fmap_xfm", "inputnode.boldref2fmap_xfm"), - ("outputnode.dummy_scans", "inputnode.dummy_scans"), + ('outputnode.coreg_boldref', 'inputnode.boldref'), + ('outputnode.bold_mask', 'inputnode.bold_mask'), + ('outputnode.motion_xfm', 'inputnode.motion_xfm'), + ('outputnode.boldref2fmap_xfm', 'inputnode.boldref2fmap_xfm'), + ('outputnode.dummy_scans', 'inputnode.dummy_scans'), ]), ]) # fmt:skip @@ -340,21 +340,21 @@ def init_bold_wf( ds_report_t2scomp = pe.Node( DerivativesDataSink( - desc="t2scomp", - datatype="figures", + desc='t2scomp', + datatype='figures', dismiss_entities=dismiss_echo(), ), - name="ds_report_t2scomp", + name='ds_report_t2scomp', run_without_submitting=True, ) ds_report_t2star_hist = pe.Node( DerivativesDataSink( - desc="t2starhist", - datatype="figures", + desc='t2starhist', + datatype='figures', dismiss_entities=dismiss_echo(), ), - name="ds_report_t2star_hist", + name='ds_report_t2star_hist', run_without_submitting=True, ) @@ -368,10 +368,10 @@ def init_bold_wf( ('outputnode.t2star_map', 'inputnode.t2star_file'), ]), (t2s_reporting_wf, ds_report_t2scomp, [('outputnode.t2s_comp_report', 'in_file')]), - (t2s_reporting_wf, ds_report_t2star_hist, [("outputnode.t2star_hist", "in_file")]), + (t2s_reporting_wf, ds_report_t2star_hist, [('outputnode.t2star_hist', 'in_file')]), ]) # fmt:skip - if config.workflow.level == "resampling": + if config.workflow.level == 'resampling': return workflow # Resample to anatomical space @@ -383,24 +383,24 @@ def init_bold_wf( jacobian='fmap-jacobian' not in config.workflow.ignore, name='bold_anat_wf', ) - bold_anat_wf.inputs.inputnode.resolution = "native" + bold_anat_wf.inputs.inputnode.resolution = 'native' workflow.connect([ (inputnode, bold_anat_wf, [ - ("t1w_preproc", "inputnode.target_ref_file"), - ("t1w_mask", "inputnode.target_mask"), - ("fmap_ref", "inputnode.fmap_ref"), - ("fmap_coeff", "inputnode.fmap_coeff"), - ("fmap_id", "inputnode.fmap_id"), + ('t1w_preproc', 'inputnode.target_ref_file'), + ('t1w_mask', 'inputnode.target_mask'), + ('fmap_ref', 'inputnode.fmap_ref'), + ('fmap_coeff', 'inputnode.fmap_coeff'), + ('fmap_id', 'inputnode.fmap_id'), ]), (bold_fit_wf, bold_anat_wf, [ - ("outputnode.coreg_boldref", "inputnode.bold_ref_file"), - ("outputnode.boldref2fmap_xfm", "inputnode.boldref2fmap_xfm"), - ("outputnode.boldref2anat_xfm", "inputnode.boldref2anat_xfm"), + ('outputnode.coreg_boldref', 'inputnode.bold_ref_file'), + ('outputnode.boldref2fmap_xfm', 'inputnode.boldref2fmap_xfm'), + ('outputnode.boldref2anat_xfm', 'inputnode.boldref2anat_xfm'), ]), (bold_native_wf, bold_anat_wf, [ - ("outputnode.bold_minimal", "inputnode.bold_file"), - ("outputnode.motion_xfm", "inputnode.motion_xfm"), + ('outputnode.bold_minimal', 'inputnode.bold_file'), + ('outputnode.motion_xfm', 'inputnode.motion_xfm'), ]), ]) # fmt:skip @@ -452,22 +452,22 @@ def init_bold_wf( workflow.connect([ (inputnode, bold_std_wf, [ - ("std_t1w", "inputnode.target_ref_file"), - ("std_mask", "inputnode.target_mask"), - ("anat2std_xfm", "inputnode.anat2std_xfm"), + ('std_t1w', 'inputnode.target_ref_file'), + ('std_mask', 'inputnode.target_mask'), + ('anat2std_xfm', 'inputnode.anat2std_xfm'), ('std_resolution', 'inputnode.resolution'), - ("fmap_ref", "inputnode.fmap_ref"), - ("fmap_coeff", "inputnode.fmap_coeff"), - ("fmap_id", "inputnode.fmap_id"), + ('fmap_ref', 'inputnode.fmap_ref'), + ('fmap_coeff', 'inputnode.fmap_coeff'), + ('fmap_id', 'inputnode.fmap_id'), ]), (bold_fit_wf, bold_std_wf, [ - ("outputnode.coreg_boldref", "inputnode.bold_ref_file"), - ("outputnode.boldref2fmap_xfm", "inputnode.boldref2fmap_xfm"), - ("outputnode.boldref2anat_xfm", "inputnode.boldref2anat_xfm"), + ('outputnode.coreg_boldref', 'inputnode.bold_ref_file'), + ('outputnode.boldref2fmap_xfm', 'inputnode.boldref2fmap_xfm'), + ('outputnode.boldref2anat_xfm', 'inputnode.boldref2anat_xfm'), ]), (bold_native_wf, bold_std_wf, [ - ("outputnode.bold_minimal", "inputnode.bold_file"), - ("outputnode.motion_xfm", "inputnode.motion_xfm"), + ('outputnode.bold_minimal', 'inputnode.bold_file'), + ('outputnode.motion_xfm', 'inputnode.motion_xfm'), ]), (inputnode, ds_bold_std_wf, [ ('anat2std_xfm', 'inputnode.anat2std_xfm'), @@ -492,23 +492,23 @@ def init_bold_wf( Non-gridded (surface) resamplings were performed using `mri_vol2surf` (FreeSurfer). """ - config.loggers.workflow.debug("Creating BOLD surface-sampling workflow.") + config.loggers.workflow.debug('Creating BOLD surface-sampling workflow.') bold_surf_wf = init_bold_surf_wf( - mem_gb=mem_gb["resampled"], + mem_gb=mem_gb['resampled'], surface_spaces=freesurfer_spaces, medial_surface_nan=config.workflow.medial_surface_nan, metadata=all_metadata[0], output_dir=fmriprep_dir, - name="bold_surf_wf", + name='bold_surf_wf', ) bold_surf_wf.inputs.inputnode.source_file = bold_file workflow.connect([ (inputnode, bold_surf_wf, [ - ("subjects_dir", "inputnode.subjects_dir"), - ("subject_id", "inputnode.subject_id"), - ("fsnative2t1w_xfm", "inputnode.fsnative2t1w_xfm"), + ('subjects_dir', 'inputnode.subjects_dir'), + ('subject_id', 'inputnode.subject_id'), + ('fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm'), ]), - (bold_anat_wf, bold_surf_wf, [("outputnode.bold_file", "inputnode.bold_t1w")]), + (bold_anat_wf, bold_surf_wf, [('outputnode.bold_file', 'inputnode.bold_t1w')]), ]) # fmt:skip if config.workflow.cifti_output: @@ -530,19 +530,19 @@ def init_bold_wf( bold_fsLR_resampling_wf = init_bold_fsLR_resampling_wf( grayord_density=config.workflow.cifti_output, omp_nthreads=omp_nthreads, - mem_gb=mem_gb["resampled"], + mem_gb=mem_gb['resampled'], ) if config.workflow.project_goodvoxels: - goodvoxels_bold_mask_wf = init_goodvoxels_bold_mask_wf(mem_gb["resampled"]) + goodvoxels_bold_mask_wf = init_goodvoxels_bold_mask_wf(mem_gb['resampled']) workflow.connect([ - (inputnode, goodvoxels_bold_mask_wf, [("anat_ribbon", "inputnode.anat_ribbon")]), + (inputnode, goodvoxels_bold_mask_wf, [('anat_ribbon', 'inputnode.anat_ribbon')]), (bold_anat_wf, goodvoxels_bold_mask_wf, [ - ("outputnode.bold_file", "inputnode.bold_file"), + ('outputnode.bold_file', 'inputnode.bold_file'), ]), (goodvoxels_bold_mask_wf, bold_fsLR_resampling_wf, [ - ("outputnode.goodvoxels_mask", "inputnode.volume_roi"), + ('outputnode.goodvoxels_mask', 'inputnode.volume_roi'), ]), ]) # fmt:skip @@ -554,7 +554,7 @@ def init_bold_wf( bold_grayords_wf = init_bold_grayords_wf( grayord_density=config.workflow.cifti_output, mem_gb=1, - repetition_time=all_metadata[0]["RepetitionTime"], + repetition_time=all_metadata[0]['RepetitionTime'], ) ds_bold_cifti = pe.Node( @@ -576,39 +576,39 @@ def init_bold_wf( workflow.connect([ # Resample BOLD to MNI152NLin6Asym, may duplicate bold_std_wf above (inputnode, bold_MNI6_wf, [ - ("mni6_mask", "inputnode.target_ref_file"), - ("mni6_mask", "inputnode.target_mask"), - ("anat2mni6_xfm", "inputnode.anat2std_xfm"), - ("fmap_ref", "inputnode.fmap_ref"), - ("fmap_coeff", "inputnode.fmap_coeff"), - ("fmap_id", "inputnode.fmap_id"), + ('mni6_mask', 'inputnode.target_ref_file'), + ('mni6_mask', 'inputnode.target_mask'), + ('anat2mni6_xfm', 'inputnode.anat2std_xfm'), + ('fmap_ref', 'inputnode.fmap_ref'), + ('fmap_coeff', 'inputnode.fmap_coeff'), + ('fmap_id', 'inputnode.fmap_id'), ]), (bold_fit_wf, bold_MNI6_wf, [ - ("outputnode.coreg_boldref", "inputnode.bold_ref_file"), - ("outputnode.boldref2fmap_xfm", "inputnode.boldref2fmap_xfm"), - ("outputnode.boldref2anat_xfm", "inputnode.boldref2anat_xfm"), + ('outputnode.coreg_boldref', 'inputnode.bold_ref_file'), + ('outputnode.boldref2fmap_xfm', 'inputnode.boldref2fmap_xfm'), + ('outputnode.boldref2anat_xfm', 'inputnode.boldref2anat_xfm'), ]), (bold_native_wf, bold_MNI6_wf, [ - ("outputnode.bold_minimal", "inputnode.bold_file"), - ("outputnode.motion_xfm", "inputnode.motion_xfm"), + ('outputnode.bold_minimal', 'inputnode.bold_file'), + ('outputnode.motion_xfm', 'inputnode.motion_xfm'), ]), # Resample T1w-space BOLD to fsLR surfaces (inputnode, bold_fsLR_resampling_wf, [ - ("white", "inputnode.white"), - ("pial", "inputnode.pial"), - ("midthickness", "inputnode.midthickness"), - ("midthickness_fsLR", "inputnode.midthickness_fsLR"), - ("sphere_reg_fsLR", "inputnode.sphere_reg_fsLR"), - ("cortex_mask", "inputnode.cortex_mask"), + ('white', 'inputnode.white'), + ('pial', 'inputnode.pial'), + ('midthickness', 'inputnode.midthickness'), + ('midthickness_fsLR', 'inputnode.midthickness_fsLR'), + ('sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR'), + ('cortex_mask', 'inputnode.cortex_mask'), ]), (bold_anat_wf, bold_fsLR_resampling_wf, [ - ("outputnode.bold_file", "inputnode.bold_file"), + ('outputnode.bold_file', 'inputnode.bold_file'), ]), (bold_MNI6_wf, bold_grayords_wf, [ - ("outputnode.bold_file", "inputnode.bold_std"), + ('outputnode.bold_file', 'inputnode.bold_std'), ]), (bold_fsLR_resampling_wf, bold_grayords_wf, [ - ("outputnode.bold_fsLR", "inputnode.bold_fsLR"), + ('outputnode.bold_fsLR', 'inputnode.bold_fsLR'), ]), (bold_grayords_wf, ds_bold_cifti, [ ('outputnode.cifti_bold', 'in_file'), @@ -617,13 +617,13 @@ def init_bold_wf( ]) # fmt:skip bold_confounds_wf = init_bold_confs_wf( - mem_gb=mem_gb["largemem"], + mem_gb=mem_gb['largemem'], metadata=all_metadata[0], freesurfer=config.workflow.run_reconall, regressors_all_comps=config.workflow.regressors_all_comps, regressors_fd_th=config.workflow.regressors_fd_th, regressors_dvars_th=config.workflow.regressors_dvars_th, - name="bold_confounds_wf", + name='bold_confounds_wf', ) ds_confounds = pe.Node( @@ -633,7 +633,7 @@ def init_bold_wf( suffix='timeseries', dismiss_entities=dismiss_echo(), ), - name="ds_confounds", + name='ds_confounds', run_without_submitting=True, mem_gb=config.DEFAULT_MEMORY_MIN_GB, ) @@ -662,15 +662,15 @@ def init_bold_wf( if spaces.get_spaces(nonstandard=False, dim=(3,)): carpetplot_wf = init_carpetplot_wf( - mem_gb=mem_gb["resampled"], + mem_gb=mem_gb['resampled'], metadata=all_metadata[0], cifti_output=config.workflow.cifti_output, - name="carpetplot_wf", + name='carpetplot_wf', ) if config.workflow.cifti_output: workflow.connect( - bold_grayords_wf, "outputnode.cifti_bold", carpetplot_wf, "inputnode.cifti_bold", + bold_grayords_wf, 'outputnode.cifti_bold', carpetplot_wf, 'inputnode.cifti_bold', ) # fmt:skip def _last(inlist): @@ -678,26 +678,26 @@ def _last(inlist): workflow.connect([ (inputnode, carpetplot_wf, [ - ("mni2009c2anat_xfm", "inputnode.std2anat_xfm"), + ('mni2009c2anat_xfm', 'inputnode.std2anat_xfm'), ]), (bold_fit_wf, carpetplot_wf, [ - ("outputnode.dummy_scans", "inputnode.dummy_scans"), - ("outputnode.bold_mask", "inputnode.bold_mask"), + ('outputnode.dummy_scans', 'inputnode.dummy_scans'), + ('outputnode.bold_mask', 'inputnode.bold_mask'), ('outputnode.boldref2anat_xfm', 'inputnode.boldref2anat_xfm'), ]), (bold_native_wf, carpetplot_wf, [ - ("outputnode.bold_native", "inputnode.bold"), + ('outputnode.bold_native', 'inputnode.bold'), ]), (bold_confounds_wf, carpetplot_wf, [ - ("outputnode.confounds_file", "inputnode.confounds_file"), - ("outputnode.crown_mask", "inputnode.crown_mask"), - (("outputnode.acompcor_masks", _last), "inputnode.acompcor_mask"), + ('outputnode.confounds_file', 'inputnode.confounds_file'), + ('outputnode.crown_mask', 'inputnode.crown_mask'), + (('outputnode.acompcor_masks', _last), 'inputnode.acompcor_mask'), ]), ]) # fmt:skip # Fill-in datasinks of reportlets seen so far for node in workflow.list_node_names(): - if node.split(".")[-1].startswith("ds_report"): + if node.split('.')[-1].startswith('ds_report'): workflow.get_node(node).inputs.base_directory = fmriprep_dir workflow.get_node(node).inputs.source_file = bold_file @@ -720,7 +720,7 @@ def _get_wf_name(bold_fname, prefix): from nipype.utils.filemanip import split_filename fname = split_filename(bold_fname)[1] - fname_nosub = "_".join(fname.split("_")[1:-1]) + fname_nosub = '_'.join(fname.split('_')[1:-1]) return f'{prefix}_{fname_nosub.replace("-", "_")}_wf' diff --git a/fmriprep/workflows/bold/confounds.py b/fmriprep/workflows/bold/confounds.py index 5c5304be4..f16904633 100644 --- a/fmriprep/workflows/bold/confounds.py +++ b/fmriprep/workflows/bold/confounds.py @@ -50,7 +50,7 @@ def init_bold_confs_wf( regressors_dvars_th: float, regressors_fd_th: float, freesurfer: bool = False, - name: str = "bold_confs_wf", + name: str = 'bold_confs_wf', ): """ Build a workflow to generate and write out confounding signals. @@ -166,7 +166,7 @@ def init_bold_confs_wf( gm_desc = ( "dilating a GM mask extracted from the FreeSurfer's *aseg* segmentation" if freesurfer - else "thresholding the corresponding partial volume map at 0.05" + else 'thresholding the corresponding partial volume map at 0.05' ) workflow = Workflow(name=name) @@ -218,268 +218,268 @@ def init_bold_confs_wf( inputnode = pe.Node( niu.IdentityInterface( fields=[ - "bold", - "bold_mask", - "movpar_file", - "rmsd_file", - "skip_vols", - "t1w_mask", - "t1w_tpms", - "boldref2anat_xfm", + 'bold', + 'bold_mask', + 'movpar_file', + 'rmsd_file', + 'skip_vols', + 't1w_mask', + 't1w_tpms', + 'boldref2anat_xfm', ] ), - name="inputnode", + name='inputnode', ) outputnode = pe.Node( niu.IdentityInterface( fields=[ - "confounds_file", - "confounds_metadata", - "acompcor_masks", - "tcompcor_mask", - "crown_mask", + 'confounds_file', + 'confounds_metadata', + 'acompcor_masks', + 'tcompcor_mask', + 'crown_mask', ] ), - name="outputnode", + name='outputnode', ) # Project T1w mask into BOLD space and merge with BOLD brainmask t1w_mask_tfm = pe.Node( - ApplyTransforms(interpolation="MultiLabel", invert_transform_flags=[True]), - name="t1w_mask_tfm", + ApplyTransforms(interpolation='MultiLabel', invert_transform_flags=[True]), + name='t1w_mask_tfm', ) - union_mask = pe.Node(niu.Function(function=_binary_union), name="union_mask") + union_mask = pe.Node(niu.Function(function=_binary_union), name='union_mask') # Create the crown mask - dilated_mask = pe.Node(BinaryDilation(), name="dilated_mask") - subtract_mask = pe.Node(BinarySubtraction(), name="subtract_mask") + dilated_mask = pe.Node(BinaryDilation(), name='dilated_mask') + subtract_mask = pe.Node(BinarySubtraction(), name='subtract_mask') # DVARS dvars = pe.Node( nac.ComputeDVARS(save_nstd=True, save_std=True, remove_zerovariance=True), - name="dvars", + name='dvars', mem_gb=mem_gb, ) # Frame displacement - fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source="SPM"), name="fdisp", mem_gb=mem_gb) + fdisp = pe.Node(nac.FramewiseDisplacement(parameter_source='SPM'), name='fdisp', mem_gb=mem_gb) # Generate aCompCor probseg maps - acc_masks = pe.Node(aCompCorMasks(is_aseg=freesurfer), name="acc_masks") + acc_masks = pe.Node(aCompCorMasks(is_aseg=freesurfer), name='acc_masks') # Resample probseg maps in BOLD space via BOLD-to-T1w transform acc_msk_tfm = pe.MapNode( - ApplyTransforms(interpolation="Gaussian", invert_transform_flags=[True]), - iterfield=["input_image"], - name="acc_msk_tfm", + ApplyTransforms(interpolation='Gaussian', invert_transform_flags=[True]), + iterfield=['input_image'], + name='acc_msk_tfm', mem_gb=0.1, ) - acc_msk_brain = pe.MapNode(ApplyMask(), name="acc_msk_brain", iterfield=["in_file"]) - acc_msk_bin = pe.MapNode(Binarize(thresh_low=0.99), name="acc_msk_bin", iterfield=["in_file"]) + acc_msk_brain = pe.MapNode(ApplyMask(), name='acc_msk_brain', iterfield=['in_file']) + acc_msk_bin = pe.MapNode(Binarize(thresh_low=0.99), name='acc_msk_bin', iterfield=['in_file']) acompcor = pe.Node( ACompCor( - components_file="acompcor.tsv", - header_prefix="a_comp_cor_", - pre_filter="cosine", + components_file='acompcor.tsv', + header_prefix='a_comp_cor_', + pre_filter='cosine', save_pre_filter=True, save_metadata=True, - mask_names=["CSF", "WM", "combined"], - merge_method="none", - failure_mode="NaN", + mask_names=['CSF', 'WM', 'combined'], + merge_method='none', + failure_mode='NaN', ), - name="acompcor", + name='acompcor', mem_gb=mem_gb, ) crowncompcor = pe.Node( ACompCor( - components_file="crown_compcor.tsv", - header_prefix="edge_comp_", - pre_filter="cosine", + components_file='crown_compcor.tsv', + header_prefix='edge_comp_', + pre_filter='cosine', save_pre_filter=True, save_metadata=True, - mask_names=["Edge"], - merge_method="none", - failure_mode="NaN", + mask_names=['Edge'], + merge_method='none', + failure_mode='NaN', num_components=24, ), - name="crowncompcor", + name='crowncompcor', mem_gb=mem_gb, ) tcompcor = pe.Node( TCompCor( - components_file="tcompcor.tsv", - header_prefix="t_comp_cor_", - pre_filter="cosine", + components_file='tcompcor.tsv', + header_prefix='t_comp_cor_', + pre_filter='cosine', save_pre_filter=True, save_metadata=True, percentile_threshold=0.02, - failure_mode="NaN", + failure_mode='NaN', ), - name="tcompcor", + name='tcompcor', mem_gb=mem_gb, ) # Set number of components if regressors_all_comps: - acompcor.inputs.num_components = "all" - tcompcor.inputs.num_components = "all" + acompcor.inputs.num_components = 'all' + tcompcor.inputs.num_components = 'all' else: acompcor.inputs.variance_threshold = 0.5 tcompcor.inputs.variance_threshold = 0.5 # Set TR if present - if "RepetitionTime" in metadata: - tcompcor.inputs.repetition_time = metadata["RepetitionTime"] - acompcor.inputs.repetition_time = metadata["RepetitionTime"] - crowncompcor.inputs.repetition_time = metadata["RepetitionTime"] + if 'RepetitionTime' in metadata: + tcompcor.inputs.repetition_time = metadata['RepetitionTime'] + acompcor.inputs.repetition_time = metadata['RepetitionTime'] + crowncompcor.inputs.repetition_time = metadata['RepetitionTime'] # Split aCompCor results into a_comp_cor, c_comp_cor, w_comp_cor - rename_acompcor = pe.Node(RenameACompCor(), name="rename_acompcor") + rename_acompcor = pe.Node(RenameACompCor(), name='rename_acompcor') # Global and segment regressors signals_class_labels = [ - "global_signal", - "csf", - "white_matter", - "csf_wm", - "tcompcor", + 'global_signal', + 'csf', + 'white_matter', + 'csf_wm', + 'tcompcor', ] merge_rois = pe.Node( - niu.Merge(3, ravel_inputs=True), name="merge_rois", run_without_submitting=True + niu.Merge(3, ravel_inputs=True), name='merge_rois', run_without_submitting=True ) signals = pe.Node( - SignalExtraction(class_labels=signals_class_labels), name="signals", mem_gb=mem_gb + SignalExtraction(class_labels=signals_class_labels), name='signals', mem_gb=mem_gb ) # Arrange confounds add_dvars_header = pe.Node( - AddTSVHeader(columns=["dvars"]), - name="add_dvars_header", + AddTSVHeader(columns=['dvars']), + name='add_dvars_header', mem_gb=0.01, run_without_submitting=True, ) add_std_dvars_header = pe.Node( - AddTSVHeader(columns=["std_dvars"]), - name="add_std_dvars_header", + AddTSVHeader(columns=['std_dvars']), + name='add_std_dvars_header', mem_gb=0.01, run_without_submitting=True, ) add_motion_headers = pe.Node( - AddTSVHeader(columns=["trans_x", "trans_y", "trans_z", "rot_x", "rot_y", "rot_z"]), - name="add_motion_headers", + AddTSVHeader(columns=['trans_x', 'trans_y', 'trans_z', 'rot_x', 'rot_y', 'rot_z']), + name='add_motion_headers', mem_gb=0.01, run_without_submitting=True, ) add_rmsd_header = pe.Node( - AddTSVHeader(columns=["rmsd"]), - name="add_rmsd_header", + AddTSVHeader(columns=['rmsd']), + name='add_rmsd_header', mem_gb=0.01, run_without_submitting=True, ) - concat = pe.Node(GatherConfounds(), name="concat", mem_gb=0.01, run_without_submitting=True) + concat = pe.Node(GatherConfounds(), name='concat', mem_gb=0.01, run_without_submitting=True) # CompCor metadata - tcc_metadata_filter = pe.Node(FilterDropped(), name="tcc_metadata_filter") - acc_metadata_filter = pe.Node(FilterDropped(), name="acc_metadata_filter") + tcc_metadata_filter = pe.Node(FilterDropped(), name='tcc_metadata_filter') + acc_metadata_filter = pe.Node(FilterDropped(), name='acc_metadata_filter') tcc_metadata_fmt = pe.Node( TSV2JSON( - index_column="component", - drop_columns=["mask"], + index_column='component', + drop_columns=['mask'], output=None, - additional_metadata={"Method": "tCompCor"}, + additional_metadata={'Method': 'tCompCor'}, enforce_case=True, ), - name="tcc_metadata_fmt", + name='tcc_metadata_fmt', ) acc_metadata_fmt = pe.Node( TSV2JSON( - index_column="component", + index_column='component', output=None, - additional_metadata={"Method": "aCompCor"}, + additional_metadata={'Method': 'aCompCor'}, enforce_case=True, ), - name="acc_metadata_fmt", + name='acc_metadata_fmt', ) crowncc_metadata_fmt = pe.Node( TSV2JSON( - index_column="component", + index_column='component', output=None, - additional_metadata={"Method": "EdgeRegressor"}, + additional_metadata={'Method': 'EdgeRegressor'}, enforce_case=True, ), - name="crowncc_metadata_fmt", + name='crowncc_metadata_fmt', ) mrg_conf_metadata = pe.Node( - niu.Merge(3), name="merge_confound_metadata", run_without_submitting=True + niu.Merge(3), name='merge_confound_metadata', run_without_submitting=True ) - mrg_conf_metadata.inputs.in3 = {label: {"Method": "Mean"} for label in signals_class_labels} + mrg_conf_metadata.inputs.in3 = {label: {'Method': 'Mean'} for label in signals_class_labels} mrg_conf_metadata2 = pe.Node( - DictMerge(), name="merge_confound_metadata2", run_without_submitting=True + DictMerge(), name='merge_confound_metadata2', run_without_submitting=True ) # Expand model to include derivatives and quadratics model_expand = pe.Node( - ExpandModel(model_formula="(dd1(rps + wm + csf + gsr))^^2 + others"), - name="model_expansion", + ExpandModel(model_formula='(dd1(rps + wm + csf + gsr))^^2 + others'), + name='model_expansion', ) # Add spike regressors spike_regress = pe.Node( SpikeRegressors(fd_thresh=regressors_fd_th, dvars_thresh=regressors_dvars_th), - name="spike_regressors", + name='spike_regressors', ) # Generate reportlet (ROIs) mrg_compcor = pe.Node( - niu.Merge(3, ravel_inputs=True), name="mrg_compcor", run_without_submitting=True + niu.Merge(3, ravel_inputs=True), name='mrg_compcor', run_without_submitting=True ) rois_plot = pe.Node( - ROIsPlot(colors=["b", "magenta", "g"], generate_report=True), - name="rois_plot", + ROIsPlot(colors=['b', 'magenta', 'g'], generate_report=True), + name='rois_plot', mem_gb=mem_gb, ) ds_report_bold_rois = pe.Node( - DerivativesDataSink(desc="rois", datatype="figures", dismiss_entities=dismiss_echo()), - name="ds_report_bold_rois", + DerivativesDataSink(desc='rois', datatype='figures', dismiss_entities=dismiss_echo()), + name='ds_report_bold_rois', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) # Generate reportlet (CompCor) mrg_cc_metadata = pe.Node( - niu.Merge(2), name="merge_compcor_metadata", run_without_submitting=True + niu.Merge(2), name='merge_compcor_metadata', run_without_submitting=True ) compcor_plot = pe.Node( CompCorVariancePlot( variance_thresholds=(0.5, 0.7, 0.9), - metadata_sources=["tCompCor", "aCompCor", "crownCompCor"], + metadata_sources=['tCompCor', 'aCompCor', 'crownCompCor'], ), - name="compcor_plot", + name='compcor_plot', ) ds_report_compcor = pe.Node( DerivativesDataSink( - desc="compcorvar", datatype="figures", dismiss_entities=dismiss_echo() + desc='compcorvar', datatype='figures', dismiss_entities=dismiss_echo() ), - name="ds_report_compcor", + name='ds_report_compcor', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) # Generate reportlet (Confound correlation) conf_corr_plot = pe.Node( - ConfoundsCorrelationPlot(reference_column="global_signal", max_dim=20), - name="conf_corr_plot", + ConfoundsCorrelationPlot(reference_column='global_signal', max_dim=20), + name='conf_corr_plot', ) ds_report_conf_corr = pe.Node( DerivativesDataSink( - desc="confoundcorr", datatype="figures", dismiss_entities=dismiss_echo() + desc='confoundcorr', datatype='figures', dismiss_entities=dismiss_echo() ), - name="ds_report_conf_corr", + name='ds_report_conf_corr', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -493,108 +493,108 @@ def _select_cols(table): return [ col for col in pd.read_table(table, nrows=2).columns - if not col.startswith(("a_comp_cor_", "t_comp_cor_", "std_dvars")) + if not col.startswith(('a_comp_cor_', 't_comp_cor_', 'std_dvars')) ] # fmt:off workflow.connect([ # connect inputnode to each non-anatomical confound node - (inputnode, dvars, [("bold", "in_file"), - ("bold_mask", "in_mask")]), - (inputnode, fdisp, [("movpar_file", "in_file")]), + (inputnode, dvars, [('bold', 'in_file'), + ('bold_mask', 'in_mask')]), + (inputnode, fdisp, [('movpar_file', 'in_file')]), # Brain mask - (inputnode, t1w_mask_tfm, [("t1w_mask", "input_image"), - ("bold_mask", "reference_image"), - ("boldref2anat_xfm", "transforms")]), - (inputnode, union_mask, [("bold_mask", "mask1")]), - (t1w_mask_tfm, union_mask, [("output_image", "mask2")]), - (union_mask, dilated_mask, [("out", "in_mask")]), - (union_mask, subtract_mask, [("out", "in_subtract")]), - (dilated_mask, subtract_mask, [("out_mask", "in_base")]), - (subtract_mask, outputnode, [("out_mask", "crown_mask")]), + (inputnode, t1w_mask_tfm, [('t1w_mask', 'input_image'), + ('bold_mask', 'reference_image'), + ('boldref2anat_xfm', 'transforms')]), + (inputnode, union_mask, [('bold_mask', 'mask1')]), + (t1w_mask_tfm, union_mask, [('output_image', 'mask2')]), + (union_mask, dilated_mask, [('out', 'in_mask')]), + (union_mask, subtract_mask, [('out', 'in_subtract')]), + (dilated_mask, subtract_mask, [('out_mask', 'in_base')]), + (subtract_mask, outputnode, [('out_mask', 'crown_mask')]), # aCompCor - (inputnode, acompcor, [("bold", "realigned_file"), - ("skip_vols", "ignore_initial_volumes")]), - (inputnode, acc_masks, [("t1w_tpms", "in_vfs"), - (("bold", _get_zooms), "bold_zooms")]), - (inputnode, acc_msk_tfm, [("boldref2anat_xfm", "transforms"), - ("bold_mask", "reference_image")]), - (inputnode, acc_msk_brain, [("bold_mask", "in_mask")]), - (acc_masks, acc_msk_tfm, [("out_masks", "input_image")]), - (acc_msk_tfm, acc_msk_brain, [("output_image", "in_file")]), - (acc_msk_brain, acc_msk_bin, [("out_file", "in_file")]), - (acc_msk_bin, acompcor, [("out_file", "mask_files")]), - (acompcor, rename_acompcor, [("components_file", "components_file"), - ("metadata_file", "metadata_file")]), + (inputnode, acompcor, [('bold', 'realigned_file'), + ('skip_vols', 'ignore_initial_volumes')]), + (inputnode, acc_masks, [('t1w_tpms', 'in_vfs'), + (('bold', _get_zooms), 'bold_zooms')]), + (inputnode, acc_msk_tfm, [('boldref2anat_xfm', 'transforms'), + ('bold_mask', 'reference_image')]), + (inputnode, acc_msk_brain, [('bold_mask', 'in_mask')]), + (acc_masks, acc_msk_tfm, [('out_masks', 'input_image')]), + (acc_msk_tfm, acc_msk_brain, [('output_image', 'in_file')]), + (acc_msk_brain, acc_msk_bin, [('out_file', 'in_file')]), + (acc_msk_bin, acompcor, [('out_file', 'mask_files')]), + (acompcor, rename_acompcor, [('components_file', 'components_file'), + ('metadata_file', 'metadata_file')]), # crownCompCor - (inputnode, crowncompcor, [("bold", "realigned_file"), - ("skip_vols", "ignore_initial_volumes")]), - (subtract_mask, crowncompcor, [("out_mask", "mask_files")]), + (inputnode, crowncompcor, [('bold', 'realigned_file'), + ('skip_vols', 'ignore_initial_volumes')]), + (subtract_mask, crowncompcor, [('out_mask', 'mask_files')]), # tCompCor - (inputnode, tcompcor, [("bold", "realigned_file"), - ("skip_vols", "ignore_initial_volumes"), - ("bold_mask", "mask_files")]), + (inputnode, tcompcor, [('bold', 'realigned_file'), + ('skip_vols', 'ignore_initial_volumes'), + ('bold_mask', 'mask_files')]), # Global signals extraction (constrained by anatomy) - (inputnode, signals, [("bold", "in_file")]), - (inputnode, merge_rois, [("bold_mask", "in1")]), - (acc_msk_bin, merge_rois, [("out_file", "in2")]), - (tcompcor, merge_rois, [("high_variance_masks", "in3")]), - (merge_rois, signals, [("out", "label_files")]), + (inputnode, signals, [('bold', 'in_file')]), + (inputnode, merge_rois, [('bold_mask', 'in1')]), + (acc_msk_bin, merge_rois, [('out_file', 'in2')]), + (tcompcor, merge_rois, [('high_variance_masks', 'in3')]), + (merge_rois, signals, [('out', 'label_files')]), # Collate computed confounds together - (inputnode, add_motion_headers, [("movpar_file", "in_file")]), - (inputnode, add_rmsd_header, [("rmsd_file", "in_file")]), - (dvars, add_dvars_header, [("out_nstd", "in_file")]), - (dvars, add_std_dvars_header, [("out_std", "in_file")]), - (signals, concat, [("out_file", "signals")]), - (fdisp, concat, [("out_file", "fd")]), - (tcompcor, concat, [("components_file", "tcompcor"), - ("pre_filter_file", "cos_basis")]), - (rename_acompcor, concat, [("components_file", "acompcor")]), - (crowncompcor, concat, [("components_file", "crowncompcor")]), - (add_motion_headers, concat, [("out_file", "motion")]), - (add_rmsd_header, concat, [("out_file", "rmsd")]), - (add_dvars_header, concat, [("out_file", "dvars")]), - (add_std_dvars_header, concat, [("out_file", "std_dvars")]), + (inputnode, add_motion_headers, [('movpar_file', 'in_file')]), + (inputnode, add_rmsd_header, [('rmsd_file', 'in_file')]), + (dvars, add_dvars_header, [('out_nstd', 'in_file')]), + (dvars, add_std_dvars_header, [('out_std', 'in_file')]), + (signals, concat, [('out_file', 'signals')]), + (fdisp, concat, [('out_file', 'fd')]), + (tcompcor, concat, [('components_file', 'tcompcor'), + ('pre_filter_file', 'cos_basis')]), + (rename_acompcor, concat, [('components_file', 'acompcor')]), + (crowncompcor, concat, [('components_file', 'crowncompcor')]), + (add_motion_headers, concat, [('out_file', 'motion')]), + (add_rmsd_header, concat, [('out_file', 'rmsd')]), + (add_dvars_header, concat, [('out_file', 'dvars')]), + (add_std_dvars_header, concat, [('out_file', 'std_dvars')]), # Confounds metadata - (tcompcor, tcc_metadata_filter, [("metadata_file", "in_file")]), - (tcc_metadata_filter, tcc_metadata_fmt, [("out_file", "in_file")]), - (rename_acompcor, acc_metadata_filter, [("metadata_file", "in_file")]), - (acc_metadata_filter, acc_metadata_fmt, [("out_file", "in_file")]), - (crowncompcor, crowncc_metadata_fmt, [("metadata_file", "in_file")]), - (tcc_metadata_fmt, mrg_conf_metadata, [("output", "in1")]), - (acc_metadata_fmt, mrg_conf_metadata, [("output", "in2")]), - (crowncc_metadata_fmt, mrg_conf_metadata, [("output", "in3")]), - (mrg_conf_metadata, mrg_conf_metadata2, [("out", "in_dicts")]), + (tcompcor, tcc_metadata_filter, [('metadata_file', 'in_file')]), + (tcc_metadata_filter, tcc_metadata_fmt, [('out_file', 'in_file')]), + (rename_acompcor, acc_metadata_filter, [('metadata_file', 'in_file')]), + (acc_metadata_filter, acc_metadata_fmt, [('out_file', 'in_file')]), + (crowncompcor, crowncc_metadata_fmt, [('metadata_file', 'in_file')]), + (tcc_metadata_fmt, mrg_conf_metadata, [('output', 'in1')]), + (acc_metadata_fmt, mrg_conf_metadata, [('output', 'in2')]), + (crowncc_metadata_fmt, mrg_conf_metadata, [('output', 'in3')]), + (mrg_conf_metadata, mrg_conf_metadata2, [('out', 'in_dicts')]), # Expand the model with derivatives, quadratics, and spikes - (concat, model_expand, [("confounds_file", "confounds_file")]), - (model_expand, spike_regress, [("confounds_file", "confounds_file")]), + (concat, model_expand, [('confounds_file', 'confounds_file')]), + (model_expand, spike_regress, [('confounds_file', 'confounds_file')]), # Set outputs - (spike_regress, outputnode, [("confounds_file", "confounds_file")]), - (mrg_conf_metadata2, outputnode, [("out_dict", "confounds_metadata")]), - (tcompcor, outputnode, [("high_variance_masks", "tcompcor_mask")]), - (acc_msk_bin, outputnode, [("out_file", "acompcor_masks")]), - (inputnode, rois_plot, [("bold", "in_file"), - ("bold_mask", "in_mask")]), - (tcompcor, mrg_compcor, [("high_variance_masks", "in1")]), - (acc_msk_bin, mrg_compcor, [(("out_file", _last), "in2")]), - (subtract_mask, mrg_compcor, [("out_mask", "in3")]), - (mrg_compcor, rois_plot, [("out", "in_rois")]), - (rois_plot, ds_report_bold_rois, [("out_report", "in_file")]), - (tcompcor, mrg_cc_metadata, [("metadata_file", "in1")]), - (acompcor, mrg_cc_metadata, [("metadata_file", "in2")]), - (crowncompcor, mrg_cc_metadata, [("metadata_file", "in3")]), - (mrg_cc_metadata, compcor_plot, [("out", "metadata_files")]), - (compcor_plot, ds_report_compcor, [("out_file", "in_file")]), - (inputnode, conf_corr_plot, [("skip_vols", "ignore_initial_volumes")]), - (concat, conf_corr_plot, [("confounds_file", "confounds_file"), - (("confounds_file", _select_cols), "columns")]), - (conf_corr_plot, ds_report_conf_corr, [("out_file", "in_file")]), + (spike_regress, outputnode, [('confounds_file', 'confounds_file')]), + (mrg_conf_metadata2, outputnode, [('out_dict', 'confounds_metadata')]), + (tcompcor, outputnode, [('high_variance_masks', 'tcompcor_mask')]), + (acc_msk_bin, outputnode, [('out_file', 'acompcor_masks')]), + (inputnode, rois_plot, [('bold', 'in_file'), + ('bold_mask', 'in_mask')]), + (tcompcor, mrg_compcor, [('high_variance_masks', 'in1')]), + (acc_msk_bin, mrg_compcor, [(('out_file', _last), 'in2')]), + (subtract_mask, mrg_compcor, [('out_mask', 'in3')]), + (mrg_compcor, rois_plot, [('out', 'in_rois')]), + (rois_plot, ds_report_bold_rois, [('out_report', 'in_file')]), + (tcompcor, mrg_cc_metadata, [('metadata_file', 'in1')]), + (acompcor, mrg_cc_metadata, [('metadata_file', 'in2')]), + (crowncompcor, mrg_cc_metadata, [('metadata_file', 'in3')]), + (mrg_cc_metadata, compcor_plot, [('out', 'metadata_files')]), + (compcor_plot, ds_report_compcor, [('out_file', 'in_file')]), + (inputnode, conf_corr_plot, [('skip_vols', 'ignore_initial_volumes')]), + (concat, conf_corr_plot, [('confounds_file', 'confounds_file'), + (('confounds_file', _select_cols), 'columns')]), + (conf_corr_plot, ds_report_conf_corr, [('out_file', 'in_file')]), ]) # fmt: on @@ -602,7 +602,7 @@ def _select_cols(table): def init_carpetplot_wf( - mem_gb: float, metadata: dict, cifti_output: bool, name: str = "bold_carpet_wf" + mem_gb: float, metadata: dict, cifti_output: bool, name: str = 'bold_carpet_wf' ): """ Build a workflow to generate *carpet* plots. @@ -656,50 +656,50 @@ def init_carpetplot_wf( inputnode = pe.Node( niu.IdentityInterface( fields=[ - "bold", - "bold_mask", - "confounds_file", - "boldref2anat_xfm", - "std2anat_xfm", - "cifti_bold", - "crown_mask", - "acompcor_mask", - "dummy_scans", + 'bold', + 'bold_mask', + 'confounds_file', + 'boldref2anat_xfm', + 'std2anat_xfm', + 'cifti_bold', + 'crown_mask', + 'acompcor_mask', + 'dummy_scans', ] ), - name="inputnode", + name='inputnode', ) - outputnode = pe.Node(niu.IdentityInterface(fields=["out_carpetplot"]), name="outputnode") + outputnode = pe.Node(niu.IdentityInterface(fields=['out_carpetplot']), name='outputnode') # Carpetplot and confounds plot conf_plot = pe.Node( FMRISummary( - tr=metadata["RepetitionTime"], + tr=metadata['RepetitionTime'], confounds_list=[ - ("global_signal", None, "GS"), - ("csf", None, "CSF"), - ("white_matter", None, "WM"), - ("std_dvars", None, "DVARS"), - ("framewise_displacement", "mm", "FD"), + ('global_signal', None, 'GS'), + ('csf', None, 'CSF'), + ('white_matter', None, 'WM'), + ('std_dvars', None, 'DVARS'), + ('framewise_displacement', 'mm', 'FD'), ], ), - name="conf_plot", + name='conf_plot', mem_gb=mem_gb, ) ds_report_bold_conf = pe.Node( DerivativesDataSink( - desc="carpetplot", datatype="figures", extension="svg", dismiss_entities=dismiss_echo() + desc='carpetplot', datatype='figures', extension='svg', dismiss_entities=dismiss_echo() ), - name="ds_report_bold_conf", + name='ds_report_bold_conf', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) - parcels = pe.Node(niu.Function(function=_carpet_parcellation), name="parcels") + parcels = pe.Node(niu.Function(function=_carpet_parcellation), name='parcels') parcels.inputs.nifti = not cifti_output # List transforms - mrg_xfms = pe.Node(niu.Merge(2), name="mrg_xfms") + mrg_xfms = pe.Node(niu.Merge(2), name='mrg_xfms') # Warp segmentation into EPI space resample_parc = pe.Node( @@ -707,42 +707,42 @@ def init_carpetplot_wf( dimension=3, input_image=str( get_template( - "MNI152NLin2009cAsym", + 'MNI152NLin2009cAsym', resolution=1, - desc="carpet", - suffix="dseg", - extension=[".nii", ".nii.gz"], + desc='carpet', + suffix='dseg', + extension=['.nii', '.nii.gz'], ) ), invert_transform_flags=[True, False], - interpolation="MultiLabel", - args="-u int", + interpolation='MultiLabel', + args='-u int', ), - name="resample_parc", + name='resample_parc', ) workflow = Workflow(name=name) if cifti_output: - workflow.connect(inputnode, "cifti_bold", conf_plot, "in_cifti") + workflow.connect(inputnode, 'cifti_bold', conf_plot, 'in_cifti') workflow.connect([ (inputnode, mrg_xfms, [ - ("boldref2anat_xfm", "in1"), - ("std2anat_xfm", "in2"), + ('boldref2anat_xfm', 'in1'), + ('std2anat_xfm', 'in2'), ]), - (inputnode, resample_parc, [("bold_mask", "reference_image")]), - (inputnode, parcels, [("crown_mask", "crown_mask")]), - (inputnode, parcels, [("acompcor_mask", "acompcor_mask")]), + (inputnode, resample_parc, [('bold_mask', 'reference_image')]), + (inputnode, parcels, [('crown_mask', 'crown_mask')]), + (inputnode, parcels, [('acompcor_mask', 'acompcor_mask')]), (inputnode, conf_plot, [ - ("bold", "in_nifti"), - ("confounds_file", "confounds_file"), - ("dummy_scans", "drop_trs"), + ('bold', 'in_nifti'), + ('confounds_file', 'confounds_file'), + ('dummy_scans', 'drop_trs'), ]), - (mrg_xfms, resample_parc, [("out", "transforms")]), - (resample_parc, parcels, [("output_image", "segmentation")]), - (parcels, conf_plot, [("out", "in_segm")]), - (conf_plot, ds_report_bold_conf, [("out_file", "in_file")]), - (conf_plot, outputnode, [("out_file", "out_carpetplot")]), + (mrg_xfms, resample_parc, [('out', 'transforms')]), + (resample_parc, parcels, [('output_image', 'segmentation')]), + (parcels, conf_plot, [('out', 'in_segm')]), + (conf_plot, ds_report_bold_conf, [('out_file', 'in_file')]), + (conf_plot, outputnode, [('out_file', 'out_carpetplot')]), ]) # fmt:skip return workflow @@ -758,8 +758,8 @@ def _binary_union(mask1, mask2): mskarr1 = np.asanyarray(img.dataobj, dtype=int) > 0 mskarr2 = np.asanyarray(nb.load(mask2).dataobj, dtype=int) > 0 out = img.__class__(mskarr1 | mskarr2, img.affine, img.header) - out.set_data_dtype("uint8") - out_name = Path("mask_union.nii.gz").absolute() + out.set_data_dtype('uint8') + out_name = Path('mask_union.nii.gz').absolute() out.to_filename(out_name) return str(out_name) @@ -773,7 +773,7 @@ def _carpet_parcellation(segmentation, crown_mask, acompcor_mask, nifti=False): img = nb.load(segmentation) - lut = np.zeros((256,), dtype="uint8") + lut = np.zeros((256,), dtype='uint8') lut[100:201] = 1 if nifti else 0 # Ctx GM lut[30:99] = 2 if nifti else 0 # dGM lut[1:11] = 3 if nifti else 1 # WM+CSF @@ -784,9 +784,9 @@ def _carpet_parcellation(segmentation, crown_mask, acompcor_mask, nifti=False): # Separate deep from shallow WM+CSF seg[np.bool_(nb.load(acompcor_mask).dataobj)] = 4 if nifti else 1 - outimg = img.__class__(seg.astype("uint8"), img.affine, img.header) - outimg.set_data_dtype("uint8") - out_file = Path("segments.nii.gz").absolute() + outimg = img.__class__(seg.astype('uint8'), img.affine, img.header) + outimg.set_data_dtype('uint8') + out_file = Path('segments.nii.gz').absolute() outimg.to_filename(out_file) return str(out_file) diff --git a/fmriprep/workflows/bold/fit.py b/fmriprep/workflows/bold/fit.py index f81285e62..3aece325a 100644 --- a/fmriprep/workflows/bold/fit.py +++ b/fmriprep/workflows/bold/fit.py @@ -59,10 +59,10 @@ def get_sbrefs( - bold_files: ty.List[str], - entity_overrides: ty.Dict[str, ty.Any], + bold_files: list[str], + entity_overrides: dict[str, ty.Any], layout: bids.BIDSLayout, -) -> ty.List[str]: +) -> list[str]: """Find single-band reference(s) associated with BOLD file(s) Parameters @@ -81,23 +81,23 @@ def get_sbrefs( sorted by EchoTime """ entities = extract_entities(bold_files) - entities.pop("echo", None) - entities.update(suffix="sbref", extension=[".nii", ".nii.gz"]) + entities.pop('echo', None) + entities.update(suffix='sbref', extension=['.nii', '.nii.gz']) entities.update(entity_overrides) return sorted( - layout.get(return_type="file", **entities), - key=lambda fname: layout.get_metadata(fname).get("EchoTime"), + layout.get(return_type='file', **entities), + key=lambda fname: layout.get_metadata(fname).get('EchoTime'), ) def init_bold_fit_wf( *, - bold_series: ty.List[str], + bold_series: list[str], precomputed: dict = {}, fieldmap_id: ty.Optional[str] = None, omp_nthreads: int = 1, - name: str = "bold_fit_wf", + name: str = 'bold_fit_wf', ) -> pe.Workflow: """ This workflow controls the minimal estimation steps for functional preprocessing. @@ -216,121 +216,121 @@ def init_bold_fit_wf( ) basename = os.path.basename(bold_file) - sbref_msg = f"No single-band-reference found for {basename}." - if sbref_files and "sbref" in config.workflow.ignore: - sbref_msg = f"Single-band reference file(s) found for {basename} and ignored." + sbref_msg = f'No single-band-reference found for {basename}.' + if sbref_files and 'sbref' in config.workflow.ignore: + sbref_msg = f'Single-band reference file(s) found for {basename} and ignored.' sbref_files = [] elif sbref_files: - sbref_msg = "Using single-band reference file(s) {}.".format( - ",".join([os.path.basename(sbf) for sbf in sbref_files]) + sbref_msg = 'Using single-band reference file(s) {}.'.format( + ','.join([os.path.basename(sbf) for sbf in sbref_files]) ) config.loggers.workflow.info(sbref_msg) # Get metadata from BOLD file(s) entities = extract_entities(bold_series) metadata = layout.get_metadata(bold_file) - orientation = "".join(nb.aff2axcodes(nb.load(bold_file).affine)) + orientation = ''.join(nb.aff2axcodes(nb.load(bold_file).affine)) bold_tlen, mem_gb = estimate_bold_mem_usage(bold_file) # Boolean used to update workflow self-descriptions multiecho = len(bold_series) > 1 - have_hmcref = "hmc_boldref" in precomputed - have_coregref = "coreg_boldref" in precomputed + have_hmcref = 'hmc_boldref' in precomputed + have_coregref = 'coreg_boldref' in precomputed # Can contain # 1) boldref2fmap # 2) boldref2anat # 3) hmc - transforms = precomputed.get("transforms", {}) - hmc_xforms = transforms.get("hmc") - boldref2fmap_xform = transforms.get("boldref2fmap") - boldref2anat_xform = transforms.get("boldref2anat") + transforms = precomputed.get('transforms', {}) + hmc_xforms = transforms.get('hmc') + boldref2fmap_xform = transforms.get('boldref2fmap') + boldref2anat_xform = transforms.get('boldref2anat') workflow = Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface( fields=[ - "bold_file", + 'bold_file', # Fieldmap registration - "fmap", - "fmap_ref", - "fmap_coeff", - "fmap_mask", - "fmap_id", - "sdc_method", + 'fmap', + 'fmap_ref', + 'fmap_coeff', + 'fmap_mask', + 'fmap_id', + 'sdc_method', # Anatomical coregistration - "t1w_preproc", - "t1w_mask", - "t1w_dseg", - "subjects_dir", - "subject_id", - "fsnative2t1w_xfm", + 't1w_preproc', + 't1w_mask', + 't1w_dseg', + 'subjects_dir', + 'subject_id', + 'fsnative2t1w_xfm', ], ), - name="inputnode", + name='inputnode', ) inputnode.inputs.bold_file = bold_series outputnode = pe.Node( niu.IdentityInterface( fields=[ - "dummy_scans", - "hmc_boldref", - "coreg_boldref", - "bold_mask", - "motion_xfm", - "boldref2anat_xfm", - "boldref2fmap_xfm", - "movpar_file", - "rmsd_file", + 'dummy_scans', + 'hmc_boldref', + 'coreg_boldref', + 'bold_mask', + 'motion_xfm', + 'boldref2anat_xfm', + 'boldref2fmap_xfm', + 'movpar_file', + 'rmsd_file', ], ), - name="outputnode", + name='outputnode', ) # If all derivatives exist, inputnode could go unconnected, so add explicitly workflow.add_nodes([inputnode]) hmcref_buffer = pe.Node( - niu.IdentityInterface(fields=["boldref", "bold_file", "dummy_scans"]), - name="hmcref_buffer", + niu.IdentityInterface(fields=['boldref', 'bold_file', 'dummy_scans']), + name='hmcref_buffer', ) - fmapref_buffer = pe.Node(niu.Function(function=_select_ref), name="fmapref_buffer") + fmapref_buffer = pe.Node(niu.Function(function=_select_ref), name='fmapref_buffer') hmc_buffer = pe.Node( - niu.IdentityInterface(fields=["hmc_xforms", "movpar_file", "rmsd_file"]), name="hmc_buffer" + niu.IdentityInterface(fields=['hmc_xforms', 'movpar_file', 'rmsd_file']), name='hmc_buffer' ) fmapreg_buffer = pe.Node( - niu.IdentityInterface(fields=["boldref2fmap_xfm"]), name="fmapreg_buffer" + niu.IdentityInterface(fields=['boldref2fmap_xfm']), name='fmapreg_buffer' ) regref_buffer = pe.Node( - niu.IdentityInterface(fields=["boldref", "boldmask"]), name="regref_buffer" + niu.IdentityInterface(fields=['boldref', 'boldmask']), name='regref_buffer' ) summary = pe.Node( FunctionalSummary( - distortion_correction="None", # Can override with connection - registration=("FSL", "FreeSurfer")[config.workflow.run_reconall], + distortion_correction='None', # Can override with connection + registration=('FSL', 'FreeSurfer')[config.workflow.run_reconall], registration_dof=config.workflow.bold2t1w_dof, registration_init=config.workflow.bold2t1w_init, - pe_direction=metadata.get("PhaseEncodingDirection"), - echo_idx=entities.get("echo", []), - tr=metadata["RepetitionTime"], + pe_direction=metadata.get('PhaseEncodingDirection'), + echo_idx=entities.get('echo', []), + tr=metadata['RepetitionTime'], orientation=orientation, ), - name="summary", + name='summary', mem_gb=config.DEFAULT_MEMORY_MIN_GB, run_without_submitting=True, ) summary.inputs.dummy_scans = config.workflow.dummy_scans - if config.workflow.level == "full": + if config.workflow.level == 'full': # Hack. More pain than it's worth to connect this up at a higher level. # We can consider separating out fit and transform summaries, # or connect a bunch a bunch of summary parameters to outputnodes # to make available to the base workflow. summary.inputs.slice_timing = ( - bool(metadata.get("SliceTiming")) and "slicetiming" not in config.workflow.ignore + bool(metadata.get('SliceTiming')) and 'slicetiming' not in config.workflow.ignore ) func_fit_reports_wf = init_func_fit_reports_wf( @@ -343,42 +343,42 @@ def init_bold_fit_wf( # fmt:off workflow.connect([ (hmcref_buffer, outputnode, [ - ("boldref", "hmc_boldref"), - ("dummy_scans", "dummy_scans"), + ('boldref', 'hmc_boldref'), + ('dummy_scans', 'dummy_scans'), ]), (regref_buffer, outputnode, [ - ("boldref", "coreg_boldref"), - ("boldmask", "bold_mask"), + ('boldref', 'coreg_boldref'), + ('boldmask', 'bold_mask'), ]), - (fmapreg_buffer, outputnode, [("boldref2fmap_xfm", "boldref2fmap_xfm")]), + (fmapreg_buffer, outputnode, [('boldref2fmap_xfm', 'boldref2fmap_xfm')]), (hmc_buffer, outputnode, [ - ("hmc_xforms", "motion_xfm"), - ("movpar_file", "movpar_file"), - ("rmsd_file", "rmsd_file"), + ('hmc_xforms', 'motion_xfm'), + ('movpar_file', 'movpar_file'), + ('rmsd_file', 'rmsd_file'), ]), (inputnode, func_fit_reports_wf, [ - ("bold_file", "inputnode.source_file"), - ("t1w_preproc", "inputnode.t1w_preproc"), + ('bold_file', 'inputnode.source_file'), + ('t1w_preproc', 'inputnode.t1w_preproc'), # May not need all of these - ("t1w_mask", "inputnode.t1w_mask"), - ("t1w_dseg", "inputnode.t1w_dseg"), - ("subjects_dir", "inputnode.subjects_dir"), - ("subject_id", "inputnode.subject_id"), + ('t1w_mask', 'inputnode.t1w_mask'), + ('t1w_dseg', 'inputnode.t1w_dseg'), + ('subjects_dir', 'inputnode.subjects_dir'), + ('subject_id', 'inputnode.subject_id'), ]), (outputnode, func_fit_reports_wf, [ - ("coreg_boldref", "inputnode.coreg_boldref"), - ("bold_mask", "inputnode.bold_mask"), - ("boldref2anat_xfm", "inputnode.boldref2anat_xfm"), + ('coreg_boldref', 'inputnode.coreg_boldref'), + ('bold_mask', 'inputnode.bold_mask'), + ('boldref2anat_xfm', 'inputnode.boldref2anat_xfm'), ]), - (summary, func_fit_reports_wf, [("out_report", "inputnode.summary_report")]), + (summary, func_fit_reports_wf, [('out_report', 'inputnode.summary_report')]), ]) # fmt:on # Stage 1: Generate motion correction boldref if not have_hmcref: - config.loggers.workflow.info("Stage 1: Adding HMC boldref workflow") + config.loggers.workflow.info('Stage 1: Adding HMC boldref workflow') hmc_boldref_wf = init_raw_boldref_wf( - name="hmc_boldref_wf", + name='hmc_boldref_wf', bold_file=bold_file, multiecho=multiecho, ) @@ -395,37 +395,37 @@ def init_bold_fit_wf( # fmt:off workflow.connect([ (hmc_boldref_wf, hmcref_buffer, [ - ("outputnode.bold_file", "bold_file"), - ("outputnode.boldref", "boldref"), - ("outputnode.skip_vols", "dummy_scans"), + ('outputnode.bold_file', 'bold_file'), + ('outputnode.boldref', 'boldref'), + ('outputnode.skip_vols', 'dummy_scans'), ]), - (hmcref_buffer, ds_hmc_boldref_wf, [("boldref", "inputnode.boldref")]), - (hmc_boldref_wf, summary, [("outputnode.algo_dummy_scans", "algo_dummy_scans")]), + (hmcref_buffer, ds_hmc_boldref_wf, [('boldref', 'inputnode.boldref')]), + (hmc_boldref_wf, summary, [('outputnode.algo_dummy_scans', 'algo_dummy_scans')]), (hmc_boldref_wf, func_fit_reports_wf, [ - ("outputnode.validation_report", "inputnode.validation_report"), + ('outputnode.validation_report', 'inputnode.validation_report'), ]), ]) # fmt:on else: - config.loggers.workflow.info("Found HMC boldref - skipping Stage 1") + config.loggers.workflow.info('Found HMC boldref - skipping Stage 1') - validate_bold = pe.Node(ValidateImage(), name="validate_bold") + validate_bold = pe.Node(ValidateImage(), name='validate_bold') validate_bold.inputs.in_file = bold_file - hmcref_buffer.inputs.boldref = precomputed["hmc_boldref"] + hmcref_buffer.inputs.boldref = precomputed['hmc_boldref'] # fmt:off workflow.connect([ - (validate_bold, hmcref_buffer, [("out_file", "bold_file")]), - (validate_bold, func_fit_reports_wf, [("out_report", "inputnode.validation_report")]), + (validate_bold, hmcref_buffer, [('out_file', 'bold_file')]), + (validate_bold, func_fit_reports_wf, [('out_report', 'inputnode.validation_report')]), ]) # fmt:on # Stage 2: Estimate head motion if not hmc_xforms: - config.loggers.workflow.info("Stage 2: Adding motion correction workflow") + config.loggers.workflow.info('Stage 2: Adding motion correction workflow') bold_hmc_wf = init_bold_hmc_wf( - name="bold_hmc_wf", mem_gb=mem_gb["filesize"], omp_nthreads=omp_nthreads + name='bold_hmc_wf', mem_gb=mem_gb['filesize'], omp_nthreads=omp_nthreads ) ds_hmc_wf = init_ds_hmc_wf( @@ -437,25 +437,25 @@ def init_bold_fit_wf( # fmt:off workflow.connect([ (hmcref_buffer, bold_hmc_wf, [ - ("boldref", "inputnode.raw_ref_image"), - ("bold_file", "inputnode.bold_file"), + ('boldref', 'inputnode.raw_ref_image'), + ('bold_file', 'inputnode.bold_file'), ]), - (bold_hmc_wf, ds_hmc_wf, [("outputnode.xforms", "inputnode.xforms")]), + (bold_hmc_wf, ds_hmc_wf, [('outputnode.xforms', 'inputnode.xforms')]), (bold_hmc_wf, hmc_buffer, [ - ("outputnode.xforms", "hmc_xforms"), - ("outputnode.movpar_file", "movpar_file"), - ("outputnode.rmsd_file", "rmsd_file"), + ('outputnode.xforms', 'hmc_xforms'), + ('outputnode.movpar_file', 'movpar_file'), + ('outputnode.rmsd_file', 'rmsd_file'), ]), ]) # fmt:on else: - config.loggers.workflow.info("Found motion correction transforms - skipping Stage 2") + config.loggers.workflow.info('Found motion correction transforms - skipping Stage 2') hmc_buffer.inputs.hmc_xforms = hmc_xforms # Stage 3: Create coregistration reference # Fieldmap correction only happens during fit if this stage is needed if not have_coregref: - config.loggers.workflow.info("Stage 3: Adding coregistration boldref workflow") + config.loggers.workflow.info('Stage 3: Adding coregistration boldref workflow') # Select initial boldref, enhance contrast, and generate mask fmapref_buffer.inputs.sbref_files = sbref_files @@ -470,40 +470,40 @@ def init_bold_fit_wf( # fmt:off workflow.connect([ - (hmcref_buffer, fmapref_buffer, [("boldref", "boldref_files")]), - (fmapref_buffer, enhance_boldref_wf, [("out", "inputnode.in_file")]), - (fmapref_buffer, ds_coreg_boldref_wf, [("out", "inputnode.source_files")]), - (ds_coreg_boldref_wf, regref_buffer, [("outputnode.boldref", "boldref")]), - (fmapref_buffer, func_fit_reports_wf, [("out", "inputnode.sdc_boldref")]), + (hmcref_buffer, fmapref_buffer, [('boldref', 'boldref_files')]), + (fmapref_buffer, enhance_boldref_wf, [('out', 'inputnode.in_file')]), + (fmapref_buffer, ds_coreg_boldref_wf, [('out', 'inputnode.source_files')]), + (ds_coreg_boldref_wf, regref_buffer, [('outputnode.boldref', 'boldref')]), + (fmapref_buffer, func_fit_reports_wf, [('out', 'inputnode.sdc_boldref')]), ]) # fmt:on if fieldmap_id: fmap_select = pe.Node( KeySelect( - fields=["fmap_ref", "fmap_coeff", "fmap_mask", "sdc_method"], + fields=['fmap_ref', 'fmap_coeff', 'fmap_mask', 'sdc_method'], key=fieldmap_id, ), - name="fmap_select", + name='fmap_select', run_without_submitting=True, ) if not boldref2fmap_xform: fmapreg_wf = init_coeff2epi_wf( - debug="fieldmaps" in config.execution.debug, + debug='fieldmaps' in config.execution.debug, omp_nthreads=config.nipype.omp_nthreads, sloppy=config.execution.sloppy, - name="fmapreg_wf", + name='fmapreg_wf', ) - itk_mat2txt = pe.Node(ConcatenateXFMs(out_fmt="itk"), name="itk_mat2txt") + itk_mat2txt = pe.Node(ConcatenateXFMs(out_fmt='itk'), name='itk_mat2txt') ds_fmapreg_wf = init_ds_registration_wf( bids_root=layout.root, output_dir=config.execution.fmriprep_dir, - source="boldref", + source='boldref', dest=fieldmap_id.replace('_', ''), - name="ds_fmapreg_wf", + name='ds_fmapreg_wf', ) ds_fmapreg_wf.inputs.inputnode.source_files = [bold_file] @@ -514,8 +514,8 @@ def init_bold_fit_wf( ('outputnode.mask_file', 'inputnode.target_mask'), ]), (fmap_select, fmapreg_wf, [ - ("fmap_ref", "inputnode.fmap_ref"), - ("fmap_mask", "inputnode.fmap_mask"), + ('fmap_ref', 'inputnode.fmap_ref'), + ('fmap_mask', 'inputnode.fmap_mask'), ]), (fmapreg_wf, itk_mat2txt, [('outputnode.target2fmap_xfm', 'in_xfms')]), (itk_mat2txt, ds_fmapreg_wf, [('out_xfm', 'inputnode.xform')]), @@ -527,7 +527,7 @@ def init_bold_fit_wf( unwarp_wf = init_unwarp_wf( free_mem=config.environment.free_mem, - debug="fieldmaps" in config.execution.debug, + debug='fieldmaps' in config.execution.debug, omp_nthreads=config.nipype.omp_nthreads, ) unwarp_wf.inputs.inputnode.metadata = layout.get_metadata(bold_file) @@ -535,21 +535,21 @@ def init_bold_fit_wf( # fmt:off workflow.connect([ (inputnode, fmap_select, [ - ("fmap_ref", "fmap_ref"), - ("fmap_coeff", "fmap_coeff"), - ("fmap_mask", "fmap_mask"), - ("sdc_method", "sdc_method"), - ("fmap_id", "keys"), + ('fmap_ref', 'fmap_ref'), + ('fmap_coeff', 'fmap_coeff'), + ('fmap_mask', 'fmap_mask'), + ('sdc_method', 'sdc_method'), + ('fmap_id', 'keys'), ]), (fmap_select, unwarp_wf, [ - ("fmap_coeff", "inputnode.fmap_coeff"), + ('fmap_coeff', 'inputnode.fmap_coeff'), ]), (fmapreg_buffer, unwarp_wf, [ # This looks backwards, but unwarp_wf describes transforms in # terms of points while we (and init_coeff2epi_wf) describe them # in terms of images. Mapping fieldmap coordinates into boldref # coordinates maps the boldref image onto the fieldmap image. - ("boldref2fmap_xfm", "inputnode.fmap2data_xfm"), + ('boldref2fmap_xfm', 'inputnode.fmap2data_xfm'), ]), (enhance_boldref_wf, unwarp_wf, [ ('outputnode.bias_corrected_file', 'inputnode.distorted'), @@ -560,12 +560,12 @@ def init_bold_fit_wf( (unwarp_wf, regref_buffer, [ ('outputnode.corrected_mask', 'boldmask'), ]), - (fmap_select, func_fit_reports_wf, [("fmap_ref", "inputnode.fmap_ref")]), - (fmap_select, summary, [("sdc_method", "distortion_correction")]), + (fmap_select, func_fit_reports_wf, [('fmap_ref', 'inputnode.fmap_ref')]), + (fmap_select, summary, [('sdc_method', 'distortion_correction')]), (fmapreg_buffer, func_fit_reports_wf, [ - ("boldref2fmap_xfm", "inputnode.boldref2fmap_xfm"), + ('boldref2fmap_xfm', 'inputnode.boldref2fmap_xfm'), ]), - (unwarp_wf, func_fit_reports_wf, [("outputnode.fieldmap", "inputnode.fieldmap")]), + (unwarp_wf, func_fit_reports_wf, [('outputnode.fieldmap', 'inputnode.fieldmap')]), ]) # fmt:on else: @@ -580,8 +580,8 @@ def init_bold_fit_wf( ]) # fmt:on else: - config.loggers.workflow.info("Found coregistration reference - skipping Stage 3") - regref_buffer.inputs.boldref = precomputed["coreg_boldref"] + config.loggers.workflow.info('Found coregistration reference - skipping Stage 3') + regref_buffer.inputs.boldref = precomputed['coreg_boldref'] if not boldref2anat_xform: # calculate BOLD registration to T1w @@ -591,35 +591,35 @@ def init_bold_fit_wf( use_bbr=config.workflow.use_bbr, freesurfer=config.workflow.run_reconall, omp_nthreads=omp_nthreads, - mem_gb=mem_gb["resampled"], + mem_gb=mem_gb['resampled'], sloppy=config.execution.sloppy, ) ds_boldreg_wf = init_ds_registration_wf( bids_root=layout.root, output_dir=config.execution.fmriprep_dir, - source="boldref", - dest="T1w", - name="ds_boldreg_wf", + source='boldref', + dest='T1w', + name='ds_boldreg_wf', ) # fmt:off workflow.connect([ (inputnode, bold_reg_wf, [ - ("t1w_preproc", "inputnode.t1w_preproc"), - ("t1w_mask", "inputnode.t1w_mask"), - ("t1w_dseg", "inputnode.t1w_dseg"), + ('t1w_preproc', 'inputnode.t1w_preproc'), + ('t1w_mask', 'inputnode.t1w_mask'), + ('t1w_dseg', 'inputnode.t1w_dseg'), # Undefined if --fs-no-reconall, but this is safe - ("subjects_dir", "inputnode.subjects_dir"), - ("subject_id", "inputnode.subject_id"), - ("fsnative2t1w_xfm", "inputnode.fsnative2t1w_xfm"), + ('subjects_dir', 'inputnode.subjects_dir'), + ('subject_id', 'inputnode.subject_id'), + ('fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm'), ]), - (regref_buffer, bold_reg_wf, [("boldref", "inputnode.ref_bold_brain")]), + (regref_buffer, bold_reg_wf, [('boldref', 'inputnode.ref_bold_brain')]), # Incomplete sources - (regref_buffer, ds_boldreg_wf, [("boldref", "inputnode.source_files")]), - (bold_reg_wf, ds_boldreg_wf, [("outputnode.itk_bold_to_t1", "inputnode.xform")]), - (ds_boldreg_wf, outputnode, [("outputnode.xform", "boldref2anat_xfm")]), - (bold_reg_wf, summary, [("outputnode.fallback", "fallback")]), + (regref_buffer, ds_boldreg_wf, [('boldref', 'inputnode.source_files')]), + (bold_reg_wf, ds_boldreg_wf, [('outputnode.itk_bold_to_t1', 'inputnode.xform')]), + (ds_boldreg_wf, outputnode, [('outputnode.xform', 'boldref2anat_xfm')]), + (bold_reg_wf, summary, [('outputnode.fallback', 'fallback')]), ]) # fmt:on else: @@ -630,10 +630,10 @@ def init_bold_fit_wf( def init_bold_native_wf( *, - bold_series: ty.List[str], + bold_series: list[str], fieldmap_id: ty.Optional[str] = None, omp_nthreads: int = 1, - name: str = "bold_native_wf", + name: str = 'bold_native_wf', ) -> pe.Workflow: r""" Minimal resampling workflow. @@ -721,7 +721,7 @@ def init_bold_native_wf( # Shortest echo first all_metadata = [layout.get_metadata(bold_file) for bold_file in bold_series] - echo_times = [md.get("EchoTime") for md in all_metadata] + echo_times = [md.get('EchoTime') for md in all_metadata] multiecho = len(bold_series) > 1 bold_file = bold_series[0] @@ -732,16 +732,17 @@ def init_bold_native_wf( if multiecho: shapes = [nb.load(echo).shape for echo in bold_series] if len(set(shapes)) != 1: - diagnostic = "\n".join( - f"{os.path.basename(echo)}: {shape}" for echo, shape in zip(bold_series, shapes) + diagnostic = '\n'.join( + f'{os.path.basename(echo)}: {shape}' + for echo, shape in zip(bold_series, shapes, strict=False) ) - raise RuntimeError(f"Multi-echo images found with mismatching shapes\n{diagnostic}") + raise RuntimeError(f'Multi-echo images found with mismatching shapes\n{diagnostic}') if len(shapes) == 2: raise RuntimeError( - "Multi-echo processing requires at least three different echos (found two)." + 'Multi-echo processing requires at least three different echos (found two).' ) - run_stc = bool(metadata.get("SliceTiming")) and "slicetiming" not in config.workflow.ignore + run_stc = bool(metadata.get('SliceTiming')) and 'slicetiming' not in config.workflow.ignore workflow = pe.Workflow(name=name) @@ -749,15 +750,15 @@ def init_bold_native_wf( niu.IdentityInterface( fields=[ # BOLD fit - "boldref", - "bold_mask", - "motion_xfm", - "boldref2fmap_xfm", - "dummy_scans", + 'boldref', + 'bold_mask', + 'motion_xfm', + 'boldref2fmap_xfm', + 'dummy_scans', # Fieldmap fit - "fmap_ref", - "fmap_coeff", - "fmap_id", + 'fmap_ref', + 'fmap_coeff', + 'fmap_id', ], ), name='inputnode', @@ -766,145 +767,145 @@ def init_bold_native_wf( outputnode = pe.Node( niu.IdentityInterface( fields=[ - "bold_minimal", - "bold_native", - "metadata", + 'bold_minimal', + 'bold_native', + 'metadata', # Transforms - "motion_xfm", + 'motion_xfm', # Multiecho outputs - "bold_echos", # Individual corrected echos - "t2star_map", # T2* map + 'bold_echos', # Individual corrected echos + 't2star_map', # T2* map ], # fmt:skip ), - name="outputnode", + name='outputnode', ) outputnode.inputs.metadata = metadata boldbuffer = pe.Node( - niu.IdentityInterface(fields=["bold_file", "ro_time", "pe_dir"]), name="boldbuffer" + niu.IdentityInterface(fields=['bold_file', 'ro_time', 'pe_dir']), name='boldbuffer' ) # Track echo index - this allows us to treat multi- and single-echo workflows # almost identically - echo_index = pe.Node(niu.IdentityInterface(fields=["echoidx"]), name="echo_index") + echo_index = pe.Node(niu.IdentityInterface(fields=['echoidx']), name='echo_index') if multiecho: - echo_index.iterables = [("echoidx", range(len(bold_series)))] + echo_index.iterables = [('echoidx', range(len(bold_series)))] else: echo_index.inputs.echoidx = 0 # BOLD source: track original BOLD file(s) - bold_source = pe.Node(niu.Select(inlist=bold_series), name="bold_source") - validate_bold = pe.Node(ValidateImage(), name="validate_bold") + bold_source = pe.Node(niu.Select(inlist=bold_series), name='bold_source') + validate_bold = pe.Node(ValidateImage(), name='validate_bold') workflow.connect([ - (echo_index, bold_source, [("echoidx", "index")]), - (bold_source, validate_bold, [("out", "in_file")]), + (echo_index, bold_source, [('echoidx', 'index')]), + (bold_source, validate_bold, [('out', 'in_file')]), ]) # fmt:skip # Slice-timing correction if run_stc: bold_stc_wf = init_bold_stc_wf(metadata=metadata, mem_gb=mem_gb) workflow.connect([ - (inputnode, bold_stc_wf, [("dummy_scans", "inputnode.skip_vols")]), - (validate_bold, bold_stc_wf, [("out_file", "inputnode.bold_file")]), - (bold_stc_wf, boldbuffer, [("outputnode.stc_file", "bold_file")]), + (inputnode, bold_stc_wf, [('dummy_scans', 'inputnode.skip_vols')]), + (validate_bold, bold_stc_wf, [('out_file', 'inputnode.bold_file')]), + (bold_stc_wf, boldbuffer, [('outputnode.stc_file', 'bold_file')]), ]) # fmt:skip else: - workflow.connect([(validate_bold, boldbuffer, [("out_file", "bold_file")])]) + workflow.connect([(validate_bold, boldbuffer, [('out_file', 'bold_file')])]) # Prepare fieldmap metadata if fieldmap_id: fmap_select = pe.Node( - KeySelect(fields=["fmap_ref", "fmap_coeff"], key=fieldmap_id), - name="fmap_select", + KeySelect(fields=['fmap_ref', 'fmap_coeff'], key=fieldmap_id), + name='fmap_select', run_without_submitting=True, ) distortion_params = pe.Node( DistortionParameters(metadata=metadata, in_file=bold_file), - name="distortion_params", + name='distortion_params', run_without_submitting=True, ) workflow.connect([ (inputnode, fmap_select, [ - ("fmap_ref", "fmap_ref"), - ("fmap_coeff", "fmap_coeff"), - ("fmap_id", "keys"), + ('fmap_ref', 'fmap_ref'), + ('fmap_coeff', 'fmap_coeff'), + ('fmap_id', 'keys'), ]), (distortion_params, boldbuffer, [ - ("readout_time", "ro_time"), - ("pe_direction", "pe_dir"), + ('readout_time', 'ro_time'), + ('pe_direction', 'pe_dir'), ]), ]) # fmt:skip # Resample to boldref boldref_bold = pe.Node( - ResampleSeries(jacobian="fmap-jacobian" not in config.workflow.ignore), - name="boldref_bold", + ResampleSeries(jacobian='fmap-jacobian' not in config.workflow.ignore), + name='boldref_bold', n_procs=omp_nthreads, - mem_gb=mem_gb["resampled"], + mem_gb=mem_gb['resampled'], ) workflow.connect([ (inputnode, boldref_bold, [ - ("boldref", "ref_file"), - ("motion_xfm", "transforms"), + ('boldref', 'ref_file'), + ('motion_xfm', 'transforms'), ]), (boldbuffer, boldref_bold, [ - ("bold_file", "in_file"), - ("ro_time", "ro_time"), - ("pe_dir", "pe_dir"), + ('bold_file', 'in_file'), + ('ro_time', 'ro_time'), + ('pe_dir', 'pe_dir'), ]), ]) # fmt:skip if fieldmap_id: - boldref_fmap = pe.Node(ReconstructFieldmap(inverse=[True]), name="boldref_fmap", mem_gb=1) + boldref_fmap = pe.Node(ReconstructFieldmap(inverse=[True]), name='boldref_fmap', mem_gb=1) workflow.connect([ (inputnode, boldref_fmap, [ - ("boldref", "target_ref_file"), - ("boldref2fmap_xfm", "transforms"), + ('boldref', 'target_ref_file'), + ('boldref2fmap_xfm', 'transforms'), ]), (fmap_select, boldref_fmap, [ - ("fmap_coeff", "in_coeffs"), - ("fmap_ref", "fmap_ref_file"), + ('fmap_coeff', 'in_coeffs'), + ('fmap_ref', 'fmap_ref_file'), ]), - (boldref_fmap, boldref_bold, [("out_file", "fieldmap")]), + (boldref_fmap, boldref_bold, [('out_file', 'fieldmap')]), ]) # fmt:skip if multiecho: join_echos = pe.JoinNode( - niu.IdentityInterface(fields=["bold_files"]), - joinsource="echo_index", - joinfield=["bold_files"], - name="join_echos", + niu.IdentityInterface(fields=['bold_files']), + joinsource='echo_index', + joinfield=['bold_files'], + name='join_echos', run_without_submitting=True, ) # create optimal combination, adaptive T2* map bold_t2s_wf = init_bold_t2s_wf( echo_times=echo_times, - mem_gb=mem_gb["filesize"], + mem_gb=mem_gb['filesize'], omp_nthreads=config.nipype.omp_nthreads, - name="bold_t2smap_wf", + name='bold_t2smap_wf', ) # Do NOT set motion_xfm on outputnode # This prevents downstream resamplers from double-dipping workflow.connect([ - (inputnode, bold_t2s_wf, [("bold_mask", "inputnode.bold_mask")]), - (boldref_bold, join_echos, [("out_file", "bold_files")]), - (join_echos, bold_t2s_wf, [("bold_files", "inputnode.bold_file")]), - (join_echos, outputnode, [("bold_files", "bold_echos")]), + (inputnode, bold_t2s_wf, [('bold_mask', 'inputnode.bold_mask')]), + (boldref_bold, join_echos, [('out_file', 'bold_files')]), + (join_echos, bold_t2s_wf, [('bold_files', 'inputnode.bold_file')]), + (join_echos, outputnode, [('bold_files', 'bold_echos')]), (bold_t2s_wf, outputnode, [ - ("outputnode.bold", "bold_minimal"), - ("outputnode.bold", "bold_native"), - ("outputnode.t2star_map", "t2star_map"), + ('outputnode.bold', 'bold_minimal'), + ('outputnode.bold', 'bold_native'), + ('outputnode.t2star_map', 't2star_map'), ]), ]) # fmt:skip else: workflow.connect([ - (inputnode, outputnode, [("motion_xfm", "motion_xfm")]), - (boldbuffer, outputnode, [("bold_file", "bold_minimal")]), - (boldref_bold, outputnode, [("out_file", "bold_native")]), + (inputnode, outputnode, [('motion_xfm', 'motion_xfm')]), + (boldbuffer, outputnode, [('bold_file', 'bold_minimal')]), + (boldref_bold, outputnode, [('out_file', 'bold_native')]), ]) # fmt:skip return workflow diff --git a/fmriprep/workflows/bold/hmc.py b/fmriprep/workflows/bold/hmc.py index 01be7940b..9fd4d6c20 100644 --- a/fmriprep/workflows/bold/hmc.py +++ b/fmriprep/workflows/bold/hmc.py @@ -89,9 +89,7 @@ def init_bold_hmc_wf(mem_gb: float, omp_nthreads: int, name: str = 'bold_hmc_wf' (transformation matrices, and six corresponding rotation and translation parameters) are estimated before any spatiotemporal filtering using `mcflirt` [FSL {fsl_ver}, @mcflirt]. -""".format( - fsl_ver=fsl.Info().version() or '' - ) +""".format(fsl_ver=fsl.Info().version() or '') inputnode = pe.Node( niu.IdentityInterface(fields=['bold_file', 'raw_ref_image']), name='inputnode' @@ -110,7 +108,7 @@ def init_bold_hmc_wf(mem_gb: float, omp_nthreads: int, name: str = 'bold_hmc_wf' fsl2itk = pe.Node(MCFLIRT2ITK(), name='fsl2itk', mem_gb=0.05, n_procs=omp_nthreads) normalize_motion = pe.Node( - NormalizeMotionParams(format='FSL'), name="normalize_motion", mem_gb=DEFAULT_MEMORY_MIN_GB + NormalizeMotionParams(format='FSL'), name='normalize_motion', mem_gb=DEFAULT_MEMORY_MIN_GB ) def _pick_rel(rms_files): diff --git a/fmriprep/workflows/bold/outputs.py b/fmriprep/workflows/bold/outputs.py index 01acf2c21..094484156 100644 --- a/fmriprep/workflows/bold/outputs.py +++ b/fmriprep/workflows/bold/outputs.py @@ -23,8 +23,6 @@ """Writing out derivative files.""" from __future__ import annotations -import typing as ty - import numpy as np from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe @@ -104,38 +102,38 @@ def prepare_timing_parameters(metadata: dict): timing_parameters = { key: metadata[key] for key in ( - "RepetitionTime", - "VolumeTiming", - "DelayTime", - "AcquisitionDuration", - "SliceTiming", + 'RepetitionTime', + 'VolumeTiming', + 'DelayTime', + 'AcquisitionDuration', + 'SliceTiming', ) if key in metadata } # Treat SliceTiming of [] or length 1 as equivalent to missing and remove it in any case - slice_timing = timing_parameters.pop("SliceTiming", []) + slice_timing = timing_parameters.pop('SliceTiming', []) run_stc = len(slice_timing) > 1 and 'slicetiming' not in config.workflow.ignore - timing_parameters["SliceTimingCorrected"] = run_stc + timing_parameters['SliceTimingCorrected'] = run_stc if len(slice_timing) > 1: st = sorted(slice_timing) TA = st[-1] + (st[1] - st[0]) # Final slice onset + slice duration # For constant TR paradigms, use DelayTime - if "RepetitionTime" in timing_parameters: - TR = timing_parameters["RepetitionTime"] + if 'RepetitionTime' in timing_parameters: + TR = timing_parameters['RepetitionTime'] if not np.isclose(TR, TA) and TA < TR: - timing_parameters["DelayTime"] = TR - TA + timing_parameters['DelayTime'] = TR - TA # For variable TR paradigms, use AcquisitionDuration - elif "VolumeTiming" in timing_parameters: - timing_parameters["AcquisitionDuration"] = TA + elif 'VolumeTiming' in timing_parameters: + timing_parameters['AcquisitionDuration'] = TA if run_stc: first, last = st[0], st[-1] frac = config.workflow.slice_time_ref tzero = np.round(first + frac * (last - first), 3) - timing_parameters["StartTime"] = tzero + timing_parameters['StartTime'] = tzero return timing_parameters @@ -145,7 +143,7 @@ def init_func_fit_reports_wf( sdc_correction: bool, freesurfer: bool, output_dir: str, - name="func_fit_reports_wf", + name='func_fit_reports_wf', ) -> pe.Workflow: """ Set up a battery of datasinks to store reports in the right location. @@ -193,34 +191,34 @@ def init_func_fit_reports_wf( workflow = pe.Workflow(name=name) inputfields = [ - "source_file", - "sdc_boldref", - "coreg_boldref", - "bold_mask", - "boldref2anat_xfm", - "boldref2fmap_xfm", - "t1w_preproc", - "t1w_mask", - "t1w_dseg", - "fieldmap", - "fmap_ref", + 'source_file', + 'sdc_boldref', + 'coreg_boldref', + 'bold_mask', + 'boldref2anat_xfm', + 'boldref2fmap_xfm', + 't1w_preproc', + 't1w_mask', + 't1w_dseg', + 'fieldmap', + 'fmap_ref', # May be missing - "subject_id", - "subjects_dir", + 'subject_id', + 'subjects_dir', # Report snippets - "summary_report", - "validation_report", + 'summary_report', + 'validation_report', ] - inputnode = pe.Node(niu.IdentityInterface(fields=inputfields), name="inputnode") + inputnode = pe.Node(niu.IdentityInterface(fields=inputfields), name='inputnode') ds_summary = pe.Node( DerivativesDataSink( base_directory=output_dir, - desc="summary", - datatype="figures", + desc='summary', + datatype='figures', dismiss_entities=dismiss_echo(), ), - name="ds_report_summary", + name='ds_report_summary', run_without_submitting=True, mem_gb=config.DEFAULT_MEMORY_MIN_GB, ) @@ -228,11 +226,11 @@ def init_func_fit_reports_wf( ds_validation = pe.Node( DerivativesDataSink( base_directory=output_dir, - desc="validation", - datatype="figures", + desc='validation', + datatype='figures', dismiss_entities=dismiss_echo(), ), - name="ds_report_validation", + name='ds_report_validation', run_without_submitting=True, mem_gb=config.DEFAULT_MEMORY_MIN_GB, ) @@ -244,15 +242,15 @@ def init_func_fit_reports_wf( default_value=0, float=True, invert_transform_flags=[True], - interpolation="LanczosWindowedSinc", + interpolation='LanczosWindowedSinc', ), - name="t1w_boldref", + name='t1w_boldref', mem_gb=1, ) t1w_wm = pe.Node( niu.Function(function=dseg_label), - name="t1w_wm", + name='t1w_wm', mem_gb=DEFAULT_MEMORY_MIN_GB, ) t1w_wm.inputs.label = 2 # BIDS default is WM=2 @@ -262,9 +260,9 @@ def init_func_fit_reports_wf( dimension=3, default_value=0, invert_transform_flags=[True], - interpolation="NearestNeighbor", + interpolation='NearestNeighbor', ), - name="boldref_wm", + name='boldref_wm', mem_gb=1, ) @@ -311,54 +309,54 @@ def init_func_fit_reports_wf( default_value=0, float=True, invert_transform_flags=[True], - interpolation="LanczosWindowedSinc", + interpolation='LanczosWindowedSinc', ), - name="fmapref_boldref", + name='fmapref_boldref', mem_gb=1, ) # SDC1 sdcreg_report = pe.Node( FieldmapReportlet( - reference_label="BOLD reference", - moving_label="Fieldmap reference", - show="both", + reference_label='BOLD reference', + moving_label='Fieldmap reference', + show='both', ), - name="sdecreg_report", + name='sdecreg_report', mem_gb=0.1, ) ds_sdcreg_report = pe.Node( DerivativesDataSink( base_directory=output_dir, - desc="fmapCoreg", - suffix="bold", - datatype="figures", + desc='fmapCoreg', + suffix='bold', + datatype='figures', dismiss_entities=dismiss_echo(), ), - name="ds_sdcreg_report", + name='ds_sdcreg_report', ) # SDC2 sdc_report = pe.Node( SimpleBeforeAfter( - before_label="Distorted", - after_label="Corrected", + before_label='Distorted', + after_label='Corrected', dismiss_affine=True, ), - name="sdc_report", + name='sdc_report', mem_gb=0.1, ) ds_sdc_report = pe.Node( DerivativesDataSink( base_directory=output_dir, - desc="sdc", - suffix="bold", - datatype="figures", + desc='sdc', + suffix='bold', + datatype='figures', dismiss_entities=dismiss_echo(), ), - name="ds_sdc_report", + name='ds_sdc_report', ) # fmt:off @@ -391,23 +389,23 @@ def init_func_fit_reports_wf( epi_t1_report = pe.Node( SimpleBeforeAfter( - before_label="T1w", - after_label="EPI", + before_label='T1w', + after_label='EPI', dismiss_affine=True, ), - name="epi_t1_report", + name='epi_t1_report', mem_gb=0.1, ) ds_epi_t1_report = pe.Node( DerivativesDataSink( base_directory=output_dir, - desc="coreg", - suffix="bold", - datatype="figures", + desc='coreg', + suffix='bold', + datatype='figures', dismiss_entities=dismiss_echo(), ), - name="ds_epi_t1_report", + name='ds_epi_t1_report', ) # fmt:off @@ -428,28 +426,28 @@ def init_ds_boldref_wf( bids_root, output_dir, desc: str, - name="ds_boldref_wf", + name='ds_boldref_wf', ) -> pe.Workflow: workflow = pe.Workflow(name=name) inputnode = pe.Node( - niu.IdentityInterface(fields=["source_files", "boldref"]), - name="inputnode", + niu.IdentityInterface(fields=['source_files', 'boldref']), + name='inputnode', ) - outputnode = pe.Node(niu.IdentityInterface(fields=["boldref"]), name="outputnode") + outputnode = pe.Node(niu.IdentityInterface(fields=['boldref']), name='outputnode') - raw_sources = pe.Node(niu.Function(function=_bids_relative), name="raw_sources") + raw_sources = pe.Node(niu.Function(function=_bids_relative), name='raw_sources') raw_sources.inputs.bids_root = bids_root ds_boldref = pe.Node( DerivativesDataSink( base_directory=output_dir, desc=desc, - suffix="boldref", + suffix='boldref', compress=True, dismiss_entities=dismiss_echo(), ), - name="ds_boldref", + name='ds_boldref', run_without_submitting=True, ) @@ -477,12 +475,12 @@ def init_ds_registration_wf( workflow = pe.Workflow(name=name) inputnode = pe.Node( - niu.IdentityInterface(fields=["source_files", "xform"]), - name="inputnode", + niu.IdentityInterface(fields=['source_files', 'xform']), + name='inputnode', ) - outputnode = pe.Node(niu.IdentityInterface(fields=["xform"]), name="outputnode") + outputnode = pe.Node(niu.IdentityInterface(fields=['xform']), name='outputnode') - raw_sources = pe.Node(niu.Function(function=_bids_relative), name="raw_sources") + raw_sources = pe.Node(niu.Function(function=_bids_relative), name='raw_sources') raw_sources.inputs.bids_root = bids_root ds_xform = pe.Node( @@ -491,7 +489,7 @@ def init_ds_registration_wf( mode='image', suffix='xfm', extension='.txt', - dismiss_entities=dismiss_echo(["part"]), + dismiss_entities=dismiss_echo(['part']), **{'from': source, 'to': dest}, ), name='ds_xform', @@ -516,30 +514,30 @@ def init_ds_hmc_wf( *, bids_root, output_dir, - name="ds_hmc_wf", + name='ds_hmc_wf', ) -> pe.Workflow: workflow = pe.Workflow(name=name) inputnode = pe.Node( - niu.IdentityInterface(fields=["source_files", "xforms"]), - name="inputnode", + niu.IdentityInterface(fields=['source_files', 'xforms']), + name='inputnode', ) - outputnode = pe.Node(niu.IdentityInterface(fields=["xforms"]), name="outputnode") + outputnode = pe.Node(niu.IdentityInterface(fields=['xforms']), name='outputnode') - raw_sources = pe.Node(niu.Function(function=_bids_relative), name="raw_sources") + raw_sources = pe.Node(niu.Function(function=_bids_relative), name='raw_sources') raw_sources.inputs.bids_root = bids_root ds_xforms = pe.Node( DerivativesDataSink( base_directory=output_dir, - desc="hmc", - suffix="xfm", - extension=".txt", + desc='hmc', + suffix='xfm', + extension='.txt', compress=True, dismiss_entities=dismiss_echo(), - **{"from": "orig", "to": "boldref"}, + **{'from': 'orig', 'to': 'boldref'}, ), - name="ds_xforms", + name='ds_xforms', run_without_submitting=True, ) @@ -563,8 +561,8 @@ def init_ds_bold_native_wf( multiecho: bool, bold_output: bool, echo_output: bool, - all_metadata: ty.List[dict], - name="ds_bold_native_wf", + all_metadata: list[dict], + name='ds_bold_native_wf', ) -> pe.Workflow: metadata = all_metadata[0] timing_parameters = prepare_timing_parameters(metadata) @@ -671,7 +669,7 @@ def init_ds_bold_native_wf( run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) - ds_bold_echos.inputs.meta_dict = [{"EchoTime": md["EchoTime"]} for md in all_metadata] + ds_bold_echos.inputs.meta_dict = [{'EchoTime': md['EchoTime']} for md in all_metadata] workflow.connect([ (inputnode, ds_bold_echos, [ ('source_files', 'source_file'), @@ -687,8 +685,8 @@ def init_ds_volumes_wf( bids_root: str, output_dir: str, multiecho: bool, - metadata: ty.List[dict], - name="ds_volumes_wf", + metadata: list[dict], + name='ds_volumes_wf', ) -> pe.Workflow: timing_parameters = prepare_timing_parameters(metadata) @@ -753,11 +751,11 @@ def init_ds_volumes_wf( dimension=3, default_value=0, float=True, - interpolation="LanczosWindowedSinc", + interpolation='LanczosWindowedSinc', ), - name="resample_ref", + name='resample_ref', ) - resample_mask = pe.Node(ApplyTransforms(interpolation="MultiLabel"), name="resample_mask") + resample_mask = pe.Node(ApplyTransforms(interpolation='MultiLabel'), name='resample_mask') resamplers = [resample_ref, resample_mask] workflow.connect([ @@ -801,9 +799,9 @@ def init_ds_volumes_wf( dimension=3, default_value=0, float=True, - interpolation="LanczosWindowedSinc", + interpolation='LanczosWindowedSinc', ), - name="resample_t2star", + name='resample_t2star', ) ds_t2star = pe.Node( DerivativesDataSink( @@ -838,8 +836,8 @@ def init_ds_volumes_wf( ]) for datasink in datasinks ] + [ - (resampler, datasink, [("output_image", "in_file")]) - for resampler, datasink in zip(resamplers, datasinks) + (resampler, datasink, [('output_image', 'in_file')]) + for resampler, datasink in zip(resamplers, datasinks, strict=False) ] ) # fmt:skip @@ -905,7 +903,7 @@ def init_bold_preproc_report_wf( DerivativesDataSink( base_directory=reportlets_dir, desc='preproc', - datatype="figures", + datatype='figures', dismiss_entities=dismiss_echo(), ), name='ds_report_bold', diff --git a/fmriprep/workflows/bold/reference.py b/fmriprep/workflows/bold/reference.py index 0b4aeb87f..5e8873e0d 100644 --- a/fmriprep/workflows/bold/reference.py +++ b/fmriprep/workflows/bold/reference.py @@ -32,7 +32,7 @@ def init_raw_boldref_wf( bold_file=None, multiecho=False, - name="raw_boldref_wf", + name='raw_boldref_wf', ): """ Build a workflow that generates reference BOLD images for a series. @@ -90,20 +90,20 @@ def init_raw_boldref_wf( """ inputnode = pe.Node( - niu.IdentityInterface(fields=["bold_file", "dummy_scans"]), - name="inputnode", + niu.IdentityInterface(fields=['bold_file', 'dummy_scans']), + name='inputnode', ) outputnode = pe.Node( niu.IdentityInterface( fields=[ - "bold_file", - "boldref", - "skip_vols", - "algo_dummy_scans", - "validation_report", + 'bold_file', + 'boldref', + 'skip_vols', + 'algo_dummy_scans', + 'validation_report', ] ), - name="outputnode", + name='outputnode', ) # Simplify manually setting input image @@ -112,35 +112,35 @@ def init_raw_boldref_wf( val_bold = pe.Node( ValidateImage(), - name="val_bold", + name='val_bold', mem_gb=DEFAULT_MEMORY_MIN_GB, ) - get_dummy = pe.Node(NonsteadyStatesDetector(), name="get_dummy") - gen_avg = pe.Node(RobustAverage(), name="gen_avg", mem_gb=1) + get_dummy = pe.Node(NonsteadyStatesDetector(), name='get_dummy') + gen_avg = pe.Node(RobustAverage(), name='gen_avg', mem_gb=1) calc_dummy_scans = pe.Node( - niu.Function(function=pass_dummy_scans, output_names=["skip_vols_num"]), - name="calc_dummy_scans", + niu.Function(function=pass_dummy_scans, output_names=['skip_vols_num']), + name='calc_dummy_scans', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) # fmt: off workflow.connect([ - (inputnode, val_bold, [("bold_file", "in_file")]), - (inputnode, get_dummy, [("bold_file", "in_file")]), - (inputnode, calc_dummy_scans, [("dummy_scans", "dummy_scans")]), - (val_bold, gen_avg, [("out_file", "in_file")]), - (get_dummy, gen_avg, [("t_mask", "t_mask")]), - (get_dummy, calc_dummy_scans, [("n_dummy", "algo_dummy_scans")]), + (inputnode, val_bold, [('bold_file', 'in_file')]), + (inputnode, get_dummy, [('bold_file', 'in_file')]), + (inputnode, calc_dummy_scans, [('dummy_scans', 'dummy_scans')]), + (val_bold, gen_avg, [('out_file', 'in_file')]), + (get_dummy, gen_avg, [('t_mask', 't_mask')]), + (get_dummy, calc_dummy_scans, [('n_dummy', 'algo_dummy_scans')]), (val_bold, outputnode, [ - ("out_file", "bold_file"), - ("out_report", "validation_report"), + ('out_file', 'bold_file'), + ('out_report', 'validation_report'), ]), - (calc_dummy_scans, outputnode, [("skip_vols_num", "skip_vols")]), - (gen_avg, outputnode, [("out_file", "boldref")]), - (get_dummy, outputnode, [("n_dummy", "algo_dummy_scans")]), + (calc_dummy_scans, outputnode, [('skip_vols_num', 'skip_vols')]), + (gen_avg, outputnode, [('out_file', 'boldref')]), + (get_dummy, outputnode, [('n_dummy', 'algo_dummy_scans')]), ]) # fmt: on diff --git a/fmriprep/workflows/bold/registration.py b/fmriprep/workflows/bold/registration.py index bde2423f2..409631602 100644 --- a/fmriprep/workflows/bold/registration.py +++ b/fmriprep/workflows/bold/registration.py @@ -38,7 +38,6 @@ from nipype.pipeline import engine as pe from ... import config, data -from ...interfaces import DerivativesDataSink DEFAULT_MEMORY_MIN_GB = config.DEFAULT_MEMORY_MIN_GB LOGGER = config.loggers.workflow @@ -268,7 +267,6 @@ def init_bbreg_wf( """ from nipype.interfaces.freesurfer import BBRegister, MRICoreg from niworkflows.engine.workflows import LiterateWorkflow as Workflow - from niworkflows.interfaces.freesurfer import PatchedLTAConvert as LTAConvert from niworkflows.interfaces.nitransforms import ConcatenateXFMs workflow = Workflow(name=name) @@ -302,16 +300,16 @@ def init_bbreg_wf( name='outputnode', ) - if bold2t1w_init not in ("register", "header"): - raise ValueError(f"Unknown BOLD-T1w initialization option: {bold2t1w_init}") + if bold2t1w_init not in ('register', 'header'): + raise ValueError(f'Unknown BOLD-T1w initialization option: {bold2t1w_init}') # For now make BBR unconditional - in the future, we can fall back to identity, # but adding the flexibility without testing seems a bit dangerous - if bold2t1w_init == "header": + if bold2t1w_init == 'header': if use_bbr is False: - raise ValueError("Cannot disable BBR and use header registration") + raise ValueError('Cannot disable BBR and use header registration') if use_bbr is None: - LOGGER.warning("Initializing BBR with header; affine fallback disabled") + LOGGER.warning('Initializing BBR with header; affine fallback disabled') use_bbr = True # Define both nodes, but only connect conditionally @@ -331,8 +329,8 @@ def init_bbreg_wf( name='bbregister', mem_gb=12, ) - if bold2t1w_init == "header": - bbregister.inputs.init = "header" + if bold2t1w_init == 'header': + bbregister.inputs.init = 'header' transforms = pe.Node(niu.Merge(2), run_without_submitting=True, name='transforms') # In cases where Merge(2) only has `in1` or `in2` defined @@ -354,7 +352,7 @@ def init_bbreg_wf( ]) # fmt:skip # Do not initialize with header, use mri_coreg - if bold2t1w_init == "register": + if bold2t1w_init == 'register': workflow.connect([ (inputnode, mri_coreg, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id'), @@ -513,11 +511,11 @@ def init_fsl_bbr_wf( wm_mask = pe.Node(niu.Function(function=_dseg_label), name='wm_mask') wm_mask.inputs.label = 2 # BIDS default is WM=2 - if bold2t1w_init not in ("register", "header"): - raise ValueError(f"Unknown BOLD-T1w initialization option: {bold2t1w_init}") + if bold2t1w_init not in ('register', 'header'): + raise ValueError(f'Unknown BOLD-T1w initialization option: {bold2t1w_init}') - if bold2t1w_init == "header": - raise NotImplementedError("Header-based registration initialization not supported for FSL") + if bold2t1w_init == 'header': + raise NotImplementedError('Header-based registration initialization not supported for FSL') # Mask T1w_preproc with T1w_mask to make T1w_brain mask_t1w_brain = pe.Node(ApplyMask(), name='mask_t1w_brain') @@ -577,7 +575,7 @@ def init_fsl_bbr_wf( return workflow flt_bbr = pe.Node( - fsl.FLIRT(cost_func='bbr', dof=bold2t1w_dof, args="-basescale 1"), + fsl.FLIRT(cost_func='bbr', dof=bold2t1w_dof, args='-basescale 1'), name='flt_bbr', ) @@ -586,7 +584,7 @@ def init_fsl_bbr_wf( flt_bbr.inputs.schedule = schedule else: # Should mostly be hit while building docs - LOGGER.warning("FSLDIR unset - using packaged BBR schedule") + LOGGER.warning('FSLDIR unset - using packaged BBR schedule') flt_bbr.inputs.schedule = data.load('flirtsch/bbr.sch') # fmt:off workflow.connect([ @@ -598,14 +596,14 @@ def init_fsl_bbr_wf( if sloppy is True: downsample = pe.Node( niu.Function( - function=_conditional_downsampling, output_names=["out_file", "out_mask"] + function=_conditional_downsampling, output_names=['out_file', 'out_mask'] ), name='downsample', ) # fmt:off workflow.connect([ - (mask_t1w_brain, downsample, [("out_file", "in_file")]), - (wm_mask, downsample, [("out", "in_mask")]), + (mask_t1w_brain, downsample, [('out_file', 'in_file')]), + (wm_mask, downsample, [('out', 'in_mask')]), (downsample, flt_bbr, [('out_file', 'reference'), ('out_mask', 'wm_seg')]), ]) diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index 771f84037..b5f13ee0b 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -51,11 +51,11 @@ def init_bold_surf_wf( *, mem_gb: float, - surface_spaces: ty.List[str], + surface_spaces: list[str], medial_surface_nan: bool, metadata: dict, output_dir: str, - name: str = "bold_surf_wf", + name: str = 'bold_surf_wf', ): """ Sample functional images to FreeSurfer surfaces. @@ -121,67 +121,65 @@ def init_bold_surf_wf( The BOLD time-series were resampled onto the following surfaces (FreeSurfer reconstruction nomenclature): {out_spaces}. -""".format( - out_spaces=", ".join(["*%s*" % s for s in surface_spaces]) - ) +""".format(out_spaces=', '.join(['*%s*' % s for s in surface_spaces])) inputnode = pe.Node( niu.IdentityInterface( fields=[ - "source_file", - "bold_t1w", - "subject_id", - "subjects_dir", - "fsnative2t1w_xfm", + 'source_file', + 'bold_t1w', + 'subject_id', + 'subjects_dir', + 'fsnative2t1w_xfm', ] ), - name="inputnode", + name='inputnode', ) - itersource = pe.Node(niu.IdentityInterface(fields=["target"]), name="itersource") - itersource.iterables = [("target", surface_spaces)] + itersource = pe.Node(niu.IdentityInterface(fields=['target']), name='itersource') + itersource.iterables = [('target', surface_spaces)] - get_fsnative = pe.Node(FreeSurferSource(), name="get_fsnative", run_without_submitting=True) + get_fsnative = pe.Node(FreeSurferSource(), name='get_fsnative', run_without_submitting=True) def select_target(subject_id, space): """Get the target subject ID, given a source subject ID and a target space.""" - return subject_id if space == "fsnative" else space + return subject_id if space == 'fsnative' else space targets = pe.Node( niu.Function(function=select_target), - name="targets", + name='targets', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) itk2lta = pe.Node( - ConcatenateXFMs(out_fmt="fs", inverse=True), name="itk2lta", run_without_submitting=True + ConcatenateXFMs(out_fmt='fs', inverse=True), name='itk2lta', run_without_submitting=True ) sampler = pe.MapNode( fs.SampleToSurface( - interp_method="trilinear", - out_type="gii", + interp_method='trilinear', + out_type='gii', override_reg_subj=True, - sampling_method="average", + sampling_method='average', sampling_range=(0, 1, 0.2), - sampling_units="frac", + sampling_units='frac', ), - iterfield=["hemi"], - name="sampler", + iterfield=['hemi'], + name='sampler', mem_gb=mem_gb * 3, ) - sampler.inputs.hemi = ["lh", "rh"] + sampler.inputs.hemi = ['lh', 'rh'] update_metadata = pe.MapNode( GiftiSetAnatomicalStructure(), - iterfield=["in_file"], - name="update_metadata", + iterfield=['in_file'], + name='update_metadata', mem_gb=DEFAULT_MEMORY_MIN_GB, ) ds_bold_surfs = pe.MapNode( DerivativesDataSink( base_directory=output_dir, - extension=".func.gii", + extension='.func.gii', dismiss_entities=dismiss_echo(), TaskName=metadata.get('TaskName'), **timing_parameters, @@ -191,52 +189,52 @@ def select_target(subject_id, space): run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) - ds_bold_surfs.inputs.hemi = ["L", "R"] + ds_bold_surfs.inputs.hemi = ['L', 'R'] workflow.connect([ (inputnode, get_fsnative, [ - ("subject_id", "subject_id"), - ("subjects_dir", "subjects_dir") + ('subject_id', 'subject_id'), + ('subjects_dir', 'subjects_dir') ]), - (inputnode, targets, [("subject_id", "subject_id")]), + (inputnode, targets, [('subject_id', 'subject_id')]), (inputnode, itk2lta, [ - ("bold_t1w", "moving"), - ("fsnative2t1w_xfm", "in_xfms"), + ('bold_t1w', 'moving'), + ('fsnative2t1w_xfm', 'in_xfms'), ]), - (get_fsnative, itk2lta, [("T1", "reference")]), + (get_fsnative, itk2lta, [('T1', 'reference')]), (inputnode, sampler, [ - ("subjects_dir", "subjects_dir"), - ("subject_id", "subject_id"), - ("bold_t1w", "source_file"), + ('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id'), + ('bold_t1w', 'source_file'), ]), - (itersource, targets, [("target", "space")]), - (itk2lta, sampler, [("out_inv", "reg_file")]), - (targets, sampler, [("out", "target_subject")]), - (inputnode, ds_bold_surfs, [("source_file", "source_file")]), - (itersource, ds_bold_surfs, [("target", "space")]), - (update_metadata, ds_bold_surfs, [("out_file", "in_file")]), + (itersource, targets, [('target', 'space')]), + (itk2lta, sampler, [('out_inv', 'reg_file')]), + (targets, sampler, [('out', 'target_subject')]), + (inputnode, ds_bold_surfs, [('source_file', 'source_file')]), + (itersource, ds_bold_surfs, [('target', 'space')]), + (update_metadata, ds_bold_surfs, [('out_file', 'in_file')]), ]) # fmt:skip # Refine if medial vertices should be NaNs medial_nans = pe.MapNode( - MedialNaNs(), iterfield=["in_file"], name="medial_nans", mem_gb=DEFAULT_MEMORY_MIN_GB + MedialNaNs(), iterfield=['in_file'], name='medial_nans', mem_gb=DEFAULT_MEMORY_MIN_GB ) if medial_surface_nan: # fmt: off workflow.connect([ - (inputnode, medial_nans, [("subjects_dir", "subjects_dir")]), - (sampler, medial_nans, [("out_file", "in_file")]), - (medial_nans, update_metadata, [("out_file", "in_file")]), + (inputnode, medial_nans, [('subjects_dir', 'subjects_dir')]), + (sampler, medial_nans, [('out_file', 'in_file')]), + (medial_nans, update_metadata, [('out_file', 'in_file')]), ]) # fmt: on else: - workflow.connect([(sampler, update_metadata, [("out_file", "in_file")])]) + workflow.connect([(sampler, update_metadata, [('out_file', 'in_file')])]) return workflow -def init_goodvoxels_bold_mask_wf(mem_gb: float, name: str = "goodvoxels_bold_mask_wf"): +def init_goodvoxels_bold_mask_wf(mem_gb: float, name: str = 'goodvoxels_bold_mask_wf'): """Calculate a mask of a BOLD series excluding high variance voxels. Workflow Graph @@ -273,103 +271,103 @@ def init_goodvoxels_bold_mask_wf(mem_gb: float, name: str = "goodvoxels_bold_mas inputnode = pe.Node( niu.IdentityInterface( fields=[ - "anat_ribbon", - "bold_file", + 'anat_ribbon', + 'bold_file', ] ), - name="inputnode", + name='inputnode', ) outputnode = pe.Node( niu.IdentityInterface( fields=[ - "goodvoxels_mask", - "goodvoxels_ribbon", + 'goodvoxels_mask', + 'goodvoxels_ribbon', ] ), - name="outputnode", + name='outputnode', ) ribbon_boldsrc_xfm = pe.Node( ApplyTransforms(interpolation='MultiLabel', transforms='identity'), - name="ribbon_boldsrc_xfm", + name='ribbon_boldsrc_xfm', mem_gb=mem_gb, ) stdev_volume = pe.Node( fsl.maths.StdImage(dimension='T'), - name="stdev_volume", + name='stdev_volume', mem_gb=DEFAULT_MEMORY_MIN_GB, ) mean_volume = pe.Node( fsl.maths.MeanImage(dimension='T'), - name="mean_volume", + name='mean_volume', mem_gb=DEFAULT_MEMORY_MIN_GB, ) cov_volume = pe.Node( fsl.maths.BinaryMaths(operation='div'), - name="cov_volume", + name='cov_volume', mem_gb=DEFAULT_MEMORY_MIN_GB, ) cov_ribbon = pe.Node( fsl.ApplyMask(), - name="cov_ribbon", + name='cov_ribbon', mem_gb=DEFAULT_MEMORY_MIN_GB, ) cov_ribbon_mean = pe.Node( fsl.ImageStats(op_string='-M'), - name="cov_ribbon_mean", + name='cov_ribbon_mean', mem_gb=DEFAULT_MEMORY_MIN_GB, ) cov_ribbon_std = pe.Node( fsl.ImageStats(op_string='-S'), - name="cov_ribbon_std", + name='cov_ribbon_std', mem_gb=DEFAULT_MEMORY_MIN_GB, ) cov_ribbon_norm = pe.Node( fsl.maths.BinaryMaths(operation='div'), - name="cov_ribbon_norm", + name='cov_ribbon_norm', mem_gb=DEFAULT_MEMORY_MIN_GB, ) smooth_norm = pe.Node( - fsl.maths.MathsCommand(args="-bin -s 5"), - name="smooth_norm", + fsl.maths.MathsCommand(args='-bin -s 5'), + name='smooth_norm', mem_gb=DEFAULT_MEMORY_MIN_GB, ) merge_smooth_norm = pe.Node( niu.Merge(1), - name="merge_smooth_norm", + name='merge_smooth_norm', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True, ) cov_ribbon_norm_smooth = pe.Node( fsl.maths.MultiImageMaths(op_string='-s 5 -div %s -dilD'), - name="cov_ribbon_norm_smooth", + name='cov_ribbon_norm_smooth', mem_gb=DEFAULT_MEMORY_MIN_GB, ) cov_norm = pe.Node( fsl.maths.BinaryMaths(operation='div'), - name="cov_norm", + name='cov_norm', mem_gb=DEFAULT_MEMORY_MIN_GB, ) cov_norm_modulate = pe.Node( fsl.maths.BinaryMaths(operation='div'), - name="cov_norm_modulate", + name='cov_norm_modulate', mem_gb=DEFAULT_MEMORY_MIN_GB, ) cov_norm_modulate_ribbon = pe.Node( fsl.ApplyMask(), - name="cov_norm_modulate_ribbon", + name='cov_norm_modulate_ribbon', mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -378,9 +376,9 @@ def _calc_upper_thr(in_stats): upper_thr_val = pe.Node( Function( - input_names=["in_stats"], output_names=["upper_thresh"], function=_calc_upper_thr + input_names=['in_stats'], output_names=['upper_thresh'], function=_calc_upper_thr ), - name="upper_thr_val", + name='upper_thr_val', mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -389,53 +387,53 @@ def _calc_lower_thr(in_stats): lower_thr_val = pe.Node( Function( - input_names=["in_stats"], output_names=["lower_thresh"], function=_calc_lower_thr + input_names=['in_stats'], output_names=['lower_thresh'], function=_calc_lower_thr ), - name="lower_thr_val", + name='lower_thr_val', mem_gb=DEFAULT_MEMORY_MIN_GB, ) mod_ribbon_mean = pe.Node( fsl.ImageStats(op_string='-M'), - name="mod_ribbon_mean", + name='mod_ribbon_mean', mem_gb=DEFAULT_MEMORY_MIN_GB, ) mod_ribbon_std = pe.Node( fsl.ImageStats(op_string='-S'), - name="mod_ribbon_std", + name='mod_ribbon_std', mem_gb=DEFAULT_MEMORY_MIN_GB, ) merge_mod_ribbon_stats = pe.Node( niu.Merge(2), - name="merge_mod_ribbon_stats", + name='merge_mod_ribbon_stats', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True, ) bin_mean_volume = pe.Node( - fsl.maths.UnaryMaths(operation="bin"), - name="bin_mean_volume", + fsl.maths.UnaryMaths(operation='bin'), + name='bin_mean_volume', mem_gb=DEFAULT_MEMORY_MIN_GB, ) merge_goodvoxels_operands = pe.Node( niu.Merge(2), - name="merge_goodvoxels_operands", + name='merge_goodvoxels_operands', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True, ) goodvoxels_thr = pe.Node( fsl.maths.Threshold(), - name="goodvoxels_thr", + name='goodvoxels_thr', mem_gb=mem_gb, ) goodvoxels_mask = pe.Node( fsl.maths.MultiImageMaths(op_string='-bin -sub %s -mul -1'), - name="goodvoxels_mask", + name='goodvoxels_mask', mem_gb=mem_gb, ) @@ -443,40 +441,40 @@ def _calc_lower_thr(in_stats): # in bold timeseries, based on modulated normalized covariance workflow.connect( [ - (inputnode, ribbon_boldsrc_xfm, [("anat_ribbon", "input_image")]), - (inputnode, stdev_volume, [("bold_file", "in_file")]), - (inputnode, mean_volume, [("bold_file", "in_file")]), - (mean_volume, ribbon_boldsrc_xfm, [("out_file", "reference_image")]), - (stdev_volume, cov_volume, [("out_file", "in_file")]), - (mean_volume, cov_volume, [("out_file", "operand_file")]), - (cov_volume, cov_ribbon, [("out_file", "in_file")]), - (ribbon_boldsrc_xfm, cov_ribbon, [("output_image", "mask_file")]), - (cov_ribbon, cov_ribbon_mean, [("out_file", "in_file")]), - (cov_ribbon, cov_ribbon_std, [("out_file", "in_file")]), - (cov_ribbon, cov_ribbon_norm, [("out_file", "in_file")]), - (cov_ribbon_mean, cov_ribbon_norm, [("out_stat", "operand_value")]), - (cov_ribbon_norm, smooth_norm, [("out_file", "in_file")]), - (smooth_norm, merge_smooth_norm, [("out_file", "in1")]), - (cov_ribbon_norm, cov_ribbon_norm_smooth, [("out_file", "in_file")]), - (merge_smooth_norm, cov_ribbon_norm_smooth, [("out", "operand_files")]), - (cov_ribbon_mean, cov_norm, [("out_stat", "operand_value")]), - (cov_volume, cov_norm, [("out_file", "in_file")]), - (cov_norm, cov_norm_modulate, [("out_file", "in_file")]), - (cov_ribbon_norm_smooth, cov_norm_modulate, [("out_file", "operand_file")]), - (cov_norm_modulate, cov_norm_modulate_ribbon, [("out_file", "in_file")]), - (ribbon_boldsrc_xfm, cov_norm_modulate_ribbon, [("output_image", "mask_file")]), - (cov_norm_modulate_ribbon, mod_ribbon_mean, [("out_file", "in_file")]), - (cov_norm_modulate_ribbon, mod_ribbon_std, [("out_file", "in_file")]), - (mod_ribbon_mean, merge_mod_ribbon_stats, [("out_stat", "in1")]), - (mod_ribbon_std, merge_mod_ribbon_stats, [("out_stat", "in2")]), - (merge_mod_ribbon_stats, upper_thr_val, [("out", "in_stats")]), - (merge_mod_ribbon_stats, lower_thr_val, [("out", "in_stats")]), - (mean_volume, bin_mean_volume, [("out_file", "in_file")]), - (upper_thr_val, goodvoxels_thr, [("upper_thresh", "thresh")]), - (cov_norm_modulate, goodvoxels_thr, [("out_file", "in_file")]), - (bin_mean_volume, merge_goodvoxels_operands, [("out_file", "in1")]), - (goodvoxels_thr, goodvoxels_mask, [("out_file", "in_file")]), - (merge_goodvoxels_operands, goodvoxels_mask, [("out", "operand_files")]), + (inputnode, ribbon_boldsrc_xfm, [('anat_ribbon', 'input_image')]), + (inputnode, stdev_volume, [('bold_file', 'in_file')]), + (inputnode, mean_volume, [('bold_file', 'in_file')]), + (mean_volume, ribbon_boldsrc_xfm, [('out_file', 'reference_image')]), + (stdev_volume, cov_volume, [('out_file', 'in_file')]), + (mean_volume, cov_volume, [('out_file', 'operand_file')]), + (cov_volume, cov_ribbon, [('out_file', 'in_file')]), + (ribbon_boldsrc_xfm, cov_ribbon, [('output_image', 'mask_file')]), + (cov_ribbon, cov_ribbon_mean, [('out_file', 'in_file')]), + (cov_ribbon, cov_ribbon_std, [('out_file', 'in_file')]), + (cov_ribbon, cov_ribbon_norm, [('out_file', 'in_file')]), + (cov_ribbon_mean, cov_ribbon_norm, [('out_stat', 'operand_value')]), + (cov_ribbon_norm, smooth_norm, [('out_file', 'in_file')]), + (smooth_norm, merge_smooth_norm, [('out_file', 'in1')]), + (cov_ribbon_norm, cov_ribbon_norm_smooth, [('out_file', 'in_file')]), + (merge_smooth_norm, cov_ribbon_norm_smooth, [('out', 'operand_files')]), + (cov_ribbon_mean, cov_norm, [('out_stat', 'operand_value')]), + (cov_volume, cov_norm, [('out_file', 'in_file')]), + (cov_norm, cov_norm_modulate, [('out_file', 'in_file')]), + (cov_ribbon_norm_smooth, cov_norm_modulate, [('out_file', 'operand_file')]), + (cov_norm_modulate, cov_norm_modulate_ribbon, [('out_file', 'in_file')]), + (ribbon_boldsrc_xfm, cov_norm_modulate_ribbon, [('output_image', 'mask_file')]), + (cov_norm_modulate_ribbon, mod_ribbon_mean, [('out_file', 'in_file')]), + (cov_norm_modulate_ribbon, mod_ribbon_std, [('out_file', 'in_file')]), + (mod_ribbon_mean, merge_mod_ribbon_stats, [('out_stat', 'in1')]), + (mod_ribbon_std, merge_mod_ribbon_stats, [('out_stat', 'in2')]), + (merge_mod_ribbon_stats, upper_thr_val, [('out', 'in_stats')]), + (merge_mod_ribbon_stats, lower_thr_val, [('out', 'in_stats')]), + (mean_volume, bin_mean_volume, [('out_file', 'in_file')]), + (upper_thr_val, goodvoxels_thr, [('upper_thresh', 'thresh')]), + (cov_norm_modulate, goodvoxels_thr, [('out_file', 'in_file')]), + (bin_mean_volume, merge_goodvoxels_operands, [('out_file', 'in1')]), + (goodvoxels_thr, goodvoxels_mask, [('out_file', 'in_file')]), + (merge_goodvoxels_operands, goodvoxels_mask, [('out', 'operand_files')]), ] ) @@ -484,17 +482,17 @@ def _calc_lower_thr(in_stats): fsl.ApplyMask(), name_source=['in_file'], keep_extension=True, - name="goodvoxels_ribbon_mask", + name='goodvoxels_ribbon_mask', mem_gb=DEFAULT_MEMORY_MIN_GB, ) # apply goodvoxels ribbon mask to bold workflow.connect( [ - (goodvoxels_mask, goodvoxels_ribbon_mask, [("out_file", "in_file")]), - (ribbon_boldsrc_xfm, goodvoxels_ribbon_mask, [("output_image", "mask_file")]), - (goodvoxels_mask, outputnode, [("out_file", "goodvoxels_mask")]), - (goodvoxels_ribbon_mask, outputnode, [("out_file", "goodvoxels_ribbon")]), + (goodvoxels_mask, goodvoxels_ribbon_mask, [('out_file', 'in_file')]), + (ribbon_boldsrc_xfm, goodvoxels_ribbon_mask, [('output_image', 'mask_file')]), + (goodvoxels_mask, outputnode, [('out_file', 'goodvoxels_mask')]), + (goodvoxels_ribbon_mask, outputnode, [('out_file', 'goodvoxels_ribbon')]), ] ) @@ -505,7 +503,7 @@ def init_bold_fsLR_resampling_wf( grayord_density: ty.Literal['91k', '170k'], omp_nthreads: int, mem_gb: float, - name: str = "bold_fsLR_resampling_wf", + name: str = 'bold_fsLR_resampling_wf', ): """Resample BOLD time series to fsLR surface. @@ -569,7 +567,7 @@ def init_bold_fsLR_resampling_wf( from fmriprep.interfaces.workbench import VolumeToSurfaceMapping - fslr_density = "32k" if grayord_density == "91k" else "59k" + fslr_density = '32k' if grayord_density == '91k' else '59k' workflow = Workflow(name=name) @@ -615,18 +613,18 @@ def init_bold_fsLR_resampling_wf( select_surfaces = pe.Node( KeySelect( fields=[ - "white", - "pial", - "midthickness", + 'white', + 'pial', + 'midthickness', 'midthickness_fsLR', - "sphere_reg_fsLR", - "template_sphere", + 'sphere_reg_fsLR', + 'template_sphere', 'cortex_mask', - "template_roi", + 'template_roi', ], - keys=["L", "R"], + keys=['L', 'R'], ), - name="select_surfaces", + name='select_surfaces', run_without_submitting=True, ) select_surfaces.inputs.template_sphere = [ @@ -648,26 +646,26 @@ def init_bold_fsLR_resampling_wf( # RibbonVolumeToSurfaceMapping.sh # Line 85 thru ... volume_to_surface = pe.Node( - VolumeToSurfaceMapping(method="ribbon-constrained"), - name="volume_to_surface", + VolumeToSurfaceMapping(method='ribbon-constrained'), + name='volume_to_surface', mem_gb=mem_gb * 3, n_procs=omp_nthreads, ) metric_dilate = pe.Node( MetricDilate(distance=10, nearest=True), - name="metric_dilate", + name='metric_dilate', mem_gb=1, n_procs=omp_nthreads, ) - mask_native = pe.Node(MetricMask(), name="mask_native") + mask_native = pe.Node(MetricMask(), name='mask_native') resample_to_fsLR = pe.Node( MetricResample(method='ADAP_BARY_AREA', area_surfs=True), - name="resample_to_fsLR", + name='resample_to_fsLR', mem_gb=1, n_procs=omp_nthreads, ) # ... line 89 - mask_fsLR = pe.Node(MetricMask(), name="mask_fsLR") + mask_fsLR = pe.Node(MetricMask(), name='mask_fsLR') workflow.connect([ (inputnode, select_surfaces, [ @@ -716,7 +714,7 @@ def init_bold_grayords_wf( grayord_density: ty.Literal['91k', '170k'], mem_gb: float, repetition_time: float, - name: str = "bold_grayords_wf", + name: str = 'bold_grayords_wf', ): """ Sample Grayordinates files onto the fsLR atlas. @@ -765,7 +763,7 @@ def init_bold_grayords_wf( workflow = Workflow(name=name) - mni_density = "2" if grayord_density == "91k" else "1" + mni_density = '2' if grayord_density == '91k' else '1' workflow.__desc__ = f"""\ *Grayordinates* files [@hcppipelines] containing {grayord_density} samples were also @@ -774,13 +772,13 @@ def init_bold_grayords_wf( """ inputnode = pe.Node( - niu.IdentityInterface(fields=["bold_std", "bold_fsLR"]), - name="inputnode", + niu.IdentityInterface(fields=['bold_std', 'bold_fsLR']), + name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(fields=["cifti_bold", "cifti_metadata"]), - name="outputnode", + niu.IdentityInterface(fields=['cifti_bold', 'cifti_metadata']), + name='outputnode', ) gen_cifti = pe.Node( @@ -788,18 +786,18 @@ def init_bold_grayords_wf( TR=repetition_time, grayordinates=grayord_density, ), - name="gen_cifti", + name='gen_cifti', mem_gb=mem_gb, ) workflow.connect([ (inputnode, gen_cifti, [ - ("bold_fsLR", "surface_bolds"), - ("bold_std", "bold_file"), + ('bold_fsLR', 'surface_bolds'), + ('bold_std', 'bold_file'), ]), (gen_cifti, outputnode, [ - ("out_file", "cifti_bold"), - ("out_metadata", "cifti_metadata"), + ('out_file', 'cifti_bold'), + ('out_metadata', 'cifti_metadata'), ]), ]) # fmt:skip return workflow diff --git a/fmriprep/workflows/bold/stc.py b/fmriprep/workflows/bold/stc.py index 39573d8cf..0a820c184 100644 --- a/fmriprep/workflows/bold/stc.py +++ b/fmriprep/workflows/bold/stc.py @@ -47,7 +47,7 @@ def _pre_run_hook(self, runtime): ntsteps = nb.load(self.inputs.in_file).shape[3] if ntsteps - ignore < 5: raise RuntimeError( - f"Insufficient length of BOLD data ({ntsteps} time points) after " + f'Insufficient length of BOLD data ({ntsteps} time points) after ' f"discarding {ignore} nonsteady-state (or 'dummy') time points." ) return runtime @@ -99,7 +99,7 @@ def init_bold_stc_wf( from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.header import CopyXForm - slice_times = metadata["SliceTiming"] + slice_times = metadata['SliceTiming'] first, last = min(slice_times), max(slice_times) frac = config.workflow.slice_time_ref tzero = np.round(first + frac * (last - first), 3) diff --git a/fmriprep/workflows/bold/t2s.py b/fmriprep/workflows/bold/t2s.py index 8d5df918a..313593a79 100644 --- a/fmriprep/workflows/bold/t2s.py +++ b/fmriprep/workflows/bold/t2s.py @@ -88,14 +88,14 @@ def init_bold_t2s_wf( from niworkflows.interfaces.morphology import BinaryDilation workflow = Workflow(name=name) - if config.workflow.me_t2s_fit_method == "curvefit": + if config.workflow.me_t2s_fit_method == 'curvefit': fit_str = ( - "nonlinear regression. " - "The T2/S0 estimates from a log-linear regression fit " - "were used for initial values" + 'nonlinear regression. ' + 'The T2/S0 estimates from a log-linear regression fit ' + 'were used for initial values' ) else: - fit_str = "log-linear regression" + fit_str = 'log-linear regression' workflow.__desc__ = f"""\ A T2 map was estimated from the preprocessed EPI echoes, by voxel-wise fitting @@ -184,25 +184,25 @@ def init_t2s_reporting_wf(name: str = 't2s_reporting_wf'): ) label_tfm = pe.Node( - ApplyTransforms(interpolation="MultiLabel", invert_transform_flags=[True]), - name="label_tfm", + ApplyTransforms(interpolation='MultiLabel', invert_transform_flags=[True]), + name='label_tfm', ) - gm_mask = pe.Node(Label2Mask(label_val=1), name="gm_mask") + gm_mask = pe.Node(Label2Mask(label_val=1), name='gm_mask') - clip_t2star = pe.Node(Clip(maximum=0.1), name="clip_t2star") + clip_t2star = pe.Node(Clip(maximum=0.1), name='clip_t2star') t2s_hist = pe.Node( - LabeledHistogram(mapping={1: "Gray matter"}, xlabel='T2* (s)'), name='t2s_hist' + LabeledHistogram(mapping={1: 'Gray matter'}, xlabel='T2* (s)'), name='t2s_hist' ) t2s_comparison = pe.Node( SimpleBeforeAfter( - before_label="BOLD Reference", - after_label="T2* Map", + before_label='BOLD Reference', + after_label='T2* Map', dismiss_affine=True, ), - name="t2s_comparison", + name='t2s_comparison', mem_gb=0.1, ) workflow.connect([ diff --git a/fmriprep/workflows/bold/tests/test_base.py b/fmriprep/workflows/bold/tests/test_base.py index 93035c640..696660598 100644 --- a/fmriprep/workflows/bold/tests/test_base.py +++ b/fmriprep/workflows/bold/tests/test_base.py @@ -12,29 +12,29 @@ from ..base import init_bold_wf -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(scope='module', autouse=True) def _quiet_logger(): import logging - logger = logging.getLogger("nipype.workflow") + logger = logging.getLogger('nipype.workflow') old_level = logger.getEffectiveLevel() logger.setLevel(logging.ERROR) yield logger.setLevel(old_level) -@pytest.fixture(scope="module") +@pytest.fixture(scope='module') def bids_root(tmp_path_factory): - base = tmp_path_factory.mktemp("boldbase") - bids_dir = base / "bids" + base = tmp_path_factory.mktemp('boldbase') + bids_dir = base / 'bids' generate_bids_skeleton(bids_dir, BASE_LAYOUT) - yield bids_dir + return bids_dir -@pytest.mark.parametrize("task", ["rest", "nback"]) -@pytest.mark.parametrize("fieldmap_id", ["phasediff", None]) -@pytest.mark.parametrize("freesurfer", [False, True]) -@pytest.mark.parametrize("level", ["minimal", "resampling", "full"]) +@pytest.mark.parametrize('task', ['rest', 'nback']) +@pytest.mark.parametrize('fieldmap_id', ['phasediff', None]) +@pytest.mark.parametrize('freesurfer', [False, True]) +@pytest.mark.parametrize('level', ['minimal', 'resampling', 'full']) def test_bold_wf( bids_root: Path, tmp_path: Path, diff --git a/fmriprep/workflows/bold/tests/test_fit.py b/fmriprep/workflows/bold/tests/test_fit.py index 382c48e7a..f7ae3a97f 100644 --- a/fmriprep/workflows/bold/tests/test_fit.py +++ b/fmriprep/workflows/bold/tests/test_fit.py @@ -12,23 +12,23 @@ from ..fit import init_bold_fit_wf, init_bold_native_wf -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(scope='module', autouse=True) def _quiet_logger(): import logging - logger = logging.getLogger("nipype.workflow") + logger = logging.getLogger('nipype.workflow') old_level = logger.getEffectiveLevel() logger.setLevel(logging.ERROR) yield logger.setLevel(old_level) -@pytest.fixture(scope="module") +@pytest.fixture(scope='module') def bids_root(tmp_path_factory): - base = tmp_path_factory.mktemp("boldfit") - bids_dir = base / "bids" + base = tmp_path_factory.mktemp('boldfit') + bids_dir = base / 'bids' generate_bids_skeleton(bids_dir, BASE_LAYOUT) - yield bids_dir + return bids_dir def _make_params( @@ -47,8 +47,8 @@ def _make_params( ) -@pytest.mark.parametrize("task", ["rest", "nback"]) -@pytest.mark.parametrize("fieldmap_id", ["phasediff", None]) +@pytest.mark.parametrize('task', ['rest', 'nback']) +@pytest.mark.parametrize('fieldmap_id', ['phasediff', None]) @pytest.mark.parametrize( ( 'have_hmcref', @@ -129,9 +129,9 @@ def test_bold_fit_precomputes( generate_expanded_graph(flatgraph) -@pytest.mark.parametrize("task", ["rest", "nback"]) -@pytest.mark.parametrize("fieldmap_id", ["phasediff", None]) -@pytest.mark.parametrize("run_stc", [True, False]) +@pytest.mark.parametrize('task', ['rest', 'nback']) +@pytest.mark.parametrize('fieldmap_id', ['phasediff', None]) +@pytest.mark.parametrize('run_stc', [True, False]) def test_bold_native_precomputes( bids_root: Path, tmp_path: Path, diff --git a/fmriprep/workflows/tests/__init__.py b/fmriprep/workflows/tests/__init__.py index c246ccd13..e6c33399e 100644 --- a/fmriprep/workflows/tests/__init__.py +++ b/fmriprep/workflows/tests/__init__.py @@ -66,4 +66,4 @@ def mock_config(bids_dir=None): shutil.rmtree(config.execution.fmriprep_dir) if not _old_fs: - del os.environ["FREESURFER_HOME"] + del os.environ['FREESURFER_HOME'] diff --git a/fmriprep/workflows/tests/test_base.py b/fmriprep/workflows/tests/test_base.py index 027609459..4672b2de9 100644 --- a/fmriprep/workflows/tests/test_base.py +++ b/fmriprep/workflows/tests/test_base.py @@ -16,68 +16,68 @@ from ..tests import mock_config BASE_LAYOUT = { - "01": { - "anat": [ - {"run": 1, "suffix": "T1w"}, - {"run": 2, "suffix": "T1w"}, - {"suffix": "T2w"}, + '01': { + 'anat': [ + {'run': 1, 'suffix': 'T1w'}, + {'run': 2, 'suffix': 'T1w'}, + {'suffix': 'T2w'}, ], - "func": [ + 'func': [ *( { - "task": "rest", - "run": i, - "suffix": suffix, - "metadata": { - "RepetitionTime": 2.0, - "PhaseEncodingDirection": "j", - "TotalReadoutTime": 0.6, - "EchoTime": 0.03, - "SliceTiming": [0.0, 0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 1.4, 1.6, 1.8], + 'task': 'rest', + 'run': i, + 'suffix': suffix, + 'metadata': { + 'RepetitionTime': 2.0, + 'PhaseEncodingDirection': 'j', + 'TotalReadoutTime': 0.6, + 'EchoTime': 0.03, + 'SliceTiming': [0.0, 0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 1.4, 1.6, 1.8], }, } - for suffix in ("bold", "sbref") + for suffix in ('bold', 'sbref') for i in range(1, 3) ), *( { - "task": "nback", - "echo": i, - "suffix": "bold", - "metadata": { - "RepetitionTime": 2.0, - "PhaseEncodingDirection": "j", - "TotalReadoutTime": 0.6, - "EchoTime": 0.015 * i, - "SliceTiming": [0.0, 0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 1.4, 1.6, 1.8], + 'task': 'nback', + 'echo': i, + 'suffix': 'bold', + 'metadata': { + 'RepetitionTime': 2.0, + 'PhaseEncodingDirection': 'j', + 'TotalReadoutTime': 0.6, + 'EchoTime': 0.015 * i, + 'SliceTiming': [0.0, 0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 1.4, 1.6, 1.8], }, } for i in range(1, 4) ), ], - "fmap": [ - {"suffix": "phasediff", "metadata": {"EchoTime1": 0.005, "EchoTime2": 0.007}}, - {"suffix": "magnitude1", "metadata": {"EchoTime": 0.005}}, + 'fmap': [ + {'suffix': 'phasediff', 'metadata': {'EchoTime1': 0.005, 'EchoTime2': 0.007}}, + {'suffix': 'magnitude1', 'metadata': {'EchoTime': 0.005}}, { - "suffix": "epi", - "direction": "PA", - "metadata": {"PhaseEncodingDirection": "j", "TotalReadoutTime": 0.6}, + 'suffix': 'epi', + 'direction': 'PA', + 'metadata': {'PhaseEncodingDirection': 'j', 'TotalReadoutTime': 0.6}, }, { - "suffix": "epi", - "direction": "AP", - "metadata": {"PhaseEncodingDirection": "j-", "TotalReadoutTime": 0.6}, + 'suffix': 'epi', + 'direction': 'AP', + 'metadata': {'PhaseEncodingDirection': 'j-', 'TotalReadoutTime': 0.6}, }, ], }, } -@pytest.fixture(scope="module", autouse=True) +@pytest.fixture(scope='module', autouse=True) def _quiet_logger(): import logging - logger = logging.getLogger("nipype.workflow") + logger = logging.getLogger('nipype.workflow') old_level = logger.getEffectiveLevel() logger.setLevel(logging.ERROR) yield @@ -90,10 +90,10 @@ def _reset_sdcflows_registry(): clear_registry() -@pytest.fixture(scope="module") +@pytest.fixture(scope='module') def bids_root(tmp_path_factory): - base = tmp_path_factory.mktemp("base") - bids_dir = base / "bids" + base = tmp_path_factory.mktemp('base') + bids_dir = base / 'bids' generate_bids_skeleton(bids_dir, BASE_LAYOUT) img = nb.Nifti1Image(np.zeros((10, 10, 10, 10)), np.eye(4)) @@ -101,11 +101,11 @@ def bids_root(tmp_path_factory): for bold_path in bids_dir.glob('sub-01/*/*.nii.gz'): img.to_filename(bold_path) - yield bids_dir + return bids_dir def _make_params( - bold2t1w_init: str = "register", + bold2t1w_init: str = 'register', use_bbr: bool | None = None, dummy_scans: int | None = None, me_output_echos: bool = False, @@ -113,7 +113,7 @@ def _make_params( project_goodvoxels: bool = False, cifti_output: bool | str = False, run_msmsulc: bool = True, - skull_strip_t1w: str = "auto", + skull_strip_t1w: str = 'auto', use_syn_sdc: str | bool = False, force_syn: bool = False, freesurfer: bool = True, @@ -142,31 +142,31 @@ def _make_params( ) -@pytest.mark.parametrize("level", ["minimal", "resampling", "full"]) -@pytest.mark.parametrize("anat_only", [False, True]) +@pytest.mark.parametrize('level', ['minimal', 'resampling', 'full']) +@pytest.mark.parametrize('anat_only', [False, True]) @pytest.mark.parametrize( ( - "bold2t1w_init", - "use_bbr", - "dummy_scans", - "me_output_echos", - "medial_surface_nan", - "project_goodvoxels", - "cifti_output", - "run_msmsulc", - "skull_strip_t1w", - "use_syn_sdc", - "force_syn", - "freesurfer", - "ignore", - "bids_filters", + 'bold2t1w_init', + 'use_bbr', + 'dummy_scans', + 'me_output_echos', + 'medial_surface_nan', + 'project_goodvoxels', + 'cifti_output', + 'run_msmsulc', + 'skull_strip_t1w', + 'use_syn_sdc', + 'force_syn', + 'freesurfer', + 'ignore', + 'bids_filters', ), [ _make_params(), - _make_params(bold2t1w_init="header"), + _make_params(bold2t1w_init='header'), _make_params(use_bbr=True), _make_params(use_bbr=False), - _make_params(bold2t1w_init="header", use_bbr=True), + _make_params(bold2t1w_init='header', use_bbr=True), # Currently disabled # _make_params(bold2t1w_init="header", use_bbr=False), _make_params(dummy_scans=2), @@ -231,7 +231,7 @@ def test_init_fmriprep_wf( def test_get_estimator_none(tmp_path): - bids_dir = tmp_path / "bids" + bids_dir = tmp_path / 'bids' # No IntendedFors/B0Fields generate_bids_skeleton(bids_dir, BASE_LAYOUT) @@ -245,7 +245,7 @@ def test_get_estimator_none(tmp_path): def test_get_estimator_b0field_and_intendedfor(tmp_path): - bids_dir = tmp_path / "bids" + bids_dir = tmp_path / 'bids' # Set B0FieldSource for run 1 spec = deepcopy(BASE_LAYOUT) @@ -269,7 +269,7 @@ def test_get_estimator_b0field_and_intendedfor(tmp_path): def test_get_estimator_overlapping_specs(tmp_path): - bids_dir = tmp_path / "bids" + bids_dir = tmp_path / 'bids' # Set B0FieldSource for both runs spec = deepcopy(BASE_LAYOUT) @@ -298,7 +298,7 @@ def test_get_estimator_overlapping_specs(tmp_path): def test_get_estimator_multiple_b0fields(tmp_path): - bids_dir = tmp_path / "bids" + bids_dir = tmp_path / 'bids' # Set B0FieldSource for both runs spec = deepcopy(BASE_LAYOUT) diff --git a/scripts/fetch_templates.py b/scripts/fetch_templates.py index b57e03aa6..c9b134cbc 100755 --- a/scripts/fetch_templates.py +++ b/scripts/fetch_templates.py @@ -23,13 +23,13 @@ def fetch_MNI2009(): tpl-MNI152NLin2009cAsym/tpl-MNI152NLin2009cAsym_res-02_desc-fMRIPrep_boldref.nii.gz tpl-MNI152NLin2009cAsym/tpl-MNI152NLin2009cAsym_res-01_label-brain_probseg.nii.gz """ - template = "MNI152NLin2009cAsym" + template = 'MNI152NLin2009cAsym' - tf.get(template, resolution=(1, 2), desc=None, suffix="T1w") - tf.get(template, resolution=(1, 2), desc="brain", suffix="mask") - tf.get(template, resolution=1, atlas=None, desc="carpet", suffix="dseg") - tf.get(template, resolution=2, desc="fMRIPrep", suffix="boldref") - tf.get(template, resolution=1, label="brain", suffix="probseg") + tf.get(template, resolution=(1, 2), desc=None, suffix='T1w') + tf.get(template, resolution=(1, 2), desc='brain', suffix='mask') + tf.get(template, resolution=1, atlas=None, desc='carpet', suffix='dseg') + tf.get(template, resolution=2, desc='fMRIPrep', suffix='boldref') + tf.get(template, resolution=1, label='brain', suffix='probseg') def fetch_MNI6(): @@ -42,12 +42,12 @@ def fetch_MNI6(): tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-02_desc-brain_mask.nii.gz tpl-MNI152NLin6Asym/tpl-MNI152NLin6Asym_res-02_atlas-HCP_dseg.nii.gz """ - template = "MNI152NLin6Asym" + template = 'MNI152NLin6Asym' - tf.get(template, resolution=(1, 2), desc=None, suffix="T1w") - tf.get(template, resolution=(1, 2), desc="brain", suffix="mask") + tf.get(template, resolution=(1, 2), desc=None, suffix='T1w') + tf.get(template, resolution=(1, 2), desc='brain', suffix='mask') # CIFTI - tf.get(template, resolution=2, atlas="HCP", suffix="dseg") + tf.get(template, resolution=2, atlas='HCP', suffix='dseg') def fetch_OASIS(): @@ -61,14 +61,14 @@ def fetch_OASIS(): tpl-OASIS30ANTs/tpl-OASIS30ANTs_res-01_desc-brain_mask.nii.gz tpl-OASIS30ANTs/tpl-OASIS30ANTs_res-01_desc-BrainCerebellumExtraction_mask.nii.gz """ - template = "OASIS30ANTs" + template = 'OASIS30ANTs' - tf.get(template, resolution=1, desc=None, label=None, suffix="T1w") - tf.get(template, resolution=1, label="WM", suffix="probseg") - tf.get(template, resolution=1, label="BS", suffix="probseg") - tf.get(template, resolution=1, label="brain", suffix="probseg") - tf.get(template, resolution=1, label="brain", suffix="mask") - tf.get(template, resolution=1, desc="BrainCerebellumExtraction", suffix="mask") + tf.get(template, resolution=1, desc=None, label=None, suffix='T1w') + tf.get(template, resolution=1, label='WM', suffix='probseg') + tf.get(template, resolution=1, label='BS', suffix='probseg') + tf.get(template, resolution=1, label='brain', suffix='probseg') + tf.get(template, resolution=1, label='brain', suffix='mask') + tf.get(template, resolution=1, desc='BrainCerebellumExtraction', suffix='mask') def fetch_fsaverage(): @@ -84,11 +84,11 @@ def fetch_fsaverage(): tpl-fsaverage/tpl-fsaverage_hemi-L_den-164k_sulc.shape.gii tpl-fsaverage/tpl-sfaverage_hemi-R_den-164k_sulc.shape.gii """ - template = "fsaverage" + template = 'fsaverage' - tf.get(template, density="164k", desc="std", suffix="sphere") - tf.get(template, density="164k", suffix="midthickness") - tf.get(template, density="164k", suffix="sulc") + tf.get(template, density='164k', desc='std', suffix='sphere') + tf.get(template, density='164k', suffix='midthickness') + tf.get(template, density='164k', suffix='sulc') def fetch_fsLR(): @@ -104,7 +104,7 @@ def fetch_fsLR(): tpl-fsLR/tpl-fsLR_space-fsaverage_hemi-L_den-32k_sphere.surf.gii tpl-fsLR/tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii """ - tf.get("fsLR", density="32k") + tf.get('fsLR', density='32k') def fetch_all(): @@ -115,21 +115,21 @@ def fetch_all(): fetch_fsLR() -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser( - description="Helper script for pre-caching required templates to run fMRIPrep", + description='Helper script for pre-caching required templates to run fMRIPrep', ) parser.add_argument( - "--tf-dir", + '--tf-dir', type=os.path.abspath, - help="Directory to save templates in. If not provided, templates will be saved to" - " `${HOME}/.cache/templateflow`.", + help='Directory to save templates in. If not provided, templates will be saved to' + ' `${HOME}/.cache/templateflow`.', ) opts = parser.parse_args() # set envvar (if necessary) prior to templateflow import if opts.tf_dir is not None: - os.environ["TEMPLATEFLOW_HOME"] = opts.tf_dir + os.environ['TEMPLATEFLOW_HOME'] = opts.tf_dir import templateflow.api as tf diff --git a/scripts/generate_reference_mask.py b/scripts/generate_reference_mask.py index 9bb3d9cf3..9d2bc828d 100755 --- a/scripts/generate_reference_mask.py +++ b/scripts/generate_reference_mask.py @@ -1,13 +1,16 @@ #!/usr/bin/env python import sys -from nipype.pipeline import engine as pe + from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe from niworkflows.func.util import init_bold_reference_wf def sink_mask_file(in_file, orig_file, out_dir): import os - from nipype.utils.filemanip import fname_presuffix, copyfile + + from nipype.utils.filemanip import copyfile, fname_presuffix + os.makedirs(out_dir, exist_ok=True) out_file = fname_presuffix(orig_file, suffix='_mask', newpath=out_dir) copyfile(in_file, out_file, copy=True, use_hardlink=True) @@ -15,18 +18,18 @@ def sink_mask_file(in_file, orig_file, out_dir): def init_main_wf(bold_file, out_dir, base_dir=None, name='main_wf'): - wf = init_bold_reference_wf(omp_nthreads=4, - name=name) + wf = init_bold_reference_wf(omp_nthreads=4, name=name) wf.base_dir = base_dir wf.inputs.inputnode.bold_file = bold_file - sink = pe.Node(niu.Function(function=sink_mask_file), - name='sink') + sink = pe.Node(niu.Function(function=sink_mask_file), name='sink') sink.inputs.out_dir = out_dir sink.inputs.orig_file = bold_file - wf.connect([ - (wf.get_node('outputnode'), sink, [('bold_mask', 'in_file')]), - ]) + wf.connect( + [ + (wf.get_node('outputnode'), sink, [('bold_mask', 'in_file')]), + ] + ) return wf