diff --git a/docs/conf.py b/docs/conf.py index 8a2617f6..6d778bcb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -11,12 +11,13 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. # from packaging.version import Version -from templateflow import __version__, __copyright__, __packagename__ + +from templateflow import __copyright__, __packagename__, __version__ # -- Project information ----------------------------------------------------- project = __packagename__ copyright = __copyright__ -author = "The NiPreps Developers" +author = 'The NiPreps Developers' # The full version, including alpha/beta/rc tags release = __version__ @@ -29,31 +30,31 @@ # -- General configuration --------------------------------------------------- extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.githubpages", - "sphinx.ext.ifconfig", - "sphinx.ext.intersphinx", - "sphinx.ext.mathjax", - "sphinx.ext.napoleon", - "sphinx.ext.viewcode", - "sphinxcontrib.apidoc", - "nbsphinx", - "sphinx_click", + 'sphinx.ext.autodoc', + 'sphinx.ext.coverage', + 'sphinx.ext.doctest', + 'sphinx.ext.githubpages', + 'sphinx.ext.ifconfig', + 'sphinx.ext.intersphinx', + 'sphinx.ext.mathjax', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', + 'sphinxcontrib.apidoc', + 'nbsphinx', + 'sphinx_click', ] autodoc_mock_imports = [ - "matplotlib", - "nilearn", - "nipy", - "nitime", - "numpy", - "pandas", - "seaborn", - "skimage", - "svgutils", - "transforms3d", + 'matplotlib', + 'nilearn', + 'nipy', + 'nitime', + 'numpy', + 'pandas', + 'seaborn', + 'skimage', + 'svgutils', + 'transforms3d', ] autodoc_autoreload = True @@ -68,16 +69,16 @@ # ] # Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] +templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = ".rst" +source_suffix = '.rst' # The master toctree document. -master_doc = "index" +master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -90,11 +91,11 @@ # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [ - "_build", - "Thumbs.db", - ".DS_Store", - "api/modules.rst", - "api/templateflow.rst", + '_build', + 'Thumbs.db', + '.DS_Store', + 'api/modules.rst', + 'api/templateflow.rst', ] # The name of the Pygments (syntax highlighting) style to use. @@ -106,7 +107,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "sphinx_rtd_theme" +html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -117,7 +118,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -133,7 +134,7 @@ # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -htmlhelp_basename = "templateflowdoc" +htmlhelp_basename = 'templateflowdoc' # -- Options for LaTeX output ------------------------------------------------ @@ -159,10 +160,10 @@ latex_documents = [ ( master_doc, - "templateflow.tex", - "TemplateFlow Documentation", - "The TemplateFlow Developers", - "manual", + 'templateflow.tex', + 'TemplateFlow Documentation', + 'The TemplateFlow Developers', + 'manual', ), ] @@ -171,7 +172,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "templateflow", "TemplateFlow Documentation", [author], 1)] +man_pages = [(master_doc, 'templateflow', 'TemplateFlow Documentation', [author], 1)] # -- Options for Texinfo output ---------------------------------------------- @@ -182,12 +183,12 @@ texinfo_documents = [ ( master_doc, - "templateflow", - "TemplateFlow Documentation", + 'templateflow', + 'TemplateFlow Documentation', author, - "TemplateFlow", - "One line description of project.", - "Miscellaneous", + 'TemplateFlow', + 'One line description of project.', + 'Miscellaneous', ), ] @@ -207,38 +208,38 @@ # epub_uid = '' # A list of files that should not be packed into the epub file. -epub_exclude_files = ["search.html"] +epub_exclude_files = ['search.html'] # -- Extension configuration ------------------------------------------------- -apidoc_module_dir = "../templateflow" -apidoc_output_dir = "api" +apidoc_module_dir = '../templateflow' +apidoc_output_dir = 'api' apidoc_excluded_paths = [ - "conftest.py", - "*/tests/*", - "tests/*", - "data/*", + 'conftest.py', + '*/tests/*', + 'tests/*', + 'data/*', ] apidoc_separate_modules = True -apidoc_extra_args = ["--module-first", "-d 1", "-T"] +apidoc_extra_args = ['--module-first', '-d 1', '-T'] # -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "bids": ("https://bids-standard.github.io/pybids/", None), - "matplotlib": ("https://matplotlib.org/", None), - "nibabel": ("https://nipy.org/nibabel/", None), - "nipype": ("https://nipype.readthedocs.io/en/latest/", None), - "numpy": ("https://numpy.org/doc/stable/", None), - "pandas": ("http://pandas.pydata.org/pandas-docs/dev", None), - "python": ("https://docs.python.org/3/", None), - "scipy": ("https://docs.scipy.org/doc/scipy/reference", None), + 'bids': ('https://bids-standard.github.io/pybids/', None), + 'matplotlib': ('https://matplotlib.org/', None), + 'nibabel': ('https://nipy.org/nibabel/', None), + 'nipype': ('https://nipype.readthedocs.io/en/latest/', None), + 'numpy': ('https://numpy.org/doc/stable/', None), + 'pandas': ('http://pandas.pydata.org/pandas-docs/dev', None), + 'python': ('https://docs.python.org/3/', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), } # -- Options for versioning extension ---------------------------------------- -smv_branch_whitelist = r"^master$" -smv_tag_whitelist = r"^\d+\.\d+\.\d+(?!rc|dev).*$" +smv_branch_whitelist = r'^master$' +smv_tag_whitelist = r'^\d+\.\d+\.\d+(?!rc|dev).*$' smv_released_pattern = r'^tags/.*$' smv_rebuild_tags = False diff --git a/docs/tools/LICENSE.txt b/docs/tools/LICENSE.txt deleted file mode 100644 index 9e1d415a..00000000 --- a/docs/tools/LICENSE.txt +++ /dev/null @@ -1,7 +0,0 @@ -These files were obtained from - -https://www.mail-archive.com/sphinx-dev@googlegroups.com/msg02472.html - -and were released under a BSD/MIT license by Fernando Perez, Matthew Brett and -the PyMVPA folks. Further cleanups by the scikit-image crew. - diff --git a/docs/tools/apigen.py b/docs/tools/apigen.py deleted file mode 100644 index 716fd488..00000000 --- a/docs/tools/apigen.py +++ /dev/null @@ -1,523 +0,0 @@ -""" -Attempt to generate templates for module reference with Sphinx - -To include extension modules, first identify them as valid in the -``_uri2path`` method, then handle them in the ``_parse_module_with_import`` -script. - -Notes ------ -This parsing is based on import and introspection of modules. -Previously functions and classes were found by parsing the text of .py files. - -Extension modules should be discovered and included as well. - -This is a modified version of a script originally shipped with the PyMVPA -project, then adapted for use first in NIPY and then in skimage. PyMVPA -is an MIT-licensed project. -""" - -# Stdlib imports -import os -import re -from inspect import getmodule - -from types import BuiltinFunctionType, FunctionType - -# suppress print statements (warnings for empty files) -DEBUG = True - - -class ApiDocWriter(object): - """ Class for automatic detection and parsing of API docs - to Sphinx-parsable reST format""" - - # only separating first two levels - rst_section_levels = ["*", "=", "-", "~", "^"] - - def __init__( - self, - package_name, - rst_extension=".txt", - package_skip_patterns=None, - module_skip_patterns=None, - other_defines=True, - ): - """ Initialize package for parsing - - Parameters - ---------- - package_name : string - Name of the top-level package. *package_name* must be the - name of an importable package - rst_extension : string, optional - Extension for reST files, default '.rst' - package_skip_patterns : None or sequence of {strings, regexps} - Sequence of strings giving URIs of packages to be excluded - Operates on the package path, starting at (including) the - first dot in the package path, after *package_name* - so, - if *package_name* is ``sphinx``, then ``sphinx.util`` will - result in ``.util`` being passed for searching by these - regexps. If is None, gives default. Default is: - ['\.tests$'] - module_skip_patterns : None or sequence - Sequence of strings giving URIs of modules to be excluded - Operates on the module name including preceding URI path, - back to the first dot after *package_name*. For example - ``sphinx.util.console`` results in the string to search of - ``.util.console`` - If is None, gives default. Default is: - ['\.setup$', '\._'] - other_defines : {True, False}, optional - Whether to include classes and functions that are imported in a - particular module but not defined there. - """ - if package_skip_patterns is None: - package_skip_patterns = ["\\.tests$"] - if module_skip_patterns is None: - module_skip_patterns = ["\\.setup$", "\\._"] - self.package_name = package_name - self.rst_extension = rst_extension - self.package_skip_patterns = package_skip_patterns - self.module_skip_patterns = module_skip_patterns - self.other_defines = other_defines - - def get_package_name(self): - return self._package_name - - def set_package_name(self, package_name): - """ Set package_name - - >>> docwriter = ApiDocWriter('sphinx') - >>> import sphinx - >>> docwriter.root_path == sphinx.__path__[0] - True - >>> docwriter.package_name = 'docutils' - >>> import docutils - >>> docwriter.root_path == docutils.__path__[0] - True - """ - # It's also possible to imagine caching the module parsing here - self._package_name = package_name - root_module = self._import(package_name) - self.root_path = root_module.__path__[-1] - self.written_modules = None - - package_name = property( - get_package_name, set_package_name, None, "get/set package_name" - ) - - def _import(self, name): - """ Import namespace package """ - mod = __import__(name) - components = name.split(".") - for comp in components[1:]: - mod = getattr(mod, comp) - return mod - - def _get_object_name(self, line): - """ Get second token in line - >>> docwriter = ApiDocWriter('sphinx') - >>> docwriter._get_object_name(" def func(): ") - 'func' - >>> docwriter._get_object_name(" class Klass(object): ") - 'Klass' - >>> docwriter._get_object_name(" class Klass: ") - 'Klass' - """ - name = line.split()[1].split("(")[0].strip() - # in case we have classes which are not derived from object - # ie. old style classes - return name.rstrip(":") - - def _uri2path(self, uri): - """ Convert uri to absolute filepath - - Parameters - ---------- - uri : string - URI of python module to return path for - - Returns - ------- - path : None or string - Returns None if there is no valid path for this URI - Otherwise returns absolute file system path for URI - - Examples - -------- - >>> docwriter = ApiDocWriter('sphinx') - >>> import sphinx - >>> modpath = sphinx.__path__[0] - >>> res = docwriter._uri2path('sphinx.builder') - >>> res == os.path.join(modpath, 'builder.py') - True - >>> res = docwriter._uri2path('sphinx') - >>> res == os.path.join(modpath, '__init__.py') - True - >>> docwriter._uri2path('sphinx.does_not_exist') - - """ - if uri == self.package_name: - return os.path.join(self.root_path, "__init__.py") - path = uri.replace(self.package_name + ".", "") - path = path.replace(".", os.path.sep) - path = os.path.join(self.root_path, path) - # XXX maybe check for extensions as well? - if os.path.exists(path + ".py"): # file - path += ".py" - elif os.path.exists(os.path.join(path, "__init__.py")): - path = os.path.join(path, "__init__.py") - else: - return None - return path - - def _path2uri(self, dirpath): - """ Convert directory path to uri """ - package_dir = self.package_name.replace(".", os.path.sep) - relpath = dirpath.replace(self.root_path, package_dir) - if relpath.startswith(os.path.sep): - relpath = relpath[1:] - return relpath.replace(os.path.sep, ".") - - def _parse_module(self, uri): - """ Parse module defined in *uri* """ - filename = self._uri2path(uri) - if filename is None: - print(filename, "erk") - # nothing that we could handle here. - return ([], []) - - f = open(filename, "rt") - functions, classes = self._parse_lines(f) - f.close() - return functions, classes - - def _parse_module_with_import(self, uri): - """Look for functions and classes in an importable module. - - Parameters - ---------- - uri : str - The name of the module to be parsed. This module needs to be - importable. - - Returns - ------- - functions : list of str - A list of (public) function names in the module. - classes : list of str - A list of (public) class names in the module. - """ - mod = __import__(uri, fromlist=[uri]) - # find all public objects in the module. - obj_strs = [obj for obj in dir(mod) if not obj.startswith("_")] - functions = [] - classes = [] - for obj_str in obj_strs: - # find the actual object from its string representation - if obj_str not in mod.__dict__: - continue - obj = mod.__dict__[obj_str] - # Check if function / class defined in module - if not self.other_defines and not getmodule(obj) == mod: - continue - # figure out if obj is a function or class - if ( - hasattr(obj, "func_name") - or isinstance(obj, BuiltinFunctionType) - or isinstance(obj, FunctionType) - ): - functions.append(obj_str) - else: - try: - issubclass(obj, object) - classes.append(obj_str) - except TypeError: - # not a function or class - pass - return functions, classes - - def _parse_lines(self, linesource): - """ Parse lines of text for functions and classes """ - functions = [] - classes = [] - for line in linesource: - if line.startswith("def ") and line.count("("): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_"): - functions.append(name) - elif line.startswith("class "): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_"): - classes.append(name) - else: - pass - functions.sort() - classes.sort() - return functions, classes - - def generate_api_doc(self, uri): - """Make autodoc documentation template string for a module - - Parameters - ---------- - uri : string - python location of module - e.g 'sphinx.builder' - - Returns - ------- - head : string - Module name, table of contents. - body : string - Function and class docstrings. - """ - # get the names of all classes and functions - functions, classes = self._parse_module_with_import(uri) - if not len(functions) and not len(classes) and DEBUG: - print("WARNING: Empty -", uri) # dbg - - # Make a shorter version of the uri that omits the package name for - # titles - uri_short = re.sub(r"^%s\." % self.package_name, "", uri) - - head = ".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n" - body = "" - - # Set the chapter title to read 'module' for all modules except for the - # main packages - if "." in uri_short: - title = "Module: :mod:`" + uri_short + "`" - head += title + "\n" + self.rst_section_levels[2] * len(title) - else: - title = ":mod:`" + uri_short + "`" - head += title + "\n" + self.rst_section_levels[1] * len(title) - - head += "\n.. automodule:: " + uri + "\n" - head += "\n.. currentmodule:: " + uri + "\n" - body += "\n.. currentmodule:: " + uri + "\n\n" - for c in classes: - body += ( - "\n:class:`" - + c - + "`\n" - + self.rst_section_levels[3] * (len(c) + 9) - + "\n\n" - ) - body += "\n.. autoclass:: " + c + "\n" - # must NOT exclude from index to keep cross-refs working - body += ( - " :members:\n" - " :undoc-members:\n" - " :show-inheritance:\n" - "\n" - " .. automethod:: __init__\n\n" - ) - head += ".. autosummary::\n\n" - for f in classes + functions: - head += " " + f + "\n" - head += "\n" - - for f in functions: - # must NOT exclude from index to keep cross-refs working - body += f + "\n" - body += self.rst_section_levels[3] * len(f) + "\n" - body += "\n.. autofunction:: " + f + "\n\n" - - return head, body - - def _survives_exclude(self, matchstr, match_type): - """ Returns True if *matchstr* does not match patterns - - ``self.package_name`` removed from front of string if present - - Examples - -------- - >>> dw = ApiDocWriter('sphinx') - >>> dw._survives_exclude('sphinx.okpkg', 'package') - True - >>> dw.package_skip_patterns.append('^\\.badpkg$') - >>> dw._survives_exclude('sphinx.badpkg', 'package') - False - >>> dw._survives_exclude('sphinx.badpkg', 'module') - True - >>> dw._survives_exclude('sphinx.badmod', 'module') - True - >>> dw.module_skip_patterns.append('^\\.badmod$') - >>> dw._survives_exclude('sphinx.badmod', 'module') - False - """ - if match_type == "module": - patterns = self.module_skip_patterns - elif match_type == "package": - patterns = self.package_skip_patterns - else: - raise ValueError('Cannot interpret match type "%s"' % match_type) - # Match to URI without package name - L = len(self.package_name) - if matchstr[:L] == self.package_name: - matchstr = matchstr[L:] - for pat in patterns: - try: - pat.search - except AttributeError: - pat = re.compile(pat) - if pat.search(matchstr): - return False - - return True - - def discover_modules(self): - """ Return module sequence discovered from ``self.package_name`` - - - Parameters - ---------- - None - - Returns - ------- - mods : sequence - Sequence of module names within ``self.package_name`` - - Examples - -------- - >>> dw = ApiDocWriter('sphinx') - >>> mods = dw.discover_modules() - >>> 'sphinx.util' in mods - True - >>> dw.package_skip_patterns.append('\.util$') - >>> 'sphinx.util' in dw.discover_modules() - False - >>> - """ - modules = [self.package_name] - # raw directory parsing - for dirpath, dirnames, filenames in os.walk(self.root_path): - # Check directory names for packages - root_uri = self._path2uri(os.path.join(self.root_path, dirpath)) - - # Normally, we'd only iterate over dirnames, but since - # dipy does not import a whole bunch of modules we'll - # include those here as well (the *.py filenames). - filenames = [ - f[:-3] - for f in filenames - if f.endswith(".py") and not f.startswith("__init__") - ] - for filename in filenames: - package_uri = "/".join((dirpath, filename)) - - for subpkg_name in dirnames + filenames: - package_uri = ".".join((root_uri, subpkg_name)) - package_path = self._uri2path(package_uri) - if package_path and self._survives_exclude(package_uri, "package"): - modules.append(package_uri) - - return sorted(modules) - - def write_modules_api(self, modules, outdir): - # upper-level modules - main_module = modules[0].split(".")[0] - ulms = [ - ".".join(m.split(".")[:2]) if m.count(".") >= 1 else m.split(".")[0] - for m in modules - ] - - from collections import OrderedDict - - module_by_ulm = OrderedDict() - - for v, k in zip(modules, ulms): - if k in module_by_ulm: - module_by_ulm[k].append(v) - else: - module_by_ulm[k] = [v] - - written_modules = [] - - for ulm, mods in module_by_ulm.items(): - print("Generating docs for %s:" % ulm) - document_head = [] - document_body = [] - - for m in mods: - print(" -> " + m) - head, body = self.generate_api_doc(m) - - document_head.append(head) - document_body.append(body) - - out_module = ulm + self.rst_extension - outfile = os.path.join(outdir, out_module) - fileobj = open(outfile, "wt") - - fileobj.writelines(document_head + document_body) - fileobj.close() - written_modules.append(out_module) - - self.written_modules = written_modules - - def write_api_docs(self, outdir): - """Generate API reST files. - - Parameters - ---------- - outdir : string - Directory name in which to store files - We create automatic filenames for each module - - Returns - ------- - None - - Notes - ----- - Sets self.written_modules to list of written modules - """ - if not os.path.exists(outdir): - os.mkdir(outdir) - # compose list of modules - modules = self.discover_modules() - self.write_modules_api(modules, outdir) - - def write_index(self, outdir, froot="gen", relative_to=None): - """Make a reST API index file from written files - - Parameters - ---------- - path : string - Filename to write index to - outdir : string - Directory to which to write generated index file - froot : string, optional - root (filename without extension) of filename to write to - Defaults to 'gen'. We add ``self.rst_extension``. - relative_to : string - path to which written filenames are relative. This - component of the written file path will be removed from - outdir, in the generated index. Default is None, meaning, - leave path as it is. - """ - if self.written_modules is None: - raise ValueError("No modules written") - # Get full filename path - path = os.path.join(outdir, froot + self.rst_extension) - # Path written into index is relative to rootpath - if relative_to is not None: - relpath = (outdir + os.path.sep).replace(relative_to + os.path.sep, "") - else: - relpath = outdir - idx = open(path, "wt") - w = idx.write - w(".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n") - - title = "API Reference" - w(title + "\n") - w("=" * len(title) + "\n\n") - w(".. toctree::\n\n") - for f in self.written_modules: - w(" %s\n" % os.path.join(relpath, f)) - idx.close() diff --git a/docs/tools/buildmodref.py b/docs/tools/buildmodref.py deleted file mode 100755 index 769c696b..00000000 --- a/docs/tools/buildmodref.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python -"""Script to auto-generate API docs. -""" -from __future__ import print_function, division - -# stdlib imports -import sys -import re - -# local imports -from apigen import ApiDocWriter - -# version comparison -from distutils.version import LooseVersion as V - -# ***************************************************************************** - - -def abort(error): - print("*WARNING* API documentation not generated: %s" % error) - exit() - - -def writeapi(package, outdir, source_version, other_defines=True): - # Check that the package is available. If not, the API documentation is not - # (re)generated and existing API documentation sources will be used. - - try: - __import__(package) - except ImportError: - abort("Can not import " + package) - - module = sys.modules[package] - - # Check that the source version is equal to the installed - # version. If the versions mismatch the API documentation sources - # are not (re)generated. This avoids automatic generation of documentation - # for older or newer versions if such versions are installed on the system. - - installed_version = V(module.__version__) - if source_version != installed_version: - abort("Installed version does not match source version") - - docwriter = ApiDocWriter(package, rst_extension=".rst", other_defines=other_defines) - - docwriter.package_skip_patterns += [ - r"\.%s$" % package, - r".*test.*$", - r"\.version.*$", - ] - docwriter.write_api_docs(outdir) - docwriter.write_index(outdir, "index", relative_to=outdir) - print("%d files written" % len(docwriter.written_modules)) - - -if __name__ == "__main__": - package = sys.argv[1] - outdir = sys.argv[2] - try: - other_defines = sys.argv[3] - except IndexError: - other_defines = True - else: - other_defines = other_defines in ("True", "true", "1") - - writeapi(package, outdir, other_defines=other_defines) diff --git a/templateflow/__init__.py b/templateflow/__init__.py index 2b5464fe..2bbe5e68 100644 --- a/templateflow/__init__.py +++ b/templateflow/__init__.py @@ -22,9 +22,10 @@ # """TemplateFlow is the Zone of Templates.""" from datetime import datetime as _dt +from datetime import timezone as _tz __packagename__ = 'templateflow' -__copyright__ = f'{_dt.now().year} The NiPreps Developers' +__copyright__ = f'{_dt.now(tz=_tz.utc).year} The NiPreps Developers' try: from ._version import __version__ except ModuleNotFoundError: diff --git a/templateflow/_loader.py b/templateflow/_loader.py index ba20a2cb..123a17cd 100644 --- a/templateflow/_loader.py +++ b/templateflow/_loader.py @@ -32,7 +32,6 @@ from functools import cached_property from pathlib import Path from types import ModuleType -from typing import Union try: from functools import cache @@ -124,7 +123,7 @@ class Loader: .. automethod:: cached """ - def __init__(self, anchor: Union[str, ModuleType]): + def __init__(self, anchor: str | ModuleType): self._anchor = anchor self.files = files(anchor) self.exit_stack = ExitStack() @@ -177,7 +176,7 @@ def as_path(self, *segments) -> AbstractContextManager[Path]: """ return as_file(self.files.joinpath(*segments)) - @cache + @cache # noqa: B019 def cached(self, *segments) -> Path: """Ensure data is available as a :class:`~pathlib.Path`. diff --git a/templateflow/conf/tests/test_conf.py b/templateflow/conf/tests/test_conf.py index 26d11cc6..ab87c185 100644 --- a/templateflow/conf/tests/test_conf.py +++ b/templateflow/conf/tests/test_conf.py @@ -124,7 +124,7 @@ def test_layout_errors(monkeypatch): def myfunc(): return 'okay' - def mock_import(name, globals=None, locals=None, fromlist=tuple(), level=0): + def mock_import(name, globals=None, locals=None, fromlist=(), level=0): # noqa: A002 if name == 'bids': raise ModuleNotFoundError return oldimport(name, globals=globals, locals=locals, fromlist=fromlist, level=level) diff --git a/templateflow/conf/tests/test_s3.py b/templateflow/conf/tests/test_s3.py index 66dfb21f..0971f662 100644 --- a/templateflow/conf/tests/test_s3.py +++ b/templateflow/conf/tests/test_s3.py @@ -39,7 +39,7 @@ def test_get_skel_file(monkeypatch): assert Path(new_skel).stat().st_size > 0 latest_md5 = ( - requests.get(s3.TF_SKEL_URL(release='master', ext='md5', allow_redirects=True)) + requests.get(s3.TF_SKEL_URL(release='master', ext='md5', allow_redirects=True), timeout=10) .content.decode() .split()[0] ) diff --git a/templateflow/tests/test_api.py b/templateflow/tests/test_api.py index 40023c8a..fc773319 100644 --- a/templateflow/tests/test_api.py +++ b/templateflow/tests/test_api.py @@ -46,15 +46,13 @@ def _parse_bibtex(self): try: self.etype = re.search(r'@(\w+)', self.text).group(1) - except AttributeError: - raise TypeError(f'Invalid bibtex: {self.text}') + except AttributeError as err: + raise TypeError(f'Invalid bibtex: {self.text}') from err try: self.citekey = re.search(r'@[^{]*{([^,\s]+)', self.text).group(1) - except AttributeError: - raise TypeError(f'Invalid bibtex: {self.text}') - self.pairs = { - key: val for key, val in re.findall(r'(\w+)=(\{[^{}]+\})', self.text) - } + except AttributeError as err: + raise TypeError(f'Invalid bibtex: {self.text}') from err + self.pairs = dict(re.findall(r'(\w+)=(\{[^{}]+\})', self.text)) def get(self, val): return self.pairs.get(val) @@ -177,7 +175,7 @@ def assert_same(self, other): @pytest.mark.parametrize( - 'template,urls,fbib,lbib', + ('template', 'urls', 'fbib', 'lbib'), [ ('MNI152NLin2009cAsym', mni2009_urls, mni2009_fbib, mni2009_lbib), ('fsLR', fslr_urls, fslr_fbib, fslr_lbib), @@ -207,8 +205,7 @@ def test_citations(tmp_path, template, urls, fbib, lbib): assert len(bibs) == 1 else: - # no citations currently - assert False + pytest.fail('no citations currently') def test_pybids_magic_get(): @@ -224,4 +221,4 @@ def test_pybids_magic_get(): # Existing layout.get_* should not be bubbled to the layout # (that means, raise an AttributeError instead of a BIDSEntityError) with pytest.raises(AttributeError): - api.get_fieldmap + _ = api.get_fieldmap