diff --git a/pre-commit/notebook_markdown_format.py b/pre-commit/notebook_markdown_format.py index e5ef46530..5a82c12a5 100644 --- a/pre-commit/notebook_markdown_format.py +++ b/pre-commit/notebook_markdown_format.py @@ -5,12 +5,12 @@ import copy import json from pathlib import Path -from typing import Any, Dict, List +from typing import Any import mdformat -def format_notebook(notebook: Dict[str, Any]) -> Dict[str, Any]: +def format_notebook(notebook: dict[str, Any]) -> dict[str, Any]: """Format a notebook in MyST style. Args: @@ -36,7 +36,7 @@ def format_notebook(notebook: Dict[str, Any]) -> Dict[str, Any]: return notebook -def main(files: List[Path]) -> None: +def main(files: list[Path]) -> None: """Check markdown cells in notebooks for common mistakes. Args: @@ -54,13 +54,14 @@ def main(files: List[Path]) -> None: changed = any( cell != formatted_cell for cell, formatted_cell in zip( - notebook["cells"], formatted_notebook["cells"] + notebook["cells"], + formatted_notebook["cells"], ) ) if not changed: continue print("Formatting notebook", path) - with open(path, "w") as fh: + with Path.open(path, "w") as fh: json.dump(formatted_notebook, fh, indent=1, ensure_ascii=False) fh.write("\n") @@ -68,7 +69,10 @@ def main(files: List[Path]) -> None: if __name__ == "__main__": parser = argparse.ArgumentParser(description="Lint notebook markdown files.") parser.add_argument( - "files", nargs="*", help="Notebook markdown files to lint.", type=Path + "files", + nargs="*", + help="Notebook markdown files to lint.", + type=Path, ) args = parser.parse_args() main(sorted(args.files)) diff --git a/pre-commit/notebook_urls.py b/pre-commit/notebook_urls.py index 24aa06b6c..8c91d9f33 100644 --- a/pre-commit/notebook_urls.py +++ b/pre-commit/notebook_urls.py @@ -1,4 +1,6 @@ """Simple check to ensure each code cell in a notebook is valid Python.""" +from __future__ import annotations + import argparse import json import re @@ -6,7 +8,6 @@ import sys from dataclasses import dataclass from pathlib import Path -from typing import List, Set, Tuple def git_branch_name() -> str: @@ -18,7 +19,7 @@ def git_branch_name() -> str: ) -def git_branch_modified_paths(from_ref: str, to_ref: str) -> Set[Path]: +def git_branch_modified_paths(from_ref: str, to_ref: str) -> set[Path]: """Get a set of file paths modified on this branch vs develop.""" from_to = f"{from_ref}...{to_ref}" return { @@ -29,7 +30,7 @@ def git_branch_modified_paths(from_ref: str, to_ref: str) -> Set[Path]: "diff", "--name-only", from_to, - ] + ], ) .decode() .strip() @@ -37,12 +38,12 @@ def git_branch_modified_paths(from_ref: str, to_ref: str) -> Set[Path]: } -def git_previous_commit_modified_paths() -> Set[Path]: +def git_previous_commit_modified_paths() -> set[Path]: """Get a set of file paths modified in the previous commit.""" return { Path(p) for p in subprocess.check_output( - ["/usr/bin/git", "diff", "--name-only", "HEAD~"] + ["/usr/bin/git", "diff", "--name-only", "HEAD~"], ) .decode() .strip() @@ -72,7 +73,7 @@ class PatternReplacement: MAIN_BRANCHES = ("master", "main") -def main(files: List[Path], from_ref: str, to_ref: str) -> bool: +def main(files: list[Path], from_ref: str, to_ref: str) -> bool: """Check that URLs in the notebook are relative to the current branch. Args: @@ -150,7 +151,7 @@ def main(files: List[Path], from_ref: str, to_ref: str) -> bool: # Write the file if it has changed if changed: print(f"Updating {path}") - with open(path, "w", encoding="utf-8") as fh: + with Path.open(path, "w", encoding="utf-8") as fh: json.dump(notebook, fh, indent=1, ensure_ascii=False) fh.write("\n") else: @@ -159,8 +160,10 @@ def main(files: List[Path], from_ref: str, to_ref: str) -> bool: def check_notebook( - path: Path, to_ref: str, replacements: List[PatternReplacement] -) -> Tuple[bool, dict]: + path: Path, + to_ref: str, + replacements: list[PatternReplacement], +) -> tuple[bool, dict]: """Check the notebook for URL replacements. Args: @@ -183,7 +186,7 @@ def check_notebook( return changed, None # Load the notebook - with open(path, encoding="utf-8") as fh: + with Path.open(path, encoding="utf-8") as fh: notebook = json.load(fh) # Check each cell for cell_num, cell in enumerate(notebook["cells"]): @@ -197,7 +200,7 @@ def check_notebook( return changed, notebook -def replace_line(line: str, to_ref: str, replacements: List[PatternReplacement]) -> str: +def replace_line(line: str, to_ref: str, replacements: list[PatternReplacement]) -> str: """Perform pattern replacements in the line. Args: @@ -230,7 +233,11 @@ def replace_line(line: str, to_ref: str, replacements: List[PatternReplacement]) default=list(Path.cwd().rglob("*.ipynb")), ) parser.add_argument( - "-f", "--from-ref", help="Reference to diff from", type=str, default="develop" + "-f", + "--from-ref", + help="Reference to diff from", + type=str, + default="develop", ) parser.add_argument( "-t", diff --git a/setup.py b/setup.py index 8c1a688f5..e9fd89b5d 100644 --- a/setup.py +++ b/setup.py @@ -6,10 +6,10 @@ from setuptools import find_packages, setup -with open("README.md") as readme_file: +with Path("README.md").open() as readme_file: readme = readme_file.read() -with open("HISTORY.md") as history_file: +with Path("HISTORY.md").open() as history_file: history = history_file.read() install_requires = [ diff --git a/tests/conftest.py b/tests/conftest.py index 9fa5b2951..2fcf4d5c6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -19,6 +19,7 @@ def pytest_configure(config): + """Perform initial configuration for TIAToolbox tests.""" logger.info( "🏁 Starting tests. TIAToolbox Version: %s. CI: %s", tiatoolbox.__version__, @@ -79,6 +80,7 @@ def __remote_sample(key: str) -> pathlib.Path: @pytest.fixture(scope="session") def sample_ndpi(remote_sample) -> pathlib.Path: """Sample pytest fixture for ndpi images. + Download ndpi image for pytest. """ @@ -99,6 +101,7 @@ def sample_ndpi2(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def sample_svs(remote_sample) -> pathlib.Path: """Sample pytest fixture for svs images. + Download svs image for pytest. """ @@ -108,6 +111,7 @@ def sample_svs(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def sample_ome_tiff(remote_sample) -> pathlib.Path: """Sample pytest fixture for ome-tiff (brightfield pyramid) images. + Download ome-tiff image for pytest. """ @@ -117,6 +121,7 @@ def sample_ome_tiff(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def sample_jp2(remote_sample) -> pathlib.Path: """Sample pytest fixture for JP2 images. + Download jp2 image for pytest. """ @@ -183,6 +188,7 @@ def sample_svs_ndpi_wsis(sample_ndpi2, sample_svs, tmpdir_factory): @pytest.fixture(scope="session") def source_image(remote_sample) -> pathlib.Path: """Sample pytest fixture for source image. + Download stain normalization source image for pytest. """ @@ -192,6 +198,7 @@ def source_image(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def norm_macenko(remote_sample) -> pathlib.Path: """Sample pytest fixture for norm_macenko image. + Download norm_macenko image for pytest. """ @@ -201,6 +208,7 @@ def norm_macenko(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def norm_reinhard(remote_sample) -> pathlib.Path: """Sample pytest fixture for norm_reinhard image. + Download norm_reinhard image for pytest. """ @@ -210,6 +218,7 @@ def norm_reinhard(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def norm_ruifrok(remote_sample) -> pathlib.Path: """Sample pytest fixture for norm_ruifrok image. + Download norm_ruifrok image for pytest. """ @@ -219,6 +228,7 @@ def norm_ruifrok(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def norm_vahadane(remote_sample) -> pathlib.Path: """Sample pytest fixture for norm_vahadane image. + Download norm_vahadane image for pytest. """ @@ -256,6 +266,7 @@ def sample_visual_fields( @pytest.fixture(scope="session") def patch_extr_vf_image(remote_sample) -> pathlib.Path: """Sample pytest fixture for a visual field image. + Download TCGA-HE-7130-01Z-00-DX1 image for pytest. """ @@ -265,6 +276,7 @@ def patch_extr_vf_image(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_csv(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction csv. + Download sample patch extraction csv for pytest. """ @@ -274,6 +286,7 @@ def patch_extr_csv(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_json(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction json. + Download sample patch extraction json for pytest. """ @@ -283,6 +296,7 @@ def patch_extr_json(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_npy(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction npy. + Download sample patch extraction npy for pytest. """ @@ -292,6 +306,7 @@ def patch_extr_npy(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_csv_noheader(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction noheader csv. + Download sample patch extraction noheader csv for pytest. """ @@ -301,6 +316,7 @@ def patch_extr_csv_noheader(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_2col_json(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction 2col json. + Download sample patch extraction 2col json for pytest. """ @@ -310,6 +326,7 @@ def patch_extr_2col_json(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_2col_npy(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction 2col npy. + Download sample patch extraction 2col npy for pytest. """ @@ -319,6 +336,7 @@ def patch_extr_2col_npy(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_jp2_csv(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction jp2 csv. + Download sample patch extraction jp2 csv for pytest. """ @@ -328,6 +346,7 @@ def patch_extr_jp2_csv(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_jp2_read(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction jp2 read npy. + Download sample patch extraction jp2 read npy for pytest. """ @@ -337,6 +356,7 @@ def patch_extr_jp2_read(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_npy_read(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction read npy. + Download sample patch extraction read npy for pytest. """ @@ -346,6 +366,7 @@ def patch_extr_npy_read(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_svs_csv(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction svs csv. + Download sample patch extraction svs csv for pytest. """ @@ -355,6 +376,7 @@ def patch_extr_svs_csv(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_svs_header(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction svs_header csv. + Download sample patch extraction svs_header csv for pytest. """ @@ -364,6 +386,7 @@ def patch_extr_svs_header(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def patch_extr_svs_npy_read(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch extraction svs_read npy. + Download sample patch extraction svs_read npy for pytest. """ @@ -373,6 +396,7 @@ def patch_extr_svs_npy_read(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def sample_patch1(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch 1. + Download sample patch 1 (Kather100K) for pytest. """ @@ -382,6 +406,7 @@ def sample_patch1(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def sample_patch2(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch 2. + Download sample patch 2 (Kather100K) for pytest. """ @@ -391,6 +416,7 @@ def sample_patch2(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def sample_patch3(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch 3. + Download sample patch 3 (PCam) for pytest. """ @@ -400,6 +426,7 @@ def sample_patch3(remote_sample) -> pathlib.Path: @pytest.fixture(scope="session") def sample_patch4(remote_sample) -> pathlib.Path: """Sample pytest fixture for sample patch 4. + Download sample patch 4 (PCam) for pytest. """ @@ -424,6 +451,7 @@ def dir_sample_patches(sample_patch1, sample_patch2, tmpdir_factory): @pytest.fixture(scope="session") def sample_wsi_dict(remote_sample): """Sample pytest fixture for torch wsi dataset. + Download svs image for pytest. """ diff --git a/tests/models/test_abc.py b/tests/models/test_abc.py index 1f3944db7..7dfe8a7a4 100644 --- a/tests/models/test_abc.py +++ b/tests/models/test_abc.py @@ -15,7 +15,7 @@ def test_get_pretrained_model(): """Test for downloading and creating pretrained models.""" pretrained_info = rcParam["pretrained_model_info"] - for pretrained_name in pretrained_info.keys(): + for pretrained_name in pretrained_info: get_pretrained_model(pretrained_name, overwrite=True) diff --git a/tests/models/test_arch_mapde.py b/tests/models/test_arch_mapde.py index 875cf4b85..e66073f9a 100644 --- a/tests/models/test_arch_mapde.py +++ b/tests/models/test_arch_mapde.py @@ -25,7 +25,7 @@ def _load_mapde(name): def test_functionality(remote_sample): """Functionality test for MapDe. - Tests the functionality of MapDe model for inference at the patch level. + Test the functionality of MapDe model for inference at the patch level. """ sample_wsi = str(remote_sample("wsi1_2k_2k_svs")) @@ -33,7 +33,10 @@ def test_functionality(remote_sample): # * test fast mode (architecture used in PanNuke paper) patch = reader.read_bounds( - (0, 0, 252, 252), resolution=0.50, units="mpp", coord_space="resolution" + (0, 0, 252, 252), + resolution=0.50, + units="mpp", + coord_space="resolution", ) model = _load_mapde(name="mapde-conic") diff --git a/tests/models/test_arch_micronet.py b/tests/models/test_arch_micronet.py index c24f07c8a..0dbe4458e 100644 --- a/tests/models/test_arch_micronet.py +++ b/tests/models/test_arch_micronet.py @@ -15,15 +15,19 @@ ON_GPU = toolbox_env.has_gpu() -def test_functionality(remote_sample, tmp_path): +def test_functionality( + remote_sample, +): """Functionality test.""" - tmp_path = str(tmp_path) - sample_wsi = str(remote_sample("wsi1_2k_2k_svs")) + sample_wsi = remote_sample("wsi1_2k_2k_svs") reader = WSIReader.open(sample_wsi) # * test fast mode (architecture used in PanNuke paper) patch = reader.read_bounds( - (0, 0, 252, 252), resolution=0.25, units="mpp", coord_space="resolution" + (0, 0, 252, 252), + resolution=0.25, + units="mpp", + coord_space="resolution", ) model = MicroNet() @@ -31,6 +35,7 @@ def test_functionality(remote_sample, tmp_path): batch = torch.from_numpy(patch)[None] weights_path = fetch_pretrained_weights("micronet-consep") map_location = select_device(ON_GPU) + model = model.to(map_location) pretrained = torch.load(weights_path, map_location=map_location) model.load_state_dict(pretrained) output = model.infer_batch(model, batch, on_gpu=ON_GPU) @@ -49,7 +54,7 @@ def test_value_error(): reason="Local test on machine with GPU.", ) def test_micronet_output(remote_sample, tmp_path): - """Tests the output of MicroNet.""" + """Test the output of MicroNet.""" svs_1_small = pathlib.Path(remote_sample("svs-1-small")) micronet_output = pathlib.Path(remote_sample("micronet-output")) pretrained_model = "micronet-consep" diff --git a/tests/models/test_arch_nuclick.py b/tests/models/test_arch_nuclick.py index a0f0ea724..e3d03df7c 100644 --- a/tests/models/test_arch_nuclick.py +++ b/tests/models/test_arch_nuclick.py @@ -15,7 +15,7 @@ def test_functional_nuclick(remote_sample, tmp_path, caplog): - """Tests for NuClick.""" + """Test for NuClick.""" # convert to pathlib Path to prevent wsireader complaint tile_path = pathlib.Path(remote_sample("patch-extraction-vf")) img = imread(tile_path) @@ -39,7 +39,8 @@ def test_functional_nuclick(remote_sample, tmp_path, caplog): patch = np.float32(patch) / 255.0 patch = np.moveaxis(patch, -1, 0) batch = np.concatenate( - (patch, inclusion_map[np.newaxis, ...], exclusion_map[np.newaxis, ...]), axis=0 + (patch, inclusion_map[np.newaxis, ...], exclusion_map[np.newaxis, ...]), + axis=0, ) batch = torch.from_numpy(batch[np.newaxis, ...]) @@ -49,7 +50,9 @@ def test_functional_nuclick(remote_sample, tmp_path, caplog): model.load_state_dict(pretrained) output = model.infer_batch(model, batch, on_gpu=ON_GPU) postproc_masks = model.postproc( - output, do_reconstruction=True, nuc_points=inclusion_map[np.newaxis, ...] + output, + do_reconstruction=True, + nuc_points=inclusion_map[np.newaxis, ...], ) gt_path = pathlib.Path(remote_sample("nuclick-output")) @@ -66,6 +69,8 @@ def test_functional_nuclick(remote_sample, tmp_path, caplog): inclusion_map = np.zeros((128, 128)) inclusion_map[0, 0] = 1 _ = model.postproc( - output, do_reconstruction=True, nuc_points=inclusion_map[np.newaxis, ...] + output, + do_reconstruction=True, + nuc_points=inclusion_map[np.newaxis, ...], ) assert "Nuclei reconstruction was not done" in caplog.text diff --git a/tests/models/test_arch_sccnn.py b/tests/models/test_arch_sccnn.py index 6f320b11f..8f7173de1 100644 --- a/tests/models/test_arch_sccnn.py +++ b/tests/models/test_arch_sccnn.py @@ -22,7 +22,7 @@ def _load_sccnn(name): def test_functionality(remote_sample): """Functionality test for SCCNN. - Tests the functionality of SCCNN model for inference at the patch level. + Test the functionality of SCCNN model for inference at the patch level. """ sample_wsi = str(remote_sample("wsi1_2k_2k_svs")) @@ -30,7 +30,10 @@ def test_functionality(remote_sample): # * test fast mode (architecture used in PanNuke paper) patch = reader.read_bounds( - (30, 30, 61, 61), resolution=0.25, units="mpp", coord_space="resolution" + (30, 30, 61, 61), + resolution=0.25, + units="mpp", + coord_space="resolution", ) batch = torch.from_numpy(patch)[None] model = _load_sccnn(name="sccnn-crchisto") diff --git a/tests/models/test_arch_unet.py b/tests/models/test_arch_unet.py index 044c9d6e8..2e2c33473 100644 --- a/tests/models/test_arch_unet.py +++ b/tests/models/test_arch_unet.py @@ -16,7 +16,7 @@ def test_functional_unet(remote_sample, tmp_path): - """Tests for unet.""" + """Test for unet.""" # convert to pathlib Path to prevent wsireader complaint mini_wsi_svs = pathlib.Path(remote_sample("wsi2_4k_4k_svs")) @@ -38,10 +38,9 @@ def test_functional_unet(remote_sample, tmp_path): read_kwargs = {"resolution": 2.0, "units": "mpp", "coord_space": "resolution"} batch = np.array( [ - # noqa reader.read_bounds([0, 0, 1024, 1024], **read_kwargs), reader.read_bounds([1024, 1024, 2048, 2048], **read_kwargs), - ] + ], ) batch = torch.from_numpy(batch) diff --git a/tests/models/test_arch_utils.py b/tests/models/test_arch_utils.py index 2e197eb94..2793dc64d 100644 --- a/tests/models/test_arch_utils.py +++ b/tests/models/test_arch_utils.py @@ -1,4 +1,4 @@ -"""Unit test package for architecture utilities""" +"""Unit test package for architecture utilities.""" import numpy as np import pytest @@ -24,7 +24,7 @@ def test_all(): [1, 1, 2, 2], [3, 3, 4, 4], [3, 3, 4, 4], - ] + ], ) assert np.sum(_output - output) == 0 diff --git a/tests/test_docs.py b/tests/test_docs.py index 32b2b61d7..faa725f05 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -1,10 +1,12 @@ +"""Test docstring examples and imports are valid.""" +from __future__ import annotations + import ast import doctest import importlib import os import sys from pathlib import Path -from typing import List, Optional, Union import pytest @@ -15,6 +17,7 @@ def source_files(root_path): ignore = {"__pycache__"} def generator(): + """Generates path to files.""" for root, dirs, files in os.walk(root_path): files = [f for f in files if f.endswith(".py") and f[0] != "."] dirs[:] = [d for d in dirs if d not in ignore and d[0] != "."] @@ -78,7 +81,7 @@ def check_imports(source_tree: ast.AST, doc: doctest.DocTest, rel_path: Path) -> source = "\n".join(eg.source.strip() for eg in doc.examples) try: spec = importlib.util.find_spec(name) - except ModuleNotFoundError as e: + except ModuleNotFoundError as e: # noqa: PERF203 raise_source_exception( source, rel_path, @@ -102,8 +105,8 @@ def raise_source_exception( rel_path: Path, source_lineno: int, file_lineno: int, - source_offset: Optional[int] = None, - exception: Optional[Exception] = None, + source_offset: int | None = None, + exception: Exception | None = None, ) -> None: """Raise an exception with the source code and line number highlighted. @@ -135,18 +138,20 @@ def raise_source_exception( source_lines.insert(source_lineno, f"{' '*(source_offset+3)}^ {message}") annotated_source = "\n".join(source_lines) exception = type(exception) if exception else SyntaxError + msg = f"{rel_path}:{file_lineno}: {message}\n{annotated_source}" raise exception( - f"{rel_path}:{file_lineno}: {message}\n{annotated_source}" + msg, ) from None -def import_node_names(import_node: Union[ast.Import, ast.ImportFrom]) -> List[str]: +def import_node_names(import_node: ast.Import | ast.ImportFrom) -> list[str]: """Get the names being imported by import nodes.""" if isinstance(import_node, ast.ImportFrom): return [import_node.module] if isinstance(import_node, ast.Import): return [name.name for name in import_node.names] - raise TypeError("Unknown node type") + msg = "Unknown node type" + raise TypeError(msg) def check_ast(doc, rel_path) -> ast.AST: diff --git a/tests/test_magic.py b/tests/test_magic.py index 5a8fb8f23..526d467f3 100644 --- a/tests/test_magic.py +++ b/tests/test_magic.py @@ -1,4 +1,4 @@ -"""Tests for detecting magic numbers and signatures in files.""" +"""Test for detecting magic numbers and signatures in files.""" import sqlite3 import zipfile from io import BytesIO diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 5250280b7..85278cc77 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -1,4 +1,4 @@ -"""Tests for metrics package in the toolbox.""" +"""Test for metrics package in the toolbox.""" import numpy as np import pytest @@ -55,7 +55,7 @@ def test_dice(): def test_dice_shape_mismatch_error(): - """Tests if the shape of inputs does not match.""" + """Test if the shape of inputs does not match.""" gt_mask = np.random.randint(2, size=(256, 256, 1)) pred_mask = np.random.randint(2, size=(256, 256, 3)) with pytest.raises(ValueError, match=r".*Shape mismatch between the two masks.*"): diff --git a/tests/test_save_tiles.py b/tests/test_save_tiles.py index 79084bcce..e6267f350 100644 --- a/tests/test_save_tiles.py +++ b/tests/test_save_tiles.py @@ -1,6 +1,5 @@ -"""Tests for code related to saving image tiles.""" +"""Test for code related to saving image tiles.""" -import os import pathlib from click.testing import CliRunner @@ -26,7 +25,7 @@ def test_command_line_save_tiles(sample_svs_ndpi_wsis, tmp_path): "--tile-objective-value", "5", "--output-path", - os.path.join(tmp_path, "all_tiles"), + str(tmp_path / "all_tiles"), ], ) diff --git a/tests/test_scale.py b/tests/test_scale.py index fcc12e417..c71c88f09 100644 --- a/tests/test_scale.py +++ b/tests/test_scale.py @@ -1,4 +1,4 @@ -"""Tests for scaling methods.""" +"""Test for scaling methods.""" import numpy as np import pytest @@ -12,7 +12,7 @@ def test_platt_scaler(): logit = np.random.rand(sample_size) # binary class label = np.concatenate( - [np.full(int(0.9 * sample_size), -1), np.full(int(0.1 * sample_size), 1)] + [np.full(int(0.9 * sample_size), -1), np.full(int(0.1 * sample_size), 1)], ) scaler = PlattScaling(max_iter=1) scaler._fixer_a = 0.0 diff --git a/tests/test_slide_info.py b/tests/test_slide_info.py index 7039ae8fc..43d25fd0b 100644 --- a/tests/test_slide_info.py +++ b/tests/test_slide_info.py @@ -1,4 +1,4 @@ -"""Tests for code related to obtaining slide information.""" +"""Test for code related to obtaining slide information.""" import pathlib @@ -120,7 +120,9 @@ def test_command_line_slide_info_output_none_mode_save(sample_svs): assert slide_info_result.exit_code == 0 assert pathlib.Path( - sample_svs.parent, "meta-data", "CMU-1-Small-Region.yaml" + sample_svs.parent, + "meta-data", + "CMU-1-Small-Region.yaml", ).exists() diff --git a/tests/test_slide_thumbnail.py b/tests/test_slide_thumbnail.py index 7e6be77a6..7aba3085d 100644 --- a/tests/test_slide_thumbnail.py +++ b/tests/test_slide_thumbnail.py @@ -1,4 +1,4 @@ -"""Tests for code related to obtaining slide thumbnails.""" +"""Test for code related to obtaining slide thumbnails.""" import os import pathlib @@ -92,7 +92,10 @@ def test_command_line_jp2_slide_thumbnail_mode_show(sample_jp2, tmp_path): runner = CliRunner() command_line_slide_thumbnail( - runner, sample=sample_jp2, tmp_path=tmp_path, mode="show" + runner, + sample=sample_jp2, + tmp_path=tmp_path, + mode="show", ) diff --git a/tests/test_stainaugment.py b/tests/test_stainaugment.py index 0c9f79698..d1ee4b77f 100644 --- a/tests/test_stainaugment.py +++ b/tests/test_stainaugment.py @@ -1,4 +1,4 @@ -"""Tests for stain augmentation code.""" +"""Test for stain augmentation code.""" import pathlib @@ -25,7 +25,10 @@ def test_stainaugment(source_image, norm_vahadane): # 1. Testing without stain matrix. # Test with macenko stain extractor augmentor = StainAugmentor( - method="macenko", sigma1=3.0, sigma2=3.0, augment_background=True + method="macenko", + sigma1=3.0, + sigma2=3.0, + augment_background=True, ) augmentor.fit(source_img) source_img_aug = augmentor.augment() @@ -64,7 +67,7 @@ def test_stainaugment(source_image, norm_vahadane): sigma1=0.0, sigma2=0.0, always_apply=True, - ) + ), ], p=1, ) diff --git a/tests/test_stainnorm.py b/tests/test_stainnorm.py index c2253c627..1025914de 100644 --- a/tests/test_stainnorm.py +++ b/tests/test_stainnorm.py @@ -1,4 +1,4 @@ -"""Tests for stain normalization code.""" +"""Test for stain normalization code.""" import pathlib @@ -17,7 +17,8 @@ def test_stain_extract(): """Test stain extraction class.""" stain_matrix = np.array([0.65, 0.70, 0.29]) with pytest.raises( - ValueError, match=r"Stain matrix must have shape \(2, 3\) or \(3, 3\)." + ValueError, + match=r"Stain matrix must have shape \(2, 3\) or \(3, 3\).", ): _ = stainextract.CustomExtractor(stain_matrix) diff --git a/tests/test_tiffreader.py b/tests/test_tiffreader.py index ac85f3a19..5daf41b07 100644 --- a/tests/test_tiffreader.py +++ b/tests/test_tiffreader.py @@ -1,3 +1,4 @@ +"""Test TIFFWSIReader.""" import pytest from defusedxml import ElementTree diff --git a/tests/test_tissuemask.py b/tests/test_tissuemask.py index e6ed18d18..35f01a74c 100644 --- a/tests/test_tissuemask.py +++ b/tests/test_tissuemask.py @@ -1,4 +1,4 @@ -"""Tests for code related to tissue mask generation.""" +"""Test for code related to tissue mask generation.""" import os import pathlib @@ -172,7 +172,8 @@ def test_transform_fit_otsu_wrong_shape(): def test_transform_morphological_conflicting_args(): """Test giving conflicting arguments to morphological masker.""" with pytest.raises( - ValueError, match="Only one of mpp, power, kernel_size can be given." + ValueError, + match="Only one of mpp, power, kernel_size can be given.", ): tissuemask.MorphologicalMasker(mpp=32, power=1.25) diff --git a/tests/test_wsimeta.py b/tests/test_wsimeta.py index fe7234f97..d40471af9 100644 --- a/tests/test_wsimeta.py +++ b/tests/test_wsimeta.py @@ -1,4 +1,4 @@ -"""Tests for obtaining whole-slide image metadata.""" +"""Test for obtaining whole-slide image metadata.""" import numpy as np import pytest diff --git a/tiatoolbox/cli/__init__.py b/tiatoolbox/cli/__init__.py index 88ed1576e..dd5c4ad99 100644 --- a/tiatoolbox/cli/__init__.py +++ b/tiatoolbox/cli/__init__.py @@ -1,6 +1,6 @@ """Console script for tiatoolbox.""" -import platform import sys +from platform import platform, python_version import click @@ -20,7 +20,7 @@ def version_msg(): """Return a string with tiatoolbox package version and python version.""" - return f"tiatoolbox {__version__} (Python {platform.python_version()}) on {platform.platform()}." + return f"tiatoolbox {__version__} (Python {python_version()}) on {platform()}." @tiatoolbox_cli.group(context_settings={"help_option_names": ["-h", "--help"]}) diff --git a/tiatoolbox/cli/nucleus_instance_segment.py b/tiatoolbox/cli/nucleus_instance_segment.py index 4909d4dd6..f851aa761 100644 --- a/tiatoolbox/cli/nucleus_instance_segment.py +++ b/tiatoolbox/cli/nucleus_instance_segment.py @@ -29,7 +29,7 @@ default="nucleus_instance_segmentation", ) @cli_file_type( - default="*.png, *.jpg, *.jpeg, *.tif, *.tiff, *.svs, *.ndpi, *.jp2, *.mrxs" + default="*.png, *.jpg, *.jpeg, *.tif, *.tiff, *.svs, *.ndpi, *.jp2, *.mrxs", ) @cli_mode( usage_help="Type of input file to process.", @@ -74,7 +74,9 @@ def nucleus_instance_segment( ) ioconfig = prepare_ioconfig_seg( - IOSegmentorConfig, pretrained_weights, yaml_config_path + IOSegmentorConfig, + pretrained_weights, + yaml_config_path, ) predictor = NucleusInstanceSegmentor( diff --git a/tiatoolbox/cli/patch_predictor.py b/tiatoolbox/cli/patch_predictor.py index e1f48ff21..dce9ccd2a 100644 --- a/tiatoolbox/cli/patch_predictor.py +++ b/tiatoolbox/cli/patch_predictor.py @@ -30,7 +30,7 @@ default="patch_prediction", ) @cli_file_type( - default="*.png, *.jpg, *.jpeg, *.tif, *.tiff, *.svs, *.ndpi, *.jp2, *.mrxs" + default="*.png, *.jpg, *.jpeg, *.tif, *.tiff, *.svs, *.ndpi, *.jp2, *.mrxs", ) @cli_mode( usage_help="Type of input file to process.", diff --git a/tiatoolbox/cli/read_bounds.py b/tiatoolbox/cli/read_bounds.py index 136df58ed..9c6b8f18e 100644 --- a/tiatoolbox/cli/read_bounds.py +++ b/tiatoolbox/cli/read_bounds.py @@ -17,11 +17,11 @@ @cli_img_input(usage_help="Path to WSI file.") @cli_output_path( usage_help="Path to output file in save mode. " - "default=img_input_dir/../im_region.jpg" + "default=img_input_dir/../im_region.jpg", ) @cli_region( usage_help="Image region in the whole slide image to read from. " - "default=0 0 2000 2000" + "default=0 0 2000 2000", ) @cli_resolution() @cli_units() diff --git a/tiatoolbox/cli/save_tiles.py b/tiatoolbox/cli/save_tiles.py index 180bc0eb8..02b4b5fd3 100644 --- a/tiatoolbox/cli/save_tiles.py +++ b/tiatoolbox/cli/save_tiles.py @@ -18,7 +18,8 @@ @tiatoolbox_cli.command() @cli_img_input() @cli_output_path( - usage_help="Path to output directory to save the output.", default="tiles" + usage_help="Path to output directory to save the output.", + default="tiles", ) @cli_file_type() @cli_tile_objective() @@ -38,7 +39,11 @@ def save_tiles( from tiatoolbox.wsicore.wsireader import WSIReader files_all, output_path = prepare_file_dir_cli( - img_input, output_path, file_types, "save", "tiles" + img_input, + output_path, + file_types, + "save", + "tiles", ) if verbose: logger.setLevel(logging.DEBUG) diff --git a/tiatoolbox/cli/semantic_segment.py b/tiatoolbox/cli/semantic_segment.py index 03f7a11ae..399b84cc7 100644 --- a/tiatoolbox/cli/semantic_segment.py +++ b/tiatoolbox/cli/semantic_segment.py @@ -27,7 +27,7 @@ default="semantic_segmentation", ) @cli_file_type( - default="*.png, *.jpg, *.jpeg, *.tif, *.tiff, *.svs, *.ndpi, *.jp2, *.mrxs" + default="*.png, *.jpg, *.jpeg, *.tif, *.tiff, *.svs, *.ndpi, *.jp2, *.mrxs", ) @cli_mode( usage_help="Type of input file to process.", @@ -68,7 +68,9 @@ def semantic_segment( ) ioconfig = prepare_ioconfig_seg( - IOSegmentorConfig, pretrained_weights, yaml_config_path + IOSegmentorConfig, + pretrained_weights, + yaml_config_path, ) predictor = SemanticSegmentor( diff --git a/tiatoolbox/cli/slide_info.py b/tiatoolbox/cli/slide_info.py index b1098681d..ae9eac1ba 100644 --- a/tiatoolbox/cli/slide_info.py +++ b/tiatoolbox/cli/slide_info.py @@ -18,7 +18,7 @@ @cli_img_input() @cli_output_path( usage_help="Path to output directory to save the output. " - "default=img_input/../meta-data" + "default=img_input/../meta-data", ) @cli_file_type(default="*.ndpi, *.svs, *.mrxs, *.jp2") @cli_mode(default="show") @@ -28,7 +28,11 @@ def slide_info(img_input, output_path, file_types, mode, verbose): from tiatoolbox import utils, wsicore files_all, output_path = prepare_file_dir_cli( - img_input, output_path, file_types, mode, "meta-data" + img_input, + output_path, + file_types, + mode, + "meta-data", ) for curr_file in files_all: @@ -44,7 +48,8 @@ def slide_info(img_input, output_path, file_types, mode, verbose): if mode == "save": out_path = pathlib.Path( - output_path, wsi.info.file_path.with_suffix(".yaml").name + output_path, + wsi.info.file_path.with_suffix(".yaml").name, ) utils.save_yaml( wsi.info.as_dict(), diff --git a/tiatoolbox/cli/slide_thumbnail.py b/tiatoolbox/cli/slide_thumbnail.py index 2d1e6a253..4f5c290ad 100644 --- a/tiatoolbox/cli/slide_thumbnail.py +++ b/tiatoolbox/cli/slide_thumbnail.py @@ -15,7 +15,7 @@ @cli_img_input() @cli_output_path( usage_help="Path to output directory to save the output. " - "default=img_input/../slide-thumbnail" + "default=img_input/../slide-thumbnail", ) @cli_file_type(default="*.ndpi, *.svs, *.mrxs, *.jp2") @cli_mode(default="save") @@ -34,7 +34,11 @@ def slide_thumbnail(img_input, output_path, file_types, mode): from tiatoolbox.wsicore.wsireader import WSIReader files_all, output_path = prepare_file_dir_cli( - img_input, output_path, file_types, mode, "slide-thumbnail" + img_input, + output_path, + file_types, + mode, + "slide-thumbnail", ) for curr_file in files_all: diff --git a/tiatoolbox/data/pretrained_model.yaml b/tiatoolbox/data/pretrained_model.yaml index 2243cebb0..9eb539efc 100644 --- a/tiatoolbox/data/pretrained_model.yaml +++ b/tiatoolbox/data/pretrained_model.yaml @@ -765,7 +765,6 @@ micronet-consep: - {"units": "mpp", "resolution": 0.25} output_resolutions: - {"units": "mpp", "resolution": 0.25} - margin: 128 tile_shape: [2048, 2048] patch_input_shape: [252, 252] patch_output_shape: [252, 252] @@ -777,66 +776,90 @@ mapde-crchisto: architecture: class: mapde.MapDe kwargs: - input_resolutions: - - { "units": "mpp", "resolution": 0.25 } num_input_channels: 3 min_distance: 4 threshold_abs: 250 num_classes: 1 + ioconfig: + class: semantic_segmentor.IOSegmentorConfig + kwargs: + input_resolutions: + - { "units": "mpp", "resolution": 0.5 } + output_resolutions: + - { "units": "mpp", "resolution": 0.5 } tile_shape: [ 2048, 2048 ] patch_input_shape: [ 252, 252 ] + patch_output_shape: [ 252, 252 ] stride_shape: [ 150, 150 ] + save_resolution: { 'units': 'mpp', 'resolution': 0.5 } mapde-conic: url: https://tiatoolbox.dcs.warwick.ac.uk/models/detection/mapde-conic.pth architecture: class: mapde.MapDe kwargs: - input_resolutions: - - { "units": "mpp", "resolution": 0.25 } num_input_channels: 3 min_distance: 3 threshold_abs: 205 num_classes: 1 + ioconfig: + class: semantic_segmentor.IOSegmentorConfig + kwargs: + input_resolutions: + - { "units": "mpp", "resolution": 0.5 } + output_resolutions: + - { "units": "mpp", "resolution": 0.5 } tile_shape: [ 2048, 2048 ] patch_input_shape: [ 252, 252 ] + patch_output_shape: [ 252, 252 ] stride_shape: [ 150, 150 ] + save_resolution: { 'units': 'mpp', 'resolution': 0.5 } sccnn-crchisto: url: https://tiatoolbox.dcs.warwick.ac.uk/models/detection/sccnn-crchisto.pth architecture: class: sccnn.SCCNN kwargs: - input_resolutions: - - { "units": "mpp", "resolution": 0.25 } num_input_channels: 3 - out_height: 13 - out_width: 13 radius: 12 min_distance: 6 threshold_abs: 0.20 + patch_output_shape: [ 13, 13 ] + ioconfig: + class: semantic_segmentor.IOSegmentorConfig + kwargs: + input_resolutions: + - { "units": "mpp", "resolution": 0.5 } + output_resolutions: + - { "units": "mpp", "resolution": 0.5 } tile_shape: [ 2048, 2048 ] patch_input_shape: [ 31, 31 ] patch_output_shape: [ 13, 13 ] stride_shape: [ 8, 8 ] + save_resolution: { 'units': 'mpp', 'resolution': 0.5 } sccnn-conic: url: https://tiatoolbox.dcs.warwick.ac.uk/models/detection/sccnn-conic.pth architecture: class: sccnn.SCCNN kwargs: - input_resolutions: - - { "units": "mpp", "resolution": 0.25 } num_input_channels: 3 - out_height: 13 - out_width: 13 radius: 12 min_distance: 5 threshold_abs: 0.05 + patch_output_shape: [ 13, 13 ] + ioconfig: + class: semantic_segmentor.IOSegmentorConfig + kwargs: + input_resolutions: + - { "units": "mpp", "resolution": 0.5 } + output_resolutions: + - { "units": "mpp", "resolution": 0.5 } tile_shape: [ 2048, 2048 ] patch_input_shape: [ 31, 31 ] patch_output_shape: [ 13, 13 ] stride_shape: [ 8, 8 ] + save_resolution: { 'units': 'mpp', 'resolution': 0.5 } nuclick_original-pannuke: url: https://tiatoolbox.dcs.warwick.ac.uk/models/seg/nuclick_original-pannuke.pth diff --git a/tiatoolbox/models/models_abc.py b/tiatoolbox/models/models_abc.py index 627b5bc34..9b52f393f 100644 --- a/tiatoolbox/models/models_abc.py +++ b/tiatoolbox/models/models_abc.py @@ -1,7 +1,7 @@ -"""Defines Abstract Base Class for Models defined in tiatoolbox.""" +"""Define Abstract Base Class for Models defined in tiatoolbox.""" from abc import ABC, abstractmethod -import torch.nn as nn +from torch import nn class IOConfigABC(ABC): @@ -15,24 +15,26 @@ class IOConfigABC(ABC): @property @abstractmethod def input_resolutions(self): + """Abstract method to update input_resolution.""" raise NotImplementedError @property @abstractmethod def output_resolutions(self): + """Abstract method to update output_resolutions.""" raise NotImplementedError class ModelABC(ABC, nn.Module): """Abstract base class for models used in tiatoolbox.""" - def __init__(self): + def __init__(self) -> None: + """Initialize Abstract class ModelABC.""" super().__init__() self._postproc = self.postproc self._preproc = self.preproc @abstractmethod - # noqa # This is generic abc, else pylint will complain def forward(self, *args, **kwargs): """Torch method, this contains logic for using layers defined in init.""" @@ -89,7 +91,8 @@ def preproc_func(self, func): """ if func is not None and not callable(func): - raise ValueError(f"{func} is not callable!") + msg = f"{func} is not callable!" + raise ValueError(msg) if func is None: self._preproc = self.preproc @@ -119,7 +122,8 @@ def postproc_func(self, func): """ if func is not None and not callable(func): - raise ValueError(f"{func} is not callable!") + msg = f"{func} is not callable!" + raise ValueError(msg) if func is None: self._postproc = self.postproc diff --git a/tiatoolbox/typing.py b/tiatoolbox/typing.py index 7b1fd1d52..10f523670 100644 --- a/tiatoolbox/typing.py +++ b/tiatoolbox/typing.py @@ -1,3 +1,4 @@ +"""Define Variable types for TIAToolbox.""" from numbers import Number from typing import Literal, Tuple, Union diff --git a/tiatoolbox/utils/metrics.py b/tiatoolbox/utils/metrics.py index d3bf7e38f..41a81d17c 100644 --- a/tiatoolbox/utils/metrics.py +++ b/tiatoolbox/utils/metrics.py @@ -69,7 +69,9 @@ def f1_detection(true, pred, radius): def dice(gt_mask, pred_mask): - r"""This function computes `Sørensen–Dice coefficient + r"""Compute the Sørensen-Dice coefficient. + + This function computes `Sørensen-Dice coefficient `_, between the two masks. @@ -88,7 +90,8 @@ def dice(gt_mask, pred_mask): """ if gt_mask.shape != pred_mask.shape: - raise ValueError(f'{"Shape mismatch between the two masks."}') + msg = f"{'Shape mismatch between the two masks.'}" + raise ValueError(msg) gt_mask = gt_mask.astype(np.bool_) pred_mask = pred_mask.astype(np.bool_) diff --git a/tiatoolbox/wsicore/__init__.py b/tiatoolbox/wsicore/__init__.py index 9f107b935..bffda4c8f 100644 --- a/tiatoolbox/wsicore/__init__.py +++ b/tiatoolbox/wsicore/__init__.py @@ -1,4 +1,4 @@ -"""Package to read whole slide images""" +"""Package to read whole slide images.""" from tiatoolbox.wsicore import metadata, wsimeta, wsireader from .wsimeta import WSIMeta diff --git a/tiatoolbox/wsicore/metadata/__init__.py b/tiatoolbox/wsicore/metadata/__init__.py index 4f1283382..7430fc0c9 100644 --- a/tiatoolbox/wsicore/metadata/__init__.py +++ b/tiatoolbox/wsicore/metadata/__init__.py @@ -1 +1,2 @@ +"""Initalizes wsicore.metadata.""" from tiatoolbox.wsicore.metadata import ngff