Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🚨 Fix Linter Error in tools, utils and pre-commit #649

Merged
merged 6 commits into from
Jul 22, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 17 additions & 9 deletions benchmarks/annotation_store_alloc.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@
```

"""
from __future__ import annotations

import argparse
import copy
Expand All @@ -98,15 +99,14 @@
import subprocess
import sys
import warnings
from numbers import Number
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Generator, Tuple
from typing import TYPE_CHECKING, Generator

sys.path.append("../")

try:
import memray # noqa: E402
import memray
except ImportError:

class memray: # noqa: N801 No CapWords convention
Expand All @@ -122,6 +122,7 @@ class Tracker:
"""Dummy Tracker context manager."""

def __init__(self, *args, **kwargs):
"""Initialize :class:`Tracker`."""
warnings.warn("Memray not installed, skipping tracking.", stacklevel=2)

def __enter__(self):
Expand All @@ -144,13 +145,16 @@ def __exit__(self, *args):
SQLiteStore,
)

if TYPE_CHECKING: # pragma: no cover
from numbers import Number


def cell_polygon(
xy: Tuple[Number, Number],
xy: tuple[Number, Number],
n_points: int = 20,
radius: Number = 8,
noise: Number = 0.01,
eccentricity: Tuple[Number, Number] = (1, 3),
eccentricity: tuple[Number, Number] = (1, 3),
repeat_first: bool = True,
direction: str = "CCW",
seed: int = 0,
Expand Down Expand Up @@ -194,8 +198,9 @@ def cell_polygon(
boundary_coords = np.stack([x, y], axis=1).astype(int).tolist()

# Copy first coordinate to the end if required
boundary_coords_0 = [boundary_coords[0]]
if repeat_first:
boundary_coords = boundary_coords + [boundary_coords[0]]
boundary_coords = boundary_coords + boundary_coords_0

# Swap direction
if direction.strip().lower() == "cw":
Expand All @@ -218,7 +223,8 @@ def cell_polygon(


def cell_grid(
size: Tuple[int, int] = (10, 10), spacing: Number = 25
size: tuple[int, int] = (10, 10),
spacing: Number = 25,
) -> Generator[Polygon, None, None]:
"""Generate a grid of cell boundaries."""
return (
Expand All @@ -236,7 +242,7 @@ def cell_grid(
def main(
store: str,
in_memory: bool,
size: Tuple[int, int],
size: tuple[int, int],
) -> None:
"""Run the benchmark.

Expand All @@ -254,7 +260,9 @@ def main(
tracker_filepath.unlink()

with NamedTemporaryFile(mode="w+") as temp_file, memray.Tracker(
tracker_filepath, native_traces=True, follow_fork=True
tracker_filepath,
native_traces=True,
follow_fork=True,
):
io = ":memory:" if in_memory else temp_file # Backing (memory/disk)
print(f"Storing {size[0] * size[1]} cells")
Expand Down
37 changes: 21 additions & 16 deletions pre-commit/missing_imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,15 @@
status.

"""
from __future__ import annotations

import argparse
import ast
import importlib
import os
import sys
import tokenize
from pathlib import Path
from typing import Dict, List, Tuple, Union

from requirements_consistency import parse_requirements

Expand Down Expand Up @@ -42,7 +43,7 @@
)


def find_source_files(base_dir: Path) -> List[Path]:
def find_source_files(base_dir: Path) -> list[Path]:
"""Recursively find all source files in the given directory.

Args:
Expand All @@ -63,7 +64,7 @@ def find_source_files(base_dir: Path) -> List[Path]:
return source_files


def find_imports(py_source_path: Path) -> List[str]:
def find_imports(py_source_path: Path) -> list[str]:
"""Find all imports in the given Python source file.

Args:
Expand All @@ -76,8 +77,8 @@ def find_imports(py_source_path: Path) -> List[str]:
the file.

"""
with open( # This file could be any python file anywhere, skipcq
py_source_path, "r"
with Path.open( # This file could be any python file anywhere, skipcq
py_source_path,
) as fh:
source = fh.read()
tree = ast.parse(source)
Expand Down Expand Up @@ -110,7 +111,7 @@ def std_spec(fullname: str) -> str:
return "site-packages" not in origin.parts and "dist-packages" not in origin.parts


def stems(node: Union[ast.Import, ast.ImportFrom]) -> List[Tuple[str, str]]:
def stems(node: ast.Import | ast.ImportFrom) -> list[tuple[str, str]]:
"""Return the stem of each alias in the given import node.

Args:
Expand All @@ -126,15 +127,16 @@ def stems(node: Union[ast.Import, ast.ImportFrom]) -> List[Tuple[str, str]]:
return [(alias.name, alias.name.split(".")[0]) for alias in node.names]
if isinstance(node, ast.ImportFrom):
return [(node.module, node.module.split(".")[0])]
msg = f"Unexpected node type: {type(node)}. Should be ast.Import or ast.ImportFrom."
raise TypeError(
f"Unexpected node type: {type(node)}. Should be ast.Import or ast.ImportFrom."
msg,
)


def main():
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Static analysis of requirements files and import statements."
description="Static analysis of requirements files and import statements.",
)
parser.add_argument(
"files",
Expand All @@ -160,16 +162,18 @@ def main():


def find_bad_imports(
root: Path, source_files: List[Path], requirements_path: Path
) -> List[Tuple[Union[ast.Import, ast.ImportFrom], ast.alias]]:
root: Path,
source_files: list[Path],
requirements_path: Path,
) -> list[tuple[ast.Import | ast.ImportFrom, ast.alias]]:
"""Find bad imports in the given requirements file.

Args:
root (pathlib.Path):
root (Path):
Root directory of the project.
source_root (pathlib.Path):
source_files (list(Path)):
Root directory of the source code.
requirements_path (pathlib.Path):
requirements_path (Path):
Path to the requirements file.

Returns:
Expand All @@ -187,7 +191,7 @@ def find_bad_imports(
for path in source_files:
file_import_nodes = find_imports(path)
# Mapping of import alias names and stems to nodes
stem_to_node_alias: Dict[Tuple[ast.alias, str], ast.Import] = {
stem_to_node_alias: dict[tuple[ast.alias, str], ast.Import] = {
stem: (node, alias)
for node in file_import_nodes
for alias, stem in stems(node)
Expand All @@ -208,7 +212,7 @@ def find_bad_imports(
print(
f"{path.relative_to(root)}:{node.lineno}:"
f" Import not in {requirements_path.name}:"
f" {stem}" + (f" ({alias})" if alias != stem else "")
f" {stem}" + (f" ({alias})" if alias != stem else ""),
)
return result

Expand All @@ -227,7 +231,8 @@ def find_comments(path, line_num: int):
List of comments on the line.

"""
with open(path, "rb") as fh: # This file could be any python file anywhere, skipcq
with Path.open(path, "rb") as fh:
# This file could be any python file anywhere.
tokens = tokenize.tokenize(fh.readline)
return [
t.string
Expand Down
11 changes: 6 additions & 5 deletions pre-commit/notebook_check_ast.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,30 @@
"""Simple check to ensure each code cell in a notebook is valid Python."""
from __future__ import annotations

import argparse
import ast
import json
import sys
from pathlib import Path
from typing import List


def main(files: List[Path]) -> bool:
def main(files: list[Path]) -> bool:
"""Check each file in the list of files for valid Python."""
passed = True
for path in files:
with open(path, encoding="utf-8") as fh:
with Path.open(path, encoding="utf-8") as fh:
notebook = json.load(fh)
for n, cell in enumerate(notebook["cells"]):
if cell["cell_type"] != "code":
continue
source = "".join([x for x in cell["source"] if x[0] not in r"#%!"])
try:
ast.parse(source)
except SyntaxError as e:
except SyntaxError as e: # noqa: PERF203
passed = False
print(f"{path.name}: {e.msg} (cell {n}, line {e.lineno})")
break
return passed # noqa: R504
return passed


if __name__ == "__main__":
Expand Down
Loading
Loading