Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Apply various ruff groups #39

Draft
wants to merge 18 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ repos:
args: [--fix=lf]
- id: check-case-conflict
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.3 # must match requirements-tests.txt
rev: v0.9.6 # must match requirements-tests.txt
hooks:
- id: ruff
name: Run ruff on stubs, tests and scripts
Expand Down
4 changes: 2 additions & 2 deletions lib/ts_utils/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def read_metadata(distribution: str) -> StubMetadata:
f"Invalid upstream_repository for {distribution!r}: "
"URLs for GitHub repositories always have two parts in their paths"
)
assert num_url_path_parts == 2, bad_github_url_msg
assert num_url_path_parts == 2, bad_github_url_msg # noqa: PLR2004 # astral-sh/ruff#10009

obsolete_since: object = data.get("obsolete_since")
assert isinstance(obsolete_since, (str, type(None)))
Expand Down Expand Up @@ -330,7 +330,7 @@ class PackageDependencies(NamedTuple):

@cache
def get_pypi_name_to_typeshed_name_mapping() -> Mapping[str, str]:
return {read_metadata(dir.name).stub_distribution: dir.name for dir in STUBS_PATH.iterdir()}
return {read_metadata(directory.name).stub_distribution: directory.name for directory in STUBS_PATH.iterdir()}


@cache
Expand Down
3 changes: 1 addition & 2 deletions lib/ts_utils/paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,5 +35,4 @@ def test_cases_path(distribution_name: str) -> Path:
def allowlists_path(distribution_name: str) -> Path:
if distribution_name == "stdlib":
return tests_path("stdlib") / "stubtest_allowlists"
else:
return tests_path(distribution_name)
return tests_path(distribution_name)
9 changes: 4 additions & 5 deletions lib/ts_utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from packaging.requirements import Requirement

try:
from termcolor import colored as colored # pyright: ignore[reportAssignmentType]
from termcolor import colored as colored # pyright: ignore[reportAssignmentType] # noqa: PLC0414
except ImportError:

def colored(text: str, color: str | None = None, **kwargs: Any) -> str: # type: ignore[misc] # noqa: ARG001
Expand Down Expand Up @@ -119,8 +119,8 @@ def parse_stdlib_versions_file() -> SupportedVersionsDict:
result: dict[str, tuple[VersionTuple, VersionTuple]] = {}
with VERSIONS_PATH.open(encoding="UTF-8") as f:
for line in f:
line = strip_comments(line)
if line == "":
line = strip_comments(line) # noqa: PLW2901
if not line:
continue
m = VERSION_LINE_RE.match(line)
assert m, f"invalid VERSIONS line: {line}"
Expand Down Expand Up @@ -193,8 +193,7 @@ def allowlists(distribution_name: str) -> list[str]:

if distribution_name == "stdlib":
return ["common.txt", platform_allowlist, version_allowlist, combined_allowlist, local_version_allowlist]
else:
return ["stubtest_allowlist.txt", platform_allowlist]
return ["stubtest_allowlist.txt", platform_allowlist]


# ====================================================================
Expand Down
90 changes: 78 additions & 12 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ force-exclude = ".*_pb2.pyi"
line-length = 130
# Oldest supported Python version
target-version = "py38"
fix = true
preview = true # Help catch typing-related lint issues early
# fix = true
exclude = [
# virtual environment
".env",
Expand All @@ -37,15 +38,31 @@ exclude = ["**/test_cases/**/*.py"]
# tell ruff not to flag these as e.g. "unused noqa comments"
external = ["F821", "NQA", "Y"]
select = [
# "PTH", # TODO !
# "TD", # TODO !
"A", # flake8-builtins
"ARG", # flake8-unused-arguments
"ASYNC", # flake8-async
"B", # flake8-bugbear
"BLE", # flake8-blind-except
"C4", # flake8-comprehensions
"D", # pydocstyle
"DOC", # pydoclint
"DTZ", # flake8-datetimez
"EXE", # flake8-executable
"FA", # flake8-future-annotations
"FBT", # flake8-boolean-trap
"FLY", # flynt
"I", # isort
"ISC", # flake8-implicit-str-concat
"N", # pep8-naming
"PGH", # pygrep-hooks
"PIE", # flake8-pie
"PL", # Pylint
"RSE", # flake8-raise
"RUF", # Ruff-specific and unused-noqa
"S", # flake8-bandit
"SLOT", # flake8-slots
"TRY", # tryceratops
"UP", # pyupgrade
"YTT", # flake8-2020
Expand All @@ -55,11 +72,6 @@ select = [
"W", # pycodestyle Warning
# Only include flake8-annotations rules that are autofixable. Otherwise leave this to mypy+pyright
"ANN2",
# Don't include TC rules that create a TYPE_CHECKING block or stringifies annotations
"TC004", # Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting.
"TC005", # Found empty type-checking block
# "TC008", # TODO: Enable when out of preview
"TC010", # Invalid string member in `X | Y`-style union type
# Most refurb rules are in preview and can be opinionated,
# consider them individually as they come out of preview (last check: 0.8.4)
"FURB105", # Unnecessary empty string passed to `print`
Expand Down Expand Up @@ -94,11 +106,34 @@ select = [
# "PYI061", # TODO: Enable when out of preview
"PYI062", # Duplicate literal member `{}`
"PYI064", # `Final[Literal[{literal}]]` can be replaced with a bare Final
# flake8-simplify, excluding rules that can reduce performance or readability due to long line formatting
"SIM101", # Multiple `isinstance` calls for `{name}`, merge into a single call
"SIM103", # Return the condition `{condition}` directly
"SIM107", # Don't use return in `try-except` and `finally`
"SIM109", # Use `{replacement}` instead of multiple equality comparisons
"SIM112", # Use capitalized environment variable `{expected}` instead of `{actual}`
"SIM113", # Use `enumerate()` for index variable `{index}` in `for` loop
"SIM114", # Combine `if` branches using logical `or` operator
"SIM115", # Use a context manager for opening files
"SIM118", # Use key `{operator}` dict instead of key `{operator} dict.keys()`
"SIM2", # flake8-simplify conditional ordering rules
"SIM300", # Yoda condition detected
"SIM401", # Use `{contents}` instead of an if block
"SIM910", # Use `{expected}` instead of `{actual}` (dict-get-with-none-default)
"SIM911", # Use `{expected}` instead of `{actual}` (zip-dict-keys-and-values)
# Don't include TC rules that create a TYPE_CHECKING block or stringifies annotations
"TC004", # Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting.
"TC005", # Found empty type-checking block
# "TC008", # TODO: Enable when out of preview
"TC010", # Invalid string member in `X | Y`-style union type
]
extend-safe-fixes = [
"UP036", # Remove unnecessary `sys.version_info` blocks
]
ignore = [
# TODO
"ASYNC221", # I don't know how to improve subprocess.check_call calls to satisfy this
"RUF036", # None not at the end of the type annotation. # Request for autofix: astral-sh/ruff#15136
###
# Rules that can conflict with the formatter (Black)
# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
Expand All @@ -112,18 +147,40 @@ ignore = [
###
# We're not a library, no need to document everything
"D1", # Missing docstring in ...
# We want D211: No blank lines allowed before class docstring
"D203", # 1 blank line required before class docstring
# Doesn't support split "summary line"
"D205", # 1 blank line required between summary line and description
# Used for direct, non-subclass type comparison, for example: `type(val) is str`
# see https://github.com/astral-sh/ruff/issues/6465
"E721", # Do not compare types, use `isinstance()`
# Mostly from scripts and tests, it's ok to have messages passed directly to exceptions
# We want D212: Multi-line docstring summary should start at the first line
"D213", # Multi-line docstring summary should start at the second line
"D401", # First line of docstring should be in imperative mood
# Return/yield type is enough documentation for us
"DOC201", # return is not documented in docstring
"DOC402", # yield is not documented in docstring
# We're not a public library, users are contributors that already directly reads teh code, clear error messages are sufficient
"DOC501", # Raised exception missing from docstring
# Prefer explicit, but allow implicit multiline
# (hence lint.flake8-implicit-str-concat.allow-multiline isn't set to false)
"ISC003", # Explicitly concatenated string should be implicitly concatenated
# Python 3.11 introduced "zero cost" exception handling, our tests & scripts run on modern Python versions
"PERF203", # try-except within a loop incurs performance overhead
"PLR09", # Too many ...
# Typeshed tests and scripts are never run in optimized mode
"S101", # Use of assert detected
# We use subprocess a lot in scripts and tests
"S404", # subprocess module is possibly insecure
# Prone to false positives astral-sh/ruff#4045
"S603", # subprocess call: check for execution of untrusted input
# Full paths would make cross-environment compatibility a nightmare
"S607", # Starting a process with a partial executable path
"TRY003", # Avoid specifying long messages outside the exception class
# Slower and more verbose https://github.com/astral-sh/ruff/issues/7871
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
###
# False-positives, but already checked by type-checkers
###
# Configuring namespace-packages = ["scripts/sync_protobuf"] doesn't work ?
"PLC2701", # Private name import {name} from external module {module}
# Ruff doesn't support multi-file analysis yet: https://github.com/astral-sh/ruff/issues/5295
"RUF013", # PEP 484 prohibits implicit `Optional`
]
Expand All @@ -133,11 +190,20 @@ ignore = [
# A lot of stubs are incomplete on purpose, and that's configured through pyright
# Some ANN204 (special method) are autofixable in stubs, but not all.
"ANN2", # Missing return type annotation for ...
# Rules that are out of the control of stub authors:
"A001", # builtin-variable-shadowing
"A002", # builtin-argument-shadowing
"A004", # builtin-import-shadowing
"F403", # `from . import *` used; unable to detect undefined names
"PIE796", # Enum contains duplicate value
"PLC2701", # Private name import from external module # https://github.com/astral-sh/ruff/issues/15294 and https://github.com/astral-sh/ruff/issues/15295
"S105", # Possible hardcoded password assigned
"S106", # Possible hardcoded password assigned to argument
"S107", # Possible hardcoded password assigned to function default
"S3", # Use of insecure ...
# Most pep8-naming rules don't apply for third-party stubs like typeshed.
# N811 to N814 could apply, but we often use them to disambiguate a name whilst making it look like a more common one
"N8",
# Rules that are out of the control of stub authors:
"F403", # `from . import *` used; unable to detect undefined names
# Stubs can sometimes re-export entire modules.
# Issues with using a star-imported name will be caught by type-checkers.
"F405", # may be undefined, or defined from star imports
Expand Down
2 changes: 1 addition & 1 deletion requirements-tests.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ packaging==24.2
pathspec>=0.11.1
pre-commit
# Required by create_baseline_stubs.py. Must match .pre-commit-config.yaml.
ruff==0.9.3
ruff==0.9.6
stubdefaulter==0.1.0
termcolor>=2.3
tomli==2.2.1
Expand Down
25 changes: 12 additions & 13 deletions scripts/create_baseline_stubs.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import subprocess
import sys
import urllib.parse
from http import HTTPStatus
from importlib.metadata import distribution

import aiohttp
Expand Down Expand Up @@ -45,34 +46,33 @@ def get_installed_package_info(project: str) -> tuple[str, str] | None:

Return (normalized project name, installed version) if successful.
"""
r = subprocess.run(["pip", "freeze"], capture_output=True, text=True, check=True)
return search_pip_freeze_output(project, r.stdout)
return search_pip_freeze_output(project, subprocess.check_output(["pip", "freeze"], text=True))


def run_stubgen(package: str, output: str) -> None:
print(f"Running stubgen: stubgen -o {output} -p {package}")
subprocess.run(["stubgen", "-o", output, "-p", package, "--export-less"], check=True)
subprocess.check_call(["stubgen", "-o", output, "-p", package, "--export-less"])


def run_stubdefaulter(stub_dir: str) -> None:
print(f"Running stubdefaulter: stubdefaulter --packages {stub_dir}")
subprocess.run(["stubdefaulter", "--packages", stub_dir])
subprocess.run(["stubdefaulter", "--packages", stub_dir], check=False)


def run_black(stub_dir: str) -> None:
print(f"Running Black: black {stub_dir}")
subprocess.run(["pre-commit", "run", "black", "--files", *glob.iglob(f"{stub_dir}/**/*.pyi")])
subprocess.run(["pre-commit", "run", "black", "--files", *glob.iglob(f"{stub_dir}/**/*.pyi")], check=False)


def run_ruff(stub_dir: str) -> None:
print(f"Running Ruff: ruff check {stub_dir} --fix-only")
subprocess.run([sys.executable, "-m", "ruff", "check", stub_dir, "--fix-only"])
subprocess.run([sys.executable, "-m", "ruff", "check", stub_dir, "--fix-only"], check=False)


async def get_project_urls_from_pypi(project: str, session: aiohttp.ClientSession) -> dict[str, str]:
pypi_root = f"https://pypi.org/pypi/{urllib.parse.quote(project)}"
async with session.get(f"{pypi_root}/json") as response:
if response.status != 200:
if response.status != HTTPStatus.OK:
return {}
j: dict[str, dict[str, dict[str, str]]]
j = await response.json()
Expand All @@ -90,24 +90,23 @@ async def get_upstream_repo_url(project: str) -> str | None:

# Order the project URLs so that we put the ones
# that are most likely to point to the source code first
urls_to_check: list[str] = []
url_names_probably_pointing_to_source = ("Source", "Repository", "Homepage")
for url_name in url_names_probably_pointing_to_source:
if url := project_urls.get(url_name):
urls_to_check.append(url)
urls_to_check: list[str] = [
url for url in (project_urls.get(url_name) for url_name in url_names_probably_pointing_to_source) if url
]
urls_to_check.extend(
url for url_name, url in project_urls.items() if url_name not in url_names_probably_pointing_to_source
)

for url in urls_to_check:
# Remove `www.`; replace `http://` with `https://`
url = re.sub(r"^(https?://)?(www\.)?", "https://", url)
url = re.sub(r"^(https?://)?(www\.)?", "https://", url) # noqa: PLW2901
netloc = urllib.parse.urlparse(url).netloc
if netloc in {"gitlab.com", "github.com", "bitbucket.org", "foss.heptapod.net"}:
# truncate to https://site.com/user/repo
upstream_repo_url = "/".join(url.split("/")[:5])
async with session.get(upstream_repo_url) as response:
if response.status == 200:
if response.status == HTTPStatus.OK:
return upstream_repo_url
return None

Expand Down
Loading
Loading