Skip to content

Commit

Permalink
Replace flake8, isort and pydocstyle with ruff (#541)
Browse files Browse the repository at this point in the history
* feat: replace flake8 with ruff

* feat: remove isort, replace with ruff

* feat: replace pydocstyle with ruff

* fix: adjust pre-commit  hook

* fix: makefile

* run make lint with ruff

* fix: linting

* feat: remove black, rearrange ruff commands

* fix: revert line length

* fix: always use docstring as well when checking with ruff

* fix: adjust docstyle for make command

* fix: typos and leftovers

* fix: update noqa: D102 and default rules for docstring lints

* fix: update docs

* fix: explanations above line items
  • Loading branch information
gruberb authored Jun 25, 2024
1 parent a13775c commit b485be4
Show file tree
Hide file tree
Showing 98 changed files with 426 additions and 1,069 deletions.
12 changes: 0 additions & 12 deletions .flake8

This file was deleted.

27 changes: 9 additions & 18 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,16 @@ repos:
rev: 24.4.2
hooks:
- id: black
- repo: "https://github.com/pycqa/flake8"
rev: 7.0.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.10
hooks:
- id: flake8
- repo: "https://github.com/pycqa/isort"
rev: 5.13.2
hooks:
- id: isort
# Run the linter
- id: ruff
types_or: [python, pyi]
args: [--fix, --exit-non-zero-on-fix, --config=pyproject.toml]
# Run the formatter
- id: ruff-format
args: [--config=pyproject.toml]
- repo: "https://github.com/pycqa/bandit"
rev: 1.7.8
hooks:
Expand All @@ -29,17 +31,6 @@ repos:
args:
- "--skip"
- "B101,B104,B311"
- repo: "https://github.com/pycqa/pydocstyle"
rev: 6.3.0
hooks:
- id: pydocstyle
args:
- "--convention"
- pep257
- "--add-select"
- "D212"
- "--add-ignore"
- "D105,D107,D203,D205,D400"
- repo: "https://github.com/pre-commit/mirrors-mypy"
rev: v1.10.0
hooks:
Expand Down
28 changes: 12 additions & 16 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -20,37 +20,33 @@ $(INSTALL_STAMP): pyproject.toml poetry.lock
$(POETRY) install
touch $(INSTALL_STAMP)

.PHONY: isort
isort: $(INSTALL_STAMP) ## Run isort
$(POETRY) run isort --check-only $(APP_AND_TEST_DIRS)
.PHONY: ruff-lint
ruff-lint: $(INSTALL_STAMP) ## Run ruff linting
$(POETRY) run ruff check $(APP_AND_TEST_DIRS)

.PHONY: black
black: $(INSTALL_STAMP) ## Run black
$(POETRY) run black --quiet --diff --check merino $(APP_AND_TEST_DIRS)
.PHONY: ruff-fmt
ruff-fmt: $(INSTALL_STAMP) ## Run ruff format checker
$(POETRY) run ruff format --check $(APP_AND_TEST_DIRS)

.PHONY: flake8
flake8: $(INSTALL_STAMP) ## Run flake8
$(POETRY) run flake8 $(APP_AND_TEST_DIRS)
.PHONY: ruff-format
ruff-format: $(INSTALL_STAMP) ## Run ruff format
$(POETRY) run ruff format $(APP_AND_TEST_DIRS)

.PHONY: bandit
bandit: $(INSTALL_STAMP) ## Run bandit
$(POETRY) run bandit --quiet -r $(APP_AND_TEST_DIRS) -c "pyproject.toml"

.PHONY: pydocstyle
pydocstyle: $(INSTALL_STAMP) ## Run pydocstyle
$(POETRY) run pydocstyle $(APP_AND_TEST_DIRS) --config="pyproject.toml"

.PHONY: mypy
mypy: $(INSTALL_STAMP) ## Run mypy
$(POETRY) run mypy $(APP_AND_TEST_DIRS) --config-file="pyproject.toml"

.PHONY: lint
lint: $(INSTALL_STAMP) isort black flake8 bandit pydocstyle mypy ## Run various linters
lint: $(INSTALL_STAMP) ruff-lint ruff-fmt bandit mypy ## Run various linters

.PHONY: format
format: $(INSTALL_STAMP) ## Sort imports and reformat code
$(POETRY) run isort $(APP_AND_TEST_DIRS)
$(POETRY) run black $(APP_AND_TEST_DIRS)
$(POETRY) run ruff check --fix $(APP_AND_TEST_DIRS)
$(POETRY) run ruff format $(APP_AND_TEST_DIRS)

.PHONY: dev
dev: $(INSTALL_STAMP) ## Run merino locally and reload automatically
Expand Down
16 changes: 8 additions & 8 deletions docs/dev/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,21 +32,21 @@ $ make help
# Just like `poetry install`
$ make install

# Run isort
$ make isort
# Run linter
$ make ruff-lint

# Run format checker
$ make ruff-fmt

# Run formatter
$ make ruff-format

# Run black
$ make black

# Run flake8
$ make flake8

# Run bandit
$ make bandit

# Run pydocstyle
$ make pydocstyle

# Run mypy
$ make mypy

Expand Down
4 changes: 2 additions & 2 deletions merino/cache/none.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,12 @@ class NoCacheAdapter: # pragma: no cover
async def get(self, key: str) -> bytes | None: # noqa: D102
return None

async def set(
async def set( # noqa: D102
self,
key: str,
value: bytes | str,
ttl: timedelta | None = None,
) -> None: # noqa: D102
) -> None:
pass

async def close(self) -> None: # noqa: D102
Expand Down
16 changes: 4 additions & 12 deletions merino/cache/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,7 @@ async def get(self, key: str) -> bytes | None:
try:
return await self.redis.get(key)
except RedisError as exc:
raise CacheAdapterError(
f"Failed to get `{repr(key)}` with error: `{exc}`"
) from exc
raise CacheAdapterError(f"Failed to get `{repr(key)}` with error: `{exc}`") from exc

async def set(
self,
Expand All @@ -45,13 +43,9 @@ async def set(
- `CacheAdapterError` if Redis returns an error.
"""
try:
await self.redis.set(
key, value, ex=ttl.days * 86400 + ttl.seconds if ttl else None
)
await self.redis.set(key, value, ex=ttl.days * 86400 + ttl.seconds if ttl else None)
except RedisError as exc:
raise CacheAdapterError(
f"Failed to set `{repr(key)}` with error: `{exc}`"
) from exc
raise CacheAdapterError(f"Failed to set `{repr(key)}` with error: `{exc}`") from exc

async def close(self) -> None:
"""Close the Redis connection."""
Expand Down Expand Up @@ -85,8 +79,6 @@ async def run_script(self, sid: str, keys: list[str], args: list[str]) -> Any:
try:
res = await self.scripts[sid](keys, args)
except RedisError as exc:
raise CacheAdapterError(
f"Failed to run script {id} with error: `{exc}`"
) from exc
raise CacheAdapterError(f"Failed to run script {id} with error: `{exc}`") from exc

return res
16 changes: 4 additions & 12 deletions merino/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,7 @@
Validator("metrics.dev_logger", is_type_of=bool),
Validator("metrics.host", is_type_of=str),
Validator("metrics.port", gte=0, is_type_of=int),
Validator(
"accuweather.url_location_key_placeholder", is_type_of=str, must_exist=True
),
Validator("accuweather.url_location_key_placeholder", is_type_of=str, must_exist=True),
Validator(
"accuweather.url_param_partner_code",
is_type_of=str,
Expand All @@ -36,22 +34,16 @@
),
# Set the upper bound of query timeout to 5 seconds as we don't want Merino
# to wait for responses from Accuweather indefinitely.
Validator(
"providers.accuweather.query_timeout_sec", is_type_of=float, gte=0, lte=5.0
),
Validator("providers.accuweather.query_timeout_sec", is_type_of=float, gte=0, lte=5.0),
Validator("providers.accuweather.type", is_type_of=str, must_exist=True),
Validator("providers.accuweather.cache", is_in=["redis", "none"]),
Validator(
"providers.accuweather.cache_ttls.current_condition_ttl_sec",
is_type_of=int,
gte=0,
),
Validator(
"providers.accuweather.cache_ttls.forecast_ttl_sec", is_type_of=int, gte=0
),
Validator(
"providers.accuweather.cached_ttls.location_key_ttl_sec", is_type_of=int, gte=0
),
Validator("providers.accuweather.cache_ttls.forecast_ttl_sec", is_type_of=int, gte=0),
Validator("providers.accuweather.cached_ttls.location_key_ttl_sec", is_type_of=int, gte=0),
Validator("providers.adm.backend", is_in=["remote-settings", "test"]),
Validator("providers.adm.cron_interval_sec", gt=0),
Validator("providers.adm.enabled_by_default", is_type_of=bool),
Expand Down
13 changes: 4 additions & 9 deletions merino/cron.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,14 @@
class Condition(Protocol):
"""Check whether the cron task should run."""

def __call__(self) -> bool: # pragma: no cover
# noqa: D102
def __call__(self) -> bool: # pragma: no cover # noqa: D102
...


class Task(Protocol):
"""Task for the cron job."""

async def __call__(self) -> None: # pragma: no cover
# noqa: D102
async def __call__(self) -> None: # pragma: no cover # noqa: D102
...


Expand All @@ -36,16 +34,13 @@ class Job:
condition: Condition
task: Task

def __init__(
self, *, name: str, interval: float, condition: Condition, task: Task
) -> None:
def __init__(self, *, name: str, interval: float, condition: Condition, task: Task) -> None:
self.name = name
self.interval = interval
self.condition = condition
self.task = task

async def __call__(self) -> None:
# noqa: D102
async def __call__(self) -> None: # noqa: D102
last_tick: float = time.time()

while True:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,7 @@ async def fetch(self) -> list[CorpusItem]:
return [
CorpusItem(
scheduledCorpusItemId="50f86ebe-3f25-41d8-bd84-53ead7bdc76e",
url=HttpUrl(
"https://www.themarginalian.org/2024/05/28/passenger-pigeon/"
),
url=HttpUrl("https://www.themarginalian.org/2024/05/28/passenger-pigeon/"),
title="Thunder, Bells, and Silence: the Eclipse That Went Extinct",
excerpt="What was it like for Martha, the endling of her species, to die alone at "
"the Cincinnati Zoo that late-summer day in 1914, all the other "
Expand Down
16 changes: 4 additions & 12 deletions merino/featureflags.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,7 @@ class FeatureFlag(BaseModel):
# Load the dynaconf configuration and parse it into Pydantic models once and
# then use it as the default value for `flags` in `FeatureFlags`.
# See https://docs.pydantic.dev/latest/usage/type_adapter/#parsing-data-into-a-specified-type
_DYNACONF_FLAGS = TypeAdapter(FeatureFlagsConfigurations).validate_python(
_dynaconf_loader()
)
_DYNACONF_FLAGS = TypeAdapter(FeatureFlagsConfigurations).validate_python(_dynaconf_loader())


@decorator
Expand All @@ -74,9 +72,7 @@ def record_decision(
decision = wrapped_method(flag_name, *remaining_args, **kwargs)

instance.decisions[flag_name] = decision
logger.info(
f"Record feature flag decision for {flag_name}", extra={flag_name: decision}
)
logger.info(f"Record feature flag decision for {flag_name}", extra={flag_name: decision})

return decision

Expand Down Expand Up @@ -161,9 +157,7 @@ def is_enabled(self, flag_name: str, bucket_for: str | bytes | None = None) -> b
logger.exception(err)
return False

def _get_bucketing_id(
self, scheme: BucketingScheme, bucket_for: str | bytes | None
) -> bytes:
def _get_bucketing_id(self, scheme: BucketingScheme, bucket_for: str | bytes | None) -> bytes:
"""Return a bytearray that can then be used to check against the
enabled percent for inclusion into the feature.
Expand Down Expand Up @@ -197,9 +191,7 @@ def _get_bucketing_id(
case BucketingScheme.session:
session_id = session_id_context.get()
if session_id is None:
raise ValueError(
"Expected a session_id but none exist in this context"
)
raise ValueError("Expected a session_id but none exist in this context")
return self._get_digest(session_id)

@staticmethod
Expand Down
4 changes: 1 addition & 3 deletions merino/jobs/amo_rs_uploader/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,6 @@ async def _upload(

# Add keywords. Sort them to make it easier to compare keywords from
# one dataset to the next.
suggestion["keywords"] = sorted(
[kw.lower() for kw in ADDON_KEYWORDS[addon]]
)
suggestion["keywords"] = sorted([kw.lower() for kw in ADDON_KEYWORDS[addon]])

uploader.add_suggestion(suggestion)
8 changes: 2 additions & 6 deletions merino/jobs/csv_rs_uploader/yelp.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,15 +65,11 @@ def csv_to_suggestions(cls, csv_reader) -> list[BaseSuggestion]:

location_modifier = row[FIELD_LOCATION_MODIFIERS]
if location_modifier:
location_signs.append(
LocationSign(keyword=location_modifier, needLocation=False)
)
location_signs.append(LocationSign(keyword=location_modifier, needLocation=False))

location_sign = row[FIELD_LOCATION_SIGNS]
if location_sign:
location_signs.append(
LocationSign(keyword=location_sign, needLocation=True)
)
location_signs.append(LocationSign(keyword=location_sign, needLocation=True))

yelp_modifier = row[FIELD_YELP_MODIFIERS]
if yelp_modifier:
Expand Down
12 changes: 3 additions & 9 deletions merino/jobs/navigational_suggestions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,9 +119,7 @@ def prepare_domain_metadata(
logger.info("domain data download complete")

# extract domain metadata of top domains
domain_metadata_extractor = DomainMetadataExtractor(
blocked_domains=TOP_PICKS_BLOCKLIST
)
domain_metadata_extractor = DomainMetadataExtractor(blocked_domains=TOP_PICKS_BLOCKLIST)
domain_metadata: list[dict[str, Optional[str]]] = (
domain_metadata_extractor.get_domain_metadata(domain_data, min_favicon_width)
)
Expand Down Expand Up @@ -152,9 +150,7 @@ def prepare_domain_metadata(
if old_top_picks is None:
old_top_picks = {}

domain_diff = DomainDiff(
latest_domain_data=top_picks, old_domain_data=old_top_picks
)
domain_diff = DomainDiff(latest_domain_data=top_picks, old_domain_data=old_top_picks)
(
unchanged,
added_domains,
Expand All @@ -165,9 +161,7 @@ def prepare_domain_metadata(
)

# Upload new domain file to replace old now that data is acquired for compare.
top_pick_blob = domain_metadata_uploader.upload_top_picks(
json.dumps(top_picks, indent=4)
)
top_pick_blob = domain_metadata_uploader.upload_top_picks(json.dumps(top_picks, indent=4))
diff: dict = domain_diff.create_diff(
file_name=top_pick_blob.name,
unchanged=unchanged,
Expand Down
Loading

0 comments on commit b485be4

Please sign in to comment.