diff --git a/.flake8 b/.flake8 deleted file mode 100644 index 3f85e7b01..000000000 --- a/.flake8 +++ /dev/null @@ -1,12 +0,0 @@ -[flake8] -exclude = - .git, - __pycache__, -extend-ignore = - # E203 whitespace before ':' - # https://github.com/psf/black/issues/315 - E203 -per-file-ignores = - # imported but not used - __init__.py:F401 -max-line-length = 99 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0958dfc57..530458419 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,14 +11,16 @@ repos: rev: 24.4.2 hooks: - id: black - - repo: "https://github.com/pycqa/flake8" - rev: 7.0.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.4.10 hooks: - - id: flake8 - - repo: "https://github.com/pycqa/isort" - rev: 5.13.2 - hooks: - - id: isort + # Run the linter + - id: ruff + types_or: [python, pyi] + args: [--fix, --exit-non-zero-on-fix, --config=pyproject.toml] + # Run the formatter + - id: ruff-format + args: [--config=pyproject.toml] - repo: "https://github.com/pycqa/bandit" rev: 1.7.8 hooks: @@ -29,17 +31,6 @@ repos: args: - "--skip" - "B101,B104,B311" - - repo: "https://github.com/pycqa/pydocstyle" - rev: 6.3.0 - hooks: - - id: pydocstyle - args: - - "--convention" - - pep257 - - "--add-select" - - "D212" - - "--add-ignore" - - "D105,D107,D203,D205,D400" - repo: "https://github.com/pre-commit/mirrors-mypy" rev: v1.10.0 hooks: diff --git a/Makefile b/Makefile index 9ab007ec1..ced25585e 100644 --- a/Makefile +++ b/Makefile @@ -20,37 +20,33 @@ $(INSTALL_STAMP): pyproject.toml poetry.lock $(POETRY) install touch $(INSTALL_STAMP) -.PHONY: isort -isort: $(INSTALL_STAMP) ## Run isort - $(POETRY) run isort --check-only $(APP_AND_TEST_DIRS) +.PHONY: ruff-lint +ruff-lint: $(INSTALL_STAMP) ## Run ruff linting + $(POETRY) run ruff check $(APP_AND_TEST_DIRS) -.PHONY: black -black: $(INSTALL_STAMP) ## Run black - $(POETRY) run black --quiet --diff --check merino $(APP_AND_TEST_DIRS) +.PHONY: ruff-fmt +ruff-fmt: $(INSTALL_STAMP) ## Run ruff format checker + $(POETRY) run ruff format --check $(APP_AND_TEST_DIRS) -.PHONY: flake8 -flake8: $(INSTALL_STAMP) ## Run flake8 - $(POETRY) run flake8 $(APP_AND_TEST_DIRS) +.PHONY: ruff-format +ruff-format: $(INSTALL_STAMP) ## Run ruff format + $(POETRY) run ruff format $(APP_AND_TEST_DIRS) .PHONY: bandit bandit: $(INSTALL_STAMP) ## Run bandit $(POETRY) run bandit --quiet -r $(APP_AND_TEST_DIRS) -c "pyproject.toml" -.PHONY: pydocstyle -pydocstyle: $(INSTALL_STAMP) ## Run pydocstyle - $(POETRY) run pydocstyle $(APP_AND_TEST_DIRS) --config="pyproject.toml" - .PHONY: mypy mypy: $(INSTALL_STAMP) ## Run mypy $(POETRY) run mypy $(APP_AND_TEST_DIRS) --config-file="pyproject.toml" .PHONY: lint -lint: $(INSTALL_STAMP) isort black flake8 bandit pydocstyle mypy ## Run various linters +lint: $(INSTALL_STAMP) ruff-lint ruff-fmt bandit mypy ## Run various linters .PHONY: format format: $(INSTALL_STAMP) ## Sort imports and reformat code - $(POETRY) run isort $(APP_AND_TEST_DIRS) - $(POETRY) run black $(APP_AND_TEST_DIRS) + $(POETRY) run ruff check --fix $(APP_AND_TEST_DIRS) + $(POETRY) run ruff format $(APP_AND_TEST_DIRS) .PHONY: dev dev: $(INSTALL_STAMP) ## Run merino locally and reload automatically diff --git a/docs/dev/index.md b/docs/dev/index.md index 3207940a8..2c61ca403 100644 --- a/docs/dev/index.md +++ b/docs/dev/index.md @@ -32,21 +32,21 @@ $ make help # Just like `poetry install` $ make install -# Run isort -$ make isort +# Run linter +$ make ruff-lint + +# Run format checker +$ make ruff-fmt + +# Run formatter +$ make ruff-format # Run black $ make black -# Run flake8 -$ make flake8 - # Run bandit $ make bandit -# Run pydocstyle -$ make pydocstyle - # Run mypy $ make mypy diff --git a/merino/cache/none.py b/merino/cache/none.py index 5885e1e2c..91c4c2e1a 100644 --- a/merino/cache/none.py +++ b/merino/cache/none.py @@ -10,12 +10,12 @@ class NoCacheAdapter: # pragma: no cover async def get(self, key: str) -> bytes | None: # noqa: D102 return None - async def set( + async def set( # noqa: D102 self, key: str, value: bytes | str, ttl: timedelta | None = None, - ) -> None: # noqa: D102 + ) -> None: pass async def close(self) -> None: # noqa: D102 diff --git a/merino/cache/redis.py b/merino/cache/redis.py index 3ae116ea1..4cbbc9386 100644 --- a/merino/cache/redis.py +++ b/merino/cache/redis.py @@ -28,9 +28,7 @@ async def get(self, key: str) -> bytes | None: try: return await self.redis.get(key) except RedisError as exc: - raise CacheAdapterError( - f"Failed to get `{repr(key)}` with error: `{exc}`" - ) from exc + raise CacheAdapterError(f"Failed to get `{repr(key)}` with error: `{exc}`") from exc async def set( self, @@ -45,13 +43,9 @@ async def set( - `CacheAdapterError` if Redis returns an error. """ try: - await self.redis.set( - key, value, ex=ttl.days * 86400 + ttl.seconds if ttl else None - ) + await self.redis.set(key, value, ex=ttl.days * 86400 + ttl.seconds if ttl else None) except RedisError as exc: - raise CacheAdapterError( - f"Failed to set `{repr(key)}` with error: `{exc}`" - ) from exc + raise CacheAdapterError(f"Failed to set `{repr(key)}` with error: `{exc}`") from exc async def close(self) -> None: """Close the Redis connection.""" @@ -85,8 +79,6 @@ async def run_script(self, sid: str, keys: list[str], args: list[str]) -> Any: try: res = await self.scripts[sid](keys, args) except RedisError as exc: - raise CacheAdapterError( - f"Failed to run script {id} with error: `{exc}`" - ) from exc + raise CacheAdapterError(f"Failed to run script {id} with error: `{exc}`") from exc return res diff --git a/merino/config.py b/merino/config.py index fc59a903d..6f02ae64b 100644 --- a/merino/config.py +++ b/merino/config.py @@ -15,9 +15,7 @@ Validator("metrics.dev_logger", is_type_of=bool), Validator("metrics.host", is_type_of=str), Validator("metrics.port", gte=0, is_type_of=int), - Validator( - "accuweather.url_location_key_placeholder", is_type_of=str, must_exist=True - ), + Validator("accuweather.url_location_key_placeholder", is_type_of=str, must_exist=True), Validator( "accuweather.url_param_partner_code", is_type_of=str, @@ -36,9 +34,7 @@ ), # Set the upper bound of query timeout to 5 seconds as we don't want Merino # to wait for responses from Accuweather indefinitely. - Validator( - "providers.accuweather.query_timeout_sec", is_type_of=float, gte=0, lte=5.0 - ), + Validator("providers.accuweather.query_timeout_sec", is_type_of=float, gte=0, lte=5.0), Validator("providers.accuweather.type", is_type_of=str, must_exist=True), Validator("providers.accuweather.cache", is_in=["redis", "none"]), Validator( @@ -46,12 +42,8 @@ is_type_of=int, gte=0, ), - Validator( - "providers.accuweather.cache_ttls.forecast_ttl_sec", is_type_of=int, gte=0 - ), - Validator( - "providers.accuweather.cached_ttls.location_key_ttl_sec", is_type_of=int, gte=0 - ), + Validator("providers.accuweather.cache_ttls.forecast_ttl_sec", is_type_of=int, gte=0), + Validator("providers.accuweather.cached_ttls.location_key_ttl_sec", is_type_of=int, gte=0), Validator("providers.adm.backend", is_in=["remote-settings", "test"]), Validator("providers.adm.cron_interval_sec", gt=0), Validator("providers.adm.enabled_by_default", is_type_of=bool), diff --git a/merino/cron.py b/merino/cron.py index bed18d0d4..9829082b4 100644 --- a/merino/cron.py +++ b/merino/cron.py @@ -15,16 +15,14 @@ class Condition(Protocol): """Check whether the cron task should run.""" - def __call__(self) -> bool: # pragma: no cover - # noqa: D102 + def __call__(self) -> bool: # pragma: no cover # noqa: D102 ... class Task(Protocol): """Task for the cron job.""" - async def __call__(self) -> None: # pragma: no cover - # noqa: D102 + async def __call__(self) -> None: # pragma: no cover # noqa: D102 ... @@ -36,16 +34,13 @@ class Job: condition: Condition task: Task - def __init__( - self, *, name: str, interval: float, condition: Condition, task: Task - ) -> None: + def __init__(self, *, name: str, interval: float, condition: Condition, task: Task) -> None: self.name = name self.interval = interval self.condition = condition self.task = task - async def __call__(self) -> None: - # noqa: D102 + async def __call__(self) -> None: # noqa: D102 last_tick: float = time.time() while True: diff --git a/merino/curated_recommendations/corpus_backends/fake_backends.py b/merino/curated_recommendations/corpus_backends/fake_backends.py index c6a055844..8f21d3f8d 100644 --- a/merino/curated_recommendations/corpus_backends/fake_backends.py +++ b/merino/curated_recommendations/corpus_backends/fake_backends.py @@ -17,9 +17,7 @@ async def fetch(self) -> list[CorpusItem]: return [ CorpusItem( scheduledCorpusItemId="50f86ebe-3f25-41d8-bd84-53ead7bdc76e", - url=HttpUrl( - "https://www.themarginalian.org/2024/05/28/passenger-pigeon/" - ), + url=HttpUrl("https://www.themarginalian.org/2024/05/28/passenger-pigeon/"), title="Thunder, Bells, and Silence: the Eclipse That Went Extinct", excerpt="What was it like for Martha, the endling of her species, to die alone at " "the Cincinnati Zoo that late-summer day in 1914, all the other " diff --git a/merino/featureflags.py b/merino/featureflags.py index 8b9396e92..1f7c7d8c2 100644 --- a/merino/featureflags.py +++ b/merino/featureflags.py @@ -49,9 +49,7 @@ class FeatureFlag(BaseModel): # Load the dynaconf configuration and parse it into Pydantic models once and # then use it as the default value for `flags` in `FeatureFlags`. # See https://docs.pydantic.dev/latest/usage/type_adapter/#parsing-data-into-a-specified-type -_DYNACONF_FLAGS = TypeAdapter(FeatureFlagsConfigurations).validate_python( - _dynaconf_loader() -) +_DYNACONF_FLAGS = TypeAdapter(FeatureFlagsConfigurations).validate_python(_dynaconf_loader()) @decorator @@ -74,9 +72,7 @@ def record_decision( decision = wrapped_method(flag_name, *remaining_args, **kwargs) instance.decisions[flag_name] = decision - logger.info( - f"Record feature flag decision for {flag_name}", extra={flag_name: decision} - ) + logger.info(f"Record feature flag decision for {flag_name}", extra={flag_name: decision}) return decision @@ -161,9 +157,7 @@ def is_enabled(self, flag_name: str, bucket_for: str | bytes | None = None) -> b logger.exception(err) return False - def _get_bucketing_id( - self, scheme: BucketingScheme, bucket_for: str | bytes | None - ) -> bytes: + def _get_bucketing_id(self, scheme: BucketingScheme, bucket_for: str | bytes | None) -> bytes: """Return a bytearray that can then be used to check against the enabled percent for inclusion into the feature. @@ -197,9 +191,7 @@ def _get_bucketing_id( case BucketingScheme.session: session_id = session_id_context.get() if session_id is None: - raise ValueError( - "Expected a session_id but none exist in this context" - ) + raise ValueError("Expected a session_id but none exist in this context") return self._get_digest(session_id) @staticmethod diff --git a/merino/jobs/amo_rs_uploader/__init__.py b/merino/jobs/amo_rs_uploader/__init__.py index 05bf411bd..ec10cfbfb 100644 --- a/merino/jobs/amo_rs_uploader/__init__.py +++ b/merino/jobs/amo_rs_uploader/__init__.py @@ -146,8 +146,6 @@ async def _upload( # Add keywords. Sort them to make it easier to compare keywords from # one dataset to the next. - suggestion["keywords"] = sorted( - [kw.lower() for kw in ADDON_KEYWORDS[addon]] - ) + suggestion["keywords"] = sorted([kw.lower() for kw in ADDON_KEYWORDS[addon]]) uploader.add_suggestion(suggestion) diff --git a/merino/jobs/csv_rs_uploader/yelp.py b/merino/jobs/csv_rs_uploader/yelp.py index 968c25c5e..d286926cf 100644 --- a/merino/jobs/csv_rs_uploader/yelp.py +++ b/merino/jobs/csv_rs_uploader/yelp.py @@ -65,15 +65,11 @@ def csv_to_suggestions(cls, csv_reader) -> list[BaseSuggestion]: location_modifier = row[FIELD_LOCATION_MODIFIERS] if location_modifier: - location_signs.append( - LocationSign(keyword=location_modifier, needLocation=False) - ) + location_signs.append(LocationSign(keyword=location_modifier, needLocation=False)) location_sign = row[FIELD_LOCATION_SIGNS] if location_sign: - location_signs.append( - LocationSign(keyword=location_sign, needLocation=True) - ) + location_signs.append(LocationSign(keyword=location_sign, needLocation=True)) yelp_modifier = row[FIELD_YELP_MODIFIERS] if yelp_modifier: diff --git a/merino/jobs/navigational_suggestions/__init__.py b/merino/jobs/navigational_suggestions/__init__.py index 289f122e0..c9c170607 100644 --- a/merino/jobs/navigational_suggestions/__init__.py +++ b/merino/jobs/navigational_suggestions/__init__.py @@ -119,9 +119,7 @@ def prepare_domain_metadata( logger.info("domain data download complete") # extract domain metadata of top domains - domain_metadata_extractor = DomainMetadataExtractor( - blocked_domains=TOP_PICKS_BLOCKLIST - ) + domain_metadata_extractor = DomainMetadataExtractor(blocked_domains=TOP_PICKS_BLOCKLIST) domain_metadata: list[dict[str, Optional[str]]] = ( domain_metadata_extractor.get_domain_metadata(domain_data, min_favicon_width) ) @@ -152,9 +150,7 @@ def prepare_domain_metadata( if old_top_picks is None: old_top_picks = {} - domain_diff = DomainDiff( - latest_domain_data=top_picks, old_domain_data=old_top_picks - ) + domain_diff = DomainDiff(latest_domain_data=top_picks, old_domain_data=old_top_picks) ( unchanged, added_domains, @@ -165,9 +161,7 @@ def prepare_domain_metadata( ) # Upload new domain file to replace old now that data is acquired for compare. - top_pick_blob = domain_metadata_uploader.upload_top_picks( - json.dumps(top_picks, indent=4) - ) + top_pick_blob = domain_metadata_uploader.upload_top_picks(json.dumps(top_picks, indent=4)) diff: dict = domain_diff.create_diff( file_name=top_pick_blob.name, unchanged=unchanged, diff --git a/merino/jobs/navigational_suggestions/domain_metadata_extractor.py b/merino/jobs/navigational_suggestions/domain_metadata_extractor.py index 0afaaa5e5..12b5359af 100644 --- a/merino/jobs/navigational_suggestions/domain_metadata_extractor.py +++ b/merino/jobs/navigational_suggestions/domain_metadata_extractor.py @@ -36,9 +36,7 @@ class Scraper: 'link[rel="SHORTCUT ICON"], link[rel="fluid-icon"], link[rel="mask-icon"],' 'link[rel="apple-touch-startup-image"]' ) - META_SELECTOR: str = ( - "meta[name=apple-touch-icon], meta[name=msapplication-TileImage]" - ) + META_SELECTOR: str = "meta[name=apple-touch-icon], meta[name=msapplication-TileImage]" MANIFEST_SELECTOR: str = 'link[rel="manifest"]' browser: RoboBrowser @@ -46,9 +44,7 @@ class Scraper: def __init__(self) -> None: session: requests.Session = requests.Session() session.headers.update(REQUEST_HEADERS) - self.browser = RoboBrowser( - session=session, parser="html.parser", allow_redirects=True - ) + self.browser = RoboBrowser(session=session, parser="html.parser", allow_redirects=True) def open(self, url: str) -> Optional[str]: """Open the given url for scraping. @@ -96,9 +92,7 @@ def scrape_favicons_from_manifest(self, manifest_url: str) -> list[dict[str, Any if response: result = response.json().get("icons") except Exception as e: - logger.info( - f"Exception: {e} while parsing icons from manifest {manifest_url}" - ) + logger.info(f"Exception: {e} while parsing icons from manifest {manifest_url}") return result def get_default_favicon(self, url: str) -> Optional[str]: @@ -114,9 +108,7 @@ def get_default_favicon(self, url: str) -> Optional[str]: response: Optional[requests.Response] = requests_get(default_favicon_url) return response.url if response else None except Exception as e: - logger.info( - f"Exception: {e} while getting default favicon {default_favicon_url}" - ) + logger.info(f"Exception: {e} while getting default favicon {default_favicon_url}") return None def scrape_title(self) -> Optional[str]: @@ -209,9 +201,7 @@ def _extract_favicons(self, scraped_url: str) -> list[dict[str, Any]]: favicon_url = favicon["href"] if favicon_url.startswith("data:"): continue - if not favicon_url.startswith("http") and not favicon_url.startswith( - "//" - ): + if not favicon_url.startswith("http") and not favicon_url.startswith("//"): favicon["href"] = urljoin(scraped_url, favicon_url) favicons.append(favicon) @@ -219,9 +209,7 @@ def _extract_favicons(self, scraped_url: str) -> list[dict[str, Any]]: favicon_url = favicon["content"] if favicon_url.startswith("data:"): continue - if not favicon_url.startswith("http") and not favicon_url.startswith( - "//" - ): + if not favicon_url.startswith("http") and not favicon_url.startswith("//"): favicon["href"] = urljoin(scraped_url, favicon_url) else: favicon["href"] = favicon_url @@ -234,9 +222,7 @@ def _extract_favicons(self, scraped_url: str) -> list[dict[str, Any]]: self.scraper.scrape_favicons_from_manifest(manifest_absolute_url) ) for scraped_favicon in scraped_favicons: - favicon_url = urljoin( - manifest_absolute_url, scraped_favicon.get("src") - ) + favicon_url = urljoin(manifest_absolute_url, scraped_favicon.get("src")) favicons.append({"href": favicon_url}) # Include the default "favicon.ico" if it exists in domain root @@ -309,9 +295,7 @@ def _extract_title(self) -> Optional[str]: title = ( title if title - and not [ - t for t in self.INVALID_TITLES if t.casefold() in title.casefold() - ] + and not [t for t in self.INVALID_TITLES if t.casefold() in title.casefold()] else None ) return title diff --git a/merino/jobs/navigational_suggestions/domain_metadata_uploader.py b/merino/jobs/navigational_suggestions/domain_metadata_uploader.py index e2f4a6c2c..c4d6b4e55 100644 --- a/merino/jobs/navigational_suggestions/domain_metadata_uploader.py +++ b/merino/jobs/navigational_suggestions/domain_metadata_uploader.py @@ -70,9 +70,7 @@ def upload_favicons(self, src_favicons: list[str]) -> list[str]: dst_favicons: list = [] for src_favicon in src_favicons: dst_favicon_public_url: str = "" - favicon_image: Image | None = self.favicon_downloader.download_favicon( - src_favicon - ) + favicon_image: Image | None = self.favicon_downloader.download_favicon(src_favicon) if favicon_image: try: dst_favicon_name = self.destination_favicon_name(favicon_image) diff --git a/merino/jobs/navigational_suggestions/utils.py b/merino/jobs/navigational_suggestions/utils.py index 3e717991f..b44dd2f5b 100644 --- a/merino/jobs/navigational_suggestions/utils.py +++ b/merino/jobs/navigational_suggestions/utils.py @@ -77,7 +77,7 @@ def requests_get(url: str) -> requests.Response | None: def update_top_picks_with_firefox_favicons( - top_picks: dict[str, list[dict[str, str]]] + top_picks: dict[str, list[dict[str, str]]], ) -> None: """Update top picks with high resolution favicons that are internally packaged in firefox for some of the selected domains for which favicon scraping didn't return anything diff --git a/merino/jobs/relevancy_uploader/__init__.py b/merino/jobs/relevancy_uploader/__init__.py index b3edcba07..07869b5ea 100644 --- a/merino/jobs/relevancy_uploader/__init__.py +++ b/merino/jobs/relevancy_uploader/__init__.py @@ -85,9 +85,7 @@ class RelevancyData: """Class to relate to conforming data to remote settings structure.""" @classmethod - def csv_to_relevancy_data( - cls, csv_reader - ) -> defaultdict[Category, list[dict[str, str]]]: + def csv_to_relevancy_data(cls, csv_reader) -> defaultdict[Category, list[dict[str, str]]]: """Read CSV file and extract required data for relevancy in the structure [ { "domain" : } @@ -102,9 +100,7 @@ def csv_to_relevancy_data( category, Category.Inconclusive ) md5_hash = md5(row["domain"].encode(), usedforsecurity=False).digest() - data[category_mapped].append( - {"domain": base64.b64encode(md5_hash).decode()} - ) + data[category_mapped].append({"domain": base64.b64encode(md5_hash).decode()}) return data diff --git a/merino/jobs/utils/chunked_rs_uploader.py b/merino/jobs/utils/chunked_rs_uploader.py index b8019078f..84ccf8aba 100644 --- a/merino/jobs/utils/chunked_rs_uploader.py +++ b/merino/jobs/utils/chunked_rs_uploader.py @@ -149,9 +149,7 @@ def _finish_current_chunk(self) -> None: """ if self.current_chunk.size: self._upload_chunk(self.current_chunk) - self.current_chunk = Chunk( - self.current_chunk.start_index + self.current_chunk.size - ) + self.current_chunk = Chunk(self.current_chunk.start_index + self.current_chunk.size) def _upload_chunk(self, chunk: Chunk) -> None: """Create a record and attachment for a chunk.""" diff --git a/merino/jobs/wikipedia_indexer/__init__.py b/merino/jobs/wikipedia_indexer/__init__.py index acb061f9d..c169cbb67 100644 --- a/merino/jobs/wikipedia_indexer/__init__.py +++ b/merino/jobs/wikipedia_indexer/__init__.py @@ -32,9 +32,7 @@ ) -version_option = typer.Option( - job_settings.index_version, "--version", help="Version of the index" -) +version_option = typer.Option(job_settings.index_version, "--version", help="Version of the index") indexer_cmd = typer.Typer( diff --git a/merino/jobs/wikipedia_indexer/filemanager.py b/merino/jobs/wikipedia_indexer/filemanager.py index e210c0491..5e0396d67 100644 --- a/merino/jobs/wikipedia_indexer/filemanager.py +++ b/merino/jobs/wikipedia_indexer/filemanager.py @@ -48,9 +48,7 @@ class FileManager: client: Client def __init__(self, gcs_bucket: str, gcs_project: str, export_base_url: str) -> None: - self.file_pattern = re.compile( - r"(?:.*/|^)enwiki-(\d+)-cirrussearch-content.json.gz" - ) + self.file_pattern = re.compile(r"(?:.*/|^)enwiki-(\d+)-cirrussearch-content.json.gz") self.client = Client(gcs_project) self.base_url = export_base_url if "/" in gcs_bucket: diff --git a/merino/jobs/wikipedia_indexer/indexer.py b/merino/jobs/wikipedia_indexer/indexer.py index 492ac6745..5ec809636 100644 --- a/merino/jobs/wikipedia_indexer/indexer.py +++ b/merino/jobs/wikipedia_indexer/indexer.py @@ -62,9 +62,7 @@ def index_from_export(self, total_docs: int, elasticsearch_alias: str): if self._create_index(index_name): logger.info("Start indexing", extra={"index": index_name}) - reporter = ProgressReporter( - logger, "Indexing", latest.name, index_name, total_docs - ) + reporter = ProgressReporter(logger, "Indexing", latest.name, index_name, total_docs) indexed = 0 blocked = 0 gcs_stream = self.file_manager.stream_from_gcs(latest) @@ -112,12 +110,8 @@ def _should_filter(self, doc: Dict[str, Any]) -> bool: """ categories: set[str] = set(doc.get("category", [])) title: str = doc.get("title", "") - should_filter_category: bool = not self.category_blocklist.isdisjoint( - categories - ) - should_filter_title: bool = ( - title.lower() in self.title_blocklist if title != "" else True - ) + should_filter_category: bool = not self.category_blocklist.isdisjoint(categories) + should_filter_title: bool = title.lower() in self.title_blocklist if title != "" else True return should_filter_category or should_filter_title def _enqueue(self, index_name: str, tpl: tuple[Mapping[str, Any], ...]): @@ -179,9 +173,7 @@ def _flip_alias_to_latest(self, current_index: str, alias: str): alias = alias.format(version=self.index_version) # fetch previous index using alias so we know what to delete - actions: list[Mapping[str, Any]] = [ - {"add": {"index": current_index, "alias": alias}} - ] + actions: list[Mapping[str, Any]] = [{"add": {"index": current_index, "alias": alias}}] indices_to_close = [] if self.es_client.indices.exists_alias(name=alias): diff --git a/merino/jobs/wikipedia_indexer/suggestion.py b/merino/jobs/wikipedia_indexer/suggestion.py index 8c22c9cb6..ae0b6e5e7 100644 --- a/merino/jobs/wikipedia_indexer/suggestion.py +++ b/merino/jobs/wikipedia_indexer/suggestion.py @@ -37,9 +37,7 @@ class Scorer: def __init__(self, max_docs: int) -> None: self.max_docs = max_docs - self.incoming_links_norm = int( - float(self.max_docs) * self.INCOMING_LINKS_MAX_DOCS_FACTOR - ) + self.incoming_links_norm = int(float(self.max_docs) * self.INCOMING_LINKS_MAX_DOCS_FACTOR) def score(self, doc: Dict) -> float: """Calculate suggestion scores from the incoming doc""" diff --git a/merino/jobs/wikipedia_indexer/utils.py b/merino/jobs/wikipedia_indexer/utils.py index fe57031d1..d1a8745a4 100644 --- a/merino/jobs/wikipedia_indexer/utils.py +++ b/merino/jobs/wikipedia_indexer/utils.py @@ -18,9 +18,7 @@ class ProgressReporter: total: int progress: int - def __init__( - self, logger: Logger, action: str, source: str, destination: str, total: int - ): + def __init__(self, logger: Logger, action: str, source: str, destination: str, total: int): self.logger = logger self.action = action self.source = source diff --git a/merino/metrics.py b/merino/metrics.py index e585aafc8..7cb1cb439 100644 --- a/merino/metrics.py +++ b/merino/metrics.py @@ -55,9 +55,7 @@ def feature_flags_as_tags(feature_flags: FeatureFlags) -> MetricTags: @decorator -def add_feature_flags( - wrapped_method: Callable, instance: C, args: tuple, kwargs: dict -) -> Any: +def add_feature_flags(wrapped_method: Callable, instance: C, args: tuple, kwargs: dict) -> Any: """Add feature flag decisions as tags when recording metrics.""" # Tags added manually to the metrics client call tags = kwargs.pop("tags", {}) @@ -89,9 +87,7 @@ def method_proxy( ) -> Callable[Concatenate[C, P], R]: """Return a method that proxies to correct method of the StatsD client.""" - def client_method( - instance: C, *method_args: P.args, **method_kwargs: P.kwargs - ) -> R: + def client_method(instance: C, *method_args: P.args, **method_kwargs: P.kwargs) -> R: """Look up the correct method of the StatsD client call it.""" # Keep track of all calls made to the metrics client call: MetricCall = { @@ -122,9 +118,7 @@ class Client(metaclass=ClientMeta): feature_flags: FeatureFlags calls: MetricCalls - def __init__( - self, statsd_client: aiodogstatsd.Client, feature_flags: FeatureFlags - ) -> None: + def __init__(self, statsd_client: aiodogstatsd.Client, feature_flags: FeatureFlags) -> None: """Initialize the client instance.""" self.statsd_client = statsd_client self.feature_flags = feature_flags @@ -133,9 +127,7 @@ def __init__( def __getattr__(self, attr_name: str): """Raise an exception when an unsupported attribute is requested.""" logger.warning(f"{attr_name} is not a supported method: {SUPPORTED_METHODS}") - raise AttributeError( - f"attribute '{attr_name}' is not supported by metrics.Client class" - ) + raise AttributeError(f"attribute '{attr_name}' is not supported by metrics.Client class") @cache diff --git a/merino/middleware/geolocation.py b/merino/middleware/geolocation.py index 345bc039f..58b948cea 100644 --- a/merino/middleware/geolocation.py +++ b/merino/middleware/geolocation.py @@ -54,9 +54,7 @@ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: request = Request(scope=scope) record = None - ip_address = CLIENT_IP_OVERRIDE or ( - request.client.host or "" if request.client else "" - ) + ip_address = CLIENT_IP_OVERRIDE or (request.client.host or "" if request.client else "") try: record = reader.city(ip_address) except ValueError: diff --git a/merino/middleware/logging.py b/merino/middleware/logging.py index b834ccb79..052e26933 100644 --- a/merino/middleware/logging.py +++ b/merino/middleware/logging.py @@ -45,16 +45,15 @@ async def send_wrapper(message: Message) -> None: # https://mozilla-hub.atlassian.net/browse/DISCO-2489 if ( PATTERN.match(request.url.path) - and request.query_params.get("providers", "").strip().lower() - != "accuweather" + and request.query_params.get("providers", "").strip().lower() != "accuweather" ): suggest_log_data: SuggestLogDataModel = create_suggest_log_data( request, message, dt ) suggest_request_logger.info("", extra=suggest_log_data.model_dump()) else: - request_log_data: RequestSummaryLogDataModel = ( - create_request_summary_log_data(request, message, dt) + request_log_data: RequestSummaryLogDataModel = create_request_summary_log_data( + request, message, dt ) logger.info("", extra=request_log_data.model_dump()) diff --git a/merino/middleware/metrics.py b/merino/middleware/metrics.py index f718dd048..0e830569f 100644 --- a/merino/middleware/metrics.py +++ b/merino/middleware/metrics.py @@ -59,9 +59,7 @@ async def send_wrapper(message: Message) -> None: # don't track NOT_FOUND statuses by path. # Instead we will track those within a general `response.status_codes` metric. if status_code != HTTPStatus.NOT_FOUND.value: - metric_name = self._build_metric_name( - request.method, request.url.path - ) + metric_name = self._build_metric_name(request.method, request.url.path) client.timing( f"{metric_name}.timing", value=duration, @@ -88,6 +86,4 @@ async def send_wrapper(message: Message) -> None: @cache def _build_metric_name(self, method: str, path: str) -> str: - return "{}.{}".format( - method, path.lower().lstrip("/").replace("/", ".") - ).lower() + return "{}.{}".format(method, path.lower().lstrip("/").replace("/", ".")).lower() diff --git a/merino/providers/__init__.py b/merino/providers/__init__.py index 3187a6310..36cec2bce 100644 --- a/merino/providers/__init__.py +++ b/merino/providers/__init__.py @@ -23,9 +23,7 @@ async def init_providers() -> None: start = timer() # register providers - providers.update( - load_providers(disabled_providers_list=settings.runtime.disabled_providers) - ) + providers.update(load_providers(disabled_providers_list=settings.runtime.disabled_providers)) # initialize providers and record time init_metric = "providers.initialize" @@ -36,9 +34,7 @@ async def init_providers() -> None: for provider_name, p in providers.items() ] await asyncio.gather(*wrapped_tasks) - default_providers.extend( - [p for p in providers.values() if p.enabled_by_default] - ) + default_providers.extend([p for p in providers.values() if p.enabled_by_default]) logger.info( "Provider initialization completed", extra={"providers": [*providers.keys()], "elapsed": timer() - start}, diff --git a/merino/providers/adm/backends/remotesettings.py b/merino/providers/adm/backends/remotesettings.py index 7495632dd..30a169a05 100644 --- a/merino/providers/adm/backends/remotesettings.py +++ b/merino/providers/adm/backends/remotesettings.py @@ -42,9 +42,7 @@ class RemoteSettingsBackend: kinto_http_client: kinto_http.AsyncClient - def __init__( - self, server: str | None, collection: str | None, bucket: str | None - ) -> None: + def __init__(self, server: str | None, collection: str | None, bucket: str | None) -> None: """Init the Remote Settings backend and create a new client. Args: @@ -103,9 +101,7 @@ async def fetch(self) -> SuggestionContent: icon_record = self.filter_records(record_type="icon", records=records) for icon in icon_record: id = icon["id"].replace("icon-", "") - icons[id] = urljoin( - base=attachment_host, url=icon["attachment"]["location"] - ) + icons[id] = urljoin(base=attachment_host, url=icon["attachment"]["location"]) return SuggestionContent( suggestions=suggestions, @@ -192,9 +188,7 @@ async def get_attachment(self, url: str) -> list[KintoSuggestion]: Raises: RemoteSettingsError: Failed request to Remote Settings. """ - async with create_http_client( - connect_timeout=RS_CONNECT_TIMEOUT - ) as httpx_client: + async with create_http_client(connect_timeout=RS_CONNECT_TIMEOUT) as httpx_client: try: response: httpx.Response = await httpx_client.get(url) response.raise_for_status() diff --git a/merino/providers/adm/provider.py b/merino/providers/adm/provider.py index 88620c1f8..a2eb0b1bd 100644 --- a/merino/providers/adm/provider.py +++ b/merino/providers/adm/provider.py @@ -148,9 +148,7 @@ async def query(self, srequest: SuggestionRequest) -> list[BaseSuggestion]: "provider": self.name, "advertiser": res.get("advertiser"), "is_sponsored": is_sponsored, - "icon": self.suggestion_content.icons.get( - res.get("icon", MISSING_ICON_ID) - ), + "icon": self.suggestion_content.icons.get(res.get("icon", MISSING_ICON_ID)), "score": self.score, } return [ diff --git a/merino/providers/amo/addons_data.py b/merino/providers/amo/addons_data.py index dfbf2ea11..6b5691a1f 100644 --- a/merino/providers/amo/addons_data.py +++ b/merino/providers/amo/addons_data.py @@ -43,8 +43,7 @@ class SupportedAddon(enum.StrEnum): SupportedAddon.PRIVATE_RELAY: { "name": "Firefox Relay", "description": ( - "Email masking to protect your inbox and identity " - "from hackers and junk mail." + "Email masking to protect your inbox and identity " "from hackers and junk mail." ), "url": "https://addons.mozilla.org/en-US/firefox/addon/private-relay/", "guid": "private-relay@firefox.com", @@ -61,8 +60,7 @@ class SupportedAddon(enum.StrEnum): SupportedAddon.DARKREADER: { "name": "Dark Reader", "description": ( - "Get night mode for the entire internet. " - "Adjust colors and reduce eye strain." + "Get night mode for the entire internet. " "Adjust colors and reduce eye strain." ), "url": "https://addons.mozilla.org/en-US/firefox/addon/darkreader/", "guid": "addon@darkreader.org", @@ -79,8 +77,7 @@ class SupportedAddon(enum.StrEnum): SupportedAddon.UBLOCK_ORIGIN: { "name": "uBlock Origin", "description": ( - "Block ads and enjoy a faster internet with " - "this efficient content blocker." + "Block ads and enjoy a faster internet with " "this efficient content blocker." ), "url": "https://addons.mozilla.org/en-US/firefox/addon/ublock-origin/", "guid": "uBlock0@raymondhill.net", diff --git a/merino/providers/amo/backends/dynamic.py b/merino/providers/amo/backends/dynamic.py index 2866c6391..ee2195660 100644 --- a/merino/providers/amo/backends/dynamic.py +++ b/merino/providers/amo/backends/dynamic.py @@ -65,9 +65,7 @@ async def _fetch_addon( "number_of_ratings": json_res["ratings"]["count"], } except httpx.HTTPError as e: - logger.error( - f"Addons API could not find key: {addon_key}, {e}, {e.__class__}" - ) + logger.error(f"Addons API could not find key: {addon_key}, {e}, {e.__class__}") except (KeyError, JSONDecodeError): logger.error( "Problem with Addons API formatting. " @@ -91,9 +89,7 @@ async def fetch_and_cache_addons_info(self) -> None: ): for addon_key in SupportedAddon: tasks.append( - group.create_task( - self._fetch_addon(client, addon_key), name=addon_key - ) + group.create_task(self._fetch_addon(client, addon_key), name=addon_key) ) # Update in place without clearing out the map so that fetch failures @@ -112,9 +108,7 @@ async def get_addon(self, addon_key: SupportedAddon) -> Addon: try: icon_and_rating: dict[str, str] = self.dynamic_data[addon_key] except KeyError: - raise DynamicAmoBackendException( - "Missing Addon in execution. Skip returning Addon." - ) + raise DynamicAmoBackendException("Missing Addon in execution. Skip returning Addon.") return Addon( name=static_info["name"], diff --git a/merino/providers/base.py b/merino/providers/base.py index 5e7bb8bd5..cb36c02b5 100644 --- a/merino/providers/base.py +++ b/merino/providers/base.py @@ -126,9 +126,7 @@ async def shutdown(self) -> None: # pragma: no cover return @abstractmethod - async def query( - self, srequest: SuggestionRequest - ) -> list[BaseSuggestion]: # pragma: no cover + async def query(self, srequest: SuggestionRequest) -> list[BaseSuggestion]: # pragma: no cover """Query against this provider. Args: diff --git a/merino/providers/manager.py b/merino/providers/manager.py index 32c98a5d0..f74b23648 100644 --- a/merino/providers/manager.py +++ b/merino/providers/manager.py @@ -67,9 +67,7 @@ def _create_provider(provider_id: str, setting: Settings) -> BaseProvider: ), cached_forecast_ttl_sec=setting.cache_ttls.forecast_ttl_sec, metrics_client=get_metrics_client(), - http_client=create_http_client( - base_url=settings.accuweather.url_base - ), + http_client=create_http_client(base_url=settings.accuweather.url_base), url_param_api_key=settings.accuweather.url_param_api_key, url_cities_path=settings.accuweather.url_cities_path, url_cities_param_query=settings.accuweather.url_cities_param_query, @@ -96,9 +94,7 @@ def _create_provider(provider_id: str, setting: Settings) -> BaseProvider: case ProviderType.AMO: return AmoProvider( backend=( - DynamicAmoBackend( - api_url=settings.amo.dynamic.api_url - ) # type: ignore [arg-type] + DynamicAmoBackend(api_url=settings.amo.dynamic.api_url) # type: ignore [arg-type] if setting.backend == "dynamic" else StaticAmoBackend() ), diff --git a/merino/providers/top_picks/backends/top_picks.py b/merino/providers/top_picks/backends/top_picks.py index f3bd097b1..078bfe6de 100644 --- a/merino/providers/top_picks/backends/top_picks.py +++ b/merino/providers/top_picks/backends/top_picks.py @@ -167,9 +167,7 @@ def maybe_build_indices(self) -> tuple[Enum, TopPicksData | None]: gcs_bucket_path=settings.providers.top_picks.gcs_bucket, ) client = remote_filemanager.create_gcs_client() - get_file_result_code, remote_domains = remote_filemanager.get_file( - client - ) + get_file_result_code, remote_domains = remote_filemanager.get_file(client) match GetFileResultCode(get_file_result_code): case GetFileResultCode.SUCCESS: diff --git a/merino/providers/top_picks/provider.py b/merino/providers/top_picks/provider.py index 1b85edefe..108c6cabb 100644 --- a/merino/providers/top_picks/provider.py +++ b/merino/providers/top_picks/provider.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Top Pick Navigational Queries Provider""" + import asyncio import logging import time @@ -88,10 +89,7 @@ async def initialize(self) -> None: # Run a cron job that will periodically check whether to update domain file. # Only runs when domain source set to `remote`. - if ( - settings.providers.top_picks.domain_data_source - == DomainDataSource.REMOTE.value - ): + if settings.providers.top_picks.domain_data_source == DomainDataSource.REMOTE.value: cron_job = cron.Job( name="resync_domain_file", interval=self.cron_interval_sec, @@ -154,17 +152,11 @@ async def query(self, srequest: SuggestionRequest) -> list[BaseSuggestion]: ): ids = self.top_picks_data.short_domain_index.get(query) case qlen if ( - self.top_picks_data.query_char_limit - <= qlen - <= self.top_picks_data.query_max + self.top_picks_data.query_char_limit <= qlen <= self.top_picks_data.query_max ): ids = self.top_picks_data.primary_index.get( query ) or self.top_picks_data.secondary_index.get(query) case _: ids = None - return ( - [Suggestion(**self.top_picks_data.results[ids[0]], score=self.score)] - if ids - else [] - ) + return [Suggestion(**self.top_picks_data.results[ids[0]], score=self.score)] if ids else [] diff --git a/merino/providers/weather/backends/accuweather.py b/merino/providers/weather/backends/accuweather.py index bba2e7bdf..9f0293eee 100644 --- a/merino/providers/weather/backends/accuweather.py +++ b/merino/providers/weather/backends/accuweather.py @@ -289,9 +289,7 @@ async def get_request( response.raise_for_status() if (response_dict := process_api_response(response.json())) is None: - self.metrics_client.increment( - f"accuweather.request.{request_type}.processor.error" - ) + self.metrics_client.increment(f"accuweather.request.{request_type}.processor.error") return None response_expiry: str = response.headers.get("Expires") @@ -303,9 +301,7 @@ async def get_request( response_dict["cached_request_ttl"] = cached_request_ttl except (CacheAdapterError, ValueError) as exc: logger.error(f"Error with storing Accuweather to cache: {exc}") - error_type = ( - "set_error" if isinstance(exc, CacheAdapterError) else "ttl_date_error" - ) + error_type = "set_error" if isinstance(exc, CacheAdapterError) else "ttl_date_error" self.metrics_client.increment(f"accuweather.cache.store.{error_type}") raise AccuweatherError( "Something went wrong with storing to cache. Did not update cache." @@ -399,9 +395,7 @@ def parse_cached_data(self, cached_data: list[bytes | None]) -> WeatherData: if location_cached is not None: location = AccuweatherLocation.model_validate_json(location_cached) if current_cached is not None: - current_conditions = CurrentConditions.model_validate_json( - current_cached - ) + current_conditions = CurrentConditions.model_validate_json(current_cached) if forecast_cached is not None: forecast = Forecast.model_validate_json(forecast_cached) if ttl_cached is not None: @@ -432,9 +426,7 @@ async def get_weather_report( return await self.get_weather_report_with_geolocation(geolocation) - async def get_weather_report_with_location_key( - self, location_key - ) -> WeatherReport | None: + async def get_weather_report_with_location_key(self, location_key) -> WeatherReport | None: """Get weather information from AccuWeather. Firstly, it will look up the Redis cache for the current condition, @@ -460,9 +452,9 @@ async def get_weather_report_with_location_key( # The order matters below. # See `LUA_SCRIPT_CACHE_BULK_FETCH_VIA_LOCATION` for details. args=[ - self.cache_key_template( - WeatherDataType.CURRENT_CONDITIONS - ).format(location_key=location_key), + self.cache_key_template(WeatherDataType.CURRENT_CONDITIONS).format( + location_key=location_key + ), self.cache_key_template(WeatherDataType.FORECAST).format( location_key=location_key ), @@ -470,9 +462,7 @@ async def get_weather_report_with_location_key( ) except CacheAdapterError as exc: logger.error(f"Failed to fetch weather report from Redis: {exc}") - self.metrics_client.increment( - "accuweather.cache.fetch-via-location-key.error" - ) + self.metrics_client.increment("accuweather.cache.fetch-via-location-key.error") return None self.emit_cache_fetch_metrics(cached_data, skip_location_key=True) @@ -567,9 +557,7 @@ async def as_awaitable(val: Any) -> Any: if location is None: if country and city and region: if ( - location := await self.get_location_by_geolocation( - country, city, region - ) + location := await self.get_location_by_geolocation(country, city, region) ) is None: return None else: @@ -591,9 +579,7 @@ async def as_awaitable(val: Any) -> Any: self.get_forecast(location.key) if forecast is None else as_awaitable( - ForecastWithTTL( - forecast=forecast, ttl=self.cached_forecast_ttl_sec - ) + ForecastWithTTL(forecast=forecast, ttl=self.cached_forecast_ttl_sec) ) ) except ExceptionGroup as e: @@ -642,9 +628,7 @@ async def get_location_by_geolocation( return AccuweatherLocation(**response) if response else None - async def get_current_conditions( - self, location_key: str - ) -> CurrentConditionsWithTTL | None: + async def get_current_conditions(self, location_key: str) -> CurrentConditionsWithTTL | None: """Return current conditions data for a specific location or None if current conditions data is not found. @@ -741,9 +725,7 @@ async def get_location_completion( response: Response = await self.http_client.get(url_path, params=params) response.raise_for_status() - processed_location_completions = process_location_completion_response( - response.json() - ) + processed_location_completions = process_location_completion_response(response.json()) location_completions = [ LocationCompletion(**item) for item in processed_location_completions diff --git a/merino/providers/weather/provider.py b/merino/providers/weather/provider.py index e74aa5312..f902b56bb 100644 --- a/merino/providers/weather/provider.py +++ b/merino/providers/weather/provider.py @@ -114,9 +114,7 @@ def build_suggestion( city_name=data.city_name, current_conditions=data.current_conditions, forecast=data.forecast, - custom_details=CustomDetails( - weather=WeatherDetails(weather_report_ttl=data.ttl) - ), + custom_details=CustomDetails(weather=WeatherDetails(weather_report_ttl=data.ttl)), ) else: return LocationCompletionSuggestion( diff --git a/merino/providers/wikipedia/backends/elastic.py b/merino/providers/wikipedia/backends/elastic.py index 578864ac4..df8494ba5 100644 --- a/merino/providers/wikipedia/backends/elastic.py +++ b/merino/providers/wikipedia/backends/elastic.py @@ -77,10 +77,7 @@ async def search(self, q: str) -> list[dict[str, Any]]: raise BackendError(f"Failed to search from Elasticsearch: {e}") from e if "suggest" in res: - return [ - self.build_article(q, doc) - for doc in res["suggest"][SUGGEST_ID][0]["options"] - ] + return [self.build_article(q, doc) for doc in res["suggest"][SUGGEST_ID][0]["options"]] else: return [] diff --git a/merino/utils/log_data_creators.py b/merino/utils/log_data_creators.py index e79d1e634..8c7861ca4 100644 --- a/merino/utils/log_data_creators.py +++ b/merino/utils/log_data_creators.py @@ -94,9 +94,7 @@ def create_suggest_log_data( rid=Headers(scope=message)["X-Request-ID"], session_id=request.query_params.get("sid"), sequence_no=( - int(seq) - if (seq := request.query_params.get("seq", "")) and seq.isdecimal() - else None + int(seq) if (seq := request.query_params.get("seq", "")) and seq.isdecimal() else None ), client_variants=request.query_params.get("client_variants", ""), requested_providers=request.query_params.get("providers", ""), diff --git a/merino/web/api_v1.py b/merino/web/api_v1.py index c0149a39f..d6b8b3d67 100644 --- a/merino/web/api_v1.py +++ b/merino/web/api_v1.py @@ -69,12 +69,8 @@ async def suggest( request: Request, q: str = Query(max_length=QUERY_CHARACTER_MAX), providers: str | None = None, - client_variants: str | None = Query( - default=None, max_length=CLIENT_VARIANT_CHARACTER_MAX - ), - sources: tuple[dict[str, BaseProvider], list[BaseProvider]] = Depends( - get_providers - ), + client_variants: str | None = Query(default=None, max_length=CLIENT_VARIANT_CHARACTER_MAX), + sources: tuple[dict[str, BaseProvider], list[BaseProvider]] = Depends(get_providers), request_type: Annotated[str | None, Query(pattern="^(location|weather)$")] = None, ) -> JSONResponse: """Query Merino for suggestions. @@ -184,9 +180,7 @@ async def suggest( geolocation=request.scope[ScopeKey.GEOLOCATION], request_type=request_type, ) - task = metrics_client.timeit_task( - p.query(srequest), f"providers.{p.name}.query" - ) + task = metrics_client.timeit_task(p.query(srequest), f"providers.{p.name}.query") # `timeit_task()` doesn't support task naming, need to set the task name manually task.set_name(p.name) lookups.append(task) @@ -290,9 +284,7 @@ def get_ttl_for_cache_control_header_for_suggestions( response_model=list[ProviderResponse], ) async def providers( - sources: tuple[dict[str, BaseProvider], list[BaseProvider]] = Depends( - get_providers - ), + sources: tuple[dict[str, BaseProvider], list[BaseProvider]] = Depends(get_providers), ) -> JSONResponse: """Query Merino for suggestion providers. diff --git a/merino/web/dockerflow.py b/merino/web/dockerflow.py index 84d3edd02..53cb6a635 100644 --- a/merino/web/dockerflow.py +++ b/merino/web/dockerflow.py @@ -33,17 +33,13 @@ async def version() -> Version: return app_version -@router.get( - "/__heartbeat__", tags=["__heartbeat__"], summary="Dockerflow: __heartbeat__" -) +@router.get("/__heartbeat__", tags=["__heartbeat__"], summary="Dockerflow: __heartbeat__") async def heartbeat() -> Response: """Dockerflow: Query service heartbeat. It returns an empty string in the response.""" return Response(content="") -@router.get( - "/__lbheartbeat__", tags=["__lbheartbeat__"], summary="Dockerflow: __lbheartbeat__" -) +@router.get("/__lbheartbeat__", tags=["__lbheartbeat__"], summary="Dockerflow: __lbheartbeat__") async def lbheartbeat() -> Response: """Dockerflow: Query service heartbeat for load balancer. It returns an empty string in the response. diff --git a/poetry.lock b/poetry.lock index 0ab3727e4..204ff9cf2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiodogstatsd" @@ -247,50 +247,6 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] -[[package]] -name = "black" -version = "24.4.2" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "cachetools" version = "5.3.3" @@ -799,22 +755,6 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] -[[package]] -name = "flake8" -version = "7.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.2.0,<3.3.0" - [[package]] name = "freezegun" version = "1.5.1" @@ -1382,20 +1322,6 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - [[package]] name = "jinja2" version = "3.1.4" @@ -1579,17 +1505,6 @@ files = [ {file = "maxminddb-2.6.1.tar.gz", hash = "sha256:6c5d591f625e03b0a34df0c7ff81580676397b8335e13ece130c6e39e4a3afb9"}, ] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - [[package]] name = "multidict" version = "6.0.5" @@ -1768,17 +1683,6 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - [[package]] name = "pbr" version = "6.0.0" @@ -2015,17 +1919,6 @@ files = [ [package.dependencies] pyasn1 = ">=0.4.6,<0.7.0" -[[package]] -name = "pycodestyle" -version = "2.11.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, -] - [[package]] name = "pycparser" version = "2.22" @@ -2147,34 +2040,6 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" -[[package]] -name = "pydocstyle" -version = "6.3.0" -description = "Python docstring style checker" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, - {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, -] - -[package.dependencies] -snowballstemmer = ">=2.2.0" - -[package.extras] -toml = ["tomli (>=1.2.3)"] - -[[package]] -name = "pyflakes" -version = "3.2.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, - {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, -] - [[package]] name = "pygments" version = "2.18.0" @@ -2517,6 +2382,32 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" +[[package]] +name = "ruff" +version = "0.4.10" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.4.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c2c4d0859305ac5a16310eec40e4e9a9dec5dcdfbe92697acd99624e8638dac"}, + {file = "ruff-0.4.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a79489607d1495685cdd911a323a35871abfb7a95d4f98fc6f85e799227ac46e"}, + {file = "ruff-0.4.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1dd1681dfa90a41b8376a61af05cc4dc5ff32c8f14f5fe20dba9ff5deb80cd6"}, + {file = "ruff-0.4.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c75c53bb79d71310dc79fb69eb4902fba804a81f374bc86a9b117a8d077a1784"}, + {file = "ruff-0.4.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18238c80ee3d9100d3535d8eb15a59c4a0753b45cc55f8bf38f38d6a597b9739"}, + {file = "ruff-0.4.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d8f71885bce242da344989cae08e263de29752f094233f932d4f5cfb4ef36a81"}, + {file = "ruff-0.4.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:330421543bd3222cdfec481e8ff3460e8702ed1e58b494cf9d9e4bf90db52b9d"}, + {file = "ruff-0.4.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e9b6fb3a37b772628415b00c4fc892f97954275394ed611056a4b8a2631365e"}, + {file = "ruff-0.4.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f54c481b39a762d48f64d97351048e842861c6662d63ec599f67d515cb417f6"}, + {file = "ruff-0.4.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:67fe086b433b965c22de0b4259ddfe6fa541c95bf418499bedb9ad5fb8d1c631"}, + {file = "ruff-0.4.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:acfaaab59543382085f9eb51f8e87bac26bf96b164839955f244d07125a982ef"}, + {file = "ruff-0.4.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3cea07079962b2941244191569cf3a05541477286f5cafea638cd3aa94b56815"}, + {file = "ruff-0.4.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:338a64ef0748f8c3a80d7f05785930f7965d71ca260904a9321d13be24b79695"}, + {file = "ruff-0.4.10-py3-none-win32.whl", hash = "sha256:ffe3cd2f89cb54561c62e5fa20e8f182c0a444934bf430515a4b422f1ab7b7ca"}, + {file = "ruff-0.4.10-py3-none-win_amd64.whl", hash = "sha256:67f67cef43c55ffc8cc59e8e0b97e9e60b4837c8f21e8ab5ffd5d66e196e25f7"}, + {file = "ruff-0.4.10-py3-none-win_arm64.whl", hash = "sha256:dd1fcee327c20addac7916ca4e2653fbbf2e8388d8a6477ce5b4e986b68ae6c0"}, + {file = "ruff-0.4.10.tar.gz", hash = "sha256:3aa4f2bc388a30d346c56524f7cacca85945ba124945fe489952aadb6b5cd804"}, +] + [[package]] name = "scalene" version = "1.5.41" @@ -2644,17 +2535,6 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - [[package]] name = "soupsieve" version = "2.5" @@ -2742,24 +2622,21 @@ weaviate = ["weaviate-client (>=4.5.4,<5.0.0)"] [[package]] name = "typer" -version = "0.9.4" +version = "0.11.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"}, - {file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"}, + {file = "typer-0.11.1-py3-none-any.whl", hash = "sha256:4ce7b2a60b8543816ca97d5ec016026cbe95d1a7a931083b988c1d3682548fe7"}, + {file = "typer-0.11.1.tar.gz", hash = "sha256:f5ae987b97ebbbd59182f8e84407bbc925bc636867fa007bce87a7a71ac81d5c"}, ] [package.dependencies] -click = ">=7.1.1,<9.0.0" +click = ">=8.0.0" typing-extensions = ">=3.7.4.3" [package.extras] all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] -dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] [[package]] name = "types-cffi" @@ -3425,4 +3302,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "d3b62aa459bae039850cceac08c2986deaa787b9795768d116dc4bb8d580c09d" +content-hash = "17887a7779cfe8368058fb691382729481745ba59199e089c644c8b8be99a7a6" diff --git a/pyproject.toml b/pyproject.toml index c5d05e26f..6355e3ea0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,9 +6,28 @@ relative_files = true [tool.coverage.report] show_missing = true -[tool.isort] -profile = "black" -skip_gitignore = true +[tool.ruff] +line-length = 99 +exclude = ["build", "dist", "__pycache__", ".git"] +lint.select = ["D", "E4", "E7", "E9", "F"] +# Error Code Ref: https://www.pydocstyle.org/en/stable/error_codes.html +# D212 Multi-line docstring summary should start at the first line +lint.extend-select = ["D212"] +lint.ignore = [ + # D105 Docstrings for magic methods + "D105", + # D107 Docstrings for __init__ + "D107", + # D203 as it conflicts with D211 https://github.com/PyCQA/pydocstyle/issues/141 + "D203", + # D205 1 blank line required between summary line and description, awkward spacing + "D205", + # D400 First line should end with a period, doesn't work when sentence spans 2 lines + "D400" +] +lint.extend-ignore = ["E203"] +lint.per-file-ignores = {"__init__.py" = ["F401"]} +lint.pydocstyle = { convention = "pep257" } [tool.bandit] # skips asserts @@ -31,19 +50,6 @@ warn_return_any = true warn_unused_ignores = true warn_unreachable = true -[tool.pydocstyle] -match = ".*\\.py" -convention = "pep257" -# Error Code Ref: https://www.pydocstyle.org/en/stable/error_codes.html -# D212 Multi-line docstring summary should start at the first line -add-select = ["D212"] -# D105 Docstrings for magic methods -# D107 Docstrings for __init__ -# D203 as it conflicts with D211 https://github.com/PyCQA/pydocstyle/issues/141 -# D205 1 blank line required between summary line and description, awkward spacing -# D400 First line should end with a period, doesn't work when sentence spans 2 lines -add-ignore = ["D105","D107","D203", "D205", "D400"] - [tool.pytest.ini_options] testpaths = ["tests"] addopts = "-v -r s" @@ -81,16 +87,12 @@ google-cloud-storage = "^2.16.0" google-cloud-bigquery = "^3.9.0" Pillow = "^10.2.0" robobro = "^0.5.3" -typer = "^0.9.0" +typer = "^0.11.0" redis = "^4.5.4" types-python-dateutil = "^2.8.19.13" pydantic = "^2.1.0" [tool.poetry.group.dev.dependencies] -black = "^24.4.0" -flake8 = "^7.0.0" -isort = "^5.10.1" -pydocstyle = "^6.1.1" mypy = "^1.5" pre-commit = "^2.20.0" bandit = {extras = ["toml"], version = "^1.7.5"} @@ -105,6 +107,7 @@ pytest-mock = "^3.10.0" freezegun = "^1.2.2" requests-mock = "^1.10.0" requests-toolbelt = "^1.0.0" +ruff = "^0.4.10" types-redis = "^4.5.1.1" testcontainers = "^4.0.1" types-pillow = "^10.2.0.20240520" diff --git a/tests/contract/client/tests/conftest.py b/tests/contract/client/tests/conftest.py index c4f09e8a0..2686f8964 100644 --- a/tests/contract/client/tests/conftest.py +++ b/tests/contract/client/tests/conftest.py @@ -51,8 +51,7 @@ def fixture_kinto_attachments(request: Any) -> dict[str, KintoRequestAttachment] for data_file in pathlib.Path(kinto_data_dir).glob("*.json"): content: bytes = data_file.read_bytes() suggestions: list[KintoSuggestion] = [ - KintoSuggestion(**suggestion) - for suggestion in json.loads(data_file.read_text()) + KintoSuggestion(**suggestion) for suggestion in json.loads(data_file.read_text()) ] kinto_attachments[data_file.name] = KintoRequestAttachment( filename=data_file.name, @@ -99,9 +98,7 @@ def pytest_configure(config: Any) -> None: with pathlib.Path(config.option.scenarios_file).open() as f: loaded_scenarios = yaml.safe_load(f) - config.merino_scenarios = [ - Scenario(**scenario) for scenario in loaded_scenarios["scenarios"] - ] + config.merino_scenarios = [Scenario(**scenario) for scenario in loaded_scenarios["scenarios"]] def pytest_generate_tests(metafunc: Any) -> None: diff --git a/tests/contract/client/tests/test_merino.py b/tests/contract/client/tests/test_merino.py index d05b075e5..ef042a710 100644 --- a/tests/contract/client/tests/test_merino.py +++ b/tests/contract/client/tests/test_merino.py @@ -85,21 +85,17 @@ def fixture_merino_step( def merino_step(step: Step) -> None: if type(step.request) is not MerinoRequest: raise TypeError( - f"Unsupported request type {type(step.request)} for Merino service " - f"step." + f"Unsupported request type {type(step.request)} for Merino service " f"step." ) if type(step.response) is not Response: raise TypeError( - f"Unsupported response type {type(step.request)} for Merino service " - f"step." + f"Unsupported response type {type(step.request)} for Merino service " f"step." ) method: str = step.request.method url: str = f"{merino_url}{step.request.path}" - headers: dict[str, str] = { - header.name: header.value for header in step.request.headers - } + headers: dict[str, str] = {header.name: header.value for header in step.request.headers} response: RequestsResponse = requests.request(method, url, headers=headers) diff --git a/tests/contract/poetry.lock b/tests/contract/poetry.lock index e53c7eff1..476f8b330 100644 --- a/tests/contract/poetry.lock +++ b/tests/contract/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -433,24 +433,21 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "typer" -version = "0.9.0" +version = "0.11.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, - {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, + {file = "typer-0.11.1-py3-none-any.whl", hash = "sha256:4ce7b2a60b8543816ca97d5ec016026cbe95d1a7a931083b988c1d3682548fe7"}, + {file = "typer-0.11.1.tar.gz", hash = "sha256:f5ae987b97ebbbd59182f8e84407bbc925bc636867fa007bce87a7a71ac81d5c"}, ] [package.dependencies] -click = ">=7.1.1,<9.0.0" +click = ">=8.0.0" typing-extensions = ">=3.7.4.3" [package.extras] all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] -dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] [[package]] name = "typing-extensions" @@ -482,4 +479,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "ca009b7cf5e18c377b7cdd047960b7753fed9a9b8e804855b990086ee039b441" +content-hash = "0b190173b5b1ae8fe7857b6b42efff997722b992f2e36170549344f5eb1b0734" diff --git a/tests/contract/pyproject.toml b/tests/contract/pyproject.toml index 6953f0ac0..f2de5db45 100644 --- a/tests/contract/pyproject.toml +++ b/tests/contract/pyproject.toml @@ -16,7 +16,7 @@ pytest = "^7.4.3" PyYAML = "^6.0.1" [tool.poetry.group.kinto.dependencies] -typer = "^0.9.0" +typer = "^0.11.0" [build-system] diff --git a/tests/integration/api/test_error.py b/tests/integration/api/test_error.py index a182bbf73..49c3a3060 100644 --- a/tests/integration/api/test_error.py +++ b/tests/integration/api/test_error.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Integration tests for the Merino __error__ API endpoint.""" + import logging from datetime import datetime from logging import LogRecord @@ -46,8 +47,7 @@ def test_error_request_log_data( expected_log_data: RequestSummaryLogDataModel = RequestSummaryLogDataModel( agent=( - "Mozilla/5.0 (Macintosh; Intel Mac OS X 11.2; rv:85.0)" - " Gecko/20100101 Firefox/103.0" + "Mozilla/5.0 (Macintosh; Intel Mac OS X 11.2; rv:85.0)" " Gecko/20100101 Firefox/103.0" ), path="/__error__", method="GET", diff --git a/tests/integration/api/test_heartbeat.py b/tests/integration/api/test_heartbeat.py index 42e9662bc..f77dba310 100644 --- a/tests/integration/api/test_heartbeat.py +++ b/tests/integration/api/test_heartbeat.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Integration tests for the Merino __heartbeat__ and __lbheartbeat__ API endpoints.""" + import logging from datetime import datetime from logging import LogRecord @@ -41,8 +42,7 @@ def test_heartbeat_request_log_data( expected_log_data: RequestSummaryLogDataModel = RequestSummaryLogDataModel( agent=( - "Mozilla/5.0 (Macintosh; Intel Mac OS X 11.2; rv:85.0)" - " Gecko/20100101 Firefox/103.0" + "Mozilla/5.0 (Macintosh; Intel Mac OS X 11.2; rv:85.0)" " Gecko/20100101 Firefox/103.0" ), path=f"/{endpoint}", method="GET", diff --git a/tests/integration/api/test_version.py b/tests/integration/api/test_version.py index 4400a5e05..5d06a235a 100644 --- a/tests/integration/api/test_version.py +++ b/tests/integration/api/test_version.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Integration tests for the Merino __version__ API endpoint.""" + import logging import pathlib from datetime import datetime @@ -56,8 +57,7 @@ def test_version_request_log_data( expected_log_data: RequestSummaryLogDataModel = RequestSummaryLogDataModel( agent=( - "Mozilla/5.0 (Macintosh; Intel Mac OS X 11.2; rv:85.0)" - " Gecko/20100101 Firefox/103.0" + "Mozilla/5.0 (Macintosh; Intel Mac OS X 11.2; rv:85.0)" " Gecko/20100101 Firefox/103.0" ), path="/__version__", method="GET", diff --git a/tests/integration/api/v1/curated_recommendations/test_curated_recommendations.py b/tests/integration/api/v1/curated_recommendations/test_curated_recommendations.py index 72c7a4057..db4e041ac 100644 --- a/tests/integration/api/v1/curated_recommendations/test_curated_recommendations.py +++ b/tests/integration/api/v1/curated_recommendations/test_curated_recommendations.py @@ -33,9 +33,7 @@ def test_curated_recommendations_locale(client: TestClient) -> None: result = response.json() assert result["recommendedAt"] == 1326511294000 # 2012-01-14 03:21:34 UTC - actual_recommendation: CuratedRecommendation = CuratedRecommendation( - **result["data"][0] - ) + actual_recommendation: CuratedRecommendation = CuratedRecommendation(**result["data"][0]) assert actual_recommendation == expected_recommendation diff --git a/tests/integration/api/v1/suggest/conftest.py b/tests/integration/api/v1/suggest/conftest.py index 8c3eae103..bfb148ee7 100644 --- a/tests/integration/api/v1/suggest/conftest.py +++ b/tests/integration/api/v1/suggest/conftest.py @@ -95,9 +95,7 @@ def fixture_providers() -> Providers: ) """ # Ensures this test interface takes into account disabled providers. - enabled_providers = { - k: v for k, v in providers.items() if k not in disabled_providers - } + enabled_providers = {k: v for k, v in providers.items() if k not in disabled_providers} setup_providers(enabled_providers) yield await teardown_providers(providers) diff --git a/tests/integration/api/v1/suggest/test_providers.py b/tests/integration/api/v1/suggest/test_providers.py index fac328be2..b166980dc 100644 --- a/tests/integration/api/v1/suggest/test_providers.py +++ b/tests/integration/api/v1/suggest/test_providers.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Integration tests for the Merino v1 providers API endpoint.""" + import logging from datetime import datetime from logging import LogRecord @@ -55,9 +56,7 @@ ], { "sponsored": FakeProviderFactory.sponsored(enabled_by_default=True), - "nonsponsored": FakeProviderFactory.nonsponsored( - enabled_by_default=False - ), + "nonsponsored": FakeProviderFactory.nonsponsored(enabled_by_default=False), "hidden-provider": FakeProviderFactory.hidden(enabled_by_default=True), }, ), @@ -132,8 +131,7 @@ def test_providers_request_log_data( expected_log_data: RequestSummaryLogDataModel = RequestSummaryLogDataModel( agent=( - "Mozilla/5.0 (Macintosh; Intel Mac OS X 11.2; rv:85.0)" - " Gecko/20100101 Firefox/103.0" + "Mozilla/5.0 (Macintosh; Intel Mac OS X 11.2; rv:85.0)" " Gecko/20100101 Firefox/103.0" ), path="/api/v1/providers", method="GET", diff --git a/tests/integration/api/v1/suggest/test_suggest.py b/tests/integration/api/v1/suggest/test_suggest.py index 426e0fc6d..2d77f7d79 100644 --- a/tests/integration/api/v1/suggest/test_suggest.py +++ b/tests/integration/api/v1/suggest/test_suggest.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Integration tests for the Merino v1 suggest API endpoint.""" + import logging from datetime import datetime @@ -451,9 +452,7 @@ def test_suggest_feature_flags_tags_in_metrics( # TODO: Remove reliance on internal details of aiodogstatsd feature_flag_tags_per_metric = { - call.args[0]: [ - tag for tag in call.args[3].keys() if tag.startswith("feature_flag.") - ] + call.args[0]: [tag for tag in call.args[3].keys() if tag.startswith("feature_flag.")] for call in report.call_args_list } diff --git a/tests/integration/api/v1/suggest/test_suggest_amo.py b/tests/integration/api/v1/suggest/test_suggest_amo.py index e83690cca..7fff399d4 100644 --- a/tests/integration/api/v1/suggest/test_suggest_amo.py +++ b/tests/integration/api/v1/suggest/test_suggest_amo.py @@ -21,23 +21,17 @@ SCENARIOS: dict[str, Scenario] = { "Case-I: Returns Matched Addon": Scenario( - providers={ - "addons": Provider(backend=StaticAmoBackend(), keywords=ADDON_KEYWORDS) - }, + providers={"addons": Provider(backend=StaticAmoBackend(), keywords=ADDON_KEYWORDS)}, query="night mo", expected_title="Dark Reader", ), "Case-II: No Addon Matches": Scenario( - providers={ - "addons": Provider(backend=StaticAmoBackend(), keywords=ADDON_KEYWORDS) - }, + providers={"addons": Provider(backend=StaticAmoBackend(), keywords=ADDON_KEYWORDS)}, query="nigh", expected_title=None, ), "Case-III: Case Insensitive Match": Scenario( - providers={ - "addons": Provider(backend=StaticAmoBackend(), keywords=ADDON_KEYWORDS) - }, + providers={"addons": Provider(backend=StaticAmoBackend(), keywords=ADDON_KEYWORDS)}, query="NIghT", expected_title="Dark Reader", ), diff --git a/tests/integration/api/v1/suggest/test_suggest_top_picks.py b/tests/integration/api/v1/suggest/test_suggest_top_picks.py index 8f0e56004..500352db1 100644 --- a/tests/integration/api/v1/suggest/test_suggest_top_picks.py +++ b/tests/integration/api/v1/suggest/test_suggest_top_picks.py @@ -5,6 +5,7 @@ """Integration tests for the Merino v1 suggest API endpoint configured with the Top Picks provider. """ + from typing import Any import pytest @@ -54,9 +55,7 @@ def fixture_top_picks_parameters() -> dict[str, Any]: @pytest.fixture(name="providers") -def fixture_providers( - backend: TopPicksBackend, top_picks_parameters: dict[str, Any] -) -> Providers: +def fixture_providers(backend: TopPicksBackend, top_picks_parameters: dict[str, Any]) -> Providers: """Define providers for this module which are injected automatically.""" return { "top_picks": Provider(backend=backend, **top_picks_parameters) # type: ignore [arg-type] @@ -114,9 +113,7 @@ def test_top_picks_no_result(client: TestClient, query: str): ("acb", "Abc", "https://abc.test"), ], ) -def test_top_picks_short_domains( - client: TestClient, query: str, title: str, url: str -) -> None: +def test_top_picks_short_domains(client: TestClient, query: str, title: str, url: str) -> None: """Test if Top Picks provider responds with a short domain or similar""" expected_suggestion: list[Suggestion] = [ Suggestion( diff --git a/tests/integration/api/v1/suggest/test_suggest_weather.py b/tests/integration/api/v1/suggest/test_suggest_weather.py index 855d777e0..c6a2c521d 100644 --- a/tests/integration/api/v1/suggest/test_suggest_weather.py +++ b/tests/integration/api/v1/suggest/test_suggest_weather.py @@ -5,6 +5,7 @@ """Integration tests for the Merino v1 suggest API endpoint configured with a weather provider. """ + import logging from datetime import datetime from logging import LogRecord @@ -247,9 +248,7 @@ def test_suggest_weather_backend_error( expected_log_messages: list[dict[str, str]] = [ {"levelname": "WARNING", "message": "Could not generate a weather report"} ] - backend_mock.get_weather_report.side_effect = BackendError( - expected_log_messages[0]["message"] - ) + backend_mock.get_weather_report.side_effect = BackendError(expected_log_messages[0]["message"]) response = client.get("/api/v1/suggest?q=weather") @@ -289,9 +288,7 @@ def test_providers_request_log_data_weather( records: list[LogRecord] = filter_caplog(caplog.records, "request.summary") assert len(records) == 1 - suggest_records: list[LogRecord] = filter_caplog( - caplog.records, "web.suggest.request" - ) + suggest_records: list[LogRecord] = filter_caplog(caplog.records, "web.suggest.request") assert len(suggest_records) == 0 record: LogRecord = records[0] @@ -344,9 +341,9 @@ def test_suggest_with_location_completion( assert response.status_code == 200 result = response.json() - assert expected_suggestion == TypeAdapter( - list[LocationCompletionSuggestion] - ).validate_python(result["suggestions"]) + assert expected_suggestion == TypeAdapter(list[LocationCompletionSuggestion]).validate_python( + result["suggestions"] + ) def test_suggest_with_location_completion_with_empty_search_term( diff --git a/tests/integration/api/v1/suggest/test_suggest_wikipedia.py b/tests/integration/api/v1/suggest/test_suggest_wikipedia.py index ca75a11f5..a78af5689 100644 --- a/tests/integration/api/v1/suggest/test_suggest_wikipedia.py +++ b/tests/integration/api/v1/suggest/test_suggest_wikipedia.py @@ -31,9 +31,7 @@ SCENARIOS: dict[str, Scenario] = { "Case-I: Backend returns": Scenario( providers={ - "wikipedia": Provider( - backend=FakeEchoWikipediaBackend(), title_block_list=BLOCK_LIST - ) + "wikipedia": Provider(backend=FakeEchoWikipediaBackend(), title_block_list=BLOCK_LIST) }, query="foo bar", expected_suggestion_count=1, @@ -53,9 +51,7 @@ ), "Case-III: Block list filter": Scenario( providers={ - "wikipedia": Provider( - backend=FakeEchoWikipediaBackend(), title_block_list=BLOCK_LIST - ) + "wikipedia": Provider(backend=FakeEchoWikipediaBackend(), title_block_list=BLOCK_LIST) }, query="unsafe content", expected_suggestion_count=0, diff --git a/tests/integration/api/v1/suggest/test_timeout_handling.py b/tests/integration/api/v1/suggest/test_timeout_handling.py index 79271a46d..bcdaa73fc 100644 --- a/tests/integration/api/v1/suggest/test_timeout_handling.py +++ b/tests/integration/api/v1/suggest/test_timeout_handling.py @@ -5,6 +5,7 @@ """Integration tests for the Merino v1 suggest API endpoint focusing on time out behavior. """ + from collections import namedtuple import aiodogstatsd @@ -35,9 +36,7 @@ # - Timeout metrics recorded in the task runner "Case-I: A-timed-out-provider": Scenario( providers={ - "timedout-sponsored": FakeProviderFactory.timeout_sponsored( - enabled_by_default=True - ), + "timedout-sponsored": FakeProviderFactory.timeout_sponsored(enabled_by_default=True), }, expected_suggestion_count=0, expected_logs_on_task_runner={ @@ -63,9 +62,7 @@ "Case-II: A-non-timed-out-and-a-timed-out-providers": Scenario( providers={ "sponsored": FakeProviderFactory.sponsored(enabled_by_default=True), - "timedout-sponsored": FakeProviderFactory.timeout_sponsored( - enabled_by_default=True - ), + "timedout-sponsored": FakeProviderFactory.timeout_sponsored(enabled_by_default=True), }, expected_suggestion_count=1, expected_logs_on_task_runner={ @@ -119,9 +116,7 @@ "Case-IV: A-non-timed-out-and-a-timed-out-tolerant-and-a-timed-out-providers": Scenario( providers={ "sponsored": FakeProviderFactory.sponsored(enabled_by_default=True), - "timedout-sponsored": FakeProviderFactory.timeout_sponsored( - enabled_by_default=True - ), + "timedout-sponsored": FakeProviderFactory.timeout_sponsored(enabled_by_default=True), "timedout-tolerant-sponsored": FakeProviderFactory.timeout_tolerant_sponsored( enabled_by_default=True ), @@ -178,9 +173,7 @@ def test_timedout_providers( # Check logs for the timed out query(-ies) records = filter_caplog(caplog.records, "merino.utils.task_runner") - assert { - record.__dict__["msg"] for record in records - } == expected_logs_on_task_runner + assert {record.__dict__["msg"] for record in records} == expected_logs_on_task_runner # Check metrics for the timed out query(-ies) assert {call.args[0] for call in report.call_args_list} == expected_metric_keys diff --git a/tests/integration/api/v1/test_unsupported.py b/tests/integration/api/v1/test_unsupported.py index dd0e530c5..4d4c5a131 100644 --- a/tests/integration/api/v1/test_unsupported.py +++ b/tests/integration/api/v1/test_unsupported.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Integration tests for unsupported Merino v1 API endpoints.""" + import logging from datetime import datetime from logging import LogRecord @@ -32,8 +33,7 @@ def test_unsupported_endpoint_request_log_data( expected_log_data: RequestSummaryLogDataModel = RequestSummaryLogDataModel( agent=( - "Mozilla/5.0 (Macintosh; Intel Mac OS X 11.2; rv:85.0)" - " Gecko/20100101 Firefox/103.0" + "Mozilla/5.0 (Macintosh; Intel Mac OS X 11.2; rv:85.0)" " Gecko/20100101 Firefox/103.0" ), path="/api/v1/unsupported", method="GET", @@ -63,9 +63,7 @@ def test_unsupported_endpoint_request_log_data( assert log_data == expected_log_data -def test_unsupported_endpoint_metrics( - mocker: MockerFixture, client: TestClient -) -> None: +def test_unsupported_endpoint_metrics(mocker: MockerFixture, client: TestClient) -> None: """Test that metrics are recorded for unsupported endpoints (status code 404).""" expected_metric_keys: list[str] = ["response.status_codes.404"] diff --git a/tests/integration/jobs/navigational_suggestions/test_domain_metadata_uploader.py b/tests/integration/jobs/navigational_suggestions/test_domain_metadata_uploader.py index 26216ce7c..6d5215a0f 100644 --- a/tests/integration/jobs/navigational_suggestions/test_domain_metadata_uploader.py +++ b/tests/integration/jobs/navigational_suggestions/test_domain_metadata_uploader.py @@ -55,7 +55,8 @@ def gcs_storage_client(gcs_storage_container) -> Client: after this test suite has finished running """ client: Client = Client( - credentials=AnonymousCredentials(), project="test_gcp_uploader_project" # type: ignore + credentials=AnonymousCredentials(), # type: ignore + project="test_gcp_uploader_project", ) yield client @@ -159,9 +160,7 @@ def test_upload_top_picks(gcs_storage_client, gcs_storage_bucket): ) # create a DomainMetadataUploader instance - domain_metadata_uploader = DomainMetadataUploader( - uploader=gcp_uploader, force_upload=False - ) + domain_metadata_uploader = DomainMetadataUploader(uploader=gcp_uploader, force_upload=False) timestamp = datetime.now().strftime("%Y%m%d%H%M%S") @@ -177,9 +176,7 @@ def test_upload_top_picks(gcs_storage_client, gcs_storage_bucket): assert top_picks_latest_blob is not None -def test_upload_favicons( - gcs_storage_client, gcs_storage_bucket, mock_favicon_downloader -): +def test_upload_favicons(gcs_storage_client, gcs_storage_bucket, mock_favicon_downloader): """Test upload_favicons method of DomainMetaDataUploader. This test uses the mocked version of the favicon downloader. This test also implicitly tests the underlying gcs uploader methods. """ @@ -202,9 +199,7 @@ def test_upload_favicons( # call the upload method with a test top picks json uploaded_favicons = domain_metadata_uploader.upload_favicons(test_favicons) - bucket_with_uploaded_favicons = gcp_uploader.storage_client.get_bucket( - gcs_storage_bucket.name - ) + bucket_with_uploaded_favicons = gcp_uploader.storage_client.get_bucket(gcs_storage_bucket.name) assert uploaded_favicons is not None assert len(uploaded_favicons) == len(test_favicons) @@ -228,18 +223,12 @@ def test_get_latest_file_for_diff(gcs_storage_client, gcs_storage_bucket): ) # create a DomainMetadataUploader instance - domain_metadata_uploader = DomainMetadataUploader( - uploader=gcp_uploader, force_upload=False - ) + domain_metadata_uploader = DomainMetadataUploader(uploader=gcp_uploader, force_upload=False) # upload test_top_picks_1 for the 2024... file - gcp_uploader.upload_content( - json.dumps(test_top_picks_1), "20240101120555_top_picks.json" - ) + gcp_uploader.upload_content(json.dumps(test_top_picks_1), "20240101120555_top_picks.json") # upload test_top_picks_2 for the 2023... file - gcp_uploader.upload_content( - json.dumps(test_top_picks_2), "20230101120555_top_picks.json" - ) + gcp_uploader.upload_content(json.dumps(test_top_picks_2), "20230101120555_top_picks.json") # get the latest file latest_file = domain_metadata_uploader.get_latest_file_for_diff() @@ -248,9 +237,7 @@ def test_get_latest_file_for_diff(gcs_storage_client, gcs_storage_bucket): assert latest_file == test_top_picks_1 -def test_get_latest_file_for_diff_when_no_file_is_found( - gcs_storage_client, gcs_storage_bucket -): +def test_get_latest_file_for_diff_when_no_file_is_found(gcs_storage_client, gcs_storage_bucket): """Test get_latest_file_for_diff method of DomainMetaDataUploader. This test also tests implicitly the get_latest_file_for_diff method on the GcsUploader """ @@ -262,9 +249,7 @@ def test_get_latest_file_for_diff_when_no_file_is_found( ) # create a DomainMetadataUploader instance - domain_metadata_uploader = DomainMetadataUploader( - uploader=gcp_uploader, force_upload=False - ) + domain_metadata_uploader = DomainMetadataUploader(uploader=gcp_uploader, force_upload=False) # this should return None since we didn't upload anything to our gcs bucket latest_file = domain_metadata_uploader.get_latest_file_for_diff() diff --git a/tests/integration/jobs/wikipedia_indexer/test_indexer.py b/tests/integration/jobs/wikipedia_indexer/test_indexer.py index a6abaea36..d4529c505 100644 --- a/tests/integration/jobs/wikipedia_indexer/test_indexer.py +++ b/tests/integration/jobs/wikipedia_indexer/test_indexer.py @@ -61,9 +61,7 @@ def test_get_index_name( expected, ): """Test filename to index name parsing.""" - indexer = Indexer( - version, category_blocklist, title_blocklist, file_manager, es_client - ) + indexer = Indexer(version, category_blocklist, title_blocklist, file_manager, es_client) index_name = indexer._get_index_name(file_name) assert index_name == expected @@ -93,9 +91,7 @@ def test_create_index( es_client.indices.create.return_value = create_return index_name = "enwiki-123-v1" - indexer = Indexer( - "v1", category_blocklist, title_blocklist, file_manager, es_client - ) + indexer = Indexer("v1", category_blocklist, title_blocklist, file_manager, es_client) assert expected_return == indexer._create_index(index_name) assert expected_create_called == es_client.indices.create.called @@ -107,9 +103,7 @@ def test_index_from_export_no_exports_available( """Test that RuntimeError is emitted.""" file_manager.get_latest_gcs.return_value = Blob("", "bucket") es_client.indices.exists.return_value = False - indexer = Indexer( - "v1", category_blocklist, title_blocklist, file_manager, es_client - ) + indexer = Indexer("v1", category_blocklist, title_blocklist, file_manager, es_client) with pytest.raises(RuntimeError) as exc_info: indexer.index_from_export(100, "fake_alias") @@ -128,9 +122,7 @@ def test_index_from_export_fail_on_existing_index( ) es_client.indices.exists.return_value = False es_client.indices.create.return_value = {} - indexer = Indexer( - "v1", category_blocklist, title_blocklist, file_manager, es_client - ) + indexer = Indexer("v1", category_blocklist, title_blocklist, file_manager, es_client) with pytest.raises(Exception) as exc_info: indexer.index_from_export(100, "fake_alias") @@ -193,9 +185,7 @@ def test_flip_alias( es_client.indices.exists_alias.return_value = len(existing_indices) > 0 es_client.indices.get_alias.return_value = existing_indices - indexer = Indexer( - "v1", category_blocklist, title_blocklist, file_manager, es_client - ) + indexer = Indexer("v1", category_blocklist, title_blocklist, file_manager, es_client) indexer._flip_alias_to_latest(new_index_name, alias_name) assert es_client.indices.update_aliases.called @@ -242,9 +232,7 @@ def test_index_from_export( ] file_manager.stream_from_gcs.return_value = (input for input in inputs) - indexer = Indexer( - "v1", category_blocklist, title_blocklist, file_manager, es_client - ) + indexer = Indexer("v1", category_blocklist, title_blocklist, file_manager, es_client) indexer.index_from_export(1, "enwiki") @@ -310,9 +298,7 @@ def check_bulk_side_effect(operations): ] file_manager.stream_from_gcs.return_value = (input for input in inputs) - indexer = Indexer( - "v1", category_blocklist, title_blocklist, file_manager, es_client - ) + indexer = Indexer("v1", category_blocklist, title_blocklist, file_manager, es_client) indexer.index_from_export(1, "enwiki") @@ -378,9 +364,7 @@ def check_bulk_side_effect(operations): ] file_manager.stream_from_gcs.return_value = (input for input in inputs) - indexer = Indexer( - "v1", category_blocklist, title_blocklist, file_manager, es_client - ) + indexer = Indexer("v1", category_blocklist, title_blocklist, file_manager, es_client) indexer.index_from_export(1, "enwiki") diff --git a/tests/integration/test_setup.py b/tests/integration/test_setup.py index ad5f36d4d..9f21650f6 100644 --- a/tests/integration/test_setup.py +++ b/tests/integration/test_setup.py @@ -15,9 +15,7 @@ def test_unknown_providers_should_shutdown_app(mocker: MockerFixture) -> None: """Test Merino should shut down upon an unknown provider.""" - mocker.patch.dict( - settings.providers, values={"unknown-provider": {"type": "unknown-type"}} - ) + mocker.patch.dict(settings.providers, values={"unknown-provider": {"type": "unknown-type"}}) with pytest.raises(InvalidProviderError) as excinfo: # This will run all the FastAPI startup event handlers. diff --git a/tests/load/locustfiles/locustfile.py b/tests/load/locustfiles/locustfile.py index 7845029c9..5cd2ffe64 100644 --- a/tests/load/locustfiles/locustfile.py +++ b/tests/load/locustfiles/locustfile.py @@ -56,9 +56,7 @@ # IP RANGE CSV FILES (GZIP) # This test framework uses IP2Location LITE data available from # https://lite.ip2location.com -CANADA_IP_ADDRESS_RANGES_GZIP: str = ( - "tests/load/data/ip2location_canada_ip_address_ranges.gz" -) +CANADA_IP_ADDRESS_RANGES_GZIP: str = "tests/load/data/ip2location_canada_ip_address_ranges.gz" US_IP_ADDRESS_RANGES_GZIP: str = ( "tests/load/data/ip2location_united_states_of_america_ip_address_ranges.gz" ) @@ -74,9 +72,7 @@ MERINO_PROVIDERS__TOP_PICKS__FIREFOX_CHAR_LIMIT: int = int( os.getenv("MERINO_PROVIDERS__TOP_PICKS__FIREFOX_CHAR_LIMIT", 0) ) -MERINO_PROVIDERS__WIKIPEDIA__ES_URL: str | None = os.getenv( - "MERINO_PROVIDERS__WIKIPEDIA__ES_URL" -) +MERINO_PROVIDERS__WIKIPEDIA__ES_URL: str | None = os.getenv("MERINO_PROVIDERS__WIKIPEDIA__ES_URL") MERINO_PROVIDERS__WIKIPEDIA__ES_API_KEY: str | None = os.getenv( "MERINO_PROVIDERS__WIKIPEDIA__ES_API_KEY" ) @@ -143,9 +139,7 @@ def on_locust_test_start(environment, **kwargs) -> None: ip_range_files=[CANADA_IP_ADDRESS_RANGES_GZIP, US_IP_ADDRESS_RANGES_GZIP] ) - logger.info( - f"Download {len(query_data.ip_ranges)} IP ranges for X-Forward-For headers" - ) + logger.info(f"Download {len(query_data.ip_ranges)} IP ranges for X-Forward-For headers") except ( ApiError, ElasticsearchWarning, @@ -159,14 +153,10 @@ def on_locust_test_start(environment, **kwargs) -> None: quit(1) for worker in environment.runner.clients: - environment.runner.send_message( - "store_suggestions", dict(query_data), client_id=worker - ) + environment.runner.send_message("store_suggestions", dict(query_data), client_id=worker) -def get_adm_queries( - server: str | None, collection: str | None, bucket: str | None -) -> QueriesList: +def get_adm_queries(server: str | None, collection: str | None, bucket: str | None) -> QueriesList: """Get query strings for use in testing the AdM Provider. Args: @@ -196,9 +186,7 @@ def get_amo_queries() -> list[str]: Returns: List[str]: List of full query strings to use with the AMO provider """ - return list( - set(map(lambda x: x.lower(), chain.from_iterable(ADDON_KEYWORDS.values()))) - ) + return list(set(map(lambda x: x.lower(), chain.from_iterable(ADDON_KEYWORDS.values())))) def get_top_picks_queries( @@ -242,9 +230,7 @@ def add_queries(index: dict[str, list[int]], queries: dict[int, list[str]]): return list(query_dict.values()) -def get_wikipedia_queries( - url: str | None, api_key: str | None, index: str | None -) -> list[str]: +def get_wikipedia_queries(url: str | None, api_key: str | None, index: str | None) -> list[str]: """Get query strings for use in testing the Wikipedia Provider. Args: @@ -479,9 +465,7 @@ def _request_suggestions( # manually control if an HTTP request should be marked as successful or # a failure in Locust's statistics if response.status_code != 200: - response.failure( - f"{response.status_code=}, expected 200, {response.text=}" - ) + response.failure(f"{response.status_code=}, expected 200, {response.text=}") return # Create a pydantic model instance for validating the response content @@ -501,12 +485,8 @@ def _request_version(self) -> Version | None: "Accept-Language": choice(LOCALES), # nosec "User-Agent": choice(DESKTOP_FIREFOX), # nosec } - with self.client.get( - url=VERSION_API, headers=headers, catch_response=True - ) as response: + with self.client.get(url=VERSION_API, headers=headers, catch_response=True) as response: if response.status_code != 200: - response.failure( - f"{response.status_code=}, expected 200, {response.text=}" - ) + response.failure(f"{response.status_code=}, expected 200, {response.text=}") return None return Version(**response.json()) diff --git a/tests/unit/content_handler/test_gcp_uploader.py b/tests/unit/content_handler/test_gcp_uploader.py index 04a50f1df..cfaa81419 100644 --- a/tests/unit/content_handler/test_gcp_uploader.py +++ b/tests/unit/content_handler/test_gcp_uploader.py @@ -13,9 +13,7 @@ @pytest.fixture def mock_gcs_client(mocker, mock_gcs_bucket): """Return a mock GCS Client instance""" - mock_client = mocker.patch( - "merino.content_handler.gcp_uploader.Client" - ).return_value + mock_client = mocker.patch("merino.content_handler.gcp_uploader.Client").return_value mock_client.get_bucket.return_value = mock_gcs_bucket return mock_client @@ -29,9 +27,7 @@ def mock_gcs_blob(mocker): @pytest.fixture def mock_most_recent_gcs_blob(mocker): """Return a mock GCS Blob instance""" - most_recent_blob = mocker.patch( - "merino.content_handler.gcp_uploader.Blob" - ).return_value + most_recent_blob = mocker.patch("merino.content_handler.gcp_uploader.Blob").return_value most_recent_blob.name = "20220101120555_top_picks.json" return most_recent_blob @@ -118,9 +114,7 @@ def test_upload_image_with_https_cdn_host_name( caplog.set_level(INFO) # creating the uploader object with cdn host name containing "https" - gcp_uploader = GcsUploader( - mock_gcs_client, test_bucket_name, test_https_cdn_host_name - ) + gcp_uploader = GcsUploader(mock_gcs_client, test_bucket_name, test_https_cdn_host_name) # force upload is set to FALSE by default result = gcp_uploader.upload_image(test_image, test_destination_name) @@ -149,9 +143,7 @@ def test_get_most_recent_file_with_two_files( # set the mock bucket's blob list to the two mocked blobs mock_gcs_bucket.list_blobs.return_value = [mock_most_recent_gcs_blob, mock_gcs_blob] - gcp_uploader = GcsUploader( - mock_gcs_client, test_bucket_name, test_https_cdn_host_name - ) + gcp_uploader = GcsUploader(mock_gcs_client, test_bucket_name, test_https_cdn_host_name) excluded_file: str = "excluded.json" result = gcp_uploader.get_most_recent_file( @@ -179,9 +171,7 @@ def test_get_most_recent_file_with_excluded_file( # bucket only contains the excluded file mock_gcs_bucket.list_blobs.return_value = [mock_gcs_blob] - gcp_uploader = GcsUploader( - mock_gcs_client, test_bucket_name, test_https_cdn_host_name - ) + gcp_uploader = GcsUploader(mock_gcs_client, test_bucket_name, test_https_cdn_host_name) # call the method with the exclusion argument set to the excluded file result = gcp_uploader.get_most_recent_file( @@ -209,9 +199,7 @@ def test_upload_content_with_forced_upload_false_and_existing_blob( mock_gcs_client.bucket.return_value = mock_gcs_bucket mock_gcs_bucket.blob.return_value = mock_gcs_blob - gcp_uploader = GcsUploader( - mock_gcs_client, test_bucket_name, test_https_cdn_host_name - ) + gcp_uploader = GcsUploader(mock_gcs_client, test_bucket_name, test_https_cdn_host_name) content = bytes(255) # call the method @@ -246,14 +234,10 @@ def test_upload_content_with_forced_upload_true_and_existing_blob( mock_gcs_client.bucket.return_value = mock_gcs_bucket mock_gcs_bucket.blob.return_value = mock_gcs_blob - gcp_uploader = GcsUploader( - mock_gcs_client, test_bucket_name, test_https_cdn_host_name - ) + gcp_uploader = GcsUploader(mock_gcs_client, test_bucket_name, test_https_cdn_host_name) content = bytes(255) - result = gcp_uploader.upload_content( - content, test_destination_name, forced_upload=True - ) + result = gcp_uploader.upload_content(content, test_destination_name, forced_upload=True) # capture logger info output log_records: list[LogRecord] = filter_caplog( @@ -264,9 +248,7 @@ def test_upload_content_with_forced_upload_true_and_existing_blob( assert len(log_records) == 1 assert log_records[0].message.startswith(f"Uploading blob: {mock_gcs_blob}") - mock_gcs_blob.upload_from_string.assert_called_once_with( - content, content_type="text/plain" - ) + mock_gcs_blob.upload_from_string.assert_called_once_with(content, content_type="text/plain") mock_gcs_blob.make_public.assert_called_once() @@ -292,9 +274,7 @@ def test_upload_content_with_exception_thrown( # make the blob.make_public() method throw a run time exception mock_gcs_blob.make_public.side_effect = RuntimeError("test-exception") - gcp_uploader = GcsUploader( - mock_gcs_client, test_bucket_name, test_https_cdn_host_name - ) + gcp_uploader = GcsUploader(mock_gcs_client, test_bucket_name, test_https_cdn_host_name) # call the method gcp_uploader.upload_content(content, test_destination_name, forced_upload=True) diff --git a/tests/unit/jobs/amo_rs_uploader/test_amo_rs_uploader.py b/tests/unit/jobs/amo_rs_uploader/test_amo_rs_uploader.py index 0e571a0eb..65d5b6f25 100644 --- a/tests/unit/jobs/amo_rs_uploader/test_amo_rs_uploader.py +++ b/tests/unit/jobs/amo_rs_uploader/test_amo_rs_uploader.py @@ -31,8 +31,7 @@ def mock_addons_data( type(mock_backend).dynamic_data = mocker.PropertyMock( return_value={ f"addon-{a}": { - f"dynamic-key-{a}-{p}": f"dynamic-value-{a}-{p}" - for p in range(data_count) + f"dynamic-key-{a}-{p}": f"dynamic-value-{a}-{p}" for p in range(data_count) } for a in range(addon_count) } @@ -43,8 +42,7 @@ def mock_addons_data( "merino.jobs.amo_rs_uploader.ADDON_DATA", new={ f"addon-{a}": { - f"static-key-{a}-{p}": f"static-value-{a}-{p}" - for p in range(data_count) + f"static-key-{a}-{p}": f"static-value-{a}-{p}" for p in range(data_count) } | {"name": f"name-{a}"} for a in range(addon_count) @@ -55,8 +53,7 @@ def mock_addons_data( mocker.patch( "merino.jobs.amo_rs_uploader.ADDON_KEYWORDS", new={ - f"addon-{a}": [f"kw-{a}-{k}" for k in range(keyword_count)] - for a in range(addon_count) + f"addon-{a}": [f"kw-{a}-{k}" for k in range(keyword_count)] for a in range(addon_count) }, ) @@ -97,9 +94,7 @@ def do_upload_test( mock_chunked_uploader_ctor = mocker.patch( "merino.jobs.amo_rs_uploader.ChunkedRemoteSettingsSuggestionUploader" ) - mock_chunked_uploader = ( - mock_chunked_uploader_ctor.return_value.__enter__.return_value - ) + mock_chunked_uploader = mock_chunked_uploader_ctor.return_value.__enter__.return_value # Do the upload. common_kwargs: dict[str, Any] = { @@ -132,9 +127,7 @@ def do_upload_test( else: mock_chunked_uploader.delete_records.assert_not_called() - mock_chunked_uploader.add_suggestion.assert_has_calls( - expected_add_suggestion_calls(mocker) - ) + mock_chunked_uploader.add_suggestion.assert_has_calls(expected_add_suggestion_calls(mocker)) def test_upload_without_deleting(mocker): diff --git a/tests/unit/jobs/csv_rs_uploader/test_csv_rs_uploader.py b/tests/unit/jobs/csv_rs_uploader/test_csv_rs_uploader.py index d0d80db5b..41f06a965 100644 --- a/tests/unit/jobs/csv_rs_uploader/test_csv_rs_uploader.py +++ b/tests/unit/jobs/csv_rs_uploader/test_csv_rs_uploader.py @@ -63,12 +63,8 @@ def expected_primary_suggestions() -> list[dict[str, Any]]: "url": f"http://example.com/pocket-{s}", "title": f"Title {s}", "description": f"Description {s}", - "lowConfidenceKeywords": [ - f"low-{s}-{k}" for k in range(PRIMARY_KEYWORD_COUNT) - ], - "highConfidenceKeywords": [ - f"high-{s}-{k}" for k in range(PRIMARY_KEYWORD_COUNT) - ], + "lowConfidenceKeywords": [f"low-{s}-{k}" for k in range(PRIMARY_KEYWORD_COUNT)], + "highConfidenceKeywords": [f"high-{s}-{k}" for k in range(PRIMARY_KEYWORD_COUNT)], } ) return suggestions diff --git a/tests/unit/jobs/csv_rs_uploader/test_fakespot.py b/tests/unit/jobs/csv_rs_uploader/test_fakespot.py index 0a4a0fd0c..c3d03fbbe 100644 --- a/tests/unit/jobs/csv_rs_uploader/test_fakespot.py +++ b/tests/unit/jobs/csv_rs_uploader/test_fakespot.py @@ -93,9 +93,7 @@ def verify_field_required(mocker): """Verify that a missing field value will raise a MissingFieldError""" def verify(field_name): - row = { - name: value for (name, value) in TEST_CSV_ROW.items() if name != field_name - } + row = {name: value for (name, value) in TEST_CSV_ROW.items() if name != field_name} do_error_test( mocker, model_name=MODEL_NAME, diff --git a/tests/unit/jobs/csv_rs_uploader/utils.py b/tests/unit/jobs/csv_rs_uploader/utils.py index 26912983d..e313b0538 100644 --- a/tests/unit/jobs/csv_rs_uploader/utils.py +++ b/tests/unit/jobs/csv_rs_uploader/utils.py @@ -37,9 +37,7 @@ def _do_csv_test( mock_chunked_uploader_ctor = mocker.patch( "merino.jobs.csv_rs_uploader.ChunkedRemoteSettingsSuggestionUploader" ) - mock_chunked_uploader = ( - mock_chunked_uploader_ctor.return_value.__enter__.return_value - ) + mock_chunked_uploader = mock_chunked_uploader_ctor.return_value.__enter__.return_value # Do the upload. common_kwargs: dict[str, Any] = { diff --git a/tests/unit/jobs/navigational_suggestions/test_domain_metadata_diff.py b/tests/unit/jobs/navigational_suggestions/test_domain_metadata_diff.py index 08ad29719..4bfc4cb5e 100644 --- a/tests/unit/jobs/navigational_suggestions/test_domain_metadata_diff.py +++ b/tests/unit/jobs/navigational_suggestions/test_domain_metadata_diff.py @@ -241,8 +241,6 @@ def test_create_diff(json_domain_data_latest, json_domain_data_old) -> None: "total_domains_unchanged": 5, "newly_added_domains": 1, "newly_added_urls": 2, - "new_urls_summary": sorted( - ["https://test.firefox.com", "https://testexample.com"] - ), + "new_urls_summary": sorted(["https://test.firefox.com", "https://testexample.com"]), } assert diff_file == expected_diff diff --git a/tests/unit/jobs/navigational_suggestions/test_domain_metadata_extractor.py b/tests/unit/jobs/navigational_suggestions/test_domain_metadata_extractor.py index 607afa70a..cbc0c0e06 100644 --- a/tests/unit/jobs/navigational_suggestions/test_domain_metadata_extractor.py +++ b/tests/unit/jobs/navigational_suggestions/test_domain_metadata_extractor.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Unit tests for domain_metadata_extractor.py module.""" + from typing import Any import pytest @@ -104,8 +105,7 @@ { "name": "apple-touch-icon", "content": ( - "https://assets.nflxext.com/en_us/layout/ecweb/" - "netflix-app-icon_152.jpg" + "https://assets.nflxext.com/en_us/layout/ecweb/" "netflix-app-icon_152.jpg" ), } ], @@ -217,9 +217,7 @@ "rel": ["icon"], "sizes": "any", "mask": "", - "href": ( - "//www.baidu.com/img/baidu_85beaf5496f291521eb75ba38eacbd87.svg" - ), + "href": ("//www.baidu.com/img/baidu_85beaf5496f291521eb75ba38eacbd87.svg"), }, ], metas=[], @@ -622,9 +620,7 @@ def test_get_domain_metadata( """Test that DomainMetadataExtractor returns favicons as expected""" scraper_mock: Any = mocker.Mock(spec=Scraper) scraper_mock.scrape_favicon_data.return_value = favicon_data - scraper_mock.scrape_favicons_from_manifest.return_value = ( - scraped_favicons_from_manifest - ) + scraper_mock.scrape_favicons_from_manifest.return_value = scraped_favicons_from_manifest scraper_mock.get_default_favicon.return_value = default_favicon scraper_mock.open.return_value = scraped_url scraper_mock.scrape_title.return_value = scraped_title @@ -650,8 +646,8 @@ def test_get_domain_metadata( favicon_downloader=favicon_downloader_mock, ) - domain_metadata: list[dict[str, str | None]] = ( - metadata_extractor.get_domain_metadata(domains_data, favicon_min_width=32) + domain_metadata: list[dict[str, str | None]] = metadata_extractor.get_domain_metadata( + domains_data, favicon_min_width=32 ) assert domain_metadata == expected_domain_metadata diff --git a/tests/unit/jobs/navigational_suggestions/test_domain_metadata_uploader.py b/tests/unit/jobs/navigational_suggestions/test_domain_metadata_uploader.py index ccda1e838..089e16bba 100644 --- a/tests/unit/jobs/navigational_suggestions/test_domain_metadata_uploader.py +++ b/tests/unit/jobs/navigational_suggestions/test_domain_metadata_uploader.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Unit tests for domain_metadata_uploader.py module.""" + import json from logging import INFO, LogRecord from typing import Any @@ -199,9 +200,7 @@ def fixture_remote_blob_newest(mocker: MockerFixture, json_domain_data) -> Any: @pytest.fixture(name="remote_bucket", autouse=True) -def fixture_remote_bucket( - mocker: MockerFixture, remote_blob, remote_blob_newest -) -> Any: +def fixture_remote_bucket(mocker: MockerFixture, remote_blob, remote_blob_newest) -> Any: """Create a remote bucket mock object for testing.""" remote_bucket = mocker.MagicMock(spec=Bucket) remote_bucket.list_blobs.return_value = [remote_blob, remote_blob_newest] @@ -259,9 +258,7 @@ def test_upload_top_picks( assert result.name == mock_blob.name -def test_upload_favicons_upload_if_not_present( - mock_favicon_downloader, mock_gcs_uploader -) -> None: +def test_upload_favicons_upload_if_not_present(mock_favicon_downloader, mock_gcs_uploader) -> None: """Test that favicons are uploaded only if not already present in GCS when force upload is not set """ @@ -278,9 +275,7 @@ def test_upload_favicons_upload_if_not_present( ) uploaded_favicons = domain_metadata_uploader.upload_favicons(["favicon1.png"]) - destination_favicon_name = domain_metadata_uploader.destination_favicon_name( - dummy_favicon - ) + destination_favicon_name = domain_metadata_uploader.destination_favicon_name(dummy_favicon) assert uploaded_favicons == [UPLOADED_FAVICON_PUBLIC_URL] mock_gcs_uploader.upload_image.assert_called_once_with( @@ -305,9 +300,7 @@ def test_upload_favicons_upload_if_force_upload_set( ) uploaded_favicons = domain_metadata_uploader.upload_favicons(["favicon1.png"]) - destination_favicon_name = domain_metadata_uploader.destination_favicon_name( - dummy_favicon - ) + destination_favicon_name = domain_metadata_uploader.destination_favicon_name(dummy_favicon) assert uploaded_favicons == [UPLOADED_FAVICON_PUBLIC_URL] mock_gcs_uploader.upload_image.assert_called_once_with( @@ -382,9 +375,7 @@ def test_get_latest_file_for_diff( assert len(result["domains"]) == 6 assert len(records) == 1 - assert records[0].message.startswith( - f"Domain file {remote_blob_newest.name} acquired." - ) + assert records[0].message.startswith(f"Domain file {remote_blob_newest.name} acquired.") def test_get_latest_file_for_diff_when_no_file_is_returned_by_the_uploader( diff --git a/tests/unit/jobs/relevancy_uploader/test_chunked_relevancy_rs_uploader.py b/tests/unit/jobs/relevancy_uploader/test_chunked_relevancy_rs_uploader.py index 65709d6a9..231129cd0 100644 --- a/tests/unit/jobs/relevancy_uploader/test_chunked_relevancy_rs_uploader.py +++ b/tests/unit/jobs/relevancy_uploader/test_chunked_relevancy_rs_uploader.py @@ -48,9 +48,7 @@ def __init__( self.size = size self.type = type self.id = id or f"{type}-{start_index}-{size}" - self.url = ( - f"{server}/buckets/{bucket}/collections/{collection}/records/{self.id}" - ) + self.url = f"{server}/buckets/{bucket}/collections/{collection}/records/{self.id}" self.attachment_url = f"{self.url}/attachment" @property @@ -78,9 +76,7 @@ def expected_attachment_request(self) -> dict[str, Any]: """A dict that describes the request that Kinto should receive when the uploader uploads an attachment. """ - attachment: list[dict[str, Any]] = [ - {"i": i} for i in range(self.start_index, self.size) - ] + attachment: list[dict[str, Any]] = [{"i": i} for i in range(self.start_index, self.size)] return { "method": "POST", @@ -139,9 +135,7 @@ def check_request(actual, expected: dict[str, Any]) -> None: assert actual_headers == expected["headers"] -def check_upload_requests( - actual_requests: list, expected_records: list[Record] -) -> None: +def check_upload_requests(actual_requests: list, expected_records: list[Record]) -> None: """Assert a list of actual requests matches expected requests given the expected records. Each record should correspond to two requests, one for uploading the record and one for uploading the attachment. diff --git a/tests/unit/jobs/relevancy_uploader/test_relevancy_csv_uploader.py b/tests/unit/jobs/relevancy_uploader/test_relevancy_csv_uploader.py index de9221558..d0d9b0541 100644 --- a/tests/unit/jobs/relevancy_uploader/test_relevancy_csv_uploader.py +++ b/tests/unit/jobs/relevancy_uploader/test_relevancy_csv_uploader.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Unit tests for __init__.py module.""" + import base64 import pathlib from hashlib import md5 diff --git a/tests/unit/jobs/relevancy_uploader/utils.py b/tests/unit/jobs/relevancy_uploader/utils.py index 019feb5f0..d5bc2ef8f 100644 --- a/tests/unit/jobs/relevancy_uploader/utils.py +++ b/tests/unit/jobs/relevancy_uploader/utils.py @@ -35,9 +35,7 @@ def _do_csv_test( mock_chunked_uploader_ctor = mocker.patch( "merino.jobs.relevancy_uploader.ChunkedRemoteSettingsRelevancyUploader" ) - mock_chunked_uploader = ( - mock_chunked_uploader_ctor.return_value.__enter__.return_value - ) + mock_chunked_uploader = mock_chunked_uploader_ctor.return_value.__enter__.return_value # Do the upload. common_kwargs: dict[str, Any] = { @@ -60,7 +58,7 @@ def _do_csv_test( record_type="category_to_domains", total_data_count=len(primary_category_data), category_name="Sports", - category_code=17 + category_code=17, ) mock_chunked_uploader_ctor.assert_any_call( @@ -68,7 +66,7 @@ def _do_csv_test( record_type="category_to_domains", total_data_count=len(secondary_category_data), category_name="News", - category_code=14 + category_code=14, ) mock_chunked_uploader_ctor.assert_any_call( @@ -76,7 +74,7 @@ def _do_csv_test( record_type="category_to_domains", total_data_count=len(inconclusive_category_data), category_name="Inconclusive", - category_code=0 + category_code=0, ) if not keep_existing_records and version == 1: @@ -84,9 +82,7 @@ def _do_csv_test( else: mock_chunked_uploader.delete_records.assert_not_called() - mock_chunked_uploader.add_data.assert_has_calls( - [*map(mocker.call, primary_category_data)] - ) + mock_chunked_uploader.add_data.assert_has_calls([*map(mocker.call, primary_category_data)]) def do_csv_test( diff --git a/tests/unit/jobs/utils/test_chunked_rs_uploader.py b/tests/unit/jobs/utils/test_chunked_rs_uploader.py index 863d9aca6..393dd71c1 100644 --- a/tests/unit/jobs/utils/test_chunked_rs_uploader.py +++ b/tests/unit/jobs/utils/test_chunked_rs_uploader.py @@ -49,9 +49,7 @@ def __init__( self.type = type self.suggestion_score = suggestion_score self.id = id or f"{type}-{start_index}-{size}" - self.url = ( - f"{server}/buckets/{bucket}/collections/{collection}/records/{self.id}" - ) + self.url = f"{server}/buckets/{bucket}/collections/{collection}/records/{self.id}" self.attachment_url = f"{self.url}/attachment" @property @@ -73,9 +71,7 @@ def expected_attachment_request(self) -> dict[str, Any]: """A dict that describes the request that Kinto should receive when the uploader uploads an attachment. """ - attachment: list[dict[str, Any]] = [ - {"i": i} for i in range(self.start_index, self.size) - ] + attachment: list[dict[str, Any]] = [{"i": i} for i in range(self.start_index, self.size)] if self.suggestion_score: for suggestion in attachment: suggestion["score"] = self.suggestion_score @@ -130,9 +126,7 @@ def check_request(actual, expected: dict[str, Any]) -> None: assert actual_headers == expected["headers"] -def check_upload_requests( - actual_requests: list, expected_records: list[Record] -) -> None: +def check_upload_requests(actual_requests: list, expected_records: list[Record]) -> None: """Assert a list of actual requests matches expected requests given the expected records. Each record should correspond to two requests, one for uploading the record and one for uploading the attachment. diff --git a/tests/unit/middleware/test_logging.py b/tests/unit/middleware/test_logging.py index 1a1e8f3f1..cefc33484 100644 --- a/tests/unit/middleware/test_logging.py +++ b/tests/unit/middleware/test_logging.py @@ -24,9 +24,7 @@ async def test_logging_invalid_scope_type( """Test that no logging action takes place for an unexpected Scope type.""" caplog.set_level(logging.INFO) scope: Scope = {"type": "not-http"} - logging_middleware: LoggingMiddleware = LoggingMiddleware( - mocker.AsyncMock(spec=ASGIApp) - ) + logging_middleware: LoggingMiddleware = LoggingMiddleware(mocker.AsyncMock(spec=ASGIApp)) await logging_middleware(scope, receive_mock, send_mock) diff --git a/tests/unit/middleware/test_metrics.py b/tests/unit/middleware/test_metrics.py index 5d0768005..9ad4e2d06 100644 --- a/tests/unit/middleware/test_metrics.py +++ b/tests/unit/middleware/test_metrics.py @@ -24,9 +24,7 @@ async def test_metrics_invalid_scope_type( """Test that no logging action takes place for an unexpected Scope type.""" caplog.set_level(logging.INFO) scope: Scope = {"type": "not-http"} - metrics_middleware: MetricsMiddleware = MetricsMiddleware( - mocker.AsyncMock(spec=ASGIApp) - ) + metrics_middleware: MetricsMiddleware = MetricsMiddleware(mocker.AsyncMock(spec=ASGIApp)) await metrics_middleware(scope, receive_mock, send_mock) diff --git a/tests/unit/providers/adm/backends/test_remotesettings.py b/tests/unit/providers/adm/backends/test_remotesettings.py index 542c279a1..41a44bbb2 100644 --- a/tests/unit/providers/adm/backends/test_remotesettings.py +++ b/tests/unit/providers/adm/backends/test_remotesettings.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Unit tests for the Remote Settings backend module.""" + from typing import Any from urllib.parse import urljoin @@ -226,8 +227,7 @@ def test_init_invalid_remote_settings_parameter_error( values. """ expected_error_value: str = ( - "The Remote Settings 'server', 'collection' or 'bucket' parameters are not " - "specified" + "The Remote Settings 'server', 'collection' or 'bucket' parameters are not " "specified" ) rs_parameters[parameter] = "" @@ -248,9 +248,7 @@ async def test_fetch( ) -> None: """Test that the fetch method returns the proper suggestion content.""" mocker.patch.object(kinto_http.AsyncClient, "get_records", return_value=rs_records) - mocker.patch.object( - kinto_http.AsyncClient, "server_info", return_value=rs_server_info - ) + mocker.patch.object(kinto_http.AsyncClient, "server_info", return_value=rs_server_info) mocker.patch.object(httpx.AsyncClient, "get", return_value=rs_attachment_response) suggestion_content: SuggestionContent = await rs_backend.fetch() @@ -270,12 +268,8 @@ async def test_fetch_no_adm_wikipedia_result( records with Wikipedia defined as advertiser. """ mocker.patch.object(kinto_http.AsyncClient, "get_records", return_value=rs_records) - mocker.patch.object( - kinto_http.AsyncClient, "server_info", return_value=rs_server_info - ) - mocker.patch.object( - httpx.AsyncClient, "get", return_value=rs_wiki_attachment_response - ) + mocker.patch.object(kinto_http.AsyncClient, "server_info", return_value=rs_server_info) + mocker.patch.object(httpx.AsyncClient, "get", return_value=rs_wiki_attachment_response) suggestion_content: SuggestionContent = await rs_backend.fetch() @@ -311,9 +305,7 @@ async def test_get_attachment_host( ) -> None: """Test that the method returns the proper attachment host.""" expected_attachment_host: str = "attachment-host/" - mocker.patch.object( - kinto_http.AsyncClient, "server_info", return_value=rs_server_info - ) + mocker.patch.object(kinto_http.AsyncClient, "server_info", return_value=rs_server_info) attachment_host: str = await rs_backend.get_attachment_host() diff --git a/tests/unit/providers/adm/conftest.py b/tests/unit/providers/adm/conftest.py index 544c1905e..41ff379fe 100644 --- a/tests/unit/providers/adm/conftest.py +++ b/tests/unit/providers/adm/conftest.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Module for test configurations for the AdM provider unit test directory.""" + from typing import Any import pytest @@ -54,9 +55,7 @@ def fixture_adm_parameters() -> dict[str, Any]: @pytest.fixture(name="backend_mock") -def fixture_backend_mock( - mocker: MockerFixture, adm_suggestion_content: SuggestionContent -) -> Any: +def fixture_backend_mock(mocker: MockerFixture, adm_suggestion_content: SuggestionContent) -> Any: """Create an AdmBackend mock object for test.""" backend_mock: Any = mocker.AsyncMock(spec=AdmBackend) backend_mock.fetch.return_value = adm_suggestion_content diff --git a/tests/unit/providers/adm/test_provider.py b/tests/unit/providers/adm/test_provider.py index 73109700c..7b258e3fe 100644 --- a/tests/unit/providers/adm/test_provider.py +++ b/tests/unit/providers/adm/test_provider.py @@ -27,9 +27,7 @@ def test_hidden(adm: Provider) -> None: @pytest.mark.asyncio -async def test_initialize( - adm: Provider, adm_suggestion_content: SuggestionContent -) -> None: +async def test_initialize(adm: Provider, adm_suggestion_content: SuggestionContent) -> None: """Test for the initialize() method of the adM provider.""" await adm.initialize() @@ -137,9 +135,7 @@ async def test_query_success( @pytest.mark.asyncio -async def test_query_with_missing_key( - srequest: SuggestionRequestFixture, adm: Provider -) -> None: +async def test_query_with_missing_key(srequest: SuggestionRequestFixture, adm: Provider) -> None: """Test for the query() method of the adM provider with a missing key.""" await adm.initialize() diff --git a/tests/unit/providers/amo/backends/test_dynamic.py b/tests/unit/providers/amo/backends/test_dynamic.py index fc98df31e..674217a36 100644 --- a/tests/unit/providers/amo/backends/test_dynamic.py +++ b/tests/unit/providers/amo/backends/test_dynamic.py @@ -45,9 +45,7 @@ def _patch_addons_api_calls(mocker: MockerFixture) -> None: @pytest.mark.asyncio -async def test_fetch_addons_succeed( - mocker: MockerFixture, dynamic_backend: DynamicAmoBackend -): +async def test_fetch_addons_succeed(mocker: MockerFixture, dynamic_backend: DynamicAmoBackend): """Test that fetch populates the Addons.""" _patch_addons_api_calls(mocker) @@ -63,9 +61,7 @@ async def test_fetch_addons_skipped_api_failure( """Test that fetch fails raises error when Addon requests fails before it returns a response. """ - mocker.patch.object( - AsyncClient, "get", side_effect=httpx.TimeoutException("timedout!") - ) + mocker.patch.object(AsyncClient, "get", side_effect=httpx.TimeoutException("timedout!")) await dynamic_backend.fetch_and_cache_addons_info() # Ensure that all the messages are errors due to the timeout. @@ -111,9 +107,7 @@ async def test_fetch_addons_skipped_api_request_failure( assert len(dynamic_backend.dynamic_data) == len(SupportedAddon) - 1 assert len(caplog.messages) == 1 - assert caplog.messages[0].startswith( - "Addons API could not find key: video-downloadhelper" - ) + assert caplog.messages[0].startswith("Addons API could not find key: video-downloadhelper") @pytest.mark.asyncio @@ -163,18 +157,14 @@ async def test_fetch_addons_handled_task_group_exceptions( mocker: MockerFixture, dynamic_backend: DynamicAmoBackend ): """Test that `TaskGroup` exceptions are captured and propagated as `AmoBackendError`.""" - mocker.patch.object( - dynamic_backend, "_fetch_addon", side_effect=Exception("mocked error") - ) + mocker.patch.object(dynamic_backend, "_fetch_addon", side_effect=Exception("mocked error")) with pytest.raises(AmoBackendError): await dynamic_backend.fetch_and_cache_addons_info() @pytest.mark.asyncio -async def test_get_addon_request( - mocker: MockerFixture, dynamic_backend: DynamicAmoBackend -): +async def test_get_addon_request(mocker: MockerFixture, dynamic_backend: DynamicAmoBackend): """Test that we can get the Addons details.""" _patch_addons_api_calls(mocker) await dynamic_backend.fetch_and_cache_addons_info() @@ -197,9 +187,7 @@ async def test_get_addon_request( @pytest.mark.asyncio -async def test_get_addon_key_error( - mocker: MockerFixture, dynamic_backend: DynamicAmoBackend -): +async def test_get_addon_key_error(mocker: MockerFixture, dynamic_backend: DynamicAmoBackend): """Test that we raise the right error for Key Error.""" _patch_addons_api_calls(mocker) await dynamic_backend.fetch_and_cache_addons_info() diff --git a/tests/unit/providers/amo/backends/test_static.py b/tests/unit/providers/amo/backends/test_static.py index 169410ab2..4f39900a1 100644 --- a/tests/unit/providers/amo/backends/test_static.py +++ b/tests/unit/providers/amo/backends/test_static.py @@ -23,9 +23,7 @@ async def test_get_addon_success(static_backend: StaticAmoBackend): """Test that we can get Addon information statically.""" addons = await static_backend.get_addon(SupportedAddon.VIDEO_DOWNLOADER) video_downloader: dict[str, str] = ADDON_DATA[SupportedAddon.VIDEO_DOWNLOADER] - vd_icon_rating: dict[str, Any] = STATIC_RATING_AND_ICONS[ - SupportedAddon.VIDEO_DOWNLOADER - ] + vd_icon_rating: dict[str, Any] = STATIC_RATING_AND_ICONS[SupportedAddon.VIDEO_DOWNLOADER] assert ( Addon( name=video_downloader["name"], diff --git a/tests/unit/providers/amo/test_provider.py b/tests/unit/providers/amo/test_provider.py index bcfb19a87..d124f0e3b 100644 --- a/tests/unit/providers/amo/test_provider.py +++ b/tests/unit/providers/amo/test_provider.py @@ -128,9 +128,7 @@ async def test_query_return_match( req = SuggestionRequest(query="dictionary", geolocation=Location()) expected_info: dict[str, str] = ADDON_DATA[SupportedAddon.LANGAUGE_TOOL] - expected_icon_rating: dict[str, Any] = STATIC_RATING_AND_ICONS[ - SupportedAddon.LANGAUGE_TOOL - ] + expected_icon_rating: dict[str, Any] = STATIC_RATING_AND_ICONS[SupportedAddon.LANGAUGE_TOOL] assert [ AddonSuggestion( title=expected_info["name"], @@ -151,9 +149,7 @@ async def test_query_return_match( @pytest.mark.asyncio -async def test_query_error( - caplog: LogCaptureFixture, keywords: dict[SupportedAddon, set[str]] -): +async def test_query_error(caplog: LogCaptureFixture, keywords: dict[SupportedAddon, set[str]]): """Test that provider can handle query error.""" provider = AddonsProvider( backend=AmoErrorBackend(), @@ -203,9 +199,7 @@ async def test_fetch_addon( await addons_provider._fetch_addon_info() assert ( addons_provider.last_fetch_at - == datetime.datetime( - year=2012, month=1, day=14, hour=3, minute=21, second=34 - ).timestamp() + == datetime.datetime(year=2012, month=1, day=14, hour=3, minute=21, second=34).timestamp() ) @@ -217,7 +211,5 @@ def test_should_fetch_false(addons_provider: AddonsProvider): def test_should_fetch_true(addons_provider: AddonsProvider): """Test that provider should fetch is true.""" - addons_provider.last_fetch_at = ( - time.time() - addons_provider.resync_interval_sec - 100 - ) + addons_provider.last_fetch_at = time.time() - addons_provider.resync_interval_sec - 100 assert addons_provider._should_fetch() diff --git a/tests/unit/providers/geolocation/test_provider.py b/tests/unit/providers/geolocation/test_provider.py index 05c664658..8ee6cdb6d 100644 --- a/tests/unit/providers/geolocation/test_provider.py +++ b/tests/unit/providers/geolocation/test_provider.py @@ -4,7 +4,6 @@ """Unit tests for the geolocation provider module.""" - from typing import Any import pytest @@ -78,9 +77,7 @@ async def test_query_geolocation(provider: Provider, geolocation: Location) -> N @pytest.mark.asyncio -async def test_query_geolocation_empty_region( - provider: Provider, empty_region: Location -) -> None: +async def test_query_geolocation_empty_region(provider: Provider, empty_region: Location) -> None: """Test that the query method provides a valid geolocation suggestion.""" expected_suggestions: list[BaseSuggestion] = [ Suggestion( diff --git a/tests/unit/providers/test_init_providers.py b/tests/unit/providers/test_init_providers.py index f7fbdfa04..928dcc0b4 100644 --- a/tests/unit/providers/test_init_providers.py +++ b/tests/unit/providers/test_init_providers.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Unit tests for the __init__ provider module.""" + import logging import pytest @@ -54,9 +55,7 @@ async def test_init_providers_with_disabled_provider(provider: str) -> None: @pytest.mark.asyncio async def test_init_providers_unknown_provider_type(mocker: MockerFixture) -> None: """Test for the `init_providers` with an unknown provider.""" - mocker.patch.dict( - settings.providers, values={"unknown-provider": {"type": "unknown-type"}} - ) + mocker.patch.dict(settings.providers, values={"unknown-provider": {"type": "unknown-type"}}) with pytest.raises(InvalidProviderError) as excinfo: await init_providers() diff --git a/tests/unit/providers/top_picks/backends/test_filemanager.py b/tests/unit/providers/top_picks/backends/test_filemanager.py index 408e4f92b..068832666 100644 --- a/tests/unit/providers/top_picks/backends/test_filemanager.py +++ b/tests/unit/providers/top_picks/backends/test_filemanager.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Unit tests for the Top Picks backend module.""" + import json import logging import os @@ -94,9 +95,7 @@ def fixture_blob_json() -> str: @pytest.fixture(name="gcs_blob_mock", autouse=True) -def fixture_gcs_blob_mock( - mocker: MockerFixture, expected_timestamp: int, blob_json: str -) -> Any: +def fixture_gcs_blob_mock(mocker: MockerFixture, expected_timestamp: int, blob_json: str) -> Any: """Create a GCS Blob mock object for testing.""" mock_blob = mocker.MagicMock(spec=Blob) mock_blob.name = "20220101120555_top_picks.json" @@ -235,9 +234,7 @@ def test_get_file( mocker.patch( "merino.config.settings.providers.top_picks.domain_data_source" ).return_value = "remote" - get_file_result_code, result = top_picks_remote_filemanager.get_file( - client=gcs_client_mock - ) + get_file_result_code, result = top_picks_remote_filemanager.get_file(client=gcs_client_mock) records: list[LogRecord] = filter_caplog( caplog.records, "merino.providers.top_picks.backends.filemanager" ) @@ -272,9 +269,7 @@ def test_get_file_skip( "merino.config.settings.providers.top_picks.domain_data_source" ).return_value = "remote" - get_file_result_code, result = top_picks_remote_filemanager.get_file( - client=gcs_client_mock - ) + get_file_result_code, result = top_picks_remote_filemanager.get_file(client=gcs_client_mock) assert get_file_result_code is GetFileResultCode.SKIP assert result is None diff --git a/tests/unit/providers/top_picks/backends/test_top_picks.py b/tests/unit/providers/top_picks/backends/test_top_picks.py index 0391e19b5..66ec74473 100644 --- a/tests/unit/providers/top_picks/backends/test_top_picks.py +++ b/tests/unit/providers/top_picks/backends/test_top_picks.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Unit tests for the Top Picks backend module.""" + import json import logging import os @@ -114,9 +115,7 @@ def fixture_blob_json() -> str: @pytest.fixture(name="gcs_blob_mock", autouse=True) -def fixture_gcs_blob_mock( - mocker: MockerFixture, expected_timestamp: int, blob_json: str -) -> Any: +def fixture_gcs_blob_mock(mocker: MockerFixture, expected_timestamp: int, blob_json: str) -> Any: """Create a GCS Blob mock object for testing.""" mock_blob = mocker.MagicMock(spec=Blob) mock_blob.name = "20220101120555_top_picks.json" @@ -142,7 +141,7 @@ def mock_gcs_client(mocker: MockerFixture, gcs_bucket_mock): def test_init_failure_no_domain_file( - top_picks_backend_parameters: dict[str, Any] + top_picks_backend_parameters: dict[str, Any], ) -> None: """Test exception handling for the __init__() method when no domain file provided.""" top_picks_backend_parameters["top_picks_file_path"] = None @@ -170,17 +169,13 @@ def test_read_domain_list_os_error(top_picks_backend: TopPicksBackend) -> None: top_picks_backend.read_domain_list("./wrongfile.json") -def test_read_domain_list_json_decode_err( - top_picks_backend: TopPicksBackend, mocker -) -> None: +def test_read_domain_list_json_decode_err(top_picks_backend: TopPicksBackend, mocker) -> None: """Test that the read function fails, raising TopPicksError when a JSONDecodeError is captured. """ mocker.patch("json.load", side_effect=JSONDecodeError("test", "json", 1)) with pytest.raises(TopPicksError): - top_picks_backend.read_domain_list( - settings.providers.top_picks.top_picks_file_path - ) + top_picks_backend.read_domain_list(settings.providers.top_picks.top_picks_file_path) @pytest.mark.asyncio @@ -250,9 +245,7 @@ def test_maybe_build_indicies_remote( assert get_file_result_code is GetFileResultCode.SUCCESS assert isinstance(result, TopPicksData) assert len(records) == 1 - assert records[0].message.startswith( - "Top Picks Domain Data loaded remotely from GCS." - ) + assert records[0].message.startswith("Top Picks Domain Data loaded remotely from GCS.") def test_maybe_build_indicies_remote_fail( @@ -327,9 +320,7 @@ def test_maybe_build_indicies_error( assert len(records) == 1 -def test_domain_blocklist( - top_picks_backend: TopPicksBackend, domain_blocklist: set[str] -) -> None: +def test_domain_blocklist(top_picks_backend: TopPicksBackend, domain_blocklist: set[str]) -> None: """Test that the blocked domain, while found in the processed domain data is not indexed and therefore not found in any indeces. """ diff --git a/tests/unit/providers/top_picks/conftest.py b/tests/unit/providers/top_picks/conftest.py index b3dda1c15..31403c093 100644 --- a/tests/unit/providers/top_picks/conftest.py +++ b/tests/unit/providers/top_picks/conftest.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Module for test configurations for the Top Picks provider unit test directory.""" + from typing import Any import pytest @@ -49,8 +50,6 @@ def fixture_top_picks_parameters() -> dict[str, Any]: @pytest.fixture(name="top_picks") -def fixture_top_picks( - backend: TopPicksBackend, top_picks_parameters: dict[str, Any] -) -> Provider: +def fixture_top_picks(backend: TopPicksBackend, top_picks_parameters: dict[str, Any]) -> Provider: """Create Top Pick Provider for test.""" return Provider(backend=backend, **top_picks_parameters) # type: ignore [arg-type] diff --git a/tests/unit/providers/weather/backends/test_accuweather.py b/tests/unit/providers/weather/backends/test_accuweather.py index a366cea1a..8f9dc757e 100644 --- a/tests/unit/providers/weather/backends/test_accuweather.py +++ b/tests/unit/providers/weather/backends/test_accuweather.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Unit tests for the AccuWeather backend module.""" + import datetime import hashlib import json @@ -166,9 +167,7 @@ def fixture_location_completion_sample_cities() -> list[dict[str, Any]]: @pytest.fixture(name="accuweather_parameters") -def fixture_accuweather_parameters( - mocker: MockerFixture, statsd_mock: Any -) -> dict[str, Any]: +def fixture_accuweather_parameters(mocker: MockerFixture, statsd_mock: Any) -> dict[str, Any]: """Create an Accuweather object for test.""" return { "api_key": "test", @@ -303,9 +302,7 @@ def fixture_accuweather( @pytest.fixture(name="geolocation") def fixture_geolocation() -> Location: """Create a Location object for test.""" - return Location( - country="US", region="CA", city="San Francisco", dma=807, postal_code="94105" - ) + return Location(country="US", region="CA", city="San Francisco", dma=807, postal_code="94105") @pytest.fixture(name="accuweather_location_response") @@ -564,7 +561,7 @@ def fixture_accuweather_forecast_response() -> dict[str, Any]: @pytest.fixture(name="accuweather_forecast_response_celsius") def fixture_accuweather_forecast_response_celsius( - accuweather_forecast_response: dict[str, Any] + accuweather_forecast_response: dict[str, Any], ) -> bytes: """Return response content for AccuWeather forecast endpoint in celsius.""" accuweather_forecast_response["DailyForecasts"][0]["Temperature"] = { @@ -576,7 +573,7 @@ def fixture_accuweather_forecast_response_celsius( @pytest.fixture(name="accuweather_forecast_response_fahrenheit") def fixture_accuweather_forecast_response_fahrenheit( - accuweather_forecast_response: dict[str, Any] + accuweather_forecast_response: dict[str, Any], ) -> bytes: """Return response content for AccuWeather forecast endpoint in fahrenheit.""" accuweather_forecast_response["DailyForecasts"][0]["Temperature"] = { @@ -736,12 +733,14 @@ def fixture_accuweather_cached_data_misses() -> list[Optional[bytes]]: @pytest.fixture(name="accuweather_parsed_data_misses") -def fixture_accuweather_parsed_data_misses() -> tuple[ - Optional[AccuweatherLocation], - Optional[CurrentConditions], - Optional[Forecast], - Optional[int], -]: +def fixture_accuweather_parsed_data_misses() -> ( + tuple[ + Optional[AccuweatherLocation], + Optional[CurrentConditions], + Optional[Forecast], + Optional[int], + ] +): """Return the partial parsed AccuWeather quartet for a cache hit.""" return (None, None, None, None) @@ -776,15 +775,11 @@ def test_init_url_value_error( mocker: MockerFixture, accuweather_parameters: dict[str, Any], url_value: str ) -> None: """Test that a ValueError is raised if initializing with empty URL values.""" - expected_error_value: str = ( - "One or more AccuWeather API URL parameters are undefined" - ) + expected_error_value: str = "One or more AccuWeather API URL parameters are undefined" accuweather_parameters[url_value] = "" with pytest.raises(ValueError) as accuweather_error: - AccuweatherBackend( - cache=mocker.AsyncMock(spec=RedisAdapter), **accuweather_parameters - ) + AccuweatherBackend(cache=mocker.AsyncMock(spec=RedisAdapter), **accuweather_parameters) assert str(accuweather_error.value) == expected_error_value @@ -821,10 +816,7 @@ async def test_get_weather_report( content=accuweather_current_conditions_response, request=Request( method="GET", - url=( - "https://www.accuweather.com/currentconditions/v1/39376.json?" - "apikey=test" - ), + url=("http://www.accuweather.com/currentconditions/v1/39376.json?" "apikey=test"), ), ), Response( @@ -834,8 +826,7 @@ async def test_get_weather_report( request=Request( method="GET", url=( - "https://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" - "apikey=test" + "http://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" "apikey=test" ), ), ), @@ -874,10 +865,7 @@ async def test_get_weather_report_with_location_key( content=accuweather_current_conditions_response, request=Request( method="GET", - url=( - "https://www.accuweather.com/currentconditions/v1/39376.json?" - "apikey=test" - ), + url=("http://www.accuweather.com/currentconditions/v1/39376.json?" "apikey=test"), ), ), Response( @@ -887,8 +875,7 @@ async def test_get_weather_report_with_location_key( request=Request( method="GET", url=( - "https://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" - "apikey=test" + "http://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" "apikey=test" ), ), ), @@ -944,9 +931,7 @@ def mock_register_script(script) -> Callable[[list, list], Awaitable[list]]: assert report == expected_weather_report client_mock.get.assert_not_called() - metrics_timeit_called = [ - call_arg[0][0] for call_arg in statsd_mock.timeit.call_args_list - ] + metrics_timeit_called = [call_arg[0][0] for call_arg in statsd_mock.timeit.call_args_list] assert metrics_timeit_called == ["accuweather.cache.fetch"] metrics_increment_called = [ @@ -999,9 +984,7 @@ def mock_register_script(script) -> Callable[[list, list], Awaitable[list]]: assert report == expected_weather_report_via_location_key client_mock.get.assert_not_called() - metrics_timeit_called = [ - call_arg[0][0] for call_arg in statsd_mock.timeit.call_args_list - ] + metrics_timeit_called = [call_arg[0][0] for call_arg in statsd_mock.timeit.call_args_list] assert metrics_timeit_called == ["accuweather.cache.fetch-via-location-key"] metrics_increment_called = [ @@ -1045,14 +1028,10 @@ def mock_register_script(script) -> Callable[[list, list], Awaitable[list]]: assert report is None client_mock.get.assert_not_called() - metrics_called = [ - call_arg[0][0] for call_arg in statsd_mock.increment.call_args_list - ] + metrics_called = [call_arg[0][0] for call_arg in statsd_mock.increment.call_args_list] assert metrics_called == ["accuweather.cache.fetch.error"] - records = filter_caplog( - caplog.records, "merino.providers.weather.backends.accuweather" - ) + records = filter_caplog(caplog.records, "merino.providers.weather.backends.accuweather") assert len(caplog.records) == 1 assert records[0].message.startswith("Failed to fetch weather report from Redis:") @@ -1086,14 +1065,10 @@ async def test_get_weather_report_with_partial_cache_hits( ) -> None: """Test that we can get the weather report with partial cache hits.""" cached_current_conditions = ( - request.getfixturevalue(cached_current_fixture) - if cached_current_fixture - else None + request.getfixturevalue(cached_current_fixture) if cached_current_fixture else None ) cached_forecast = ( - request.getfixturevalue(cached_forecast_fixture) - if cached_forecast_fixture - else None + request.getfixturevalue(cached_forecast_fixture) if cached_forecast_fixture else None ) redis_mock = mocker.AsyncMock(spec=Redis) @@ -1130,8 +1105,7 @@ def mock_register_script(script) -> Callable[[list, list], Awaitable[list]]: request=Request( method="GET", url=( - "https://www.accuweather.com/currentconditions/v1/39376.json?" - "apikey=test" + "http://www.accuweather.com/currentconditions/v1/39376.json?" "apikey=test" ), ), ) @@ -1196,14 +1170,10 @@ async def test_get_weather_report_via_location_key_with_partial_cache_hits( ) -> None: """Test that we can get the weather report with partial cache hits.""" cached_current_conditions = ( - request.getfixturevalue(cached_current_fixture) - if cached_current_fixture - else None + request.getfixturevalue(cached_current_fixture) if cached_current_fixture else None ) cached_forecast = ( - request.getfixturevalue(cached_forecast_fixture) - if cached_forecast_fixture - else None + request.getfixturevalue(cached_forecast_fixture) if cached_forecast_fixture else None ) redis_mock = mocker.AsyncMock(spec=Redis) @@ -1240,8 +1210,7 @@ def mock_register_script(script) -> Callable[[list, list], Awaitable[list]]: request=Request( method="GET", url=( - "https://www.accuweather.com/currentconditions/v1/39376.json?" - "apikey=test" + "http://www.accuweather.com/currentconditions/v1/39376.json?" "apikey=test" ), ), ) @@ -1338,10 +1307,7 @@ async def test_get_weather_report_failed_current_conditions_query( content=b"[]", request=Request( method="GET", - url=( - "https://www.accuweather.com/currentconditions/v1/39376.json?" - "apikey=test" - ), + url=("http://www.accuweather.com/currentconditions/v1/39376.json?" "apikey=test"), ), ), Response( @@ -1351,8 +1317,7 @@ async def test_get_weather_report_failed_current_conditions_query( request=Request( method="GET", url=( - "https://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" - "apikey=test" + "http://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" "apikey=test" ), ), ), @@ -1435,10 +1400,7 @@ async def test_get_weather_report_failed_forecast_query( content=accuweather_current_conditions_response, request=Request( method="GET", - url=( - "https://www.accuweather.com/currentconditions/v1/39376.json?" - "apikey=test" - ), + url=("http://www.accuweather.com/currentconditions/v1/39376.json?" "apikey=test"), ), ), Response( @@ -1448,8 +1410,7 @@ async def test_get_weather_report_failed_forecast_query( request=Request( method="GET", url=( - "https://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" - "apikey=test" + "http://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" "apikey=test" ), ), ), @@ -1510,8 +1471,8 @@ async def test_get_location( ), ) - location: Optional[AccuweatherLocation] = ( - await accuweather.get_location_by_geolocation(country, region, city) + location: Optional[AccuweatherLocation] = await accuweather.get_location_by_geolocation( + country, region, city ) assert location == expected_location @@ -1541,8 +1502,8 @@ async def test_get_location_no_location_returned( ), ) - location: Optional[AccuweatherLocation] = ( - await accuweather.get_location_by_geolocation(country, region, city) + location: Optional[AccuweatherLocation] = await accuweather.get_location_by_geolocation( + country, region, city ) assert location is None @@ -1628,15 +1589,12 @@ async def test_get_current_conditions( content=accuweather_current_conditions_response, request=Request( method="GET", - url=( - "https://www.accuweather.com/currentconditions/v1/39376.json?" - "apikey=test" - ), + url=("http://www.accuweather.com/currentconditions/v1/39376.json?" "apikey=test"), ), ) - conditions: Optional[CurrentConditionsWithTTL] = ( - await accuweather.get_current_conditions(location_key) + conditions: Optional[CurrentConditionsWithTTL] = await accuweather.get_current_conditions( + location_key ) assert conditions == expected_conditions @@ -1657,15 +1615,12 @@ async def test_get_current_conditions_no_current_conditions_returned( content=b"[]", request=Request( method="GET", - url=( - "https://www.accuweather.com/currentconditions/v1/39376.json?" - "apikey=test" - ), + url=("http://www.accuweather.com/currentconditions/v1/39376.json?" "apikey=test"), ), ) - conditions: Optional[CurrentConditionsWithTTL] = ( - await accuweather.get_current_conditions(location_key) + conditions: Optional[CurrentConditionsWithTTL] = await accuweather.get_current_conditions( + location_key ) assert conditions is None @@ -1693,10 +1648,7 @@ async def test_get_current_conditions_error( ), request=Request( method="GET", - url=( - "https://www.accuweather.com/currentconditions/v1/INVALID.json?" - "apikey=test" - ), + url=("http://www.accuweather.com/currentconditions/v1/INVALID.json?" "apikey=test"), ), ) @@ -1762,10 +1714,7 @@ async def test_get_forecast( content=content, request=Request( method="GET", - url=( - "https://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" - "apikey=test" - ), + url=("http://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" "apikey=test"), ), ) @@ -1790,10 +1739,7 @@ async def test_get_forecast_no_forecast_returned( content=b"{}", request=Request( method="GET", - url=( - "https://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" - "apikey=test" - ), + url=("http://www.accuweather.com/forecasts/v1/daily/1day/39376.json?" "apikey=test"), ), ) @@ -1821,10 +1767,7 @@ async def test_get_forecast_error(accuweather: AccuweatherBackend) -> None: ), request=Request( method="GET", - url=( - "https://www.accuweather.com/forecasts/v1/daily/1day/INVALID.json?" - "apikey=test" - ), + url=("http://www.accuweather.com/forecasts/v1/daily/1day/INVALID.json?" "apikey=test"), ), ) @@ -1861,9 +1804,7 @@ def test_cache_key_for_accuweather_request( ): """Test that the cache key is created properly.""" url = "localhost" - cache_key = accuweather.cache_key_for_accuweather_request( - url, query_params=query_params - ) + cache_key = accuweather.cache_key_for_accuweather_request(url, query_params=query_params) assert cache_key == expected_cache_key @@ -1890,9 +1831,7 @@ async def test_get_request_cache_get_errors( """Test for cache errors/misses. Ensures that the right metrics are called and that the API request is actually made. """ - expiry_date = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta( - days=2 - ) + expiry_date = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=2) expected_client_response = {"hello": "world", "cached_request_ttl": 0} client_mock: AsyncMock = cast(AsyncMock, accuweather.http_client) @@ -1915,9 +1854,7 @@ async def test_get_request_cache_get_errors( assert expected_client_response == results - timeit_metrics_called = [ - call_arg[0][0] for call_arg in statsd_mock.timeit.call_args_list - ] + timeit_metrics_called = [call_arg[0][0] for call_arg in statsd_mock.timeit.call_args_list] assert [ f"accuweather.request.{expected_url_type}.get", "accuweather.cache.store", @@ -1939,13 +1876,9 @@ async def test_get_request_cache_store_errors( url = "/forecasts/v1/daily/1day/39376.json" redis_mock.get.side_effect = CacheMissError - redis_mock.set.side_effect = CacheAdapterError( - "Failed to set key with error: MockError" - ) + redis_mock.set.side_effect = CacheAdapterError("Failed to set key with error: MockError") - expiry_date = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta( - days=2 - ) + expiry_date = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=2) expected_client_response = {"hello": "world"} accuweather: AccuweatherBackend = AccuweatherBackend( @@ -1971,17 +1904,13 @@ async def test_get_request_cache_store_errors( cache_ttl_sec=TEST_CACHE_TTL_SEC, ) - timeit_metrics_called = [ - call_arg[0][0] for call_arg in statsd_mock.timeit.call_args_list - ] + timeit_metrics_called = [call_arg[0][0] for call_arg in statsd_mock.timeit.call_args_list] assert [ "accuweather.request.forecasts.get", "accuweather.cache.store", ] == timeit_metrics_called - increment_called = [ - call_arg[0][0] for call_arg in statsd_mock.increment.call_args_list - ] + increment_called = [call_arg[0][0] for call_arg in statsd_mock.increment.call_args_list] assert [ "accuweather.cache.store.set_error", ] == increment_called @@ -2074,9 +2003,7 @@ def test_metrics_for_cache_fetch( """Test metrics for cache fetches.""" accuweather.emit_cache_fetch_metrics(cached_data) - metrics_called = [ - call_arg[0][0] for call_arg in statsd_mock.increment.call_args_list - ] + metrics_called = [call_arg[0][0] for call_arg in statsd_mock.increment.call_args_list] assert [f"accuweather.cache.{val}" for val in expected_metrics] == metrics_called @@ -2146,25 +2073,17 @@ def test_parse_cached_data_error( ] ) - assert location == AccuweatherLocation.model_validate_json( - accuweather_cached_location_key - ) + assert location == AccuweatherLocation.model_validate_json(accuweather_cached_location_key) assert current_conditions is None assert forecast is None - metrics_called = [ - call_arg[0][0] for call_arg in statsd_mock.increment.call_args_list - ] + metrics_called = [call_arg[0][0] for call_arg in statsd_mock.increment.call_args_list] assert metrics_called == ["accuweather.cache.data.error"] - records = filter_caplog( - caplog.records, "merino.providers.weather.backends.accuweather" - ) + records = filter_caplog(caplog.records, "merino.providers.weather.backends.accuweather") assert len(caplog.records) == 1 - assert records[0].message.startswith( - "Failed to load weather report data from Redis:" - ) + assert records[0].message.startswith("Failed to load weather report data from Redis:") @pytest.mark.asyncio @@ -2193,9 +2112,9 @@ async def test_get_location_completion( ) ] - location_completions: Optional[list[LocationCompletion]] = ( - await accuweather.get_location_completion(geolocation, search_term) - ) + location_completions: Optional[ + list[LocationCompletion] + ] = await accuweather.get_location_completion(geolocation, search_term) assert location_completions == expected_location_completion @@ -2227,9 +2146,9 @@ async def test_get_location_completion_with_empty_search_term( ) ] - location_completions: Optional[list[LocationCompletion]] = ( - await accuweather.get_location_completion(geolocation, search_term) - ) + location_completions: Optional[ + list[LocationCompletion] + ] = await accuweather.get_location_completion(geolocation, search_term) assert location_completions is None @@ -2262,8 +2181,8 @@ async def test_get_location_completion_with_no_geolocation_country_code( ) ] - location_completions: Optional[list[LocationCompletion]] = ( - await accuweather.get_location_completion(geolocation, search_term) - ) + location_completions: Optional[ + list[LocationCompletion] + ] = await accuweather.get_location_completion(geolocation, search_term) assert location_completions == expected_location_completion diff --git a/tests/unit/providers/weather/test_provider.py b/tests/unit/providers/weather/test_provider.py index a259733a1..891379c4e 100644 --- a/tests/unit/providers/weather/test_provider.py +++ b/tests/unit/providers/weather/test_provider.py @@ -110,9 +110,7 @@ async def test_query_weather_report_returned( city_name=report.city_name, current_conditions=report.current_conditions, forecast=report.forecast, - custom_details=CustomDetails( - weather=WeatherDetails(weather_report_ttl=report.ttl) - ), + custom_details=CustomDetails(weather=WeatherDetails(weather_report_ttl=report.ttl)), ) ] backend_mock.get_weather_report.return_value = report @@ -156,9 +154,7 @@ async def test_query_error( expected_log_messages: list[dict[str, str]] = [ {"levelname": "WARNING", "message": "Could not generate a weather report"} ] - backend_mock.get_weather_report.side_effect = BackendError( - expected_log_messages[0]["message"] - ) + backend_mock.get_weather_report.side_effect = BackendError(expected_log_messages[0]["message"]) suggestions: list[BaseSuggestion] = await provider.query( SuggestionRequest(query="", geolocation=geolocation) diff --git a/tests/unit/providers/wikipedia/backends/test_elastic.py b/tests/unit/providers/wikipedia/backends/test_elastic.py index 59c8a227e..d87317ddc 100644 --- a/tests/unit/providers/wikipedia/backends/test_elastic.py +++ b/tests/unit/providers/wikipedia/backends/test_elastic.py @@ -145,9 +145,7 @@ async def test_es_backend_search_exception( es_backend: ElasticBackend, ) -> None: """Test the exception handling in the search method of the ES backend.""" - mocker.patch.object( - AsyncElasticsearch, "search", side_effect=Exception("404 error") - ) + mocker.patch.object(AsyncElasticsearch, "search", side_effect=Exception("404 error")) with pytest.raises(BackendError) as excinfo: await es_backend.search("foo") @@ -156,9 +154,7 @@ async def test_es_backend_search_exception( @pytest.mark.asyncio -async def test_es_backend_shutdown( - mocker: MockerFixture, es_backend: ElasticBackend -) -> None: +async def test_es_backend_shutdown(mocker: MockerFixture, es_backend: ElasticBackend) -> None: """Test the shutdown method of the ES backend.""" spy = mocker.spy(AsyncElasticsearch, "close") diff --git a/tests/unit/test_config_logging.py b/tests/unit/test_config_logging.py index ceaf0c444..82ac4bd19 100644 --- a/tests/unit/test_config_logging.py +++ b/tests/unit/test_config_logging.py @@ -4,7 +4,6 @@ """Unit tests for the config_logging.py module.""" - import logging from typing import Any diff --git a/tests/unit/test_config_sentry.py b/tests/unit/test_config_sentry.py index 20da1f87b..2c2e48318 100644 --- a/tests/unit/test_config_sentry.py +++ b/tests/unit/test_config_sentry.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Unit tests for the config_sentry.py module.""" + import logging import typing @@ -215,40 +216,28 @@ def test_strip_sensitive_data() -> None: assert isinstance(mock_sentry_hint["exc_info"][1], RuntimeError) assert ( - sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"][ - "q" - ] + sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["q"] == REDACTED_TEXT ) assert ( - sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][1]["vars"][ - "values" - ]["q"] + sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][1]["vars"]["values"]["q"] == REDACTED_TEXT ) assert ( - sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][2]["vars"][ - "srequest" - ] + sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][2]["vars"]["srequest"] == REDACTED_TEXT ) assert ( - sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][4]["vars"][ - "q" - ] + sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][4]["vars"]["q"] == REDACTED_TEXT ) assert ( - sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][4]["vars"][ - "suggest" - ] + sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][4]["vars"]["suggest"] == REDACTED_TEXT ) assert ( - sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][5]["vars"][ - "body" - ] + sanitized_event["exception"]["values"][0]["stacktrace"]["frames"][5]["vars"]["body"] == REDACTED_TEXT ) diff --git a/tests/unit/utils/test_log_data_creator.py b/tests/unit/utils/test_log_data_creator.py index 223fa1e66..c9d2143d3 100644 --- a/tests/unit/utils/test_log_data_creator.py +++ b/tests/unit/utils/test_log_data_creator.py @@ -65,9 +65,7 @@ def test_create_request_summary_log_data( ) message: Message = {"type": "http.response.start", "status": "200"} - log_data: RequestSummaryLogDataModel = create_request_summary_log_data( - request, message, dt - ) + log_data: RequestSummaryLogDataModel = create_request_summary_log_data(request, message, dt) assert log_data == expected_log_data diff --git a/tests/unit/utils/test_task_runner.py b/tests/unit/utils/test_task_runner.py index 5be25d73f..12084d7a9 100644 --- a/tests/unit/utils/test_task_runner.py +++ b/tests/unit/utils/test_task_runner.py @@ -98,10 +98,7 @@ async def test_gather_tasks_with_timeout( assert len(records) == 2 assert records[0].__dict__["msg"] == "Timeout triggered in the task runner" - assert ( - records[1].__dict__["msg"] - == "Cancelling the task: timedout-task due to timeout" - ) + assert records[1].__dict__["msg"] == "Cancelling the task: timedout-task due to timeout" @pytest.mark.asyncio @@ -179,7 +176,4 @@ async def test_gather_tasks_without_timeout_callback( assert len(records) == 2 assert records[0].__dict__["msg"] == "Timeout triggered in the task runner" - assert ( - records[1].__dict__["msg"] - == "Cancelling the task: timedout-task due to timeout" - ) + assert records[1].__dict__["msg"] == "Cancelling the task: timedout-task due to timeout" diff --git a/tests/unit/utils/test_version.py b/tests/unit/utils/test_version.py index c8d0c5134..35482a838 100644 --- a/tests/unit/utils/test_version.py +++ b/tests/unit/utils/test_version.py @@ -3,6 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """Unit tests for the version.py utility module.""" + import json import pathlib