Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: improving django dependency version constraints for allowing d… #29

Merged
merged 1 commit into from
Apr 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 8 additions & 5 deletions .github/workflows/testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,16 @@ jobs:
fail-fast: false
matrix:
python-version: [ "3.8.14" , "3.9.14", "3.10.7", "3.11.2" ]
django-version: [ "3.2", "4.0", "4.1" ]
django-version: [ "3.2", "4.0", "4.1", "5.0" ]
exclude:
# Django v4 dropped 3.7 support
- django-version: 3.2
python-version: 3.11.2
- django-version: 4.0
python-version: 3.7.14
- django-version: 4.1
python-version: 3.7.14
python-version: 3.11.2
- django-version: 5.0
python-version: 3.8.14
- django-version: 5.0
python-version: 3.9.14
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
Expand Down
32 changes: 9 additions & 23 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,9 +1,4 @@
repos:
- repo: https://github.com/psf/black
rev: 23.3.0
hooks:
- id: black

- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
Expand All @@ -15,32 +10,22 @@ repos:
- id: check-yaml
- id: check-merge-conflict

- repo: https://github.com/pycqa/flake8
rev: 6.0.0
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.3.3
hooks:
- id: flake8
additional_dependencies: [
'flake8-bugbear',
'flake8-comprehensions',
'flake8-print',
'flake8-mutable',
'flake8-use-fstring',
'flake8-simplify',
'flake8-pytest-style',
'flake8-type-checking==2.3.0',
]
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format

- repo: https://github.com/asottile/pyupgrade
rev: v3.8.0
hooks:
- id: pyupgrade
args: [ "--py3-plus", "--py36-plus", "--py37-plus" ]

- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.4.1
hooks:
Expand All @@ -52,6 +37,7 @@ repos:
- types-PyYAML
- drf-yasg
- drf-spectacular
- setuptools

- repo: local
hooks:
Expand Down
9 changes: 7 additions & 2 deletions openapi_tester/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
""" Django OpenAPI Schema Tester """
"""Django OpenAPI Schema Tester"""

from .case_testers import is_camel_case, is_kebab_case, is_pascal_case, is_snake_case
from .clients import OpenAPIClient
from .loaders import BaseSchemaLoader, DrfSpectacularSchemaLoader, DrfYasgSchemaLoader, StaticSchemaLoader
from .loaders import (
BaseSchemaLoader,
DrfSpectacularSchemaLoader,
DrfYasgSchemaLoader,
StaticSchemaLoader,
)
from .schema_tester import SchemaTester

__all__ = [
Expand Down
2 changes: 1 addition & 1 deletion openapi_tester/case_testers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
""" Case testers - this module includes helper functions to test key casing """
"""Case testers - this module includes helper functions to test key casing"""

from __future__ import annotations

Expand Down
2 changes: 1 addition & 1 deletion openapi_tester/config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
Configuration module for schema section test.
Configuration module for schema section test.
"""

from dataclasses import dataclass
Expand Down
46 changes: 25 additions & 21 deletions openapi_tester/constants.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
""" Constants module """
"""Constants module"""

OPENAPI_PYTHON_MAPPING = {
"boolean": bool.__name__,
Expand All @@ -11,36 +11,40 @@
}

# Validation errors
VALIDATE_FORMAT_ERROR = 'Expected: {article} "{format}" formatted value\n\nReceived: {received}'
VALIDATE_PATTERN_ERROR = 'The string "{data}" does not match the specified pattern: {pattern}'
VALIDATE_FORMAT_ERROR = (
'Expected: {article} "{format}" formatted value\n\nReceived: {received}'
)
VALIDATE_PATTERN_ERROR = (
'The string "{data}" does not match the specified pattern: {pattern}'
)
INVALID_PATTERN_ERROR = "String pattern is not valid regex: {pattern}"
VALIDATE_ENUM_ERROR = "Expected: a member of the enum {enum}\n\nReceived: {received}"
VALIDATE_TYPE_ERROR = 'Expected: {article} "{type}" type value\n\nReceived: {received}'
VALIDATE_MULTIPLE_OF_ERROR = "The value {data} should be a multiple of {multiple}"
VALIDATE_MINIMUM_ERROR = "The value {data} is lower than the specified minimum of {minimum}"
VALIDATE_MAXIMUM_ERROR = "The value {data} exceeds the maximum allowed value of {maximum}"
VALIDATE_MIN_LENGTH_ERROR = 'The length of "{data}" is shorter than the specified minimum length of {min_length}'
VALIDATE_MAX_LENGTH_ERROR = 'The length of "{data}" exceeds the specified maximum length of {max_length}'
VALIDATE_MIN_ARRAY_LENGTH_ERROR = (
"The length of the array {data} is shorter than the specified minimum length of {min_length}"
VALIDATE_MINIMUM_ERROR = (
"The value {data} is lower than the specified minimum of {minimum}"
)
VALIDATE_MAX_ARRAY_LENGTH_ERROR = "The length of the array {data} exceeds the specified maximum length of {max_length}"
VALIDATE_MINIMUM_NUMBER_OF_PROPERTIES_ERROR = (
"The number of properties in {data} is fewer than the specified minimum number of properties of {min_length}"
VALIDATE_MAXIMUM_ERROR = (
"The value {data} exceeds the maximum allowed value of {maximum}"
)
VALIDATE_MAXIMUM_NUMBER_OF_PROPERTIES_ERROR = (
"The number of properties in {data} exceeds the specified maximum number of properties of {max_length}"
VALIDATE_MIN_LENGTH_ERROR = 'The length of "{data}" is shorter than the specified minimum length of {min_length}'
VALIDATE_MAX_LENGTH_ERROR = (
'The length of "{data}" exceeds the specified maximum length of {max_length}'
)
VALIDATE_MIN_ARRAY_LENGTH_ERROR = "The length of the array {data} is shorter than the specified minimum length of {min_length}"
VALIDATE_MAX_ARRAY_LENGTH_ERROR = "The length of the array {data} exceeds the specified maximum length of {max_length}"
VALIDATE_MINIMUM_NUMBER_OF_PROPERTIES_ERROR = "The number of properties in {data} is fewer than the specified minimum number of properties of {min_length}"
VALIDATE_MAXIMUM_NUMBER_OF_PROPERTIES_ERROR = "The number of properties in {data} exceeds the specified maximum number of properties of {max_length}"
VALIDATE_UNIQUE_ITEMS_ERROR = "The array {data} must contain unique items only"
VALIDATE_NONE_ERROR = "Received a null value for a non-nullable schema object"
VALIDATE_MISSING_KEY_ERROR = 'The following property is missing in the {http_message} data: "{missing_key}"'
VALIDATE_EXCESS_KEY_ERROR = (
'The following property was found in the {http_message}, but is missing from the schema definition: "{excess_key}"'
)
VALIDATE_WRITE_ONLY_RESPONSE_KEY_ERROR = (
'The following property was found in the response, but is documented as being "writeOnly": "{write_only_key}"'
VALIDATE_MISSING_KEY_ERROR = (
'The following property is missing in the {http_message} data: "{missing_key}"'
)
VALIDATE_EXCESS_KEY_ERROR = 'The following property was found in the {http_message}, but is missing from the schema definition: "{excess_key}"'
VALIDATE_WRITE_ONLY_RESPONSE_KEY_ERROR = 'The following property was found in the response, but is documented as being "writeOnly": "{write_only_key}"'
VALIDATE_ONE_OF_ERROR = "Expected data to match one and only one of the oneOf schema types; found {matches} matches"
VALIDATE_ANY_OF_ERROR = "Expected data to match one or more of the documented anyOf schema types, but found no matches"
UNDOCUMENTED_SCHEMA_SECTION_ERROR = "Error: Unsuccessfully tried to index the OpenAPI schema by `{key}`. {error_addon}"
UNDOCUMENTED_SCHEMA_SECTION_ERROR = (
"Error: Unsuccessfully tried to index the OpenAPI schema by `{key}`. {error_addon}"
)
INIT_ERROR = "Unable to configure loader"
6 changes: 4 additions & 2 deletions openapi_tester/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
""" Exceptions Module """
"""Exceptions Module"""


class DocumentationError(AssertionError):
Expand All @@ -15,7 +15,9 @@ class CaseError(DocumentationError):
"""

def __init__(self, key: str, case: str, expected: str) -> None:
super().__init__(f"The response key `{key}` is not properly {case}. Expected value: {expected}")
super().__init__(
f"The response key `{key}` is not properly {case}. Expected value: {expected}"
)


class OpenAPISchemaError(Exception):
Expand Down
64 changes: 48 additions & 16 deletions openapi_tester/loaders.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
""" Loaders Module """
"""Loaders Module"""

from __future__ import annotations

Expand All @@ -14,7 +14,11 @@
import yaml
from django.urls import Resolver404, resolve
from django.utils.functional import cached_property
from openapi_spec_validator import OpenAPIV2SpecValidator, OpenAPIV30SpecValidator, OpenAPIV31SpecValidator
from openapi_spec_validator import (
OpenAPIV2SpecValidator,
OpenAPIV30SpecValidator,
OpenAPIV31SpecValidator,
)
from prance.util.resolver import RefResolver
from rest_framework.schemas.generators import BaseSchemaGenerator, EndpointEnumerator
from rest_framework.settings import api_settings
Expand Down Expand Up @@ -95,7 +99,9 @@ def normalize_schema_paths(self, schema: dict) -> dict[str, dict]:
normalized_paths: dict[str, dict] = {}
for key, value in schema["paths"].items():
try:
parameterized_path, _ = self.resolve_path(endpoint_path=key, method=list(value.keys())[0])
parameterized_path, _ = self.resolve_path(
endpoint_path=key, method=list(value.keys())[0]
)
normalized_paths[parameterized_path] = value
except ValueError:
normalized_paths[key] = value
Expand All @@ -115,11 +121,14 @@ def validate_schema(schema: dict):
else:
raise UndocumentedSchemaSectionError(
UNDOCUMENTED_SCHEMA_SECTION_ERROR.format(
key=schema["openapi"], error_addon="Support might need to be added."
key=schema["openapi"],
error_addon="Support might need to be added.",
)
)
else:
raise UndocumentedSchemaSectionError(UNDOCUMENTED_SCHEMA_SECTION_ERROR.format(key=schema["openapi"]))
raise UndocumentedSchemaSectionError(
UNDOCUMENTED_SCHEMA_SECTION_ERROR.format(key=schema["openapi"])
)
else:
validator = OpenAPIV2SpecValidator(schema=schema)
validator.validate()
Expand All @@ -138,9 +147,13 @@ def endpoints(self) -> list[str]:
"""
Returns a list of endpoint paths.
"""
return list({endpoint[0] for endpoint in EndpointEnumerator().get_api_endpoints()})
return list(
{endpoint[0] for endpoint in EndpointEnumerator().get_api_endpoints()}
)

def resolve_path(self, endpoint_path: str, method: str) -> tuple[str, ResolverMatch]:
def resolve_path(
self, endpoint_path: str, method: str
) -> tuple[str, ResolverMatch]:
"""
Resolves a Django path.
"""
Expand Down Expand Up @@ -168,19 +181,25 @@ def resolve_path(self, endpoint_path: str, method: str) -> tuple[str, ResolverMa
message = f"Could not resolve path `{endpoint_path}`."
close_matches = difflib.get_close_matches(endpoint_path, self.endpoints)
if close_matches:
message += "\n\nDid you mean one of these?\n\n- " + "\n- ".join(close_matches)
message += "\n\nDid you mean one of these?\n\n- " + "\n- ".join(
close_matches
)
raise ValueError(message)

@staticmethod
def handle_pk_parameter(resolved_route: ResolverMatch, path: str, method: str) -> tuple[str, ResolverMatch]:
def handle_pk_parameter(
resolved_route: ResolverMatch, path: str, method: str
) -> tuple[str, ResolverMatch]:
"""
Handle the DRF conversion of params called {pk} into a named parameter based on Model field
"""
coerced_path = BaseSchemaGenerator().coerce_path(
path=path, method=method, view=cast("APIView", resolved_route.func)
)
pk_field_name = "".join(
entry.replace("+ ", "") for entry in difflib.Differ().compare(path, coerced_path) if "+ " in entry
entry.replace("+ ", "")
for entry in difflib.Differ().compare(path, coerced_path)
if "+ " in entry
)
resolved_route.kwargs[pk_field_name] = resolved_route.kwargs["pk"]
del resolved_route.kwargs["pk"]
Expand All @@ -197,7 +216,9 @@ def __init__(self, field_key_map: dict[str, str] | None = None) -> None:
from drf_yasg.generators import OpenAPISchemaGenerator
from drf_yasg.openapi import Info

self.schema_generator = OpenAPISchemaGenerator(info=Info(title="", default_version=""))
self.schema_generator = OpenAPISchemaGenerator(
info=Info(title="", default_version="")
)

def load_schema(self) -> dict:
"""
Expand All @@ -206,8 +227,12 @@ def load_schema(self) -> dict:
odict_schema = self.schema_generator.get_schema(None, True)
return cast("dict", loads(dumps(odict_schema.as_odict())))

def resolve_path(self, endpoint_path: str, method: str) -> tuple[str, ResolverMatch]:
de_parameterized_path, resolved_path = super().resolve_path(endpoint_path=endpoint_path, method=method)
def resolve_path(
self, endpoint_path: str, method: str
) -> tuple[str, ResolverMatch]:
de_parameterized_path, resolved_path = super().resolve_path(
endpoint_path=endpoint_path, method=method
)
path_prefix = self.schema_generator.determine_path_prefix(self.endpoints)
trim_length = len(path_prefix) if path_prefix != "/" else 0
return de_parameterized_path[trim_length:], resolved_path
Expand All @@ -230,10 +255,14 @@ def load_schema(self) -> dict:
"""
return cast("dict", loads(dumps(self.schema_generator.get_schema(public=True))))

def resolve_path(self, endpoint_path: str, method: str) -> tuple[str, ResolverMatch]:
def resolve_path(
self, endpoint_path: str, method: str
) -> tuple[str, ResolverMatch]:
from drf_spectacular.settings import spectacular_settings

de_parameterized_path, resolved_path = super().resolve_path(endpoint_path=endpoint_path, method=method)
de_parameterized_path, resolved_path = super().resolve_path(
endpoint_path=endpoint_path, method=method
)
return (
de_parameterized_path[len(spectacular_settings.SCHEMA_PATH_PREFIX or "") :],
resolved_path,
Expand All @@ -260,7 +289,10 @@ def load_schema(self) -> dict[str, Any]:
with open(self.path, encoding="utf-8") as file:
content = file.read()
return cast(
"dict", json.loads(content) if ".json" in self.path else yaml.load(content, Loader=yaml.FullLoader)
"dict",
json.loads(content)
if ".json" in self.path
else yaml.load(content, Loader=yaml.FullLoader),
)


Expand Down
6 changes: 2 additions & 4 deletions openapi_tester/response_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,10 @@ def response(self) -> Union["Response", "HttpResponse"]:
return self._response

@property
def data(self) -> Optional[dict]:
...
def data(self) -> Optional[dict]: ...

@property
def request_data(self) -> Optional[dict]:
...
def request_data(self) -> Optional[dict]: ...


class DRFResponseHandler(ResponseHandler):
Expand Down
7 changes: 5 additions & 2 deletions openapi_tester/response_handler_factory.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
# pylint: disable=R0903
"""
Module that contains the factory to create response handlers.
Module that contains the factory to create response handlers.
"""

from typing import TYPE_CHECKING, Union

from rest_framework.response import Response

from openapi_tester.response_handler import DjangoNinjaResponseHandler, DRFResponseHandler
from openapi_tester.response_handler import (
DjangoNinjaResponseHandler,
DRFResponseHandler,
)

if TYPE_CHECKING:
from django.http.response import HttpResponse
Expand Down
Loading
Loading