diff --git a/HISTORY.md b/HISTORY.md index e76cff20..d0d9c5de 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -31,6 +31,8 @@ can now be used as decorators and have gained new features. ([#472](https://github.com/python-attrs/cattrs/pull/472)) - The default union handler now also handles dataclasses. ([#426](https://github.com/python-attrs/cattrs/issues/426) [#477](https://github.com/python-attrs/cattrs/pull/477)) +- **Potentially breaking**: `IterableValidationError`s now require their subexceptions to have appropriate notes attached. + This was always the case internally in _cattrs_, but is now required of errors produced outside too. - Add support for [PEP 695](https://peps.python.org/pep-0695/) type aliases. ([#452](https://github.com/python-attrs/cattrs/pull/452)) - Add support for [PEP 696](https://peps.python.org/pep-0696/) `TypeVar`s with defaults. diff --git a/Makefile b/Makefile index 2012f9fd..3035ee96 100644 --- a/Makefile +++ b/Makefile @@ -54,7 +54,6 @@ lint: ## check style with ruff and black test: ## run tests quickly with the default Python pdm run pytest -x --ff -n auto tests - test-all: ## run tests on every Python version with tox tox @@ -78,7 +77,7 @@ servedocs: docs ## compile the docs watching for changes watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . bench-cmp: - pytest bench --benchmark-compare + pytest --benchmark-sort=fullname --benchmark-warmup=true --benchmark-warmup-iterations=5 --benchmark-group-by=fullname bench --benchmark-compare bench: - pytest bench --benchmark-save base + pytest --benchmark-sort=fullname --benchmark-warmup=true --benchmark-warmup-iterations=5 --benchmark-group-by=fullname bench --benchmark-save base diff --git a/README.md b/README.md index 0419682f..36b1b7c7 100644 --- a/README.md +++ b/README.md @@ -13,31 +13,21 @@ --- -**cattrs** is an open source Python library for structuring and unstructuring -data. _cattrs_ works best with _attrs_ classes, dataclasses and the usual -Python collections, but other kinds of classes are supported by manually -registering converters. - -Python has a rich set of powerful, easy to use, built-in data types like -dictionaries, lists and tuples. These data types are also the lingua franca -of most data serialization libraries, for formats like json, msgpack, cbor, -yaml or toml. - -Data types like this, and mappings like `dict` s in particular, represent -unstructured data. Your data is, in all likelihood, structured: not all -combinations of field names or values are valid inputs to your programs. In -Python, structured data is better represented with classes and enumerations. -_attrs_ is an excellent library for declaratively describing the structure of -your data, and validating it. - -When you're handed unstructured data (by your network, file system, database...), -_cattrs_ helps to convert this data into structured data. When you have to -convert your structured data into data types other libraries can handle, -_cattrs_ turns your classes and enumerations into dictionaries, integers and -strings. - -Here's a simple taste. The list containing a float, an int and a string -gets converted into a tuple of three ints. +**cattrs** is an open source Python library for structuring and unstructuring data. +_cattrs_ works best with _attrs_ classes, dataclasses and the usual Python collections, but other kinds of classes are supported by manually registering converters. + +Python has a rich set of powerful, easy to use, built-in data types like dictionaries, lists and tuples. +These data types are also the lingua franca of most data serialization libraries, for formats like json, msgpack, cbor, yaml or toml. + +Data types like this, and mappings like `dict` s in particular, represent unstructured data. +Your data is, in all likelihood, structured: not all combinations of field names or values are valid inputs to your programs. +In Python, structured data is better represented with classes and enumerations. +_attrs_ is an excellent library for declaratively describing the structure of your data and validating it. + +When you're handed unstructured data (by your network, file system, database...), _cattrs_ helps to convert this data into structured data. +When you have to convert your structured data into data types other libraries can handle, _cattrs_ turns your classes and enumerations into dictionaries, integers and strings. + +Here's a simple taste. The list containing a float, an int and a string gets converted into a tuple of three ints. ```python >>> import cattrs @@ -68,7 +58,7 @@ Here's a much more complex example, involving _attrs_ classes with type metadata ```python >>> from enum import unique, Enum ->>> from typing import Optional, Sequence, Union +>>> from typing import Sequence >>> from cattrs import structure, unstructure >>> from attrs import define, field @@ -91,14 +81,18 @@ Here's a much more complex example, involving _attrs_ classes with type metadata >>> @define ... class Dog: ... cuteness: int -... chip: Optional[DogMicrochip] = None +... chip: DogMicrochip | None = None ->>> p = unstructure([Dog(cuteness=1, chip=DogMicrochip(chip_id=1, time_chipped=10.0)), -... Cat(breed=CatBreed.MAINE_COON, names=('Fluffly', 'Fluffer'))]) +>>> p = unstructure( +... [ +... Dog(cuteness=1, chip=DogMicrochip(chip_id=1, time_chipped=10.0)), +... Cat(CatBreed.MAINE_COON, names=('Fluffly', 'Fluffer')) +... ] +... ) >>> print(p) [{'cuteness': 1, 'chip': {'chip_id': 1, 'time_chipped': 10.0}}, {'breed': 'maine_coon', 'names': ('Fluffly', 'Fluffer')}] ->>> print(structure(p, list[Union[Dog, Cat]])) +>>> print(structure(p, list[Dog | Cat])) [Dog(cuteness=1, chip=DogMicrochip(chip_id=1, time_chipped=10.0)), Cat(breed=, names=['Fluffly', 'Fluffer'])] ``` @@ -151,6 +145,9 @@ _cattrs_ is based on a few fundamental design decisions. - Un/structuring rules are separate from the models. This allows models to have a one-to-many relationship with un/structuring rules, and to create un/structuring rules for models which you do not own and you cannot change. (_cattrs_ can be configured to use un/structuring rules from models using the [`use_class_methods` strategy](https://catt.rs/en/latest/strategies.html#using-class-specific-structure-and-unstructure-methods).) +- Strongly lean on function composition. + Almost all problems in _cattrs_ can be solved by writing and composing functions (called _hooks_), instead of writing classes and subclassing. + This makes _cattrs_ code elegant, concise, powerful and amenable to all the rich Python ways of working with functions. - Invent as little as possible; reuse existing ordinary Python instead. For example, _cattrs_ did not have a custom exception type to group exceptions until the sanctioned Python [`exceptiongroups`](https://docs.python.org/3/library/exceptions.html#ExceptionGroup). A side-effect of this design decision is that, in a lot of cases, when you're solving _cattrs_ problems you're actually learning Python instead of learning _cattrs_. diff --git a/bench/test_attrs_collections.py b/bench/test_attrs_collections.py index c0527f53..b0ddd759 100644 --- a/bench/test_attrs_collections.py +++ b/bench/test_attrs_collections.py @@ -1,10 +1,10 @@ from enum import IntEnum from typing import Dict, List, Mapping, MutableMapping -import attr import pytest +from attrs import define, frozen -from cattr import BaseConverter, Converter, UnstructureStrategy +from cattrs import BaseConverter, Converter, UnstructureStrategy @pytest.mark.parametrize("converter_cls", [BaseConverter, Converter]) @@ -21,7 +21,7 @@ class E(IntEnum): ONE = 1 TWO = 2 - @attr.define + @define class C: a: List[int] b: List[float] @@ -62,32 +62,32 @@ class C: [1] * 3, [1.0] * 3, ["a small string"] * 3, - ["test".encode()] * 3, + [b"test"] * 3, [E.ONE] * 3, [2] * 3, [2.0] * 3, ["a small string"] * 3, - ["test".encode()] * 3, + [b"test"] * 3, [E.TWO] * 3, [3] * 3, [3.0] * 3, ["a small string"] * 3, - ["test".encode()] * 3, + [b"test"] * 3, [E.ONE] * 3, [4] * 3, [4.0] * 3, ["a small string"] * 3, - ["test".encode()] * 3, + [b"test"] * 3, [E.TWO] * 3, [5] * 3, [5.0] * 3, ["a small string"] * 3, - ["test".encode()] * 3, + [b"test"] * 3, [E.ONE] * 3, [6] * 3, [6.0] * 3, ["a small string"] * 3, - ["test".encode()] * 3, + [b"test"] * 3, [E.TWO] * 3, ), ) @@ -102,11 +102,11 @@ def test_unstructure_attrs_mappings(benchmark, converter_cls, unstructure_strat) Benchmark an attrs class containing mappings. """ - @attr.frozen + @frozen class FrozenCls: a: int - @attr.define + @define class C: a: Mapping[int, str] b: Dict[float, bytes] @@ -130,11 +130,11 @@ def test_structure_attrs_mappings(benchmark, converter_cls): Benchmark an attrs class containing mappings. """ - @attr.frozen + @frozen class FrozenCls: a: int - @attr.define + @define class C: a: Mapping[int, str] b: Dict[float, bytes] diff --git a/bench/test_attrs_nested.py b/bench/test_attrs_nested.py index 75b6fb52..3b675354 100644 --- a/bench/test_attrs_nested.py +++ b/bench/test_attrs_nested.py @@ -1,8 +1,8 @@ """Benchmark attrs containing other attrs classes.""" -import attr import pytest +from attrs import define -from cattr import BaseConverter, Converter, UnstructureStrategy +from cattrs import BaseConverter, Converter, UnstructureStrategy @pytest.mark.parametrize("converter_cls", [BaseConverter, Converter]) @@ -12,42 +12,42 @@ def test_unstructure_attrs_nested(benchmark, converter_cls, unstructure_strat): c = converter_cls(unstruct_strat=unstructure_strat) - @attr.define + @define class InnerA: a: int b: float c: str d: bytes - @attr.define + @define class InnerB: a: int b: float c: str d: bytes - @attr.define + @define class InnerC: a: int b: float c: str d: bytes - @attr.define + @define class InnerD: a: int b: float c: str d: bytes - @attr.define + @define class InnerE: a: int b: float c: str d: bytes - @attr.define + @define class Outer: a: InnerA b: InnerB @@ -56,11 +56,11 @@ class Outer: e: InnerE inst = Outer( - InnerA(1, 1.0, "one", "one".encode()), - InnerB(2, 2.0, "two", "two".encode()), - InnerC(3, 3.0, "three", "three".encode()), - InnerD(4, 4.0, "four", "four".encode()), - InnerE(5, 5.0, "five", "five".encode()), + InnerA(1, 1.0, "one", b"one"), + InnerB(2, 2.0, "two", b"two"), + InnerC(3, 3.0, "three", b"three"), + InnerD(4, 4.0, "four", b"four"), + InnerE(5, 5.0, "five", b"five"), ) benchmark(c.unstructure, inst) @@ -73,49 +73,49 @@ class Outer: def test_unstruct_attrs_deep_nest(benchmark, converter_cls, unstructure_strat): c = converter_cls(unstruct_strat=unstructure_strat) - @attr.define + @define class InnerA: a: int b: float c: str d: bytes - @attr.define + @define class InnerB: a: InnerA b: InnerA c: InnerA d: InnerA - @attr.define + @define class InnerC: a: InnerB b: InnerB c: InnerB d: InnerB - @attr.define + @define class InnerD: a: InnerC b: InnerC c: InnerC d: InnerC - @attr.define + @define class InnerE: a: InnerD b: InnerD c: InnerD d: InnerD - @attr.define + @define class Outer: a: InnerE b: InnerE c: InnerE d: InnerE - make_inner_a = lambda: InnerA(1, 1.0, "one", "one".encode()) + make_inner_a = lambda: InnerA(1, 1.0, "one", b"one") make_inner_b = lambda: InnerB(*[make_inner_a() for _ in range(4)]) make_inner_c = lambda: InnerC(*[make_inner_b() for _ in range(4)]) make_inner_d = lambda: InnerD(*[make_inner_c() for _ in range(4)]) diff --git a/bench/test_attrs_primitives.py b/bench/test_attrs_primitives.py index 8fff85ff..e11fae92 100644 --- a/bench/test_attrs_primitives.py +++ b/bench/test_attrs_primitives.py @@ -1,9 +1,9 @@ from enum import IntEnum -import attr import pytest +from attrs import define -from cattr import BaseConverter, Converter, UnstructureStrategy +from cattrs import BaseConverter, Converter, UnstructureStrategy class E(IntEnum): @@ -11,7 +11,7 @@ class E(IntEnum): TWO = 2 -@attr.define +@define class C: a: int b: float @@ -60,32 +60,32 @@ def test_unstructure_attrs_primitives(benchmark, converter_cls, unstructure_stra 1, 1.0, "a small string", - "test".encode(), + b"test", E.ONE, 2, 2.0, "a small string", - "test".encode(), + b"test", E.TWO, 3, 3.0, "a small string", - "test".encode(), + b"test", E.ONE, 4, 4.0, "a small string", - "test".encode(), + b"test", E.TWO, 5, 5.0, "a small string", - "test".encode(), + b"test", E.ONE, 6, 6.0, "a small string", - "test".encode(), + b"test", E.TWO, ), ) @@ -104,32 +104,32 @@ def test_structure_attrs_primitives(benchmark, converter_cls, unstructure_strat) 1, 1.0, "a small string", - "test".encode(), + b"test", E.ONE, 2, 2.0, "a small string", - "test".encode(), + b"test", E.TWO, 3, 3.0, "a small string", - "test".encode(), + b"test", E.ONE, 4, 4.0, "a small string", - "test".encode(), + b"test", E.TWO, 5, 5.0, "a small string", - "test".encode(), + b"test", E.ONE, 6, 6.0, "a small string", - "test".encode(), + b"test", E.TWO, ) diff --git a/bench/test_validators.py b/bench/test_validators.py new file mode 100644 index 00000000..a8238371 --- /dev/null +++ b/bench/test_validators.py @@ -0,0 +1,29 @@ +"""Benchmarks for validators.""" +import pytest +from attrs import define +from attrs import fields as f + +from cattrs import Converter +from cattrs.v import V, customize, greater_than, len_between + + +@define +class Small: + a: int + b: str + + +@pytest.mark.parametrize("dv", [True, False]) +def test_structure_success(dv: bool, benchmark): + c = Converter(detailed_validation=dv) + + hook = customize( + c, + Small, + V((fs := f(Small)).a).ensure(greater_than(10)), + V(fs.b).ensure(len_between(0, 10)), + ) + + d = {"a": 11, "b": "abcde"} + + benchmark(hook, d, None) diff --git a/docs/validation.md b/docs/validation.md index a059fe20..a6d0e133 100644 --- a/docs/validation.md +++ b/docs/validation.md @@ -1,14 +1,75 @@ # Validation -_cattrs_ has a detailed validation mode since version 22.1.0, and this mode is enabled by default. -When running under detailed validation, the structuring hooks are slightly slower but produce richer and more precise error messages. -Unstructuring hooks are not affected. +_cattrs_ supports _structuring_ since its initial release, and _validation_ since release 24.1. + +**Structuring** is the process of ensuring data matches a set of Python types; +it can be thought of as validating data against structural constraints. +Structuring ensures the shape of your data. +Structuring ensures data typed as `list[int]` really contains a list of integers. + +**Validation** is the process of ensuring data matches a set of user-provided constraints; +it can be thought of as validating the value of data. +Validation happens after the shape of the data has been ensured. +Validation can ensure a `list[int]` contains at least one integer, and that all integers are positive. + +## (Value) Validation + +```{versionadded} 24.1.0 + +``` +```{note} _This API is still provisional; as such it is subject to breaking changes._ + +``` + +_cattrs_ can be configured to validate the values of your data (ensuring a list of integers has at least one member, and that all elements are positive). + +The basic unit of value validation is a function that takes a value and, if the value is unacceptable, either raises an exception or returns exactly `False`. +These functions are called _validators_. + +The attributes of _attrs_ classes can be validated with the use of a helper function, {func}`cattrs.v.customize`, and a helper class, {class}`cattrs.v.V`. +_V_ is the validation attribute, mapping to _attrs_ or _dataclass_ attributes. + +```python +from attrs import define +from cattrs import Converter +from cattrs.v import customize, V + +@define +class C: + a: int + +converter = Converter() + +customize(converter, C, V("a").ensure(lambda a: a > 0)) +``` + +Now, every structuring of class `C` will run the provided validator(s). + +```python +converter.structure({"a": -1}, C) +``` + +This process also works with dataclasses: + +```python +from dataclasses import dataclass + +@dataclass +class D: + a: int + +customize(converter, D, V("a").ensure(lambda a: a == 5)) +``` ## Detailed Validation ```{versionadded} 22.1.0 ``` +Detailed validation is enabled by default and can be disabled for a speed boost by creating a converter with `detailed_validation=False`. +When running under detailed validation, the structuring hooks are slightly slower but produce richer and more precise error messages. +Unstructuring hooks are not affected. + In detailed validation mode, any structuring errors will be grouped and raised together as a {class}`cattrs.BaseValidationError`, which is a [PEP 654 ExceptionGroup](https://www.python.org/dev/peps/pep-0654/). ExceptionGroups are special exceptions which contain lists of other exceptions, which may themselves be other ExceptionGroups. In essence, ExceptionGroups are trees of exceptions. diff --git a/pdm.lock b/pdm.lock index 53b69d49..f404caa3 100644 --- a/pdm.lock +++ b/pdm.lock @@ -2,10 +2,10 @@ # It is not intended for manual editing. [metadata] -groups = ["default", "bench", "bson", "cbor2", "docs", "lint", "msgpack", "msgspec", "orjson", "pyyaml", "test", "tomlkit", "ujson"] +groups = ["default", "bench", "bson", "cbor2", "docs", "lint", "msgpack", "orjson", "pyyaml", "test", "tomlkit", "ujson", "msgspec", "tests-mypy"] strategy = ["cross_platform"] lock_version = "4.4.1" -content_hash = "sha256:80497e8d5b756fc000f8a8b58b2ae6e6501168628e264daf7de6049fa45b096e" +content_hash = "sha256:596cc1abc11be8d512cfafc6e71950c3003c26181beab37d862052d883c0a77a" [[package]] name = "alabaster" @@ -307,6 +307,16 @@ files = [ {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] +[[package]] +name = "decorator" +version = "5.1.1" +requires_python = ">=3.5" +summary = "Decorators for Humans" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + [[package]] name = "dnspython" version = "2.4.2" @@ -690,6 +700,46 @@ files = [ {file = "msgspec-0.18.5.tar.gz", hash = "sha256:8e545651531f2d01b983d0ac0c7f3b6d99674267ff261b5f344f5016160b5608"}, ] +[[package]] +name = "mypy" +version = "1.8.0" +requires_python = ">=3.8" +summary = "Optional static typing for Python" +dependencies = [ + "mypy-extensions>=1.0.0", + "tomli>=1.1.0; python_version < \"3.11\"", + "typing-extensions>=4.1.0", +] +files = [ + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, +] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1060,6 +1110,26 @@ files = [ {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"}, ] +[[package]] +name = "pytest-mypy-plugins" +version = "3.0.0" +requires_python = ">=3.8" +summary = "pytest plugin for writing tests for mypy plugins" +dependencies = [ + "Jinja2", + "decorator", + "mypy>=1.3", + "packaging", + "pytest>=7.0.0", + "pyyaml", + "regex", + "tomlkit>=0.11", +] +files = [ + {file = "pytest-mypy-plugins-3.0.0.tar.gz", hash = "sha256:05a728c7cbc4f33610f97fe9266b2c3eb209e41c28935011b4fc9531662625f6"}, + {file = "pytest_mypy_plugins-3.0.0-py3-none-any.whl", hash = "sha256:a1e3f51b68898bc25713cc53718a28d9dc0cfd51d28a537ef18c7df3b123ed84"}, +] + [[package]] name = "pytest-xdist" version = "3.4.0" @@ -1142,6 +1212,93 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "regex" +version = "2023.12.25" +requires_python = ">=3.7" +summary = "Alternative regular expression module, to replace re." +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + [[package]] name = "requests" version = "2.31.0" diff --git a/pyproject.toml b/pyproject.toml index a5e8d140..162a61f9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,6 @@ lint = [ test = [ "hypothesis>=6.79.4", "pytest>=7.4.0", - "pytest-benchmark>=4.0.0", "immutables>=0.20", "typing-extensions>=4.7.1", "coverage>=7.4.0", @@ -26,6 +25,11 @@ docs = [ ] bench = [ "pyperf>=2.6.1", + "pytest-benchmark>=4.0.0", +] +tests-mypy = [ + "pytest-mypy-plugins>=3.0.0", + "mypy>=1.7.1", ] [build-system] @@ -99,7 +103,7 @@ msgspec = [ ] [tool.pytest.ini_options] -addopts = "-l --benchmark-sort=fullname --benchmark-warmup=true --benchmark-warmup-iterations=5 --benchmark-group-by=fullname" +addopts = "-l" [tool.coverage.run] parallel = true @@ -109,6 +113,8 @@ source_pkgs = ["cattrs", "tests"] exclude_also = [ "@overload", "if TYPE_CHECKING:", + "class .*\\bProtocol(\\[.*\\])?\\):", + "assert_never\\(" ] [tool.ruff] diff --git a/src/cattrs/__init__.py b/src/cattrs/__init__.py index db496363..e5eb144b 100644 --- a/src/cattrs/__init__.py +++ b/src/cattrs/__init__.py @@ -11,20 +11,21 @@ StructureHandlerNotFoundError, ) from .gen import override -from .v import transform_error +from .v import ensure, transform_error __all__ = [ - "structure", - "unstructure", + "ensure", "get_structure_hook", "get_unstructure_hook", + "global_converter", "register_structure_hook_func", "register_structure_hook", "register_unstructure_hook_func", "register_unstructure_hook", "structure_attrs_fromdict", "structure_attrs_fromtuple", - "global_converter", + "structure", + "unstructure", "BaseConverter", "Converter", "AttributeValidationNote", diff --git a/src/cattrs/_compat.py b/src/cattrs/_compat.py index bad9d037..3d31f6c8 100644 --- a/src/cattrs/_compat.py +++ b/src/cattrs/_compat.py @@ -32,10 +32,12 @@ from typing import Sequence as TypingSequence from typing import Set as TypingSet -from attrs import NOTHING, Attribute, Factory, resolve_types +from attrs import NOTHING, Attribute, AttrsInstance, Factory, resolve_types from attrs import fields as attrs_fields from attrs import fields_dict as attrs_fields_dict +from ._types import DataclassLike + __all__ = [ "ANIES", "adapted_fields", @@ -131,7 +133,9 @@ def fields(type): return dataclass_fields(type) -def fields_dict(type) -> Dict[str, Union[Attribute, Field]]: +def fields_dict( + type: Union[Type[AttrsInstance], Type[DataclassLike]] +) -> Dict[str, Union[Attribute, Field]]: """Return the fields_dict for attrs and dataclasses.""" if is_dataclass(type): return {f.name: f for f in dataclass_fields(type)} diff --git a/src/cattrs/_types.py b/src/cattrs/_types.py new file mode 100644 index 00000000..834486d4 --- /dev/null +++ b/src/cattrs/_types.py @@ -0,0 +1,58 @@ +"""Types for internal use.""" + +from __future__ import annotations + +from dataclasses import Field +from types import FrameType, TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Tuple, + Type, + TypeVar, + Union, + final, +) + +from typing_extensions import LiteralString, Protocol, TypeAlias + +ExcInfo: TypeAlias = Tuple[Type[BaseException], BaseException, TracebackType] +OptExcInfo: TypeAlias = Union[ExcInfo, Tuple[None, None, None]] + +# Superset of typing.AnyStr that also includes LiteralString +AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) + +# Represents when str or LiteralStr is acceptable. Useful for string processing +# APIs where literalness of return value depends on literalness of inputs +StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) + +# Objects suitable to be passed to sys.setprofile, threading.setprofile, and similar +ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object] + +# Objects suitable to be passed to sys.settrace, threading.settrace, and similar +TraceFunction: TypeAlias = Callable[[FrameType, str, Any], Union["TraceFunction", None]] + + +# Copied over from https://github.com/hauntsaninja/useful_types/blob/main/useful_types/experimental.py +# Might not work as expected for pyright, see +# https://github.com/python/typeshed/pull/9362 +# https://github.com/microsoft/pyright/issues/4339 +@final +class DataclassLike(Protocol): + """Abstract base class for all dataclass types. + + Mainly useful for type-checking. + """ + + __dataclass_fields__: ClassVar[dict[str, Field[Any]]] = {} + + # we don't want type checkers thinking this is a protocol member; it isn't + if not TYPE_CHECKING: + + def __init_subclass__(cls): + raise TypeError( + "Use the @dataclass decorator to create dataclasses, " + "rather than subclassing dataclasses.DataclassLike" + ) diff --git a/src/cattrs/errors.py b/src/cattrs/errors.py index 9148bf10..54ffb9a7 100644 --- a/src/cattrs/errors.py +++ b/src/cattrs/errors.py @@ -45,26 +45,24 @@ def __getnewargs__(self) -> Tuple[str, Union[int, str], Any]: class IterableValidationError(BaseValidationError): - """Raised when structuring an iterable.""" + """Raised when structuring an iterable. - def group_exceptions( - self, - ) -> Tuple[List[Tuple[Exception, IterableValidationNote]], List[Exception]]: - """Split the exceptions into two groups: with and without validation notes.""" + If instantiating this error manually (outside of cattrs), ensure every + subexception has an appropriate IterableValidationNote note in its notes. + """ + + def group_exceptions(self) -> List[Tuple[Exception, IterableValidationNote]]: + """Group up the subexceptions alongside their IV notes.""" excs_with_notes = [] - other_excs = [] for subexc in self.exceptions: - if hasattr(subexc, "__notes__"): - for note in subexc.__notes__: - if note.__class__ is IterableValidationNote: - excs_with_notes.append((subexc, note)) - break - else: - other_excs.append(subexc) + for note in subexc.__notes__: + if note.__class__ is IterableValidationNote: + excs_with_notes.append((subexc, note)) + break else: - other_excs.append(subexc) + raise AttributeError("Subexceptions require notes") - return excs_with_notes, other_excs + return excs_with_notes class AttributeValidationNote(str): @@ -127,3 +125,7 @@ def __init__( message or f"Extra fields in constructor for {cln}: {', '.join(extra_fields)}" ) + + +class ValueValidationError(BaseValidationError): + """Raised when a custom value validator fails under detailed validation.""" diff --git a/src/cattrs/v.py b/src/cattrs/v/__init__.py similarity index 55% rename from src/cattrs/v.py rename to src/cattrs/v/__init__.py index c3ab18cc..89052c7e 100644 --- a/src/cattrs/v.py +++ b/src/cattrs/v/__init__.py @@ -1,14 +1,49 @@ """Cattrs validation.""" -from typing import Callable, List, Union +from typing import Any, Callable, Dict, List, Tuple, Type, TypeVar, Union, overload -from .errors import ( +from attrs import NOTHING, frozen + +from .._compat import Annotated, ExceptionGroup +from ..errors import ( ClassValidationError, ForbiddenExtraKeysError, IterableValidationError, + ValueValidationError, +) +from ._fluent import V, customize +from ._validators import ( + between, + for_all, + greater_than, + ignoring_none, + is_unique, + len_between, ) -__all__ = ["format_exception", "transform_error"] +__all__ = [ + "between", + "customize", + "for_all", + "format_exception", + "greater_than", + "ignoring_none", + "is_unique", + "len_between", + "transform_error", + "V", + "ValidatorFactory", +] + + +@frozen +class VAnnotation: + """Use this with Annotated to get validation.""" + + validators: Tuple[Callable[[Any], Any]] + + def __init__(self, *validators: Callable[[Any], Any]): + self.__attrs_init__(validators) def format_exception(exc: BaseException, type: Union[type, None]) -> str: @@ -28,7 +63,9 @@ def format_exception(exc: BaseException, type: Union[type, None]) -> str: elif isinstance(exc, ValueError): if type is not None: tn = type.__name__ if hasattr(type, "__name__") else repr(type) - res = f"invalid value for type, expected {tn}" + res = f"invalid value for type, expected {tn} ({exc.args[0]})" + elif exc.args: + res = f"invalid value ({exc.args[0]})" else: res = "invalid value" elif isinstance(exc, TypeError): @@ -60,7 +97,12 @@ def format_exception(exc: BaseException, type: Union[type, None]) -> str: def transform_error( - exc: Union[ClassValidationError, IterableValidationError, BaseException], + exc: Union[ + ClassValidationError, + IterableValidationError, + ValueValidationError, + BaseException, + ], path: str = "$", format_exception: Callable[ [BaseException, Union[type, None]], str @@ -86,27 +128,75 @@ def transform_error( .. versionadded:: 23.1.0 """ - errors = [] + errors: List[str] = [] if isinstance(exc, IterableValidationError): - with_notes, without = exc.group_exceptions() - for exc, note in with_notes: + for e, note in exc.group_exceptions(): p = f"{path}[{note.index!r}]" - if isinstance(exc, (ClassValidationError, IterableValidationError)): - errors.extend(transform_error(exc, p, format_exception)) + if isinstance(e, (ClassValidationError, IterableValidationError)): + errors.extend(transform_error(e, p, format_exception)) else: - errors.append(f"{format_exception(exc, note.type)} @ {p}") - for exc in without: - errors.append(f"{format_exception(exc, None)} @ {path}") + errors.append(f"{format_exception(e, note.type)} @ {p}") elif isinstance(exc, ClassValidationError): with_notes, without = exc.group_exceptions() for exc, note in with_notes: p = f"{path}.{note.name}" - if isinstance(exc, (ClassValidationError, IterableValidationError)): + if isinstance(exc, ExceptionGroup): errors.extend(transform_error(exc, p, format_exception)) else: errors.append(f"{format_exception(exc, note.type)} @ {p}") for exc in without: errors.append(f"{format_exception(exc, None)} @ {path}") + elif isinstance(exc, ValueValidationError): + # This is a value validation error, which we should just flatten. + for inner in exc.exceptions: + errors.append(f"{format_exception(inner, None)} @ {path}") + elif isinstance(exc, ExceptionGroup): + # Likely from a nested validator, needs flattening. + errors.extend( + [ + line + for inner in exc.exceptions + for line in transform_error(inner, path, format_exception) + ] + ) else: errors.append(f"{format_exception(exc, None)} @ {path}") return errors + + +T = TypeVar("T") +E = TypeVar("E") +TV = TypeVar("TV") + + +@overload +def ensure( + type: Type[List[T]], *validators: Callable[[List[T]], Any], elems: Type[E] +) -> Type[List[E]]: ... + + +@overload +def ensure( + type: Type[Dict], + *validators: Callable[[Dict], Any], + keys: Type[E], + values: Type[TV], +) -> Type[Dict[E, TV]]: ... + + +@overload +def ensure(type: Type[T], *validators: Callable[[T], Any]) -> Type[T]: ... + + +def ensure(type, *validators, elems=NOTHING, keys=NOTHING, values=NOTHING): + """Ensure validators run when structuring the given type.""" + if elems is not NOTHING: + # These are lists. + if not validators: + return type[elems] + return Annotated[type[elems], VAnnotation(*validators)] + if keys is not NOTHING or values is not NOTHING: + if not validators: + return type[keys, values] + return Annotated[type[keys, values], VAnnotation(*validators)] + return Annotated[type, VAnnotation(*validators)] diff --git a/src/cattrs/v/_fluent.py b/src/cattrs/v/_fluent.py new file mode 100644 index 00000000..6db59a27 --- /dev/null +++ b/src/cattrs/v/_fluent.py @@ -0,0 +1,232 @@ +"""The fluent validation API.""" + +from __future__ import annotations + +from typing import Any, Callable, Generic, Literal, Sequence, TypeVar + +try: + from typing import assert_never +except ImportError: + from typing_extensions import assert_never + +try: + from typing import TypeGuard +except ImportError: + from typing_extensions import TypeGuard + +from inspect import signature + +from attrs import Attribute, AttrsInstance, define +from attrs import fields as f + +from .. import BaseConverter +from .._compat import ExceptionGroup, fields_dict, get_origin +from .._types import DataclassLike +from ..dispatch import StructureHook +from ..gen import make_dict_structure_fn, override +from ._types import Validator, ValidatorFactory +from .fns import invalid_value + +T = TypeVar("T") + + +@define +class VOmitted: + """This attribute has been marked for omission. + + The class contains no methods. + """ + + attr: str + + +@define +class VRenamed(Generic[T]): + """This attribute has been renamed. + + This class has no `omit` and no `rename`. + """ + + attr: Attribute[T] | str + new_name: str + + def ensure( + self: VRenamed[T], + validator: Validator[T] | ValidatorFactory[T], + *validators: Validator[T] | ValidatorFactory[T], + ) -> VCustomized[T]: + return VCustomized( + self.attr if isinstance(self.attr, str) else self.attr.name, + self.new_name, + (validator, *validators), + ) + + +@define +class VCustomized(Generic[T]): + """This attribute has been customized. + + This class has no `omit`. + """ + + attr: str + new_name: str | None + validators: tuple[Callable[[T], None | bool] | ValidatorFactory[T], ...] = () + + +@define +class V(Generic[T]): + """ + The cattrs.v validation attribute. + + Instances are initialized from strings or `attrs.Attribute`s. + + One V attribute maps directly to each class attribute. + """ + + def __init__(self, attr: Attribute[T] | str) -> None: + self.attr = attr + self.validators = () + + attr: Attribute[T] | str + validators: tuple[Callable[[T], None | bool] | ValidatorFactory[T], ...] = () + + def ensure( + self: V[T], + validator: Validator[T] | ValidatorFactory[T], + *validators: Validator[T] | ValidatorFactory[T], + ) -> VCustomized[T]: + return VCustomized(self.attr, None, (*self.validators, validator, *validators)) + + def rename(self: V[T], new_name: str) -> VRenamed[T]: + """Rename the attribute after processing.""" + return VRenamed(self.attr, new_name) + + def omit(self) -> VOmitted: + """Omit the attribute.""" + return VOmitted(self.attr if isinstance(self.attr, str) else self.attr.name) + + +def _is_validator_factory( + validator: Callable[[Any], None | bool] | ValidatorFactory[T] +) -> TypeGuard[ValidatorFactory[T]]: + """Figure out if this is a validator factory or not.""" + sig = signature(validator) + ra = sig.return_annotation + return ( + ra.startswith("Validator") + if isinstance(ra, str) + else get_origin(ra) is Validator + ) + + +def _compose_validators( + base_structure: StructureHook, + validators: Sequence[Callable[[Any], None | bool] | ValidatorFactory], + detailed_validation: bool, +) -> Callable[[Any, Any], Any]: + """Produce a hook composing the base structuring hook and additional validators. + + The validators will run only if the base structuring succeeds; no point otherwise. + + The new hook will raise an ExceptionGroup. + """ + bs = base_structure + final_validators = [] + for val in validators: + if _is_validator_factory(val): + final_validators.append(val(detailed_validation)) + else: + final_validators.append(val) + + if detailed_validation: + + def structure_hook( + val: dict[str, Any], t: Any, _hooks=final_validators, _bs=bs + ) -> Any: + res = _bs(val, t) + errors: list[Exception] = [] + for hook in _hooks: + try: + if hook(val) is False: + invalid_value(val) + except Exception as exc: + errors.append(exc) + if errors: + raise ExceptionGroup("Validation errors structuring {}", errors) + return res + + else: + + def structure_hook( + val: dict[str, Any], t: Any, _hooks=final_validators, _bs=bs + ) -> Any: + res = _bs(val, t) + for hook in _hooks: + if hook(val) is False: + invalid_value(val) + return res + + return structure_hook + + +def customize( + converter: BaseConverter, + cl: type[AttrsInstance] | type[DataclassLike], + *fields: VCustomized[Any] | VRenamed[Any] | VOmitted, + detailed_validation: bool | Literal["from_converter"] = "from_converter", + forbid_extra_keys: bool | Literal["from_converter"] = "from_converter", +) -> StructureHook: + """Customize the structuring process for an attrs class. + + :param converter: The converter to fetch subhooks from, and to which the + customization will be applied to. + :param cl: The _attrs_ class to be customized. + :param fields: The fields to apply customizations to. + :param detailed_validation: Whether to enable detailed validation. + :param forbid_extra_keys: Whether to check for extra keys when structuring. + + .. versionadded:: 24.1.0 + """ + seen = set() + overrides = {} + if detailed_validation == "from_converter": + detailed_validation = converter.detailed_validation + for field in fields: + field_name = field.attr if isinstance(field.attr, str) else field.attr.name + if field_name in seen: + raise TypeError(f"Duplicate customization for field {field_name}") + + if isinstance(field.attr, str): + try: + attribute = fields_dict(cl)[field.attr] + except KeyError: + raise TypeError(f"Class {cl} has no field {field}") from None + else: + attribute = field.attr + + if not isinstance(field.attr, str) and field.attr is not getattr( + f(cl), field.attr.name + ): + raise TypeError(f"Customizing {cl}, but {field} is from a different class") + seen.add(field_name) + if isinstance(field, VOmitted): + overrides[field_name] = override(omit=True) + elif isinstance(field, VRenamed): + overrides[field_name] = override(rename=field.new_name) + elif isinstance(field, VCustomized): + base_hook = converter._structure_func.dispatch(attribute.type) + hook = _compose_validators(base_hook, field.validators, detailed_validation) + overrides[field_name] = override(rename=field.new_name, struct_hook=hook) + else: + # The match is exhaustive. + assert_never(field) + res = make_dict_structure_fn( + cl, + converter, + _cattrs_detailed_validation=detailed_validation, + _cattrs_forbid_extra_keys=forbid_extra_keys, + **overrides, + ) + converter.register_structure_hook(cl, res) + return res diff --git a/src/cattrs/v/_hooks.py b/src/cattrs/v/_hooks.py new file mode 100644 index 00000000..ac1e9e23 --- /dev/null +++ b/src/cattrs/v/_hooks.py @@ -0,0 +1,65 @@ +"""Hooks and hook factories for validation.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from .._compat import Annotated, is_annotated +from ..dispatch import StructureHook +from ..errors import ValueValidationError +from . import VAnnotation +from .fns import invalid_value + +if TYPE_CHECKING: + from ..converters import BaseConverter + + +def get_validator_annotation(type: Any) -> tuple[VAnnotation, Any] | None: + if is_annotated(type): + args = type.__metadata__ + for arg in args: + if isinstance(arg, VAnnotation): + new_args = tuple(a for a in args[1:] if a is not arg) + if new_args: + return Annotated(type.__origin__, *new_args) # type: ignore + return arg, type.__origin__ + return None + + +def is_validated(type: Any) -> bool: + """The predicate for validated annotations.""" + return get_validator_annotation(type) is not None + + +def validator_factory(type: Any, converter: BaseConverter) -> StructureHook: + res = get_validator_annotation(type) + assert res is not None + val_annotation, type = res + + base_hook = converter.get_structure_hook(type) + + if converter.detailed_validation: + + def validating_hook(val: Any, _: Any) -> Any: + res = base_hook(val, type) + errors = [] + for validator in val_annotation.validators: + try: + if validator(res) is False: + invalid_value(res) + except Exception as exc: + errors.append(exc) + if errors: + raise ValueValidationError("Value validation failed", errors, type) + return res + + else: + + def validating_hook(val: Any, _: Any) -> Any: + res = base_hook(val, type) + for validator in val_annotation.validators: + if validator(res) is False: + invalid_value(res) + return res + + return validating_hook diff --git a/src/cattrs/v/_types.py b/src/cattrs/v/_types.py new file mode 100644 index 00000000..6538e272 --- /dev/null +++ b/src/cattrs/v/_types.py @@ -0,0 +1,7 @@ +from typing import Any, Callable, TypeAlias, TypeVar + +#: Value validators take a single value and return a single value. +T = TypeVar("T") +Validator: TypeAlias = Callable[[T], Any] + +ValidatorFactory: TypeAlias = Callable[[bool], Validator[T]] diff --git a/src/cattrs/v/_validators.py b/src/cattrs/v/_validators.py new file mode 100644 index 00000000..0bd0751a --- /dev/null +++ b/src/cattrs/v/_validators.py @@ -0,0 +1,144 @@ +from __future__ import annotations + +from collections.abc import Hashable, Iterable +from typing import Callable, Collection, Protocol, Sized, TypeVar + +from .._compat import ExceptionGroup +from ..errors import IterableValidationError, IterableValidationNote +from ._types import Validator, ValidatorFactory + +T = TypeVar("T") + + +class Comparable(Protocol[T]): + def __lt__(self: T, other: T) -> bool: ... + + def __eq__(self: T, other: T) -> bool: ... + + +C = TypeVar("C", bound=Comparable) + + +def greater_than(min: C) -> Callable[[C], None]: + def assert_gt(val: C, _min: C = min) -> None: + if _min >= val: + raise ValueError(f"{val} not greater than {_min}") + + return assert_gt + + +def between(min: C, max: C) -> Callable[[C], None]: + """Ensure the value of the attribute is between min (inclusive) and max (exclusive).""" + + def assert_between(val: C, _min: C = min, _max: C = max) -> None: + if val < _min or val >= _max: + raise ValueError(f"{val} not between {_min} and {_max}") + + return assert_between + + +def len_between(min: int, max: int) -> Callable[[Sized], None]: + """Ensure the length of the argument is between min (inclusive) and max (exclusive).""" + + def assert_len_between(val: Sized, _min: int = min, _max: int = max) -> None: + length = len(val) + if not (_min <= length < _max): + raise ValueError(f"length ({length}) not between {_min} and {_max}") + + return assert_len_between + + +def is_unique(val: Collection[Hashable]) -> None: + """Ensure all elements in a collection are unique.""" + if (length := len(val)) != (unique_length := len(set(val))): + raise ValueError( + f"Collection ({length} elem(s)) not unique, only {unique_length} unique elem(s)" + ) + + +def ignoring_none( + validator: Callable[[T], None], *validators: Callable[[T], None] +) -> ValidatorFactory[T | None]: + """ + Wrap validators with this so they can be applied to types that include `None`. + + Values that are equal to `None` are passed through. + """ + + validators = (validator, *validators) + + def factory(detailed_validation: bool) -> Validator[T | None]: + if detailed_validation: + + def skip_none(val: T | None, _validators=validators) -> None: + if val is None: + return + errors = [] + for validator in _validators: + try: + validator(val) + except Exception as exc: + errors.append(exc) + if errors: + raise ExceptionGroup("", errors) + + else: + + def skip_none(val: T | None, _validators=validators) -> None: + if val is None: + return + for validator in _validators: + validator(val) + + return skip_none + + return factory + + +def for_all( + validator: Callable[[T], None | bool], *validators: Callable[[T], None | bool] +) -> ValidatorFactory[Iterable[T]]: + """A helper validator included with cattrs. + + Run all the given validators against all members of the + iterable. + """ + + validators = (validator, *validators) + + def factory(detailed_validation: bool) -> Validator[Iterable[T]]: + if detailed_validation: + + def assert_all_elements(val: Iterable[T], _validators=validators) -> None: + errors = [] + ix = 0 + for e in val: + try: + for v in _validators: + try: + if v(e) is False: + raise ValueError() + except Exception as exc: + exc.__notes__ = [ + *getattr(exc, "__notes__", []), + IterableValidationNote( + f"Validating @ index {ix}", ix, None + ), + ] + errors.append(exc) + finally: + ix += 1 + if errors: + raise IterableValidationError("", errors, val.__class__) + + else: + + def assert_all_elements(val: Iterable[T], _validators=validators) -> None: + for e in val: + for v in _validators: + if v(e) is False: + raise ValueError() + + return assert_all_elements + + return factory diff --git a/src/cattrs/v/fns.py b/src/cattrs/v/fns.py new file mode 100644 index 00000000..4fff84c6 --- /dev/null +++ b/src/cattrs/v/fns.py @@ -0,0 +1,6 @@ +from typing import Never + + +def invalid_value(val) -> Never: + """Called with an invalid value when a value validator returns `False`.""" + raise ValueError(f"Validation failed for {val}") diff --git a/tests/__init__.py b/tests/__init__.py index 9d678465..9a26decb 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -12,4 +12,4 @@ if "CI" in os.environ: settings.load_profile("CI") -unstructure_strats = one_of(just(s) for s in UnstructureStrategy) +unstructure_strats = one_of([just(s) for s in UnstructureStrategy]) diff --git a/tests/test_converter_typing.yml b/tests/test_converter_typing.yml new file mode 100644 index 00000000..da89bced --- /dev/null +++ b/tests/test_converter_typing.yml @@ -0,0 +1,9 @@ +- case: sequence_structuring + main: | + from typing import Sequence + from cattrs import Converter + + c = Converter() + + # Maybe one day! + c.structure([], Sequence[int]) # E: Only concrete class can be given where "type[Sequence[int]]" is expected [type-abstract] diff --git a/tests/v/__init__.py b/tests/v/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/v/conftest.py b/tests/v/conftest.py new file mode 100644 index 00000000..86cc4cd5 --- /dev/null +++ b/tests/v/conftest.py @@ -0,0 +1,9 @@ +from pytest import fixture + +from cattrs import Converter + + +@fixture +def c() -> Converter: + """We need only converters with detailed_validation=True.""" + return Converter() diff --git a/tests/v/test_ensure.py b/tests/v/test_ensure.py new file mode 100644 index 00000000..5cfbf47d --- /dev/null +++ b/tests/v/test_ensure.py @@ -0,0 +1,156 @@ +"""Tests for `cattrs.v.ensure`.""" + +import sys +from typing import Dict, List, MutableSequence, Sequence + +from pytest import fixture, mark, raises + +from cattrs import BaseConverter +from cattrs._compat import ExceptionGroup +from cattrs.errors import IterableValidationError, ValueValidationError +from cattrs.v import ensure, transform_error +from cattrs.v._hooks import is_validated, validator_factory + + +@fixture +def valconv(converter) -> BaseConverter: + converter.register_structure_hook_factory(is_validated, validator_factory) + return converter + + +def test_ensured_ints(valconv: BaseConverter): + """Validation for primitives works.""" + assert valconv.structure("5", ensure(int, lambda i: i > 0)) + + with raises(Exception) as exc: + valconv.structure("-5", ensure(int, lambda i: i > 0)) + + if valconv.detailed_validation: + assert isinstance(exc.value, ExceptionGroup) + assert isinstance(exc.value.exceptions[0], ValueError) + else: + assert isinstance(exc.value, ValueError) + + +def test_ensured_lists(valconv: BaseConverter): + """Validation for lists works.""" + assert valconv.structure([1, 2], ensure(List[int], lambda lst: len(lst) > 0)) + + with raises(Exception) as exc: + valconv.structure([], ensure(List[int], lambda lst: len(lst) > 0)) + + if valconv.detailed_validation: + assert isinstance(exc.value, ValueValidationError) + assert isinstance(exc.value.exceptions[0], ValueError) + assert transform_error(exc.value) == [ + "invalid value (Validation failed for []) @ $" + ] + else: + assert isinstance(exc.value, ValueError) + + +@mark.parametrize("type", [List, Sequence, MutableSequence]) +def test_ensured_list_elements(valconv: BaseConverter, type): + """Validation for list elements works.""" + assert valconv.structure([1, 2], ensure(type, elems=ensure(int, lambda i: i > 0))) + + with raises(Exception) as exc: + valconv.structure([1, -2], ensure(type, elems=ensure(int, lambda i: i > 0))) + + if valconv.detailed_validation: + assert isinstance(exc.value, IterableValidationError) + assert isinstance(exc.value.exceptions[0], ExceptionGroup) + assert isinstance(exc.value.exceptions[0].exceptions[0], ValueError) + else: + assert isinstance(exc.value, ValueError) + + # Now both elements and the list itself. + assert valconv.structure( + [1, 2], + ensure(type, lambda lst: len(lst) < 3, elems=ensure(int, lambda i: i > 0)), + ) + + with raises(Exception) as exc: + valconv.structure( + [1, 2, 3], + ensure(type, lambda lst: len(lst) < 3, elems=ensure(int, lambda i: i > 0)), + ) + + if valconv.detailed_validation: + assert isinstance(exc.value, ValueValidationError) + assert isinstance(exc.value.exceptions[0], ValueError) + else: + assert isinstance(exc.value, ValueError) + + with raises(Exception) as exc: + valconv.structure( + [1, -2], + ensure(type, lambda lst: len(lst) < 3, elems=ensure(int, lambda i: i > 0)), + ) + + if valconv.detailed_validation: + assert isinstance(exc.value, IterableValidationError) + assert isinstance(exc.value.exceptions[0], ExceptionGroup) + assert isinstance(exc.value.exceptions[0].exceptions[0], ValueError) + else: + assert isinstance(exc.value, ValueError) + + +def test_ensured_typing_list(valconv: BaseConverter): + """Ensure works for typing lists.""" + assert valconv.structure([1, 2], ensure(List, elems=ensure(int, lambda i: i > 0))) + + with raises(Exception) as exc: + valconv.structure([1, -2], ensure(List, elems=ensure(int, lambda i: i > 0))) + + if valconv.detailed_validation: + assert isinstance(exc.value, IterableValidationError) + assert isinstance(exc.value.exceptions[0], ExceptionGroup) + assert isinstance(exc.value.exceptions[0].exceptions[0], ValueError) + else: + assert isinstance(exc.value, ValueError) + + +@mark.skipif(sys.version_info[:2] < (3, 10), reason="Not supported on older Pythons") +def test_ensured_list(valconv: BaseConverter): + """Ensure works for builtin lists.""" + assert valconv.structure([1, 2], ensure(list, elems=ensure(int, lambda i: i > 0))) + + with raises(Exception) as exc: + valconv.structure([1, -2], ensure(list, elems=ensure(int, lambda i: i > 0))) + + if valconv.detailed_validation: + assert isinstance(exc.value, IterableValidationError) + assert isinstance(exc.value.exceptions[0], ExceptionGroup) + assert isinstance(exc.value.exceptions[0].exceptions[0], ValueError) + else: + assert isinstance(exc.value, ValueError) + + +def test_ensured_typing_dict(valconv: BaseConverter): + """Ensure works for typing.Dicts.""" + assert valconv.structure( + {"a": 1}, ensure(Dict, lambda d: len(d) > 0, keys=str, values=int) + ) + + with raises(Exception) as exc: + valconv.structure({}, ensure(Dict, lambda d: len(d) > 0, keys=str, values=int)) + + if valconv.detailed_validation: + assert isinstance(exc.value, ValueValidationError) + assert isinstance(exc.value.exceptions[0], ValueError) + else: + assert isinstance(exc.value, ValueError) + + with raises(Exception) as exc: + valconv.structure( + {"b": 1, "c": "a"}, + ensure(Dict, keys=ensure(str, lambda s: s.startswith("a")), values=int), + ) + + if valconv.detailed_validation: + assert isinstance(exc.value, IterableValidationError) + assert isinstance(exc.value.exceptions[0], ExceptionGroup) + assert isinstance(exc.value.exceptions[0].exceptions[0], ValueError) + else: + assert isinstance(exc.value, ValueError) diff --git a/tests/v/test_ensure_typing.yml b/tests/v/test_ensure_typing.yml new file mode 100644 index 00000000..0b0e0756 --- /dev/null +++ b/tests/v/test_ensure_typing.yml @@ -0,0 +1,49 @@ +- case: int_validation + main: | + from cattrs import v, Converter, ensure + + c = Converter() + + reveal_type(c.structure("5", ensure(int))) # N: Revealed type is "builtins.int" + + reveal_type(c.structure("5", ensure(int, lambda i: i > 5))) # N: Revealed type is "builtins.int" + + reveal_type(c.structure("5", ensure(int, v.greater_than(5)))) # N: Revealed type is "builtins.int" + + c.structure("5", ensure(int, lambda i: len(i) > 5)) # E: Argument 1 to "len" has incompatible type "int"; expected "Sized" [arg-type] +- case: list_validation + main: | + from typing import List + + from cattrs import v, Converter, ensure + + c = Converter() + + reveal_type(c.structure([], ensure(List[int]))) # N: Revealed type is "builtins.list[builtins.int]" + + reveal_type(c.structure([], ensure(List[int], lambda lst: len(lst) > 0))) # N: Revealed type is "builtins.list[builtins.int]" + + reveal_type(c.structure([], ensure(List[int], v.len_between(1, 5)))) # N: Revealed type is "builtins.list[builtins.int]" +- case: list_and_int_validation + main: | + from typing import List + + from cattrs import Converter, ensure + + c = Converter() + + reveal_type(c.structure([], ensure(List, elems=ensure(int)))) # N: Revealed type is "builtins.list[builtins.int]" + reveal_type(c.structure([], ensure(List, elems=ensure(int, lambda i: i > 5)))) # N: Revealed type is "builtins.list[builtins.int]" + + # Quite unfortunate this doesn't work. + c.structure([], List[ensure(int), lambda i: i > 5]) # E: Type expected within [...] [misc] # E: The type "type[list[Any]]" is not generic and not indexable [misc] +- case: sequence_and_int_validation + main: | + from typing import Sequence + + from cattrs import Converter, ensure + + c = Converter() + + # This doesn't work because of no TypeForm. + c.structure([], ensure(Sequence, elems=ensure(int))) # E: Argument 1 to "ensure" has incompatible type "type[Sequence[Any]]"; expected "type[list[Never]]" [arg-type] \ No newline at end of file diff --git a/tests/v/test_fluent.py b/tests/v/test_fluent.py new file mode 100644 index 00000000..cd9bfa9c --- /dev/null +++ b/tests/v/test_fluent.py @@ -0,0 +1,258 @@ +"""Tests for the fluent validation API.""" + +from dataclasses import dataclass +from typing import Dict, List, Union + +from attrs import Factory, define, evolve +from attrs import fields as f +from pytest import fixture, raises + +from cattrs import BaseConverter, ClassValidationError, Converter +from cattrs.v import V, customize, greater_than, transform_error + + +@fixture +def c(converter: BaseConverter) -> BaseConverter: + converter.register_structure_hook( + Union[str, int], lambda v, _: v if isinstance(v, int) else str(v) + ) + + return converter + + +@define +class Model: + """The class we want to validate, with an assortment of fields.""" + + a: int + b: str + c: List[str] = Factory(list) + d: List[int] = Factory(list) + e: Union[str, None] = None + f: Union[int, None] = None + g: Union[str, int] = 0 + h: Dict[str, int] = Factory(dict) + + +@dataclass +class DataclassModel: + """A dataclass we want to validate.""" + + a: int + b: str + + +def is_lowercase(val: str) -> None: + """Probably the simplest possible validator, only takes a string.""" + if val != val.lower(): + raise ValueError(f"{val!r} not lowercase") + + +def is_email(val: str) -> None: + """A custom validator, not in cattrs. + + It just takes a value and maybe raises, simple as that. + """ + if "@" not in val: + raise ValueError(f"{val!r} is not a valid email") + + +def test_roundtrip(c: Converter) -> None: + """Test models can roundtrip.""" + customize(c, Model) + + instance = Model(1, "1", ["1"], [1], "", 0, 0, {"a": 1}) + + assert instance == c.structure(c.unstructure(instance), Model) + + +def test_omit(c: Converter) -> None: + """Omitting a field works.""" + customize(c, Model, V(f(Model).c).omit()) + + instance = Model(1, "1", ["1"], [1], "", 0, 0, {"a": 1}) + + assert evolve(instance, c=[]) == c.structure(c.unstructure(instance), Model) + + +def test_rename(c: Converter) -> None: + """Renaming a field works.""" + customize(c, Model, V(f(Model).c).rename("C")) + + instance = Model(1, "1", ["1"], [1], "", 0, 0, {"a": 1}) + + unstructured = c.unstructure(instance) + unstructured["C"] = unstructured["c"].pop() + + assert c.structure(unstructured, Model) == instance + + +def test_rename_also_validates(c: Converter) -> None: + """Renaming a field and validating works.""" + customize(c, Model, V(f(Model).b).rename("B").ensure(is_lowercase)) + + instance = Model(1, "A", ["1"], [1], "", 0, 0, {"a": 1}) + + unstructured = c.unstructure(instance) + + # Customize only affects structuring currently. + unstructured["B"] = unstructured.pop("b") + + if c.detailed_validation: + with raises(ClassValidationError) as exc_info: + c.structure(unstructured, Model) + + assert transform_error(exc_info.value) == [ + "invalid value ('A' not lowercase) @ $.b" + ] + else: + with raises(ValueError) as exc_info: + c.structure(unstructured, Model) + + assert repr(exc_info.value) == "ValueError(\"'A' not lowercase\")" + + unstructured["B"] = instance.b = "a" + assert instance == c.structure(unstructured, Model) + + +def test_simple_string_validation(c: Converter) -> None: + """Simple string validation works.""" + customize(c, Model, V(f(Model).b).ensure(is_lowercase)) + + instance = Model(1, "A", ["1"], [1], "", 0, 0, {"a": 1}) + + unstructured = c.unstructure(instance) + + if c.detailed_validation: + with raises(ClassValidationError) as exc_info: + c.structure(unstructured, Model) + + assert transform_error(exc_info.value) == [ + "invalid value ('A' not lowercase) @ $.b" + ] + else: + with raises(ValueError) as exc_info: + c.structure(unstructured, Model) + + assert repr(exc_info.value) == "ValueError(\"'A' not lowercase\")" + + instance.b = "a" + assert instance == c.structure(c.unstructure(instance), Model) + + +def test_simple_string_validation_dc(c: Converter) -> None: + """Simple string validation works for dataclasses.""" + customize(c, DataclassModel, V("b").ensure(is_lowercase)) + + instance = DataclassModel(1, "A") + + unstructured = c.unstructure(instance) + + if c.detailed_validation: + with raises(ClassValidationError) as exc_info: + c.structure(unstructured, DataclassModel) + + assert transform_error(exc_info.value) == [ + "invalid value ('A' not lowercase) @ $.b" + ] + else: + with raises(ValueError) as exc_info: + c.structure(unstructured, DataclassModel) + + assert repr(exc_info.value) == "ValueError(\"'A' not lowercase\")" + + instance.b = "a" + assert instance == c.structure(c.unstructure(instance), DataclassModel) + + +def test_multiple_string_validators(c: Converter) -> None: + """Simple string validation works.""" + customize(c, Model, V(f(Model).b).ensure(is_lowercase, is_email)) + + instance = Model(1, "A", ["1"], [1], "", 0, 0, {"a": 1}) + + unstructured = c.unstructure(instance) + + if c.detailed_validation: + with raises(ClassValidationError) as exc_info: + c.structure(unstructured, Model) + + assert transform_error(exc_info.value) == [ + "invalid value ('A' not lowercase) @ $.b", + "invalid value ('A' is not a valid email) @ $.b", + ] + else: + with raises(ValueError) as exc_info: + c.structure(unstructured, Model) + + assert repr(exc_info.value) == "ValueError(\"'A' not lowercase\")" + + instance.b = "a@b" + assert instance == c.structure(c.unstructure(instance), Model) + + +def test_multiple_field_validators(c: Converter) -> None: + """Multiple fields are validated.""" + customize( + c, + Model, + V((fs := f(Model)).a).ensure(greater_than(5)), + V(fs.b).ensure(is_lowercase), + ) + + instance = Model(5, "A", ["1"], [1], "", 0, 0, {"a": 1}) + + unstructured = c.unstructure(instance) + + if c.detailed_validation: + with raises(ClassValidationError) as exc_info: + c.structure(unstructured, Model) + + assert transform_error(exc_info.value) == [ + "invalid value (5 not greater than 5) @ $.a", + "invalid value ('A' not lowercase) @ $.b", + ] + else: + with raises(ValueError) as exc_info: + c.structure(unstructured, Model) + + assert repr(exc_info.value) == "ValueError('5 not greater than 5')" + + instance.a = 6 + instance.b = "a" + assert instance == c.structure(c.unstructure(instance), Model) + + +def test_multiple_fields_error(c: Converter): + """Customizing the same field twice is a runtime error.""" + + fs = f(Model) + + with raises(TypeError): + customize( + c, Model, V(fs.a).ensure(greater_than(5)), V(fs.a).ensure(greater_than(5)) + ) + + +def test_different_classes_error(c: Converter): + """Customizing the field of a different class is a runtime error.""" + + @define + class AnotherModel: + a: int + + fs = f(Model) + + with raises(TypeError): + customize(c, AnotherModel, V(fs.a).ensure(greater_than(5))) + + +def test_dataclass_typo(c: Converter): + """Customizing a non-existent field is a runtime error.""" + + @dataclass + class AnotherModel: + a: int + + with raises(TypeError): + customize(c, AnotherModel, V("b").ensure(greater_than(5))) diff --git a/tests/v/test_fluent_typing.yml b/tests/v/test_fluent_typing.yml new file mode 100644 index 00000000..988573e4 --- /dev/null +++ b/tests/v/test_fluent_typing.yml @@ -0,0 +1,88 @@ +- case: empty_customize + main: | + from attrs import define, fields as f + from cattrs import v, Converter + + @define + class A: + a: int + + c = Converter() + + v.customize(c, A) + +- case: empty_customize_dc + main: | + from dataclasses import dataclass + from cattrs import v, Converter + + @dataclass + class A: + a: int + + c = Converter() + + v.customize(c, A) + +- case: empty_customize_unsupported + main: | + from cattrs import v, Converter + + class A: + a: int + + c = Converter() + + v.customize(c, A) # E: Argument 2 to "customize" has incompatible type "type[A]"; expected "type[AttrsInstance] | type[DataclassLike]" [arg-type] + +- case: customize_int + main: | + from attrs import define, fields as f + from cattrs import v, Converter + + @define + class A: + a: int + + c = Converter() + + v.customize(c, A, v.V(f(A).a).ensure(v.between(5, 10))) + +- case: customize_int_dc + main: | + from dataclasses import dataclass + from cattrs import v, Converter + + @dataclass + class A: + a: int + + c = Converter() + + v.customize(c, A, v.V("a").ensure(v.between(5, 10))) + +- case: customize_int_no_empty_ensure + main: | + from attrs import define, fields as f + from cattrs import v, Converter + + @define + class A: + a: int + + c = Converter() + + v.customize(c, A, v.V(f(A).a).ensure()) # E: Missing positional argument "validator" in call to "ensure" of "V" [call-arg] + +- case: customize_int_no_wrong_validator + main: | + from attrs import define, fields as f + from cattrs import v, Converter + + @define + class A: + a: int + + c = Converter() + + v.customize(c, A, v.V(f(A).a).ensure(v.len_between(0, 10))) # E: Argument 1 to "ensure" of "V" has incompatible type "Callable[[Sized], None]"; expected "Callable[[int], Any] | Callable[[bool], Callable[[int], Any]]" [arg-type] diff --git a/tests/test_v.py b/tests/v/test_v.py similarity index 65% rename from tests/test_v.py rename to tests/v/test_v.py index 4aa97164..1b9fb67d 100644 --- a/tests/test_v.py +++ b/tests/v/test_v.py @@ -11,20 +11,14 @@ ) from attrs import Factory, define, field -from pytest import fixture, raises +from pytest import raises -from cattrs import Converter, transform_error +from cattrs import Converter, IterableValidationError, transform_error from cattrs._compat import Mapping, TypedDict from cattrs.gen import make_dict_structure_fn from cattrs.v import format_exception -@fixture -def c() -> Converter: - """We need only converters with detailed_validation=True.""" - return Converter() - - def test_attribute_errors(c: Converter) -> None: @define class C: @@ -39,7 +33,9 @@ class C: try: c.structure({"a": 1, "b": "str"}, C) except Exception as exc: - assert transform_error(exc) == ["invalid value for type, expected int @ $.b"] + assert transform_error(exc) == [ + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.b" + ] @define class D: @@ -63,7 +59,9 @@ class D: try: c.structure({"c": {"a": "str"}}, D) except Exception as exc: - assert transform_error(exc) == ["invalid value for type, expected int @ $.c.a"] + assert transform_error(exc) == [ + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.c.a" + ] @define class E: @@ -79,7 +77,7 @@ class E: else repr(Optional[int]) ) assert transform_error(exc.value) == [ - f"invalid value for type, expected {tn} @ $.a" + f"invalid value for type, expected {tn} (invalid literal for int() with base 10: 'str') @ $.a" ] @@ -134,8 +132,8 @@ def test_sequence_errors(c: Converter) -> None: c.structure(["str", 1, "str"], List[int]) except Exception as exc: assert transform_error(exc) == [ - "invalid value for type, expected int @ $[0]", - "invalid value for type, expected int @ $[2]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $[0]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $[2]", ] try: @@ -149,24 +147,24 @@ def test_sequence_errors(c: Converter) -> None: c.structure(["str", 1, "str"], Tuple[int, ...]) except Exception as exc: assert transform_error(exc) == [ - "invalid value for type, expected int @ $[0]", - "invalid value for type, expected int @ $[2]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $[0]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $[2]", ] try: c.structure(["str", 1, "str"], Sequence[int]) except Exception as exc: assert transform_error(exc) == [ - "invalid value for type, expected int @ $[0]", - "invalid value for type, expected int @ $[2]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $[0]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $[2]", ] try: c.structure(["str", 1, "str"], MutableSequence[int]) except Exception as exc: assert transform_error(exc) == [ - "invalid value for type, expected int @ $[0]", - "invalid value for type, expected int @ $[2]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $[0]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $[2]", ] @define @@ -178,16 +176,16 @@ class C: c.structure({"a": ["str", 1, "str"]}, C) except Exception as exc: assert transform_error(exc) == [ - "invalid value for type, expected int @ $.a[0]", - "invalid value for type, expected int @ $.a[2]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.a[0]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.a[2]", ] try: c.structure({"a": [], "b": [[], ["str", 1, "str"]]}, C) except Exception as exc: assert transform_error(exc) == [ - "invalid value for type, expected int @ $.b[1][0]", - "invalid value for type, expected int @ $.b[1][2]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.b[1][0]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.b[1][2]", ] @@ -195,7 +193,9 @@ def test_mapping_errors(c: Converter) -> None: try: c.structure({"a": 1, "b": "str"}, Dict[str, int]) except Exception as exc: - assert transform_error(exc) == ["invalid value for type, expected int @ $['b']"] + assert transform_error(exc) == [ + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $['b']" + ] @define class C: @@ -205,8 +205,8 @@ class C: c.structure({"a": {"a": "str", "b": 1, "c": "str"}}, C) except Exception as exc: assert transform_error(exc) == [ - "invalid value for type, expected int @ $.a['a']", - "invalid value for type, expected int @ $.a['c']", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.a['a']", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.a['c']", ] try: @@ -217,19 +217,23 @@ class C: try: c.structure({"a": 1, "b": "str"}, Mapping[str, int]) except Exception as exc: - assert transform_error(exc) == ["invalid value for type, expected int @ $['b']"] + assert transform_error(exc) == [ + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $['b']" + ] try: c.structure({"a": 1, "b": "str"}, MutableMapping[str, int]) except Exception as exc: - assert transform_error(exc) == ["invalid value for type, expected int @ $['b']"] + assert transform_error(exc) == [ + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $['b']" + ] try: c.structure({"a": 1, 2: "str"}, MutableMapping[int, int]) except Exception as exc: assert transform_error(exc) == [ - "invalid value for type, expected int @ $['a']", - "invalid value for type, expected int @ $[2]", + "invalid value for type, expected int (invalid literal for int() with base 10: 'a') @ $['a']", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $[2]", ] @@ -249,7 +253,7 @@ class C: except Exception as exc: assert transform_error(exc, format_exception=my_format) == [ "no key @ $.a", - "invalid value for type, expected int @ $.b", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.b", ] @@ -267,7 +271,7 @@ class C: c.structure({"a": {"a": "str", "b": 1, "c": None}}, C) except Exception as exc: assert transform_error(exc, format_exception=my_format) == [ - "invalid value for type, expected int @ $.a['a']", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.a['a']", "Must be correct type @ $.a['c']", ] @@ -295,7 +299,9 @@ class C(TypedDict): try: c.structure({"a": 1, "b": "str"}, C) except Exception as exc: - assert transform_error(exc) == ["invalid value for type, expected int @ $.b"] + assert transform_error(exc) == [ + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.b" + ] class D(TypedDict): c: C @@ -322,7 +328,7 @@ class D(TypedDict): c.structure({"c": {"a": "str"}}, D) except Exception as exc: assert transform_error(exc) == [ - "invalid value for type, expected int @ $.c.a", + "invalid value for type, expected int (invalid literal for int() with base 10: 'str') @ $.c.a", "required field missing @ $.c.b", ] @@ -339,10 +345,27 @@ class E(TypedDict): else repr(Optional[int]) ) assert transform_error(exc.value) == [ - f"invalid value for type, expected {tn} @ $.a" + f"invalid value for type, expected {tn} (invalid literal for int() with base 10: 'str') @ $.a" ] def test_other_errors(): """Errors without explicit support transform predictably.""" assert format_exception(IndexError("Test"), List[int]) == "unknown error (Test)" + + +def test_iterable_val_no_note(): + """`IterableValidationErrors` require subexceptions with notes.""" + with raises(AttributeError): + IterableValidationError("Test", [RuntimeError()], List[str]).group_exceptions() + + r = RuntimeError() + r.__notes__ = ["test"] + with raises(AttributeError): + IterableValidationError("Test", [r], List[str]).group_exceptions() + + +def test_typeerror_formatting(): + """`format_exception` works with non-iteration TypeErrors.""" + exc = TypeError("exception") + assert format_exception(exc, None) == "invalid type (exception)" diff --git a/tests/test_validation.py b/tests/v/test_validation.py similarity index 100% rename from tests/test_validation.py rename to tests/v/test_validation.py diff --git a/tests/v/test_validators.py b/tests/v/test_validators.py new file mode 100644 index 00000000..cac00c21 --- /dev/null +++ b/tests/v/test_validators.py @@ -0,0 +1,229 @@ +from typing import Dict, List, Optional + +from attrs import define +from attrs import fields as f +from pytest import raises + +from cattrs import BaseConverter +from cattrs.errors import ClassValidationError +from cattrs.v import ( + V, + between, + customize, + for_all, + greater_than, + ignoring_none, + is_unique, + len_between, + transform_error, +) + + +@define +class WithInt: + a: int + + +@define +class WithList: + a: List[int] + + +@define +class WithDict: + a: Dict[str, int] + + +@define +class WithOptional: + a: Optional[int] + + +def test_gt(converter: BaseConverter): + """The greater_than validator works.""" + customize(converter, WithInt, V(f(WithInt).a).ensure(greater_than(10))) + + assert converter.structure({"a": 11}, WithInt) == WithInt(11) + + if converter.detailed_validation: + with raises(ClassValidationError) as exc_info: + converter.structure({"a": 10}, WithInt) + + assert transform_error(exc_info.value) == [ + "invalid value (10 not greater than 10) @ $.a" + ] + else: + with raises(ValueError) as exc_info: + converter.structure({"a": 10}, WithInt) + + assert repr(exc_info.value) == "ValueError('10 not greater than 10')" + + +def test_between(converter: BaseConverter): + """The between validator works.""" + customize(converter, WithInt, V(f(WithInt).a).ensure(between(10, 20))) + + assert converter.structure({"a": 10}, WithInt) == WithInt(10) + assert converter.structure({"a": 19}, WithInt) == WithInt(19) + + if converter.detailed_validation: + with raises(ClassValidationError) as exc_info: + converter.structure({"a": 9}, WithInt) + + assert transform_error(exc_info.value) == [ + "invalid value (9 not between 10 and 20) @ $.a" + ] + else: + with raises(ValueError) as exc_info: + converter.structure({"a": 9}, WithInt) + + assert repr(exc_info.value) == "ValueError('9 not between 10 and 20')" + + if converter.detailed_validation: + with raises(ClassValidationError) as exc_info: + converter.structure({"a": 20}, WithInt) + + assert transform_error(exc_info.value) == [ + "invalid value (20 not between 10 and 20) @ $.a" + ] + else: + with raises(ValueError) as exc_info: + converter.structure({"a": 20}, WithInt) + + assert repr(exc_info.value) == "ValueError('20 not between 10 and 20')" + + +def test_len_between(converter: BaseConverter): + """The len_between validator works.""" + customize(converter, WithList, V(f(WithList).a).ensure(len_between(1, 2))) + + assert converter.structure({"a": [1]}, WithList) == WithList([1]) + + if converter.detailed_validation: + with raises(ClassValidationError) as exc_info: + converter.structure({"a": []}, WithList) + + assert transform_error(exc_info.value) == [ + "invalid value (length (0) not between 1 and 2) @ $.a" + ] + else: + with raises(ValueError) as exc_info: + converter.structure({"a": []}, WithList) + + assert repr(exc_info.value) == "ValueError('length (0) not between 1 and 2')" + + if converter.detailed_validation: + with raises(ClassValidationError) as exc_info: + converter.structure({"a": [1, 2]}, WithList) + + assert transform_error(exc_info.value) == [ + "invalid value (length (2) not between 1 and 2) @ $.a" + ] + else: + with raises(ValueError) as exc_info: + converter.structure({"a": [1, 2]}, WithList) + + assert repr(exc_info.value) == "ValueError('length (2) not between 1 and 2')" + + +def test_unique(converter: BaseConverter): + """The `is_unique` validator works.""" + + customize(converter, WithList, V(f(WithList).a).ensure(is_unique)) + + assert converter.structure({"a": [1]}, WithList) == WithList([1]) + + if converter.detailed_validation: + with raises(ClassValidationError) as exc_info: + converter.structure({"a": [1, 1]}, WithList) + + assert transform_error(exc_info.value) == [ + "invalid value (Collection (2 elem(s)) not unique, only 1 unique elem(s)) @ $.a" + ] + else: + with raises(ValueError) as exc_info: + converter.structure({"a": [1, 1]}, WithList) + + assert ( + repr(exc_info.value) + == "ValueError('Collection (2 elem(s)) not unique, only 1 unique elem(s)')" + ) + + +def test_ignoring_none(converter: BaseConverter): + """`ignoring_none` works.""" + + customize( + converter, + WithOptional, + V(f(WithOptional).a).ensure(ignoring_none(between(0, 5))), + ) + + assert converter.structure({"a": None}, WithOptional) == WithOptional(None) + assert converter.structure({"a": 1}, WithOptional) == WithOptional(1) + + if converter.detailed_validation: + with raises(ClassValidationError) as exc_info: + converter.structure({"a": 10}, WithOptional) + + assert transform_error(exc_info.value) == [ + "invalid value (10 not between 0 and 5) @ $.a" + ] + else: + with raises(ValueError) as exc_info: + converter.structure({"a": 10}, WithOptional) + + assert repr(exc_info.value) == "ValueError('10 not between 0 and 5')" + + +def test_for_all_lists(converter: BaseConverter): + """`for_all` works on lists.""" + + hook = customize( + converter, + WithList, + V(f(WithList).a).ensure(for_all(greater_than(5), between(5, 10))), + ) + + assert hook({"a": []}, None) == WithList([]) + assert hook({"a": [6, 7, 8]}, None) == WithList([6, 7, 8]) + + if converter.detailed_validation: + with raises(ClassValidationError) as exc_info: + hook({"a": [1, 2]}, None) + + assert transform_error(exc_info.value) == [ + "invalid value (1 not greater than 5) @ $.a[0]", + "invalid value (1 not between 5 and 10) @ $.a[0]", + "invalid value (2 not greater than 5) @ $.a[1]", + "invalid value (2 not between 5 and 10) @ $.a[1]", + ] + else: + with raises(ValueError) as exc_info: + hook({"a": [1, 2]}, None) + + assert repr(exc_info.value) == "ValueError('1 not greater than 5')" + + +def test_for_all_dicts(converter: BaseConverter): + """`for_all` works on dicts.""" + + hook = customize( + converter, WithDict, V(f(WithDict).a).ensure(for_all(len_between(0, 2))) + ) + + assert hook({"a": {}}, None) == WithDict({}) + assert hook({"a": {"a": 1, "b": 2}}, None) == WithDict({"a": 1, "b": 2}) + + if converter.detailed_validation: + with raises(ClassValidationError) as exc_info: + hook({"a": {"aaa": 1}}, None) + + assert transform_error(exc_info.value) == [ + "invalid value (length (3) not between 0 and 2) @ $.a[0]" + ] + else: + with raises(ValueError) as exc_info: + hook({"a": {"aaa": 1}}, None) + + assert repr(exc_info.value) == "ValueError('length (3) not between 0 and 2')" diff --git a/tests/v/test_validators_typing.yml b/tests/v/test_validators_typing.yml new file mode 100644 index 00000000..7932aaee --- /dev/null +++ b/tests/v/test_validators_typing.yml @@ -0,0 +1,144 @@ +- case: greater_than + main: | + from attrs import define, fields + from cattrs import v, Converter + + @define + class A: + a: int + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(v.greater_than(5))) + +- case: greater_than_error + main: | + from attrs import define, fields + from cattrs import v, Converter + + @define + class A: + a: int + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(v.greater_than("5"))) # E: Argument 1 to "greater_than" has incompatible type "str"; expected "int" [arg-type] + +- case: len_between + main: | + from attrs import define, fields + from cattrs import v, Converter + + @define + class A: + a: list[int] + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(v.len_between(5, 10))) + +- case: unique + main: | + from attrs import define, fields + from cattrs import v, Converter + + @define + class A: + a: list[int] + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(v.is_unique)) + +- case: unique_error_not_hashable + main: | + from attrs import define, fields + from cattrs import v, Converter + + @define + class A: + a: list[dict] + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(v.is_unique)) # E: Argument 1 to "ensure" of "V" has incompatible type "Callable[[Collection[Hashable]], None]"; expected "Callable[[list[dict[Any, Any]]], bool | None] | Callable[[bool], Callable[[list[dict[Any, Any]]], None]]" [arg-type] + +- case: ignoring_none + main: | + from attrs import define, fields + from cattrs import v, Converter + + @define + class A: + a: int | None + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(v.ignoring_none(v.greater_than(5)))) + +- case: ignoring_none_missing + main: | + from attrs import define, fields + from cattrs import v, Converter + + @define + class A: + a: int | None + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(v.greater_than(5))) # E: Argument 1 to "ensure" of "V" has incompatible type "Callable[[int], None]"; expected "Callable[[int | None], bool | None] | Callable[[bool], Callable[[int | None], None]]" [arg-type] + v.customize(c, A, v.V(fields(A).a).ensure(v.ignoring_none(v.len_between(0, 5)))) # E: Argument 1 to "ignoring_none" has incompatible type "Callable[[Sized], None]"; expected "Callable[[int], None]" [arg-type] + +- case: for_all + main: | + from attrs import define, fields + from cattrs import v, Converter + + @define + class A: + a: list[int] + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(v.for_all(v.greater_than(5)))) + +- case: for_all_dict + main: | + from attrs import define, fields + from cattrs import v, Converter + + @define + class A: + a: dict[str, int] + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(v.for_all(v.len_between(0, 5)))) + +- case: for_all_error + main: | + from attrs import define, fields + from cattrs import v, Converter + from cattrs.v import for_all as fa + + @define + class A: + a: list[int] + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(fa(fa(v.greater_than(5))))) # E: Argument 1 to "for_all" has incompatible type "Callable[[bool], Callable[[Iterable[int]], None]]"; expected "Callable[[int], bool | None]" [arg-type] + +- case: for_all_dict_error + main: | + from attrs import define, fields + from cattrs import v, Converter + + @define + class A: + a: dict[str, int] + + c = Converter() + + v.customize(c, A, v.V(fields(A).a).ensure(v.for_all(v.greater_than(5)))) # E: Argument 1 to "greater_than" has incompatible type "int"; expected "str" [arg-type] diff --git a/tox.ini b/tox.ini index 58f31167..dcf25ea9 100644 --- a/tox.ini +++ b/tox.ini @@ -37,6 +37,13 @@ package = wheel wheel_build_env = .pkg [testenv:py312] +commands_pre = + pdm sync -G :all,test,tests-mypy + python -c 'import pathlib; pathlib.Path("{env_site_packages_dir}/cov.pth").write_text("import coverage; coverage.process_startup()")' +commands = + coverage run -m pytest tests {posargs:-n auto --mypy-only-local-stub} + +[testenv:pypy3] setenv = PDM_IGNORE_SAVED_PYTHON="1" COVERAGE_PROCESS_START={toxinidir}/pyproject.toml