diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index e40dd64..77cc406 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -11,13 +11,16 @@ jobs: include: - python-version: 3.13 env: - TOXENV: pylint + TOXENV: typing - python-version: 3.13 env: - TOXENV: typing + TOXENV: docs - python-version: 3.13 env: TOXENV: twinecheck + - python-version: 3.13 + env: + TOXENV: pylint steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 2d29f64..8d4631e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -9,7 +9,55 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "pypy3.10"] + include: + - python-version: "3.9" + env: + TOXENV: min-attrs + - python-version: "3.9" + env: + TOXENV: min-pydantic + - python-version: "3.9" + env: + TOXENV: min-scrapy + - python-version: "3.9" + env: + TOXENV: min-extra + - python-version: "3.9" + env: + TOXENV: py + - python-version: "3.10" + env: + TOXENV: py + - python-version: "pypy3.10" + env: + TOXENV: py + - python-version: "3.11" + env: + TOXENV: py + - python-version: "3.12" + env: + TOXENV: py + - python-version: "3.13" + env: + TOXENV: py + - python-version: "3.13" + env: + TOXENV: attrs + - python-version: "3.13" + env: + TOXENV: pydantic1 + - python-version: "3.13" + env: + TOXENV: pydantic + - python-version: "3.13" + env: + TOXENV: scrapy + - python-version: "3.13" + env: + TOXENV: extra + - python-version: "3.13" + env: + TOXENV: extra-pydantic1 steps: - uses: actions/checkout@v4 @@ -23,31 +71,8 @@ jobs: run: pip install tox - name: Run tests - run: tox -e py - - - name: Upload coverage report - run: | - curl -Os https://uploader.codecov.io/latest/linux/codecov - chmod +x codecov - ./codecov - - tests-other: - name: "Test: py39-scrapy22, Ubuntu" - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.9 - - - name: Install tox - run: pip install tox - - - name: Run tests - run: tox -e py39-scrapy22 + env: ${{ matrix.env }} + run: tox - name: Upload coverage report run: | diff --git a/.gitignore b/.gitignore index 945cd46..d54178b 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ .coverage htmlcov/ coverage.xml +/dist/ diff --git a/README.md b/README.md index 41da1eb..02564ba 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ Currently supported types are: * [`dict`](https://docs.python.org/3/library/stdtypes.html#dict) * [`dataclass`](https://docs.python.org/3/library/dataclasses.html)-based classes * [`attrs`](https://www.attrs.org)-based classes -* [`pydantic`](https://pydantic-docs.helpmanual.io/)-based classes (`pydantic>=2` not yet supported) +* [`pydantic`](https://pydantic-docs.helpmanual.io/)-based classes Additionally, interaction with arbitrary types is supported, by implementing a pre-defined interface (see [extending `itemadapter`](#extending-itemadapter)). @@ -24,11 +24,14 @@ a pre-defined interface (see [extending `itemadapter`](#extending-itemadapter)). ## Requirements -* Python 3.9+, either the CPython implementation (default) or the PyPy implementation -* [`scrapy`](https://scrapy.org/): optional, needed to interact with `scrapy` items -* [`attrs`](https://pypi.org/project/attrs/): optional, needed to interact with `attrs`-based items -* [`pydantic`](https://pypi.org/project/pydantic/): optional, needed to interact with - `pydantic`-based items (`pydantic>=2` not yet supported) +* Python 3.9+, either the CPython implementation (default) or the PyPy + implementation +* [`scrapy`](https://scrapy.org/) 2.2+: optional, needed to interact with + `scrapy` items +* [`attrs`](https://pypi.org/project/attrs/) 18.1.0+: optional, needed to + interact with `attrs`-based items +* [`pydantic`](https://pypi.org/project/pydantic/) 1.8+: optional, needed to + interact with `pydantic`-based items --- @@ -40,6 +43,20 @@ a pre-defined interface (see [extending `itemadapter`](#extending-itemadapter)). pip install itemadapter ``` +For `attrs`, `pydantic` and `scrapy` support, install the corresponding extra +to ensure that a supported version of the corresponding dependencies is +installed. For example: + +``` +pip install itemadapter[scrapy] +``` + +Mind that you can install multiple extras as needed. For example: + +``` +pip install itemadapter[attrs,pydantic,scrapy] +``` + --- ## License @@ -306,9 +323,9 @@ mappingproxy({'serializer': , 'limit': 100}) ... >>> adapter = ItemAdapter(InventoryItem(name="foo", value=10)) >>> adapter.get_field_meta("name") -mappingproxy({'serializer': }) +mappingproxy({'default': PydanticUndefined, 'json_schema_extra': {'serializer': }, 'repr': True}) >>> adapter.get_field_meta("value") -mappingproxy({'serializer': , 'limit': 100}) +mappingproxy({'default': PydanticUndefined, 'json_schema_extra': {'serializer': , 'limit': 100}, 'repr': True}) >>> ``` @@ -361,19 +378,23 @@ so all methods from the `MutableMapping` interface must be implemented as well. ### Registering an adapter -Add your custom adapter class to the `itemadapter.adapter.ItemAdapter.ADAPTER_CLASSES` -class attribute in order to handle custom item classes: +Add your custom adapter class to the +`itemadapter.adapter.ItemAdapter.ADAPTER_CLASSES` class attribute in order to +handle custom item classes. **Example** +``` +pip install zyte-common-items +``` ```python >>> from itemadapter.adapter import ItemAdapter ->>> from tests.test_interface import BaseFakeItemAdapter, FakeItemClass +>>> from zyte_common_items import Item, ZyteItemAdapter >>> ->>> ItemAdapter.ADAPTER_CLASSES.appendleft(BaseFakeItemAdapter) ->>> item = FakeItemClass() +>>> ItemAdapter.ADAPTER_CLASSES.appendleft(ZyteItemAdapter) +>>> item = Item() >>> adapter = ItemAdapter(item) >>> adapter - + >>> ``` diff --git a/itemadapter/_imports.py b/itemadapter/_imports.py index b3e9cec..f9c9eb3 100644 --- a/itemadapter/_imports.py +++ b/itemadapter/_imports.py @@ -1,32 +1,49 @@ # attempt the following imports only once, # to be imported from itemadapter's submodules +from typing import Any + _scrapy_item_classes: tuple +scrapy: Any try: import scrapy # pylint: disable=W0611 (unused-import) except ImportError: - scrapy = None # type: ignore[assignment] _scrapy_item_classes = () + scrapy = None else: try: # handle deprecated base classes _base_item_cls = getattr( scrapy.item, "_BaseItem", - scrapy.item.BaseItem, # type: ignore[attr-defined] + scrapy.item.BaseItem, ) except AttributeError: _scrapy_item_classes = (scrapy.item.Item,) else: _scrapy_item_classes = (scrapy.item.Item, _base_item_cls) +attr: Any try: import attr # pylint: disable=W0611 (unused-import) except ImportError: - attr = None # type: ignore[assignment] + attr = None + +pydantic_v1: Any = None +pydantic: Any = None try: import pydantic # pylint: disable=W0611 (unused-import) -except ImportError: - pydantic = None # type: ignore[assignment] +except ImportError: # No pydantic + pass +else: + try: + import pydantic.v1 as pydantic_v1 # pylint: disable=W0611 (unused-import) + except ImportError: # Pydantic <1.10.17 + pydantic_v1 = pydantic + pydantic = None # pylint: disable=C0103 (invalid-name) + else: # Pydantic 1.10.17+ + if not hasattr(pydantic.BaseModel, "model_fields"): # Pydantic <2 + pydantic_v1 = pydantic + pydantic = None # pylint: disable=C0103 (invalid-name) diff --git a/itemadapter/adapter.py b/itemadapter/adapter.py index db0bf1c..5b46665 100644 --- a/itemadapter/adapter.py +++ b/itemadapter/adapter.py @@ -8,8 +8,10 @@ from itemadapter._imports import _scrapy_item_classes, attr from itemadapter.utils import ( _get_pydantic_model_metadata, + _get_pydantic_v1_model_metadata, _is_attrs_class, _is_pydantic_model, + _is_pydantic_v1_model, ) __all__ = [ @@ -167,47 +169,83 @@ class PydanticAdapter(AdapterInterface): @classmethod def is_item_class(cls, item_class: type) -> bool: - return _is_pydantic_model(item_class) + return _is_pydantic_model(item_class) or _is_pydantic_v1_model(item_class) @classmethod def get_field_meta_from_class(cls, item_class: type, field_name: str) -> MappingProxyType: try: - return _get_pydantic_model_metadata(item_class, field_name) + try: + return _get_pydantic_model_metadata(item_class, field_name) + except AttributeError: + return _get_pydantic_v1_model_metadata(item_class, field_name) except KeyError: raise KeyError(f"{item_class.__name__} does not support field: {field_name}") @classmethod def get_field_names_from_class(cls, item_class: type) -> Optional[List[str]]: - return list(item_class.__fields__.keys()) # type: ignore[attr-defined] + try: + return list(item_class.model_fields.keys()) # type: ignore[attr-defined] + except AttributeError: + return list(item_class.__fields__.keys()) # type: ignore[attr-defined] def field_names(self) -> KeysView: - return KeysView(self.item.__fields__) + try: + return KeysView(self.item.model_fields) + except AttributeError: + return KeysView(self.item.__fields__) def __getitem__(self, field_name: str) -> Any: - if field_name in self.item.__fields__: - return getattr(self.item, field_name) + try: + self.item.model_fields + except AttributeError: + if field_name in self.item.__fields__: + return getattr(self.item, field_name) + else: + if field_name in self.item.model_fields: + return getattr(self.item, field_name) raise KeyError(field_name) def __setitem__(self, field_name: str, value: Any) -> None: - if field_name in self.item.__fields__: - setattr(self.item, field_name, value) + try: + self.item.model_fields + except AttributeError: + if field_name in self.item.__fields__: + setattr(self.item, field_name, value) + return else: - raise KeyError(f"{self.item.__class__.__name__} does not support field: {field_name}") + if field_name in self.item.model_fields: + setattr(self.item, field_name, value) + return + raise KeyError(f"{self.item.__class__.__name__} does not support field: {field_name}") def __delitem__(self, field_name: str) -> None: - if field_name in self.item.__fields__: - try: - if hasattr(self.item, field_name): - delattr(self.item, field_name) - else: + try: + self.item.model_fields + except AttributeError: + if field_name in self.item.__fields__: + try: + if hasattr(self.item, field_name): + delattr(self.item, field_name) + return raise AttributeError - except AttributeError: - raise KeyError(field_name) + except AttributeError: + raise KeyError(field_name) else: - raise KeyError(f"{self.item.__class__.__name__} does not support field: {field_name}") + if field_name in self.item.model_fields: + try: + if hasattr(self.item, field_name): + delattr(self.item, field_name) + return + raise AttributeError + except AttributeError: + raise KeyError(field_name) + raise KeyError(f"{self.item.__class__.__name__} does not support field: {field_name}") def __iter__(self) -> Iterator: - return iter(attr for attr in self.item.__fields__ if hasattr(self.item, attr)) + try: + return iter(attr for attr in self.item.model_fields if hasattr(self.item, attr)) + except AttributeError: + return iter(attr for attr in self.item.__fields__ if hasattr(self.item, attr)) def __len__(self) -> int: return len(list(iter(self))) diff --git a/itemadapter/utils.py b/itemadapter/utils.py index 54667f5..f5448b6 100644 --- a/itemadapter/utils.py +++ b/itemadapter/utils.py @@ -2,7 +2,7 @@ from types import MappingProxyType from typing import Any -from itemadapter._imports import attr, pydantic +from itemadapter._imports import attr, pydantic, pydantic_v1 __all__ = ["is_item", "get_field_meta_from_class"] @@ -19,7 +19,60 @@ def _is_pydantic_model(obj: Any) -> bool: return issubclass(obj, pydantic.BaseModel) +def _is_pydantic_v1_model(obj: Any) -> bool: + if pydantic_v1 is None: + return False + return issubclass(obj, pydantic_v1.BaseModel) + + def _get_pydantic_model_metadata(item_model: Any, field_name: str) -> MappingProxyType: + metadata = {} + field = item_model.model_fields[field_name] + + for attribute in [ + "default", + "default_factory", + "alias", + "alias_priority", + "validation_alias", + "serialization_alias", + "title", + "field_title_generator", + "description", + "examples", + "exclude", + "discriminator", + "deprecated", + "json_schema_extra", + "frozen", + "validate_default", + "repr", + "init", + "init_var", + "kw_only", + "pattern", + "strict", + "coerce_numbers_to_str", + "gt", + "ge", + "lt", + "le", + "multiple_of", + "allow_inf_nan", + "max_digits", + "decimal_places", + "min_length", + "max_length", + "union_mode", + "fail_fast", + ]: + if hasattr(field, attribute) and (value := getattr(field, attribute)) is not None: + metadata[attribute] = value + + return MappingProxyType(metadata) + + +def _get_pydantic_v1_model_metadata(item_model: Any, field_name: str) -> MappingProxyType: metadata = {} field = item_model.__fields__[field_name].field_info diff --git a/pytest.ini b/pytest.ini index 04d3845..51411c3 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,3 +1,3 @@ [pytest] filterwarnings = - ignore:.*BaseItem.*:scrapy.exceptions.ScrapyDeprecationWarning + ignore:.*BaseItem.* diff --git a/setup.py b/setup.py index 2e19486..698e6b1 100644 --- a/setup.py +++ b/setup.py @@ -20,6 +20,11 @@ }, include_package_data=True, python_requires=">=3.9", + extras_require={ + "attrs": ["attrs>=18.1.0"], + "pydantic": ["pydantic>=1.8"], + "scrapy": ["scrapy>=2.2"], + }, classifiers=[ "Development Status :: 3 - Alpha", "License :: OSI Approved :: BSD License", diff --git a/tests/__init__.py b/tests/__init__.py index be0e906..7450df0 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -5,6 +5,7 @@ from typing import Callable, Generator, Optional from itemadapter import ItemAdapter +from itemadapter._imports import pydantic, pydantic_v1 def make_mock_import(block_name: str) -> Callable: @@ -101,10 +102,56 @@ class AttrsItemEmpty: pass -try: - from pydantic import BaseModel - from pydantic import Field as PydanticField -except ImportError: +if pydantic_v1 is None: + PydanticV1Model = None + PydanticV1SpecialCasesModel = None + PydanticV1ModelNested = None + PydanticV1ModelSubclassed = None + PydanticV1ModelEmpty = None +else: + + class PydanticV1Model(pydantic_v1.BaseModel): + name: Optional[str] = pydantic_v1.Field( + default_factory=lambda: None, + serializer=str, + ) + value: Optional[int] = pydantic_v1.Field( + default_factory=lambda: None, + serializer=int, + ) + + class PydanticV1SpecialCasesModel(pydantic_v1.BaseModel): + special_cases: Optional[int] = pydantic_v1.Field( + default_factory=lambda: None, + alias="special_cases", + allow_mutation=False, + ) + + class Config: + validate_assignment = True + + class PydanticV1ModelNested(pydantic_v1.BaseModel): + nested: PydanticV1Model + adapter: ItemAdapter + dict_: dict + list_: list + set_: set + tuple_: tuple + int_: int + + class Config: + arbitrary_types_allowed = True + + class PydanticV1ModelSubclassed(PydanticV1Model): + subclassed: bool = pydantic_v1.Field( + default_factory=lambda: True, + ) + + class PydanticV1ModelEmpty(pydantic_v1.BaseModel): + pass + + +if pydantic is None: PydanticModel = None PydanticSpecialCasesModel = None PydanticModelNested = None @@ -112,18 +159,18 @@ class AttrsItemEmpty: PydanticModelEmpty = None else: - class PydanticModel(BaseModel): - name: Optional[str] = PydanticField( + class PydanticModel(pydantic.BaseModel): + name: Optional[str] = pydantic.Field( default_factory=lambda: None, serializer=str, ) - value: Optional[int] = PydanticField( + value: Optional[int] = pydantic.Field( default_factory=lambda: None, serializer=int, ) - class PydanticSpecialCasesModel(BaseModel): - special_cases: Optional[int] = PydanticField( + class PydanticSpecialCasesModel(pydantic.BaseModel): + special_cases: Optional[int] = pydantic.Field( default_factory=lambda: None, alias="special_cases", allow_mutation=False, @@ -132,7 +179,7 @@ class PydanticSpecialCasesModel(BaseModel): class Config: validate_assignment = True - class PydanticModelNested(BaseModel): + class PydanticModelNested(pydantic.BaseModel): nested: PydanticModel adapter: ItemAdapter dict_: dict @@ -145,11 +192,11 @@ class Config: arbitrary_types_allowed = True class PydanticModelSubclassed(PydanticModel): - subclassed: bool = PydanticField( + subclassed: bool = pydantic.Field( default_factory=lambda: True, ) - class PydanticModelEmpty(BaseModel): + class PydanticModelEmpty(pydantic.BaseModel): pass diff --git a/tests/requirements.txt b/tests/requirements.txt index 4e41abe..f473368 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,5 +1,3 @@ -attrs -pydantic<2 -pytest-cov>=2.8 pytest>=5.4 -scrapy>=2.0 +pytest-cov>=2.8 + diff --git a/tests/test_adapter.py b/tests/test_adapter.py index 3a48111..1b760e1 100644 --- a/tests/test_adapter.py +++ b/tests/test_adapter.py @@ -14,10 +14,10 @@ DataClassItemNested, DataClassItemSubclassed, DataClassWithoutInit, - PydanticModel, - PydanticModelEmpty, - PydanticModelNested, - PydanticModelSubclassed, + PydanticV1Model, + PydanticV1ModelEmpty, + PydanticV1ModelNested, + PydanticV1ModelSubclassed, ScrapySubclassedItem, ScrapySubclassedItemEmpty, ScrapySubclassedItemNested, @@ -77,13 +77,13 @@ def test_repr_attrs_init_false(self): repr(adapter), "" ) - @unittest.skipIf(not PydanticModel, "pydantic module is not available") + @unittest.skipIf(not PydanticV1Model, "pydantic module is not available") def test_repr_pydantic(self): - item = PydanticModel(name="asdf", value=1234) + item = PydanticV1Model(name="asdf", value=1234) adapter = ItemAdapter(item) self.assertEqual( repr(adapter), - "", + "", ) @@ -264,11 +264,11 @@ def test_get_value_keyerror_item_dict(self): adapter["name"] -class PydanticModelTestCase(NonDictTestMixin, unittest.TestCase): - item_class = PydanticModel - item_class_nested = PydanticModelNested - item_class_subclassed = PydanticModelSubclassed - item_class_empty = PydanticModelEmpty +class PydanticV1ModelTestCase(NonDictTestMixin, unittest.TestCase): + item_class = PydanticV1Model + item_class_nested = PydanticV1ModelNested + item_class_subclassed = PydanticV1ModelSubclassed + item_class_empty = PydanticV1ModelEmpty class DataClassItemTestCase(NonDictTestMixin, unittest.TestCase): diff --git a/tests/test_adapter_attrs.py b/tests/test_adapter_attrs.py index e0f6df3..4a8615f 100644 --- a/tests/test_adapter_attrs.py +++ b/tests/test_adapter_attrs.py @@ -8,6 +8,7 @@ AttrsItem, DataClassItem, PydanticModel, + PydanticV1Model, ScrapyItem, ScrapySubclassedItem, clear_itemadapter_imports, @@ -23,10 +24,7 @@ def test_false(self): self.assertFalse(AttrsAdapter.is_item(sum)) self.assertFalse(AttrsAdapter.is_item(1234)) self.assertFalse(AttrsAdapter.is_item(object())) - self.assertFalse(AttrsAdapter.is_item(ScrapyItem())) self.assertFalse(AttrsAdapter.is_item(DataClassItem())) - self.assertFalse(AttrsAdapter.is_item(PydanticModel())) - self.assertFalse(AttrsAdapter.is_item(ScrapySubclassedItem())) self.assertFalse(AttrsAdapter.is_item("a string")) self.assertFalse(AttrsAdapter.is_item(b"some bytes")) self.assertFalse(AttrsAdapter.is_item({"a": "dict"})) @@ -35,6 +33,19 @@ def test_false(self): self.assertFalse(AttrsAdapter.is_item({"a", "set"})) self.assertFalse(AttrsAdapter.is_item(AttrsItem)) + if PydanticModel is not None: + self.assertFalse(AttrsAdapter.is_item(PydanticModel())) + if PydanticV1Model is not None: + self.assertFalse(AttrsAdapter.is_item(PydanticV1Model())) + + try: + import scrapy # noqa: F401 + except ImportError: + pass + else: + self.assertFalse(AttrsAdapter.is_item(ScrapyItem())) + self.assertFalse(AttrsAdapter.is_item(ScrapySubclassedItem())) + @unittest.skipIf(not AttrsItem, "attrs module is not available") @mock.patch("builtins.__import__", make_mock_import("attr")) def test_module_import_error(self): diff --git a/tests/test_adapter_dataclasses.py b/tests/test_adapter_dataclasses.py index 78c702f..cac7af6 100644 --- a/tests/test_adapter_dataclasses.py +++ b/tests/test_adapter_dataclasses.py @@ -7,6 +7,7 @@ AttrsItem, DataClassItem, PydanticModel, + PydanticV1Model, ScrapyItem, ScrapySubclassedItem, ) @@ -20,10 +21,6 @@ def test_false(self): self.assertFalse(DataclassAdapter.is_item(sum)) self.assertFalse(DataclassAdapter.is_item(1234)) self.assertFalse(DataclassAdapter.is_item(object())) - self.assertFalse(DataclassAdapter.is_item(ScrapyItem())) - self.assertFalse(DataclassAdapter.is_item(AttrsItem())) - self.assertFalse(DataclassAdapter.is_item(PydanticModel())) - self.assertFalse(DataclassAdapter.is_item(ScrapySubclassedItem())) self.assertFalse(DataclassAdapter.is_item("a string")) self.assertFalse(DataclassAdapter.is_item(b"some bytes")) self.assertFalse(DataclassAdapter.is_item({"a": "dict"})) @@ -32,6 +29,26 @@ def test_false(self): self.assertFalse(DataclassAdapter.is_item({"a", "set"})) self.assertFalse(DataclassAdapter.is_item(DataClassItem)) + try: + import attrs # noqa: F401 + except ImportError: + pass + else: + self.assertFalse(DataclassAdapter.is_item(AttrsItem())) + + if PydanticModel is not None: + self.assertFalse(DataclassAdapter.is_item(PydanticModel())) + if PydanticV1Model is not None: + self.assertFalse(DataclassAdapter.is_item(PydanticV1Model())) + + try: + import scrapy # noqa: F401 + except ImportError: + pass + else: + self.assertFalse(DataclassAdapter.is_item(ScrapyItem())) + self.assertFalse(DataclassAdapter.is_item(ScrapySubclassedItem())) + def test_true(self): from itemadapter.adapter import DataclassAdapter diff --git a/tests/test_adapter_pydantic.py b/tests/test_adapter_pydantic.py index 610b144..47a8124 100644 --- a/tests/test_adapter_pydantic.py +++ b/tests/test_adapter_pydantic.py @@ -24,10 +24,7 @@ def test_false(self): self.assertFalse(PydanticAdapter.is_item(sum)) self.assertFalse(PydanticAdapter.is_item(1234)) self.assertFalse(PydanticAdapter.is_item(object())) - self.assertFalse(PydanticAdapter.is_item(ScrapyItem())) - self.assertFalse(PydanticAdapter.is_item(AttrsItem())) self.assertFalse(PydanticAdapter.is_item(DataClassItem())) - self.assertFalse(PydanticAdapter.is_item(ScrapySubclassedItem())) self.assertFalse(PydanticAdapter.is_item("a string")) self.assertFalse(PydanticAdapter.is_item(b"some bytes")) self.assertFalse(PydanticAdapter.is_item({"a": "dict"})) @@ -36,7 +33,22 @@ def test_false(self): self.assertFalse(PydanticAdapter.is_item({"a", "set"})) self.assertFalse(PydanticAdapter.is_item(PydanticModel)) - @unittest.skipIf(not PydanticModel, "pydantic module is not available") + try: + import attrs # noqa: F401 + except ImportError: + pass + else: + self.assertFalse(PydanticAdapter.is_item(AttrsItem())) + + try: + import scrapy # noqa: F401 + except ImportError: + pass + else: + self.assertFalse(PydanticAdapter.is_item(ScrapyItem())) + self.assertFalse(PydanticAdapter.is_item(ScrapySubclassedItem())) + + @unittest.skipIf(not PydanticModel, "pydantic <2 module is not available") @mock.patch("builtins.__import__", make_mock_import("pydantic")) def test_module_import_error(self): with clear_itemadapter_imports(): @@ -48,6 +60,7 @@ def test_module_import_error(self): @unittest.skipIf(not PydanticModel, "pydantic module is not available") @mock.patch("itemadapter.utils.pydantic", None) + @mock.patch("itemadapter.utils.pydantic_v1", None) def test_module_not_available(self): from itemadapter.adapter import PydanticAdapter @@ -57,22 +70,52 @@ def test_module_not_available(self): @unittest.skipIf(not PydanticModel, "pydantic module is not available") def test_true(self): + from pydantic_core import PydanticUndefined + from itemadapter.adapter import PydanticAdapter self.assertTrue(PydanticAdapter.is_item(PydanticModel())) self.assertTrue(PydanticAdapter.is_item(PydanticModel(name="asdf", value=1234))) # field metadata + mapping_proxy_type = get_field_meta_from_class(PydanticModel, "name") self.assertEqual( - get_field_meta_from_class(PydanticModel, "name"), - MappingProxyType({"serializer": str}), + mapping_proxy_type, + MappingProxyType( + { + "default": PydanticUndefined, + "default_factory": mapping_proxy_type["default_factory"], + "json_schema_extra": {"serializer": str}, + "repr": True, + } + ), ) + mapping_proxy_type = get_field_meta_from_class(PydanticModel, "value") self.assertEqual( get_field_meta_from_class(PydanticModel, "value"), - MappingProxyType({"serializer": int}), + MappingProxyType( + { + "default": PydanticUndefined, + "default_factory": mapping_proxy_type["default_factory"], + "json_schema_extra": {"serializer": int}, + "repr": True, + } + ), ) + mapping_proxy_type = get_field_meta_from_class(PydanticSpecialCasesModel, "special_cases") self.assertEqual( - get_field_meta_from_class(PydanticSpecialCasesModel, "special_cases"), - MappingProxyType({"alias": "special_cases", "allow_mutation": False}), + mapping_proxy_type, + MappingProxyType( + { + "default": PydanticUndefined, + "default_factory": mapping_proxy_type["default_factory"], + "alias": "special_cases", + "alias_priority": 2, + "validation_alias": "special_cases", + "serialization_alias": "special_cases", + "frozen": True, + "repr": True, + } + ), ) with self.assertRaises(KeyError, msg="PydanticModel does not support field: non_existent"): get_field_meta_from_class(PydanticModel, "non_existent") diff --git a/tests/test_adapter_pydantic_v1.py b/tests/test_adapter_pydantic_v1.py new file mode 100644 index 0000000..b3f4dbf --- /dev/null +++ b/tests/test_adapter_pydantic_v1.py @@ -0,0 +1,106 @@ +import unittest +import warnings +from types import MappingProxyType +from unittest import mock + +from itemadapter.utils import get_field_meta_from_class +from tests import ( + AttrsItem, + DataClassItem, + PydanticV1Model, + PydanticV1SpecialCasesModel, + ScrapyItem, + ScrapySubclassedItem, + clear_itemadapter_imports, + make_mock_import, +) + + +class PydanticTestCase(unittest.TestCase): + def test_false(self): + from itemadapter.adapter import PydanticAdapter + + self.assertFalse(PydanticAdapter.is_item(int)) + self.assertFalse(PydanticAdapter.is_item(sum)) + self.assertFalse(PydanticAdapter.is_item(1234)) + self.assertFalse(PydanticAdapter.is_item(object())) + self.assertFalse(PydanticAdapter.is_item(DataClassItem())) + self.assertFalse(PydanticAdapter.is_item("a string")) + self.assertFalse(PydanticAdapter.is_item(b"some bytes")) + self.assertFalse(PydanticAdapter.is_item({"a": "dict"})) + self.assertFalse(PydanticAdapter.is_item(["a", "list"])) + self.assertFalse(PydanticAdapter.is_item(("a", "tuple"))) + self.assertFalse(PydanticAdapter.is_item({"a", "set"})) + self.assertFalse(PydanticAdapter.is_item(PydanticV1Model)) + + try: + import attrs # noqa: F401 + except ImportError: + pass + else: + self.assertFalse(PydanticAdapter.is_item(AttrsItem())) + + try: + import scrapy # noqa: F401 + except ImportError: + pass + else: + self.assertFalse(PydanticAdapter.is_item(ScrapyItem())) + self.assertFalse(PydanticAdapter.is_item(ScrapySubclassedItem())) + + @unittest.skipIf(not PydanticV1Model, "pydantic <2 module is not available") + @mock.patch("builtins.__import__", make_mock_import("pydantic")) + def test_module_import_error(self): + with clear_itemadapter_imports(): + from itemadapter.adapter import PydanticAdapter + + self.assertFalse(PydanticAdapter.is_item(PydanticV1Model(name="asdf", value=1234))) + with self.assertRaises(TypeError, msg="PydanticV1Model is not a valid item class"): + get_field_meta_from_class(PydanticV1Model, "name") + + @unittest.skipIf(not PydanticV1Model, "pydantic module is not available") + @mock.patch("itemadapter.utils.pydantic", None) + @mock.patch("itemadapter.utils.pydantic_v1", None) + def test_module_not_available(self): + from itemadapter.adapter import PydanticAdapter + + self.assertFalse(PydanticAdapter.is_item(PydanticV1Model(name="asdf", value=1234))) + with self.assertRaises(TypeError, msg="PydanticV1Model is not a valid item class"): + get_field_meta_from_class(PydanticV1Model, "name") + + @unittest.skipIf(not PydanticV1Model, "pydantic module is not available") + def test_true(self): + from itemadapter.adapter import PydanticAdapter + + self.assertTrue(PydanticAdapter.is_item(PydanticV1Model())) + self.assertTrue(PydanticAdapter.is_item(PydanticV1Model(name="asdf", value=1234))) + # field metadata + self.assertEqual( + get_field_meta_from_class(PydanticV1Model, "name"), + MappingProxyType({"serializer": str}), + ) + self.assertEqual( + get_field_meta_from_class(PydanticV1Model, "value"), + MappingProxyType({"serializer": int}), + ) + self.assertEqual( + get_field_meta_from_class(PydanticV1SpecialCasesModel, "special_cases"), + MappingProxyType({"alias": "special_cases", "allow_mutation": False}), + ) + with self.assertRaises( + KeyError, msg="PydanticV1Model does not support field: non_existent" + ): + get_field_meta_from_class(PydanticV1Model, "non_existent") + + def test_deprecated_is_instance(self): + from itemadapter.utils import is_pydantic_instance + + with warnings.catch_warnings(record=True) as caught: + is_pydantic_instance(1) + self.assertEqual(len(caught), 1) + self.assertTrue(issubclass(caught[0].category, DeprecationWarning)) + self.assertEqual( + "itemadapter.utils.is_pydantic_instance is deprecated" + " and it will be removed in a future version", + str(caught[0].message), + ) diff --git a/tests/test_adapter_scrapy.py b/tests/test_adapter_scrapy.py index 36efae7..5465e39 100644 --- a/tests/test_adapter_scrapy.py +++ b/tests/test_adapter_scrapy.py @@ -8,6 +8,7 @@ AttrsItem, DataClassItem, PydanticModel, + PydanticV1Model, ScrapyItem, ScrapySubclassedItem, clear_itemadapter_imports, @@ -23,9 +24,7 @@ def test_false(self): self.assertFalse(ScrapyItemAdapter.is_item(sum)) self.assertFalse(ScrapyItemAdapter.is_item(1234)) self.assertFalse(ScrapyItemAdapter.is_item(object())) - self.assertFalse(ScrapyItemAdapter.is_item(AttrsItem())) self.assertFalse(ScrapyItemAdapter.is_item(DataClassItem())) - self.assertFalse(ScrapyItemAdapter.is_item(PydanticModel())) self.assertFalse(ScrapyItemAdapter.is_item("a string")) self.assertFalse(ScrapyItemAdapter.is_item(b"some bytes")) self.assertFalse(ScrapyItemAdapter.is_item({"a": "dict"})) @@ -34,6 +33,18 @@ def test_false(self): self.assertFalse(ScrapyItemAdapter.is_item({"a", "set"})) self.assertFalse(ScrapyItemAdapter.is_item(ScrapySubclassedItem)) + try: + import attrs # noqa: F401 + except ImportError: + pass + else: + self.assertFalse(ScrapyItemAdapter.is_item(AttrsItem())) + + if PydanticModel is not None: + self.assertFalse(ScrapyItemAdapter.is_item(PydanticModel())) + if PydanticV1Model is not None: + self.assertFalse(ScrapyItemAdapter.is_item(PydanticV1Model())) + @unittest.skipIf(not ScrapySubclassedItem, "scrapy module is not available") @mock.patch("builtins.__import__", make_mock_import("scrapy")) def test_module_import_error(self): diff --git a/tests/test_utils.py b/tests/test_utils.py index f9f92a5..767b817 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -6,7 +6,7 @@ from tests import ( AttrsItem, DataClassItem, - PydanticModel, + PydanticV1Model, ScrapyItem, ScrapySubclassedItem, ) @@ -43,7 +43,7 @@ def test_false(self): self.assertFalse(is_item(DataClassItem)) self.assertFalse(is_item(ScrapySubclassedItem)) self.assertFalse(is_item(AttrsItem)) - self.assertFalse(is_item(PydanticModel)) + self.assertFalse(is_item(PydanticV1Model)) self.assertFalse(ItemAdapter.is_item_class(list)) self.assertFalse(ItemAdapter.is_item_class(int)) self.assertFalse(ItemAdapter.is_item_class(tuple)) @@ -69,7 +69,7 @@ def test_true_attrs(self): self.assertTrue(is_item(AttrsItem(name="asdf", value=1234))) self.assertTrue(ItemAdapter.is_item_class(AttrsItem)) - @unittest.skipIf(not PydanticModel, "pydantic module is not available") + @unittest.skipIf(not PydanticV1Model, "pydantic module is not available") def test_true_pydantic(self): - self.assertTrue(is_item(PydanticModel(name="asdf", value=1234))) - self.assertTrue(ItemAdapter.is_item_class(PydanticModel)) + self.assertTrue(is_item(PydanticV1Model(name="asdf", value=1234))) + self.assertTrue(ItemAdapter.is_item_class(PydanticV1Model)) diff --git a/tox.ini b/tox.ini index d196ac6..9676977 100644 --- a/tox.ini +++ b/tox.ini @@ -1,12 +1,32 @@ [tox] -envlist = typing,py,py38-scrapy22,pylint,pre-commit,twinecheck +envlist = min-attrs,min-pydantic,min-scrapy,min-extra,py39,py310,py311,py312,py313,attrs,pydantic1,pydantic,scrapy,extra,extra-pydantic1,pre-commit,typing,docs,twinecheck,pylint [testenv] +basepython = + min-attrs,min-pydantic,min-scrapy,min-extra: python3.9 deps = -rtests/requirements.txt - py39-scrapy22: scrapy==2.2 + min-attrs,min-extra: attrs==18.1.0 + min-pydantic,min-extra: pydantic==1.8 + min-scrapy,min-extra: scrapy==2.2 + pydantic1,extra-pydantic1: pydantic<2 +extras = + min-attrs,attrs,min-extra,extra,extra-pydantic1: attrs + min-pydantic,pydantic1,pydantic,min-extra,extra,extra-pydantic1: pydantic + min-scrapy,scrapy,min-extra,extra,extra-pydantic1: scrapy commands = - pytest --verbose --cov=itemadapter --cov-report=term-missing --cov-report=html --cov-report=xml --doctest-glob=README.md {posargs: itemadapter README.md tests} + pytest --verbose --cov=itemadapter --cov-report=term-missing --cov-report=html --cov-report=xml {posargs: itemadapter tests} + +[testenv:docs] +deps = + {[testenv]deps} + zyte-common-items +extras = + attrs + pydantic + scrapy +commands = + pytest --verbose --cov=itemadapter --cov-report=term-missing --cov-report=html --cov-report=xml --doctest-glob=README.md {posargs:README.md} [testenv:typing] basepython = python3