Skip to content

Commit

Permalink
chore: switch to new import attrs API (#414)
Browse files Browse the repository at this point in the history
* chore: simplify _new_type_to_xref in Sphinx <4.4
* ci: run pre-commit pylint hook serial
* fix: run update cron job on Mondays
  • Loading branch information
redeboer authored Feb 19, 2022
1 parent bea8cd9 commit 0dd2d22
Show file tree
Hide file tree
Showing 6 changed files with 37 additions and 38 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/requirements-cron.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Requirements (scheduled)

on:
schedule:
- cron: "0 2 */14 * *"
- cron: "0 2 * * 1"
workflow_dispatch:

jobs:
Expand Down
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -178,5 +178,6 @@ repos:
- --rcfile=.pylintrc
- --score=no
language: system
require_serial: true
types:
- python
7 changes: 7 additions & 0 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,20 @@ ignore-patterns=
[MESSAGES CONTROL]
disable=
duplicate-code, # https://github.com/PyCQA/pylint/issues/214
invalid-unary-operand-type, # conflicts with attrs.field
logging-fstring-interpolation,
missing-class-docstring, # pydocstyle
missing-function-docstring, # pydocstyle
missing-module-docstring, # pydocstyle
no-member, # conflicts with attrs.field
not-an-iterable, # conflicts with attrs.field
not-callable, # conflicts with attrs.field
redefined-builtin, # flake8-built
too-few-public-methods, # data containers (attrs) and interface classes
unspecified-encoding, # http://pylint.pycqa.org/en/latest/whatsnew/2.10.html
unsubscriptable-object, # conflicts with attrs.field
unsupported-assignment-operation, # conflicts with attrs.field
unsupported-membership-test, # conflicts with attrs.field
unused-import, # https://www.flake8rules.com/rules/F401
wrong-import-order, # handled by isort

Expand Down
11 changes: 1 addition & 10 deletions docs/_relink_references.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,13 +42,11 @@ def _new_type_to_xref(
env: BuildEnvironment = None,
suppress_prefix: bool = False,
) -> pending_xref:
"""Convert a type string to a cross reference node."""
reftype, target, title, refspecific = parse_reftarget(
target, suppress_prefix
)
target = __TARGET_SUBSTITUTIONS.get(target, target)
reftype = __REF_TYPE_SUBSTITUTIONS.get(target, reftype)

assert env is not None
return pending_xref(
"",
Expand All @@ -68,15 +66,8 @@ def _new_type_to_xref(
suppress_prefix: bool = False,
) -> pending_xref:
# pylint: disable=unused-argument
"""Convert a type string to a cross reference node."""
if target == "None":
reftype = "obj"
else:
reftype = "class"

target = __TARGET_SUBSTITUTIONS.get(target, target)
reftype = __REF_TYPE_SUBSTITUTIONS.get(target, reftype)

reftype = __REF_TYPE_SUBSTITUTIONS.get(target, "class")
assert env is not None
return pending_xref(
"",
Expand Down
17 changes: 8 additions & 9 deletions src/tensorwaves/function/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
import inspect
from typing import Callable, Dict, Iterable, Mapping, Tuple

import attr
import attrs
import numpy as np
from attrs import field, frozen

from tensorwaves.interface import (
DataSample,
Expand All @@ -15,14 +16,14 @@


def _all_str(
_: "PositionalArgumentFunction", __: attr.Attribute, value: Iterable[str]
_: "PositionalArgumentFunction", __: attrs.Attribute, value: Iterable[str]
) -> None:
if not all(map(lambda s: isinstance(s, str), value)):
raise TypeError(f"Not all arguments are of type {str.__name__}")


def _all_unique(
_: "PositionalArgumentFunction", __: attr.Attribute, value: Iterable[str]
_: "PositionalArgumentFunction", __: attrs.Attribute, value: Iterable[str]
) -> None:
argument_names = list(value)
if len(set(argument_names)) != len(argument_names):
Expand All @@ -38,7 +39,7 @@ def _all_unique(


def _validate_arguments(
instance: "PositionalArgumentFunction", _: attr.Attribute, value: Callable
instance: "PositionalArgumentFunction", _: attrs.Attribute, value: Callable
) -> None:
if not callable(value):
raise TypeError("Function is not callable")
Expand All @@ -59,7 +60,7 @@ def _to_tuple(argument_order: Iterable[str]) -> Tuple[str, ...]:
return tuple(argument_order)


@attr.s(frozen=True)
@frozen
class PositionalArgumentFunction(Function):
"""Wrapper around a function with positional arguments.
Expand All @@ -70,11 +71,9 @@ class PositionalArgumentFunction(Function):
argument positions in its underlying :attr:`function`.
"""

function: Callable[..., np.ndarray] = attr.ib(
validator=_validate_arguments
)
function: Callable[..., np.ndarray] = field(validator=_validate_arguments)
"""A function with positional arguments only."""
argument_order: Tuple[str, ...] = attr.ib(
argument_order: Tuple[str, ...] = field(
converter=_to_tuple, validator=[_all_str, _all_unique]
)
"""Ordered labels for each positional argument."""
Expand Down
37 changes: 19 additions & 18 deletions src/tensorwaves/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@
Union,
)

import attr
import attrs
import numpy as np
from attr.validators import instance_of, optional
from attrs import field, frozen
from attrs.validators import instance_of, optional

if TYPE_CHECKING: # pragma: no cover
from IPython.lib.pretty import PrettyPrinter
Expand Down Expand Up @@ -96,29 +97,29 @@ def gradient(
"""Calculate gradient for given parameter mapping."""


_PARAMETER_DICT_VALIDATOR = attr.validators.deep_mapping(
_PARAMETER_DICT_VALIDATOR = attrs.validators.deep_mapping(
key_validator=instance_of(str),
mapping_validator=instance_of(dict),
value_validator=instance_of(ParameterValue.__args__), # type: ignore[attr-defined]
)


@attr.frozen
@frozen
class FitResult: # pylint: disable=too-many-instance-attributes
minimum_valid: bool = attr.ib(validator=instance_of(bool))
execution_time: float = attr.ib(validator=instance_of(float))
function_calls: int = attr.ib(validator=instance_of(int))
estimator_value: float = attr.ib(validator=instance_of(float))
parameter_values: Dict[str, ParameterValue] = attr.ib(
minimum_valid: bool = field(validator=instance_of(bool))
execution_time: float = field(validator=instance_of(float))
function_calls: int = field(validator=instance_of(int))
estimator_value: float = field(validator=instance_of(float))
parameter_values: Dict[str, ParameterValue] = field(
default=None, validator=_PARAMETER_DICT_VALIDATOR
)
parameter_errors: Optional[Dict[str, ParameterValue]] = attr.ib(
parameter_errors: Optional[Dict[str, ParameterValue]] = field(
default=None, validator=optional(_PARAMETER_DICT_VALIDATOR)
)
iterations: Optional[int] = attr.ib(
iterations: Optional[int] = field(
default=None, validator=optional(instance_of(int))
)
specifics: Optional[Any] = attr.ib(default=None)
specifics: Optional[Any] = field(default=None)
"""Any additional info provided by the specific optimizer.
An instance returned by one of the implemented optimizers under the
Expand All @@ -133,7 +134,7 @@ class FitResult: # pylint: disable=too-many-instance-attributes

@parameter_errors.validator # pyright: reportOptionalMemberAccess=false
def _check_parameter_errors(
self, _: attr.Attribute, value: Optional[Dict[str, ParameterValue]]
self, _: attrs.Attribute, value: Optional[Dict[str, ParameterValue]]
) -> None:
if value is None:
return
Expand All @@ -150,13 +151,13 @@ def _repr_pretty_(self, p: "PrettyPrinter", cycle: bool) -> None:
p.text(f"{class_name}(...)")
else:
with p.group(indent=1, open=f"{class_name}("):
for field in attr.fields(type(self)):
if field.name in {"specifics"}:
for attribute in attrs.fields(type(self)):
if attribute.name in {"specifics"}:
continue
value = getattr(self, field.name)
if value != field.default:
value = getattr(self, attribute.name)
if value != attribute.default:
p.breakable()
p.text(f"{field.name}=")
p.text(f"{attribute.name}=")
if isinstance(value, dict):
with p.group(indent=1, open="{"):
for key, val in value.items():
Expand Down

0 comments on commit 0dd2d22

Please sign in to comment.