diff --git a/Cargo.lock b/Cargo.lock index dd8d94cb9..74dd6c31b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -263,9 +263,9 @@ dependencies = [ [[package]] name = "idna" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44a986806a1cc899952ba462bc1f28afbfd5850ab6cb030ccb20dd02cc527a24" +checksum = "bd69211b9b519e98303c015e21a007e293db403b6c85b9b124e133d25e242cdd" dependencies = [ "icu_normalizer", "icu_properties", @@ -425,13 +425,13 @@ dependencies = [ [[package]] name = "pydantic-core" -version = "2.23.1" +version = "2.23.2" dependencies = [ "ahash", "base64", "enum_dispatch", "hex", - "idna 1.0.1", + "idna 1.0.2", "jiter", "num-bigint", "pyo3", @@ -540,9 +540,9 @@ checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" [[package]] name = "regex" -version = "1.10.5" +version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", @@ -581,18 +581,18 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "serde" -version = "1.0.204" +version = "1.0.209" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" +checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.204" +version = "1.0.209" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" +checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170" dependencies = [ "proc-macro2", "quote", @@ -601,9 +601,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.121" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ab380d7d9f22ef3f21ad3e6c1ebe8e4fc7a2000ccba2e4d71fc96f15b2cb609" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ "indexmap", "itoa", @@ -773,9 +773,9 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" -version = "1.9.1" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439" +checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" [[package]] name = "version_check" diff --git a/Cargo.toml b/Cargo.toml index 915391fa2..4597d2431 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pydantic-core" -version = "2.23.1" +version = "2.23.2" edition = "2021" license = "MIT" homepage = "https://github.com/pydantic/pydantic-core" @@ -30,22 +30,22 @@ rust-version = "1.75" # TODO it would be very nice to remove the "py-clone" feature as it can panic, # but needs a bit of work to make sure it's not used in the codebase pyo3 = { version = "0.22.2", features = ["generate-import-lib", "num-bigint", "py-clone"] } -regex = "1.10.4" +regex = "1.10.6" strum = { version = "0.26.3", features = ["derive"] } strum_macros = "0.26.4" -serde_json = {version = "1.0.121", features = ["arbitrary_precision", "preserve_order"]} +serde_json = {version = "1.0.128", features = ["arbitrary_precision", "preserve_order"]} enum_dispatch = "0.3.13" -serde = { version = "1.0.204", features = ["derive"] } +serde = { version = "1.0.209", features = ["derive"] } speedate = "0.14.4" smallvec = "1.13.2" ahash = "0.8.10" url = "2.5.0" # idna is already required by url, added here to be explicit -idna = "1.0.1" +idna = "1.0.2" base64 = "0.22.1" num-bigint = "0.4.6" python3-dll-a = "0.2.10" -uuid = "1.9.1" +uuid = "1.10.0" jiter = { version = "0.5", features = ["python"] } hex = "0.4.3" diff --git a/python/pydantic_core/_pydantic_core.pyi b/python/pydantic_core/_pydantic_core.pyi index aeec227f8..fb52a5b10 100644 --- a/python/pydantic_core/_pydantic_core.pyi +++ b/python/pydantic_core/_pydantic_core.pyi @@ -70,14 +70,18 @@ class SchemaValidator: `CombinedValidator` which may in turn own more `CombinedValidator`s which make up the full schema validator. """ - def __new__(cls, schema: CoreSchema, config: CoreConfig | None = None) -> Self: - """ - Create a new SchemaValidator. + # note: pyo3 currently supports __new__, but not __init__, though we include __init__ stubs + # and docstrings here (and in the following classes) for documentation purposes + + def __init__(self, schema: CoreSchema, config: CoreConfig | None = None) -> None: + """Initializes the `SchemaValidator`. Arguments: - schema: The [`CoreSchema`][pydantic_core.core_schema.CoreSchema] to use for validation. + schema: The `CoreSchema` to use for validation. config: Optionally a [`CoreConfig`][pydantic_core.core_schema.CoreConfig] to configure validation. """ + + def __new__(cls, schema: CoreSchema, config: CoreConfig | None = None) -> Self: ... @property def title(self) -> str: """ @@ -242,14 +246,15 @@ class SchemaSerializer: `CombinedSerializer` which may in turn own more `CombinedSerializer`s which make up the full schema serializer. """ - def __new__(cls, schema: CoreSchema, config: CoreConfig | None = None) -> Self: - """ - Create a new SchemaSerializer. + def __init__(self, schema: CoreSchema, config: CoreConfig | None = None) -> None: + """Initializes the `SchemaSerializer`. Arguments: - schema: The [`CoreSchema`][pydantic_core.core_schema.CoreSchema] to use for serialization. + schema: The `CoreSchema` to use for serialization. config: Optionally a [`CoreConfig`][pydantic_core.core_schema.CoreConfig] to to configure serialization. """ + + def __new__(cls, schema: CoreSchema, config: CoreConfig | None = None) -> Self: ... def to_python( self, value: Any, @@ -471,9 +476,8 @@ class Url(SupportsAllComparisons): by Mozilla. """ - def __new__(cls, url: str) -> Self: - """ - Create a new `Url` instance. + def __init__(self, url: str) -> None: + """Initializes the `Url`. Args: url: String representation of a URL. @@ -484,6 +488,8 @@ class Url(SupportsAllComparisons): Raises: ValidationError: If the URL is invalid. """ + + def __new__(cls, url: str) -> Self: ... @property def scheme(self) -> str: """ @@ -607,9 +613,8 @@ class MultiHostUrl(SupportsAllComparisons): by Mozilla. """ - def __new__(cls, url: str) -> Self: - """ - Create a new `MultiHostUrl` instance. + def __init__(self, url: str) -> None: + """Initializes the `MultiHostUrl`. Args: url: String representation of a URL. @@ -620,6 +625,8 @@ class MultiHostUrl(SupportsAllComparisons): Raises: ValidationError: If the URL is invalid. """ + + def __new__(cls, url: str) -> Self: ... @property def scheme(self) -> str: """ @@ -822,54 +829,290 @@ class ValidationError(ValueError): @final class PydanticCustomError(ValueError): + """A custom exception providing flexible error handling for Pydantic validators. + + You can raise this error in custom validators when you'd like flexibility in regards to the error type, message, and context. + + Example: + ```py + from pydantic_core import PydanticCustomError + + def custom_validator(v) -> None: + if v <= 10: + raise PydanticCustomError('custom_value_error', 'Value must be greater than {value}', {'value': 10, 'extra_context': 'extra_data'}) + return v + ``` + """ + + def __init__( + self, error_type: LiteralString, message_template: LiteralString, context: dict[str, Any] | None = None + ) -> None: + """Initializes the `PydanticCustomError`. + + Arguments: + error_type: The error type. + message_template: The message template. + context: The data to inject into the message template. + """ + def __new__( cls, error_type: LiteralString, message_template: LiteralString, context: dict[str, Any] | None = None ) -> Self: ... @property - def context(self) -> dict[str, Any] | None: ... + def context(self) -> dict[str, Any] | None: + """Values which are required to render the error message, and could hence be useful in passing error data forward.""" + @property - def type(self) -> str: ... + def type(self) -> str: + """The error type associated with the error. For consistency with Pydantic, this is typically a snake_case string.""" + @property - def message_template(self) -> str: ... - def message(self) -> str: ... + def message_template(self) -> str: + """The message template associated with the error. This is a string that can be formatted with context variables in `{curly_braces}`.""" + + def message(self) -> str: + """The formatted message associated with the error. This presents as the message template with context variables appropriately injected.""" @final class PydanticKnownError(ValueError): + """A helper class for raising exceptions that mimic Pydantic's built-in exceptions, with more flexibility in regards to context. + + Unlike [`PydanticCustomError`][pydantic_core.PydanticCustomError], the `error_type` argument must be a known `ErrorType`. + + Example: + ```py + from pydantic_core import PydanticKnownError + + def custom_validator(v) -> None: + if v <= 10: + raise PydanticKnownError(error_type='greater_than', context={'gt': 10}) + return v + ``` + """ + + def __init__(self, error_type: ErrorType, context: dict[str, Any] | None = None) -> None: + """Initializes the `PydanticKnownError`. + + Arguments: + error_type: The error type. + context: The data to inject into the message template. + """ + def __new__(cls, error_type: ErrorType, context: dict[str, Any] | None = None) -> Self: ... @property - def context(self) -> dict[str, Any] | None: ... + def context(self) -> dict[str, Any] | None: + """Values which are required to render the error message, and could hence be useful in passing error data forward.""" + @property - def type(self) -> ErrorType: ... + def type(self) -> ErrorType: + """The type of the error.""" + @property - def message_template(self) -> str: ... - def message(self) -> str: ... + def message_template(self) -> str: + """The message template associated with the provided error type. This is a string that can be formatted with context variables in `{curly_braces}`.""" + + def message(self) -> str: + """The formatted message associated with the error. This presents as the message template with context variables appropriately injected.""" @final class PydanticOmit(Exception): + """An exception to signal that a field should be omitted from a generated result. + + This could span from omitting a field from a JSON Schema to omitting a field from a serialized result. + Upcoming: more robust support for using PydanticOmit in custom serializers is still in development. + Right now, this is primarily used in the JSON Schema generation process. + + Example: + ```py + from typing import Callable + + from pydantic_core import PydanticOmit + + from pydantic import BaseModel + from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue + + + class MyGenerateJsonSchema(GenerateJsonSchema): + def handle_invalid_for_json_schema(self, schema, error_info) -> JsonSchemaValue: + raise PydanticOmit + + + class Predicate(BaseModel): + name: str = 'no-op' + func: Callable = lambda x: x + + + instance_example = Predicate() + + validation_schema = instance_example.model_json_schema(schema_generator=MyGenerateJsonSchema, mode='validation') + print(validation_schema) + ''' + {'properties': {'name': {'default': 'no-op', 'title': 'Name', 'type': 'string'}}, 'title': 'Predicate', 'type': 'object'} + ''' + ``` + + For a more in depth example / explanation, see the [customizing JSON schema](../concepts/json_schema.md#customizing-the-json-schema-generation-process) docs. + """ + def __new__(cls) -> Self: ... @final class PydanticUseDefault(Exception): + """An exception to signal that standard validation either failed or should be skipped, and the default value should be used instead. + + This warning can be raised in custom valiation functions to redirect the flow of validation. + + Example: + ```py + from pydantic_core import PydanticUseDefault + from datetime import datetime + from pydantic import BaseModel, field_validator + + + class Event(BaseModel): + name: str = 'meeting' + time: datetime + + @field_validator('name', mode='plain') + def name_must_be_present(cls, v) -> str: + if not v or not isinstance(v, str): + raise PydanticUseDefault() + return v + + + event1 = Event(name='party', time=datetime(2024, 1, 1, 12, 0, 0)) + print(repr(event1)) + # > Event(name='party', time=datetime.datetime(2024, 1, 1, 12, 0)) + event2 = Event(time=datetime(2024, 1, 1, 12, 0, 0)) + print(repr(event2)) + # > Event(name='meeting', time=datetime.datetime(2024, 1, 1, 12, 0)) + ``` + + For an additional example, seethe [validating partial json data](../concepts/json.md#partial-json-parsing) section of the Pydantic documentation. + """ + def __new__(cls) -> Self: ... @final class PydanticSerializationError(ValueError): + """An error raised when an issue occurs during serialization. + + In custom serializers, this error can be used to indicate that serialization has failed. + """ + + def __init__(self, message: str) -> None: + """Initializes the `PydanticSerializationError`. + + Arguments: + message: The message associated with the error. + """ + def __new__(cls, message: str) -> Self: ... @final class PydanticSerializationUnexpectedValue(ValueError): + """An error raised when an unexpected value is encountered during serialization. + + This error is often caught and coerced into a warning, as `pydantic-core` generally makes a best attempt + at serializing values, in contrast with validation where errors are eagerly raised. + + Example: + ```py + from pydantic import BaseModel, field_serializer + from pydantic_core import PydanticSerializationUnexpectedValue + + class BasicPoint(BaseModel): + x: int + y: int + + @field_serializer('*') + def serialize(self, v): + if not isinstance(v, int): + raise PydanticSerializationUnexpectedValue(f'Expected type `int`, got {type(v)} with value {v}') + return v + + point = BasicPoint(x=1, y=2) + # some sort of mutation + point.x = 'a' + + print(point.model_dump()) + ''' + UserWarning: Pydantic serializer warnings: + PydanticSerializationUnexpectedValue(Expected type `int`, got with value a) + return self.__pydantic_serializer__.to_python( + {'x': 'a', 'y': 2} + ''' + ``` + + This is often used internally in `pydantic-core` when unexpected types are encountered during serialization, + but it can also be used by users in custom serializers, as seen above. + """ + + def __init__(self, message: str) -> None: + """Initializes the `PydanticSerializationUnexpectedValue`. + + Arguments: + message: The message associated with the unexpected value. + """ + def __new__(cls, message: str | None = None) -> Self: ... @final class ArgsKwargs: + """A construct used to store arguments and keyword arguments for a function call. + + This data structure is generally used to store information for core schemas associated with functions (like in an arguments schema). + This data structure is also currently used for some validation against dataclasses. + + Example: + ```py + from pydantic.dataclasses import dataclass + from pydantic import model_validator + + + @dataclass + class Model: + a: int + b: int + + @model_validator(mode="before") + @classmethod + def no_op_validator(cls, values): + print(values) + return values + + Model(1, b=2) + #> ArgsKwargs((1,), {"b": 2}) + + Model(1, 2) + #> ArgsKwargs((1, 2), {}) + + Model(a=1, b=2) + #> ArgsKwargs((), {"a": 1, "b": 2}) + ``` + """ + + def __init__(self, args: tuple[Any, ...], kwargs: dict[str, Any] | None = None) -> None: + """Initializes the `ArgsKwargs`. + + Arguments: + args: The arguments (inherently ordered) for a function call. + kwargs: The keyword arguments for a function call + """ + def __new__(cls, args: tuple[Any, ...], kwargs: dict[str, Any] | None = None) -> Self: ... @property - def args(self) -> tuple[Any, ...]: ... + def args(self) -> tuple[Any, ...]: + """The arguments (inherently ordered) for a function call.""" + @property - def kwargs(self) -> dict[str, Any] | None: ... + def kwargs(self) -> dict[str, Any] | None: + """The keyword arguments for a function call.""" @final class PydanticUndefinedType: + """A type used as a sentinel for undefined values.""" + def __copy__(self) -> Self: ... def __deepcopy__(self, memo: Any) -> Self: ... @@ -884,14 +1127,37 @@ def list_all_errors() -> list[ErrorTypeInfo]: """ @final class TzInfo(datetime.tzinfo): - def tzname(self, _dt: datetime.datetime | None) -> str | None: ... - def utcoffset(self, _dt: datetime.datetime | None) -> datetime.timedelta: ... - def dst(self, _dt: datetime.datetime | None) -> datetime.timedelta: ... - def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ... + """An `pydantic-core` implementation of the abstract [`datetime.tzinfo`] class.""" + + # Docstrings for attributes sourced from the abstract base class, [`datetime.tzinfo`](https://docs.python.org/3/library/datetime.html#datetime.tzinfo). + + def tzname(self, dt: datetime.datetime | None) -> str | None: + """Return the time zone name corresponding to the [`datetime`][datetime.datetime] object _dt_, as a string. + + For more info, see [`tzinfo.tzname`][datetime.tzinfo.tzname]. + """ + + def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: + """Return offset of local time from UTC, as a [`timedelta`][datetime.timedelta] object that is positive east of UTC. If local time is west of UTC, this should be negative. + + More info can be found at [`tzinfo.utcoffset`][datetime.tzinfo.utcoffset]. + """ + + def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: + """Return the daylight saving time (DST) adjustment, as a [`timedelta`][datetime.timedelta] object or `None` if DST information isn’t known. + + More info can be found at[`tzinfo.dst`][datetime.tzinfo.dst].""" + + def fromutc(self, dt: datetime.datetime) -> datetime.datetime: + """Adjust the date and time data associated datetime object _dt_, returning an equivalent datetime in self’s local time. + + More info can be found at [`tzinfo.fromutc`][datetime.tzinfo.fromutc].""" + def __deepcopy__(self, _memo: dict[Any, Any]) -> TzInfo: ... def validate_core_schema(schema: CoreSchema, *, strict: bool | None = None) -> CoreSchema: - """Validate a CoreSchema + """Validate a core schema. + This currently uses lax mode for validation (i.e. will coerce strings to dates and such) but may use strict mode in the future. We may also remove this function altogether, do not rely on it being present if you are diff --git a/src/input/datetime.rs b/src/input/datetime.rs index f8ae6929e..577a91014 100644 --- a/src/input/datetime.rs +++ b/src/input/datetime.rs @@ -516,15 +516,18 @@ impl TzInfo { Self::try_from(seconds.trunc() as i32) } - fn utcoffset<'py>(&self, py: Python<'py>, _dt: &Bound<'_, PyAny>) -> PyResult> { + #[allow(unused_variables)] + fn utcoffset<'py>(&self, py: Python<'py>, dt: &Bound<'_, PyAny>) -> PyResult> { PyDelta::new_bound(py, 0, self.seconds, 0, true) } - fn tzname(&self, _dt: &Bound<'_, PyAny>) -> String { + #[allow(unused_variables)] + fn tzname(&self, dt: &Bound<'_, PyAny>) -> String { self.__str__() } - fn dst(&self, _dt: &Bound<'_, PyAny>) -> Option<&PyDelta> { + #[allow(unused_variables)] + fn dst(&self, dt: &Bound<'_, PyAny>) -> Option<&PyDelta> { None } diff --git a/src/input/shared.rs b/src/input/shared.rs index 95b9912a5..0c81c1cfe 100644 --- a/src/input/shared.rs +++ b/src/input/shared.rs @@ -122,6 +122,22 @@ fn clean_int_str(mut s: &str) -> Option> { s = suffix; } + // Remember if the number is negative + // the `strip_leading_zeros` function will not strip leading zeros for negative numbers + // therefore we simply "take away" the unary minus sign temporarily and add it back before + // returning. This allows consistent handling of leading zeros for both positive and negative numbers. + let mut is_negative = false; + if let Some(suffix) = s.strip_prefix('-') { + // Invalidate "--" and "-+" as an integer prefix by returning None + if suffix.starts_with('-') | suffix.starts_with('+') { + return None; + } + + is_negative = true; + // Continue as usual without the unary minus sign + s = suffix; + } + // strip loading zeros s = strip_leading_zeros(s)?; @@ -136,13 +152,20 @@ fn clean_int_str(mut s: &str) -> Option> { // remove underscores if let Some(str_stripped) = strip_underscores(s) { - Some(str_stripped.into()) - } else { - match len_before == s.len() { - true => None, - false => Some(s.into()), + match is_negative { + true => return Some(("-".to_string() + &str_stripped).into()), + false => return Some(str_stripped.into()), } } + + if len_before == s.len() { + return None; + } + + match is_negative { + true => Some(("-".to_string() + s).into()), + false => Some(s.into()), + } } /// strip leading zeros from a string, we can't simple use `s.trim_start_matches('0')`, because: diff --git a/src/lookup_key.rs b/src/lookup_key.rs index 4cc131df8..7415d0942 100644 --- a/src/lookup_key.rs +++ b/src/lookup_key.rs @@ -191,34 +191,10 @@ impl LookupKey { } } - pub fn py_get_attr<'py, 's>( + pub fn simple_py_get_attr<'py, 's>( &'s self, obj: &Bound<'py, PyAny>, - kwargs: Option<&Bound<'py, PyDict>>, - ) -> ValResult)>> { - match self._py_get_attr(obj, kwargs) { - Ok(v) => Ok(v), - Err(err) => { - let error = py_err_string(obj.py(), err); - Err(ValError::new( - ErrorType::GetAttributeError { error, context: None }, - obj, - )) - } - } - } - - pub fn _py_get_attr<'py, 's>( - &'s self, - obj: &Bound<'py, PyAny>, - kwargs: Option<&Bound<'py, PyDict>>, ) -> PyResult)>> { - if let Some(dict) = kwargs { - if let Ok(Some(item)) = self.py_get_dict_item(dict) { - return Ok(Some(item)); - } - } - match self { Self::Simple { py_key, path, .. } => match py_get_attrs(obj, py_key)? { Some(value) => Ok(Some((path, value))), @@ -260,6 +236,29 @@ impl LookupKey { } } + pub fn py_get_attr<'py, 's>( + &'s self, + obj: &Bound<'py, PyAny>, + kwargs: Option<&Bound<'py, PyDict>>, + ) -> ValResult)>> { + if let Some(dict) = kwargs { + if let Ok(Some(item)) = self.py_get_dict_item(dict) { + return Ok(Some(item)); + } + } + + match self.simple_py_get_attr(obj) { + Ok(v) => Ok(v), + Err(err) => { + let error = py_err_string(obj.py(), err); + Err(ValError::new( + ErrorType::GetAttributeError { error, context: None }, + obj, + )) + } + } + } + pub fn json_get<'a, 'data, 's>( &'s self, dict: &'a JsonObject<'data>, diff --git a/src/serializers/type_serializers/union.rs b/src/serializers/type_serializers/union.rs index 38f8ab45a..46ec5312e 100644 --- a/src/serializers/type_serializers/union.rs +++ b/src/serializers/type_serializers/union.rs @@ -8,7 +8,6 @@ use std::borrow::Cow; use crate::build_tools::py_schema_err; use crate::common::union::{Discriminator, SMALL_UNION_THRESHOLD}; use crate::definitions::DefinitionsBuilder; -use crate::lookup_key::LookupKey; use crate::serializers::type_serializers::py_err_se_err; use crate::tools::{truncate_safe_repr, SchemaDict}; use crate::PydanticSerializationUnexpectedValue; @@ -438,10 +437,10 @@ impl TaggedUnionSerializer { fn get_discriminator_value(&self, value: &Bound<'_, PyAny>, extra: &Extra) -> Option> { let py = value.py(); let discriminator_value = match &self.discriminator { - Discriminator::LookupKey(lookup_key) => match lookup_key { - LookupKey::Simple { py_key, .. } => value.getattr(py_key).ok().map(|obj| obj.to_object(py)), - _ => None, - }, + Discriminator::LookupKey(lookup_key) => lookup_key + .simple_py_get_attr(value) + .ok() + .and_then(|opt| opt.map(|(_, bound)| bound.to_object(py))), Discriminator::Function(func) => func.call1(py, (value,)).ok(), }; if discriminator_value.is_none() { diff --git a/tests/requirements-linting.txt b/tests/requirements-linting.txt index b004e99ca..c614101d5 100644 --- a/tests/requirements-linting.txt +++ b/tests/requirements-linting.txt @@ -1,4 +1,4 @@ -griffe==0.48.0 -pyright==1.1.374 -ruff==0.5.5 -mypy==1.11.1 +griffe==1.2.0 +pyright==1.1.378 +ruff==0.6.3 +mypy==1.11.2 diff --git a/tests/requirements.txt b/tests/requirements.txt index 342fbfd2d..5ee5ebfda 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,7 +1,7 @@ backports.zoneinfo==0.2.1;python_version<"3.9" -coverage==7.6.0 -dirty-equals==0.7.1.post0 -hypothesis==6.108.5 +coverage==7.6.1 +dirty-equals==0.8.0 +hypothesis==6.111.2 # pandas doesn't offer prebuilt wheels for all versions and platforms we test in CI e.g. aarch64 musllinux pandas==2.1.3; python_version >= "3.9" and python_version < "3.13" and implementation_name == "cpython" and platform_machine == 'x86_64' pytest==8.3.2 @@ -9,7 +9,7 @@ pytest==8.3.2 pytest-codspeed~=2.2.1; implementation_name == "cpython" and platform_machine == 'x86_64' # pytest-examples currently depends on aiohttp via black; we don't want to build # it on platforms like aarch64 musllinux in CI -pytest-examples==0.0.12; implementation_name == "cpython" and platform_machine == 'x86_64' +pytest-examples==0.0.13; implementation_name == "cpython" and platform_machine == 'x86_64' pytest-speed==0.3.5 pytest-mock==3.14.0 pytest-pretty==1.2.0 diff --git a/tests/serializers/test_union.py b/tests/serializers/test_union.py index d97d52f03..342f12cf4 100644 --- a/tests/serializers/test_union.py +++ b/tests/serializers/test_union.py @@ -711,3 +711,62 @@ def test_custom_serializer() -> None: print(s) assert s.to_python([{'id': 1}, {'id': 2}]) == [1, 2] assert s.to_python({'id': 1}) == 1 + + +def test_tagged_union_with_aliases() -> None: + @dataclasses.dataclass + class ModelA: + field: int + tag: Literal['a'] = 'a' + + @dataclasses.dataclass + class ModelB: + field: int + tag: Literal['b'] = 'b' + + s = SchemaSerializer( + core_schema.tagged_union_schema( + choices={ + 'a': core_schema.dataclass_schema( + ModelA, + core_schema.dataclass_args_schema( + 'ModelA', + [ + core_schema.dataclass_field(name='field', schema=core_schema.int_schema()), + core_schema.dataclass_field( + name='tag', + schema=core_schema.literal_schema(['a']), + validation_alias='TAG', + serialization_alias='TAG', + ), + ], + ), + ['field', 'tag'], + ), + 'b': core_schema.dataclass_schema( + ModelB, + core_schema.dataclass_args_schema( + 'ModelB', + [ + core_schema.dataclass_field(name='field', schema=core_schema.int_schema()), + core_schema.dataclass_field( + name='tag', + schema=core_schema.literal_schema(['b']), + validation_alias='TAG', + serialization_alias='TAG', + ), + ], + ), + ['field', 'tag'], + ), + }, + discriminator=[['tag'], ['TAG']], + ) + ) + + assert 'TaggedUnionSerializer' in repr(s) + + model_a = ModelA(field=1) + model_b = ModelB(field=1) + assert s.to_python(model_a) == {'field': 1, 'TAG': 'a'} + assert s.to_python(model_b) == {'field': 1, 'TAG': 'b'} diff --git a/tests/validators/test_complex.py b/tests/validators/test_complex.py index 83c5d416d..244091265 100644 --- a/tests/validators/test_complex.py +++ b/tests/validators/test_complex.py @@ -1,6 +1,7 @@ import math import platform import re +import sys import pytest @@ -84,7 +85,7 @@ def test_complex_strict(input_value, expected): @pytest.mark.xfail( - platform.python_implementation() == 'PyPy', + platform.python_implementation() == 'PyPy' and sys.pypy_version_info < (7, 3, 17), reason='PyPy cannot process this string due to a bug, even if this string is considered valid in python', ) def test_valid_complex_string_with_space(): diff --git a/tests/validators/test_int.py b/tests/validators/test_int.py index f93919f17..47944126a 100644 --- a/tests/validators/test_int.py +++ b/tests/validators/test_int.py @@ -52,6 +52,12 @@ ('++4_2', Err('Input should be a valid integer, unable to parse string as an integer')), ('-+1', Err('Input should be a valid integer, unable to parse string as an integer')), ('+-1', Err('Input should be a valid integer, unable to parse string as an integer')), + ('--0001', Err('Input should be a valid integer, unable to parse string as an integer')), + ('-+0001', Err('Input should be a valid integer, unable to parse string as an integer')), + ('-0-001', Err('Input should be a valid integer, unable to parse string as an integer')), + ('-0+001', Err('Input should be a valid integer, unable to parse string as an integer')), + ('-00001', -1), + ('-00042_000', -42000), ('4_2', 42), ('0_42', 42), ('4_2.0', 42),