Skip to content

Commit

Permalink
Refactor enums processing (#1059)
Browse files Browse the repository at this point in the history
This commit refactors enum processing from ground up. This fully unifies
the how each enum variant schema is resolved and the way variant schemas
are generated. This will result easier debugging, changing and updating
the enum processing in future and most of all the enums now will behave
consistently due to removing bunch of duplication and adding correct
abstractions instead.

This commit also unifies previously known `SimpleEnum` and `ReprEnum` to
a single enum to furhter simplify the code. Also `ComplexEnum` is now
known by `MixedEnum`.

This commit implements discriminator with support for custom mapping.
Discriminator can only be used with `#[serde(untagged)]` enum having
only unnamed field variants with one schema reference implementing
`ToSchema` trait. It cannot be used with primitive types nor with
inlined schemas.

Removed `#[serde(tag = ...)]` as discriminator support.

Update docs and add support for missing features for enum variants such
as `Title`, `Deprecated`, `MinProperties` and `MaxProperties`.

 ### Breaking changes

* `#[serde(tag = ...)]` will not be used as discriminator.
  • Loading branch information
juhaku authored Sep 21, 2024
1 parent 576f6c1 commit e13cfe1
Show file tree
Hide file tree
Showing 16 changed files with 1,857 additions and 1,632 deletions.
16 changes: 8 additions & 8 deletions utoipa-gen/src/component.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ impl<'t> TypeTree<'t> {
fn convert(path: &'t Path, last_segment: &'t PathSegment) -> TypeTree<'t> {
let generic_type = Self::get_generic_type(last_segment);
let schema_type = SchemaType {
path,
path: Cow::Borrowed(path),
nullable: matches!(generic_type, Some(GenericType::Option)),
};

Expand Down Expand Up @@ -500,11 +500,11 @@ trait Rename {
/// * `value` to rename.
/// * `to` Optional rename to value for fields with _`rename`_ property.
/// * `container_rule` which is used to rename containers with _`rename_all`_ property.
fn rename<'r, R: Rename>(
value: &'r str,
to: Option<Cow<'r, str>>,
container_rule: Option<&'r RenameRule>,
) -> Option<Cow<'r, str>> {
fn rename<'s, R: Rename>(
value: &str,
to: Option<Cow<'s, str>>,
container_rule: Option<&RenameRule>,
) -> Option<Cow<'s, str>> {
let rename = to.and_then(|to| if !to.is_empty() { Some(to) } else { None });

rename.or_else(|| {
Expand Down Expand Up @@ -841,7 +841,7 @@ impl<'c> ComponentSchema {
let validate = |feature: &Feature| {
let type_path = &**type_tree.path.as_ref().unwrap();
let schema_type = SchemaType {
path: type_path,
path: Cow::Borrowed(type_path),
nullable: nullable
.map(|nullable| nullable.value())
.unwrap_or_default(),
Expand Down Expand Up @@ -894,7 +894,7 @@ impl<'c> ComponentSchema {
ValueType::Primitive => {
let type_path = &**type_tree.path.as_ref().unwrap();
let schema_type = SchemaType {
path: type_path,
path: Cow::Borrowed(type_path),
nullable,
};
if schema_type.is_unsigned_integer() {
Expand Down
142 changes: 74 additions & 68 deletions utoipa-gen/src/component/features.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ pub enum Feature {
Required(attributes::Required),
ContentEncoding(attributes::ContentEncoding),
ContentMediaType(attributes::ContentMediaType),
Discriminator(attributes::Discriminator),
MultipleOf(validation::MultipleOf),
Maximum(validation::Maximum),
Minimum(validation::Minimum),
Expand Down Expand Up @@ -166,73 +167,74 @@ impl Feature {
impl ToTokensDiagnostics for Feature {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) -> Result<(), Diagnostics> {
let feature = match &self {
Feature::Default(default) => quote! { .default(#default) },
Feature::Example(example) => quote! { .example(Some(#example)) },
Feature::Examples(examples) => quote! { .examples(#examples) },
Feature::XmlAttr(xml) => quote! { .xml(Some(#xml)) },
Feature::Format(format) => quote! { .format(Some(#format)) },
Feature::WriteOnly(write_only) => quote! { .write_only(Some(#write_only)) },
Feature::ReadOnly(read_only) => quote! { .read_only(Some(#read_only)) },
Feature::Title(title) => quote! { .title(Some(#title)) },
Feature::Nullable(_nullable) => return Err(Diagnostics::new("Nullable does not support `ToTokens`")),
Feature::Rename(rename) => rename.to_token_stream(),
Feature::Style(style) => quote! { .style(Some(#style)) },
Feature::ParameterIn(parameter_in) => quote! { .parameter_in(#parameter_in) },
Feature::MultipleOf(multiple_of) => quote! { .multiple_of(Some(#multiple_of)) },
Feature::AllowReserved(allow_reserved) => {
quote! { .allow_reserved(Some(#allow_reserved)) }
}
Feature::Explode(explode) => quote! { .explode(Some(#explode)) },
Feature::Maximum(maximum) => quote! { .maximum(Some(#maximum)) },
Feature::Minimum(minimum) => quote! { .minimum(Some(#minimum)) },
Feature::ExclusiveMaximum(exclusive_maximum) => {
quote! { .exclusive_maximum(Some(#exclusive_maximum)) }
}
Feature::ExclusiveMinimum(exclusive_minimum) => {
quote! { .exclusive_minimum(Some(#exclusive_minimum)) }
}
Feature::MaxLength(max_length) => quote! { .max_length(Some(#max_length)) },
Feature::MinLength(min_length) => quote! { .min_length(Some(#min_length)) },
Feature::Pattern(pattern) => quote! { .pattern(Some(#pattern)) },
Feature::MaxItems(max_items) => quote! { .max_items(Some(#max_items)) },
Feature::MinItems(min_items) => quote! { .min_items(Some(#min_items)) },
Feature::MaxProperties(max_properties) => {
quote! { .max_properties(Some(#max_properties)) }
}
Feature::MinProperties(min_properties) => {
quote! { .max_properties(Some(#min_properties)) }
}
Feature::SchemaWith(schema_with) => schema_with.to_token_stream(),
Feature::Description(description) => quote! { .description(Some(#description)) },
Feature::Deprecated(deprecated) => quote! { .deprecated(Some(#deprecated)) },
Feature::AdditionalProperties(additional_properties) => {
quote! { .additional_properties(Some(#additional_properties)) }
}
Feature::ContentEncoding(content_encoding) => quote! { .content_encoding(#content_encoding) },
Feature::ContentMediaType(content_media_type) => quote! { .content_media_type(#content_media_type) },
Feature::RenameAll(_) => {
return Err(Diagnostics::new("RenameAll feature does not support `ToTokens`"))
}
Feature::ValueType(_) => {
return Err(Diagnostics::new("ValueType feature does not support `ToTokens`")
.help("ValueType is supposed to be used with `TypeTree` in same manner as a resolved struct/field type."))
}
Feature::Inline(_) => {
// inline feature is ignored by `ToTokens`
TokenStream::new()
}
Feature::IntoParamsNames(_) => {
return Err(Diagnostics::new("Names feature does not support `ToTokens`")
.help("Names is only used with IntoParams to artificially give names for unnamed struct type `IntoParams`."))
}
Feature::As(_) => {
return Err(Diagnostics::new("As does not support `ToTokens`"))
}
Feature::Required(required) => {
let name = <attributes::Required as FeatureLike>::get_name();
quote! { .#name(#required) }
}
};
Feature::Default(default) => quote! { .default(#default) },
Feature::Example(example) => quote! { .example(Some(#example)) },
Feature::Examples(examples) => quote! { .examples(#examples) },
Feature::XmlAttr(xml) => quote! { .xml(Some(#xml)) },
Feature::Format(format) => quote! { .format(Some(#format)) },
Feature::WriteOnly(write_only) => quote! { .write_only(Some(#write_only)) },
Feature::ReadOnly(read_only) => quote! { .read_only(Some(#read_only)) },
Feature::Title(title) => quote! { .title(Some(#title)) },
Feature::Nullable(_nullable) => return Err(Diagnostics::new("Nullable does not support `ToTokens`")),
Feature::Rename(rename) => rename.to_token_stream(),
Feature::Style(style) => quote! { .style(Some(#style)) },
Feature::ParameterIn(parameter_in) => quote! { .parameter_in(#parameter_in) },
Feature::MultipleOf(multiple_of) => quote! { .multiple_of(Some(#multiple_of)) },
Feature::AllowReserved(allow_reserved) => {
quote! { .allow_reserved(Some(#allow_reserved)) }
}
Feature::Explode(explode) => quote! { .explode(Some(#explode)) },
Feature::Maximum(maximum) => quote! { .maximum(Some(#maximum)) },
Feature::Minimum(minimum) => quote! { .minimum(Some(#minimum)) },
Feature::ExclusiveMaximum(exclusive_maximum) => {
quote! { .exclusive_maximum(Some(#exclusive_maximum)) }
}
Feature::ExclusiveMinimum(exclusive_minimum) => {
quote! { .exclusive_minimum(Some(#exclusive_minimum)) }
}
Feature::MaxLength(max_length) => quote! { .max_length(Some(#max_length)) },
Feature::MinLength(min_length) => quote! { .min_length(Some(#min_length)) },
Feature::Pattern(pattern) => quote! { .pattern(Some(#pattern)) },
Feature::MaxItems(max_items) => quote! { .max_items(Some(#max_items)) },
Feature::MinItems(min_items) => quote! { .min_items(Some(#min_items)) },
Feature::MaxProperties(max_properties) => {
quote! { .max_properties(Some(#max_properties)) }
}
Feature::MinProperties(min_properties) => {
quote! { .max_properties(Some(#min_properties)) }
}
Feature::SchemaWith(schema_with) => schema_with.to_token_stream(),
Feature::Description(description) => quote! { .description(Some(#description)) },
Feature::Deprecated(deprecated) => quote! { .deprecated(Some(#deprecated)) },
Feature::AdditionalProperties(additional_properties) => {
quote! { .additional_properties(Some(#additional_properties)) }
}
Feature::ContentEncoding(content_encoding) => quote! { .content_encoding(#content_encoding) },
Feature::ContentMediaType(content_media_type) => quote! { .content_media_type(#content_media_type) },
Feature::Discriminator(discriminator) => quote! { .discriminator(Some(#discriminator)) },
Feature::RenameAll(_) => {
return Err(Diagnostics::new("RenameAll feature does not support `ToTokens`"))
}
Feature::ValueType(_) => {
return Err(Diagnostics::new("ValueType feature does not support `ToTokens`")
.help("ValueType is supposed to be used with `TypeTree` in same manner as a resolved struct/field type."))
}
Feature::Inline(_) => {
// inline feature is ignored by `ToTokens`
TokenStream::new()
}
Feature::IntoParamsNames(_) => {
return Err(Diagnostics::new("Names feature does not support `ToTokens`")
.help("Names is only used with IntoParams to artificially give names for unnamed struct type `IntoParams`."))
}
Feature::As(_) => {
return Err(Diagnostics::new("As does not support `ToTokens`"))
}
Feature::Required(required) => {
let name = <attributes::Required as FeatureLike>::get_name();
quote! { .#name(#required) }
}
};

tokens.extend(feature);

Expand Down Expand Up @@ -291,6 +293,7 @@ impl Display for Feature {
Feature::Required(required) => required.fmt(f),
Feature::ContentEncoding(content_encoding) => content_encoding.fmt(f),
Feature::ContentMediaType(content_media_type) => content_media_type.fmt(f),
Feature::Discriminator(discriminator) => discriminator.fmt(f),
}
}
}
Expand Down Expand Up @@ -338,6 +341,7 @@ impl Validatable for Feature {
Feature::Required(required) => required.is_validatable(),
Feature::ContentEncoding(content_encoding) => content_encoding.is_validatable(),
Feature::ContentMediaType(content_media_type) => content_media_type.is_validatable(),
Feature::Discriminator(discriminator) => discriminator.is_validatable(),
}
}
}
Expand Down Expand Up @@ -383,6 +387,7 @@ is_validatable! {
attributes::Required,
attributes::ContentEncoding,
attributes::ContentMediaType,
attributes::Discriminator,
validation::MultipleOf = true,
validation::Maximum = true,
validation::Minimum = true,
Expand Down Expand Up @@ -607,8 +612,9 @@ impl_feature_into_inner! {
attributes::Description,
attributes::Deprecated,
attributes::As,
attributes::AdditionalProperties,
attributes::Required,
attributes::AdditionalProperties,
attributes::Discriminator,
validation::MultipleOf,
validation::Maximum,
validation::Minimum,
Expand Down
135 changes: 134 additions & 1 deletion utoipa-gen/src/component/features/attributes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@ use proc_macro2::{Ident, TokenStream};
use quote::ToTokens;
use syn::parse::ParseStream;
use syn::punctuated::Punctuated;
use syn::{LitStr, Token, TypePath};
use syn::token::Paren;
use syn::{Error, LitStr, Token, TypePath};

use crate::component::serde::RenameRule;
use crate::component::{schema, GenericType, TypeTree};
use crate::parse_utils::LitStrOrExpr;
use crate::path::parameter::{self, ParameterStyle};
use crate::schema_type::SchemaFormat;
use crate::{parse_utils, AnyValue, Array, Diagnostics};
Expand Down Expand Up @@ -649,6 +651,12 @@ impl From<Deprecated> for Feature {
}
}

impl From<bool> for Deprecated {
fn from(value: bool) -> Self {
Self(value)
}
}

impl_feature! {
#[cfg_attr(feature = "debug", derive(Debug))]
#[derive(Clone)]
Expand Down Expand Up @@ -804,3 +812,128 @@ impl From<ContentMediaType> for Feature {
Self::ContentMediaType(value)
}
}

// discriminator = ...
// discriminator(property_name = ..., mapping(
// (value = ...),
// (value2 = ...)
// ))
impl_feature! {
#[derive(Clone)]
#[cfg_attr(feature = "debug", derive(Debug))]
pub struct Discriminator(LitStrOrExpr, Punctuated<(LitStrOrExpr, LitStrOrExpr), Token![,]>, Ident);
}

impl Discriminator {
fn new(attribute: Ident) -> Self {
Self(LitStrOrExpr::default(), Punctuated::default(), attribute)
}

pub fn get_attribute(&self) -> &Ident {
&self.2
}
}

impl Parse for Discriminator {
fn parse(input: ParseStream, attribute: Ident) -> syn::Result<Self>
where
Self: std::marker::Sized,
{
let lookahead = input.lookahead1();
if lookahead.peek(Token![=]) {
parse_utils::parse_next_literal_str_or_expr(input)
.map(|property_name| Self(property_name, Punctuated::new(), attribute))
} else if lookahead.peek(Paren) {
let discriminator_stream;
syn::parenthesized!(discriminator_stream in input);

let mut discriminator = Discriminator::new(attribute);

while !discriminator_stream.is_empty() {
let property = discriminator_stream.parse::<Ident>()?;
let name = &*property.to_string();

match name {
"property_name" => {
discriminator.0 =
parse_utils::parse_next_literal_str_or_expr(&discriminator_stream)?
}
"mapping" => {
let mapping_stream;
syn::parenthesized!(mapping_stream in &discriminator_stream);
let mappings: Punctuated<(LitStrOrExpr, LitStrOrExpr), Token![,]> =
Punctuated::parse_terminated_with(&mapping_stream, |input| {
let inner;
syn::parenthesized!(inner in input);

let key = inner.parse::<LitStrOrExpr>()?;
inner.parse::<Token![=]>()?;
let value = inner.parse::<LitStrOrExpr>()?;

Ok((key, value))
})?;
discriminator.1 = mappings;
}
unexpected => {
return Err(Error::new(
property.span(),
&format!(
"unexpected identifier {}, expected any of: property_name, mapping",
unexpected
),
))
}
}

if !discriminator_stream.is_empty() {
discriminator_stream.parse::<Token![,]>()?;
}
}

Ok(discriminator)
} else {
Err(lookahead.error())
}
}
}

impl ToTokens for Discriminator {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Discriminator(property_name, mapping, _) = self;

struct Mapping<'m>(&'m LitStrOrExpr, &'m LitStrOrExpr);

impl ToTokens for Mapping<'_> {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Mapping(property_name, value) = *self;

tokens.extend(quote! {
(#property_name, #value)
})
}
}

let discriminator = if !mapping.is_empty() {
let mapping = mapping
.iter()
.map(|(key, value)| Mapping(key, value))
.collect::<Array<Mapping>>();

quote! {
utoipa::openapi::schema::Discriminator::with_mapping(#property_name, #mapping)
}
} else {
quote! {
utoipa::openapi::schema::Discriminator::new(#property_name)
}
};

discriminator.to_tokens(tokens);
}
}

impl From<Discriminator> for Feature {
fn from(value: Discriminator) -> Self {
Self::Discriminator(value)
}
}
Loading

0 comments on commit e13cfe1

Please sign in to comment.