aboutsummaryrefslogtreecommitdiff
path: root/vendor/serde_derive/src/internals
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/serde_derive/src/internals')
-rw-r--r--vendor/serde_derive/src/internals/ast.rs216
-rw-r--r--vendor/serde_derive/src/internals/attr.rs1881
-rw-r--r--vendor/serde_derive/src/internals/case.rs200
-rw-r--r--vendor/serde_derive/src/internals/check.rs477
-rw-r--r--vendor/serde_derive/src/internals/ctxt.rs68
-rw-r--r--vendor/serde_derive/src/internals/mod.rs27
-rw-r--r--vendor/serde_derive/src/internals/receiver.rs292
-rw-r--r--vendor/serde_derive/src/internals/respan.rs16
-rw-r--r--vendor/serde_derive/src/internals/symbol.rs71
9 files changed, 3248 insertions, 0 deletions
diff --git a/vendor/serde_derive/src/internals/ast.rs b/vendor/serde_derive/src/internals/ast.rs
new file mode 100644
index 0000000..a28d3ae
--- /dev/null
+++ b/vendor/serde_derive/src/internals/ast.rs
@@ -0,0 +1,216 @@
+//! A Serde ast, parsed from the Syn ast and ready to generate Rust code.
+
+use crate::internals::{attr, check, Ctxt, Derive};
+use syn::punctuated::Punctuated;
+use syn::Token;
+
+/// A source data structure annotated with `#[derive(Serialize)]` and/or `#[derive(Deserialize)]`,
+/// parsed into an internal representation.
+pub struct Container<'a> {
+ /// The struct or enum name (without generics).
+ pub ident: syn::Ident,
+ /// Attributes on the structure, parsed for Serde.
+ pub attrs: attr::Container,
+ /// The contents of the struct or enum.
+ pub data: Data<'a>,
+ /// Any generics on the struct or enum.
+ pub generics: &'a syn::Generics,
+ /// Original input.
+ pub original: &'a syn::DeriveInput,
+}
+
+/// The fields of a struct or enum.
+///
+/// Analogous to `syn::Data`.
+pub enum Data<'a> {
+ Enum(Vec<Variant<'a>>),
+ Struct(Style, Vec<Field<'a>>),
+}
+
+/// A variant of an enum.
+pub struct Variant<'a> {
+ pub ident: syn::Ident,
+ pub attrs: attr::Variant,
+ pub style: Style,
+ pub fields: Vec<Field<'a>>,
+ pub original: &'a syn::Variant,
+}
+
+/// A field of a struct.
+pub struct Field<'a> {
+ pub member: syn::Member,
+ pub attrs: attr::Field,
+ pub ty: &'a syn::Type,
+ pub original: &'a syn::Field,
+}
+
+#[derive(Copy, Clone)]
+pub enum Style {
+ /// Named fields.
+ Struct,
+ /// Many unnamed fields.
+ Tuple,
+ /// One unnamed field.
+ Newtype,
+ /// No fields.
+ Unit,
+}
+
+impl<'a> Container<'a> {
+ /// Convert the raw Syn ast into a parsed container object, collecting errors in `cx`.
+ pub fn from_ast(
+ cx: &Ctxt,
+ item: &'a syn::DeriveInput,
+ derive: Derive,
+ ) -> Option<Container<'a>> {
+ let mut attrs = attr::Container::from_ast(cx, item);
+
+ let mut data = match &item.data {
+ syn::Data::Enum(data) => Data::Enum(enum_from_ast(cx, &data.variants, attrs.default())),
+ syn::Data::Struct(data) => {
+ let (style, fields) = struct_from_ast(cx, &data.fields, None, attrs.default());
+ Data::Struct(style, fields)
+ }
+ syn::Data::Union(_) => {
+ cx.error_spanned_by(item, "Serde does not support derive for unions");
+ return None;
+ }
+ };
+
+ let mut has_flatten = false;
+ match &mut data {
+ Data::Enum(variants) => {
+ for variant in variants {
+ variant.attrs.rename_by_rules(attrs.rename_all_rules());
+ for field in &mut variant.fields {
+ if field.attrs.flatten() {
+ has_flatten = true;
+ }
+ field.attrs.rename_by_rules(
+ variant
+ .attrs
+ .rename_all_rules()
+ .or(attrs.rename_all_fields_rules()),
+ );
+ }
+ }
+ }
+ Data::Struct(_, fields) => {
+ for field in fields {
+ if field.attrs.flatten() {
+ has_flatten = true;
+ }
+ field.attrs.rename_by_rules(attrs.rename_all_rules());
+ }
+ }
+ }
+
+ if has_flatten {
+ attrs.mark_has_flatten();
+ }
+
+ let mut item = Container {
+ ident: item.ident.clone(),
+ attrs,
+ data,
+ generics: &item.generics,
+ original: item,
+ };
+ check::check(cx, &mut item, derive);
+ Some(item)
+ }
+}
+
+impl<'a> Data<'a> {
+ pub fn all_fields(&'a self) -> Box<dyn Iterator<Item = &'a Field<'a>> + 'a> {
+ match self {
+ Data::Enum(variants) => {
+ Box::new(variants.iter().flat_map(|variant| variant.fields.iter()))
+ }
+ Data::Struct(_, fields) => Box::new(fields.iter()),
+ }
+ }
+
+ pub fn has_getter(&self) -> bool {
+ self.all_fields().any(|f| f.attrs.getter().is_some())
+ }
+}
+
+fn enum_from_ast<'a>(
+ cx: &Ctxt,
+ variants: &'a Punctuated<syn::Variant, Token![,]>,
+ container_default: &attr::Default,
+) -> Vec<Variant<'a>> {
+ let variants: Vec<Variant> = variants
+ .iter()
+ .map(|variant| {
+ let attrs = attr::Variant::from_ast(cx, variant);
+ let (style, fields) =
+ struct_from_ast(cx, &variant.fields, Some(&attrs), container_default);
+ Variant {
+ ident: variant.ident.clone(),
+ attrs,
+ style,
+ fields,
+ original: variant,
+ }
+ })
+ .collect();
+
+ let index_of_last_tagged_variant = variants
+ .iter()
+ .rposition(|variant| !variant.attrs.untagged());
+ if let Some(index_of_last_tagged_variant) = index_of_last_tagged_variant {
+ for variant in &variants[..index_of_last_tagged_variant] {
+ if variant.attrs.untagged() {
+ cx.error_spanned_by(&variant.ident, "all variants with the #[serde(untagged)] attribute must be placed at the end of the enum");
+ }
+ }
+ }
+
+ variants
+}
+
+fn struct_from_ast<'a>(
+ cx: &Ctxt,
+ fields: &'a syn::Fields,
+ attrs: Option<&attr::Variant>,
+ container_default: &attr::Default,
+) -> (Style, Vec<Field<'a>>) {
+ match fields {
+ syn::Fields::Named(fields) => (
+ Style::Struct,
+ fields_from_ast(cx, &fields.named, attrs, container_default),
+ ),
+ syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => (
+ Style::Newtype,
+ fields_from_ast(cx, &fields.unnamed, attrs, container_default),
+ ),
+ syn::Fields::Unnamed(fields) => (
+ Style::Tuple,
+ fields_from_ast(cx, &fields.unnamed, attrs, container_default),
+ ),
+ syn::Fields::Unit => (Style::Unit, Vec::new()),
+ }
+}
+
+fn fields_from_ast<'a>(
+ cx: &Ctxt,
+ fields: &'a Punctuated<syn::Field, Token![,]>,
+ attrs: Option<&attr::Variant>,
+ container_default: &attr::Default,
+) -> Vec<Field<'a>> {
+ fields
+ .iter()
+ .enumerate()
+ .map(|(i, field)| Field {
+ member: match &field.ident {
+ Some(ident) => syn::Member::Named(ident.clone()),
+ None => syn::Member::Unnamed(i.into()),
+ },
+ attrs: attr::Field::from_ast(cx, i, field, attrs, container_default),
+ ty: &field.ty,
+ original: field,
+ })
+ .collect()
+}
diff --git a/vendor/serde_derive/src/internals/attr.rs b/vendor/serde_derive/src/internals/attr.rs
new file mode 100644
index 0000000..bb9de32
--- /dev/null
+++ b/vendor/serde_derive/src/internals/attr.rs
@@ -0,0 +1,1881 @@
+use crate::internals::symbol::*;
+use crate::internals::{ungroup, Ctxt};
+use proc_macro2::{Spacing, Span, TokenStream, TokenTree};
+use quote::ToTokens;
+use std::borrow::Cow;
+use std::collections::BTreeSet;
+use std::iter::FromIterator;
+use syn::meta::ParseNestedMeta;
+use syn::parse::ParseStream;
+use syn::punctuated::Punctuated;
+use syn::{parse_quote, token, Ident, Lifetime, Token};
+
+// This module handles parsing of `#[serde(...)]` attributes. The entrypoints
+// are `attr::Container::from_ast`, `attr::Variant::from_ast`, and
+// `attr::Field::from_ast`. Each returns an instance of the corresponding
+// struct. Note that none of them return a Result. Unrecognized, malformed, or
+// duplicated attributes result in a span_err but otherwise are ignored. The
+// user will see errors simultaneously for all bad attributes in the crate
+// rather than just the first.
+
+pub use crate::internals::case::RenameRule;
+
+struct Attr<'c, T> {
+ cx: &'c Ctxt,
+ name: Symbol,
+ tokens: TokenStream,
+ value: Option<T>,
+}
+
+impl<'c, T> Attr<'c, T> {
+ fn none(cx: &'c Ctxt, name: Symbol) -> Self {
+ Attr {
+ cx,
+ name,
+ tokens: TokenStream::new(),
+ value: None,
+ }
+ }
+
+ fn set<A: ToTokens>(&mut self, obj: A, value: T) {
+ let tokens = obj.into_token_stream();
+
+ if self.value.is_some() {
+ let msg = format!("duplicate serde attribute `{}`", self.name);
+ self.cx.error_spanned_by(tokens, msg);
+ } else {
+ self.tokens = tokens;
+ self.value = Some(value);
+ }
+ }
+
+ fn set_opt<A: ToTokens>(&mut self, obj: A, value: Option<T>) {
+ if let Some(value) = value {
+ self.set(obj, value);
+ }
+ }
+
+ fn set_if_none(&mut self, value: T) {
+ if self.value.is_none() {
+ self.value = Some(value);
+ }
+ }
+
+ fn get(self) -> Option<T> {
+ self.value
+ }
+
+ fn get_with_tokens(self) -> Option<(TokenStream, T)> {
+ match self.value {
+ Some(v) => Some((self.tokens, v)),
+ None => None,
+ }
+ }
+}
+
+struct BoolAttr<'c>(Attr<'c, ()>);
+
+impl<'c> BoolAttr<'c> {
+ fn none(cx: &'c Ctxt, name: Symbol) -> Self {
+ BoolAttr(Attr::none(cx, name))
+ }
+
+ fn set_true<A: ToTokens>(&mut self, obj: A) {
+ self.0.set(obj, ());
+ }
+
+ fn get(&self) -> bool {
+ self.0.value.is_some()
+ }
+}
+
+struct VecAttr<'c, T> {
+ cx: &'c Ctxt,
+ name: Symbol,
+ first_dup_tokens: TokenStream,
+ values: Vec<T>,
+}
+
+impl<'c, T> VecAttr<'c, T> {
+ fn none(cx: &'c Ctxt, name: Symbol) -> Self {
+ VecAttr {
+ cx,
+ name,
+ first_dup_tokens: TokenStream::new(),
+ values: Vec::new(),
+ }
+ }
+
+ fn insert<A: ToTokens>(&mut self, obj: A, value: T) {
+ if self.values.len() == 1 {
+ self.first_dup_tokens = obj.into_token_stream();
+ }
+ self.values.push(value);
+ }
+
+ fn at_most_one(mut self) -> Option<T> {
+ if self.values.len() > 1 {
+ let dup_token = self.first_dup_tokens;
+ let msg = format!("duplicate serde attribute `{}`", self.name);
+ self.cx.error_spanned_by(dup_token, msg);
+ None
+ } else {
+ self.values.pop()
+ }
+ }
+
+ fn get(self) -> Vec<T> {
+ self.values
+ }
+}
+
+pub struct Name {
+ serialize: String,
+ serialize_renamed: bool,
+ deserialize: String,
+ deserialize_renamed: bool,
+ deserialize_aliases: BTreeSet<String>,
+}
+
+fn unraw(ident: &Ident) -> String {
+ ident.to_string().trim_start_matches("r#").to_owned()
+}
+
+impl Name {
+ fn from_attrs(
+ source_name: String,
+ ser_name: Attr<String>,
+ de_name: Attr<String>,
+ de_aliases: Option<VecAttr<String>>,
+ ) -> Name {
+ let mut alias_set = BTreeSet::new();
+ if let Some(de_aliases) = de_aliases {
+ for alias_name in de_aliases.get() {
+ alias_set.insert(alias_name);
+ }
+ }
+
+ let ser_name = ser_name.get();
+ let ser_renamed = ser_name.is_some();
+ let de_name = de_name.get();
+ let de_renamed = de_name.is_some();
+ Name {
+ serialize: ser_name.unwrap_or_else(|| source_name.clone()),
+ serialize_renamed: ser_renamed,
+ deserialize: de_name.unwrap_or(source_name),
+ deserialize_renamed: de_renamed,
+ deserialize_aliases: alias_set,
+ }
+ }
+
+ /// Return the container name for the container when serializing.
+ pub fn serialize_name(&self) -> &str {
+ &self.serialize
+ }
+
+ /// Return the container name for the container when deserializing.
+ pub fn deserialize_name(&self) -> &str {
+ &self.deserialize
+ }
+
+ fn deserialize_aliases(&self) -> &BTreeSet<String> {
+ &self.deserialize_aliases
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct RenameAllRules {
+ serialize: RenameRule,
+ deserialize: RenameRule,
+}
+
+impl RenameAllRules {
+ /// Returns a new `RenameAllRules` with the individual rules of `self` and
+ /// `other_rules` joined by `RenameRules::or`.
+ pub fn or(self, other_rules: Self) -> Self {
+ Self {
+ serialize: self.serialize.or(other_rules.serialize),
+ deserialize: self.deserialize.or(other_rules.deserialize),
+ }
+ }
+}
+
+/// Represents struct or enum attribute information.
+pub struct Container {
+ name: Name,
+ transparent: bool,
+ deny_unknown_fields: bool,
+ default: Default,
+ rename_all_rules: RenameAllRules,
+ rename_all_fields_rules: RenameAllRules,
+ ser_bound: Option<Vec<syn::WherePredicate>>,
+ de_bound: Option<Vec<syn::WherePredicate>>,
+ tag: TagType,
+ type_from: Option<syn::Type>,
+ type_try_from: Option<syn::Type>,
+ type_into: Option<syn::Type>,
+ remote: Option<syn::Path>,
+ identifier: Identifier,
+ has_flatten: bool,
+ serde_path: Option<syn::Path>,
+ is_packed: bool,
+ /// Error message generated when type can't be deserialized
+ expecting: Option<String>,
+ non_exhaustive: bool,
+}
+
+/// Styles of representing an enum.
+pub enum TagType {
+ /// The default.
+ ///
+ /// ```json
+ /// {"variant1": {"key1": "value1", "key2": "value2"}}
+ /// ```
+ External,
+
+ /// `#[serde(tag = "type")]`
+ ///
+ /// ```json
+ /// {"type": "variant1", "key1": "value1", "key2": "value2"}
+ /// ```
+ Internal { tag: String },
+
+ /// `#[serde(tag = "t", content = "c")]`
+ ///
+ /// ```json
+ /// {"t": "variant1", "c": {"key1": "value1", "key2": "value2"}}
+ /// ```
+ Adjacent { tag: String, content: String },
+
+ /// `#[serde(untagged)]`
+ ///
+ /// ```json
+ /// {"key1": "value1", "key2": "value2"}
+ /// ```
+ None,
+}
+
+/// Whether this enum represents the fields of a struct or the variants of an
+/// enum.
+#[derive(Copy, Clone)]
+pub enum Identifier {
+ /// It does not.
+ No,
+
+ /// This enum represents the fields of a struct. All of the variants must be
+ /// unit variants, except possibly one which is annotated with
+ /// `#[serde(other)]` and is a newtype variant.
+ Field,
+
+ /// This enum represents the variants of an enum. All of the variants must
+ /// be unit variants.
+ Variant,
+}
+
+impl Identifier {
+ #[cfg(feature = "deserialize_in_place")]
+ pub fn is_some(self) -> bool {
+ match self {
+ Identifier::No => false,
+ Identifier::Field | Identifier::Variant => true,
+ }
+ }
+}
+
+impl Container {
+ /// Extract out the `#[serde(...)]` attributes from an item.
+ pub fn from_ast(cx: &Ctxt, item: &syn::DeriveInput) -> Self {
+ let mut ser_name = Attr::none(cx, RENAME);
+ let mut de_name = Attr::none(cx, RENAME);
+ let mut transparent = BoolAttr::none(cx, TRANSPARENT);
+ let mut deny_unknown_fields = BoolAttr::none(cx, DENY_UNKNOWN_FIELDS);
+ let mut default = Attr::none(cx, DEFAULT);
+ let mut rename_all_ser_rule = Attr::none(cx, RENAME_ALL);
+ let mut rename_all_de_rule = Attr::none(cx, RENAME_ALL);
+ let mut rename_all_fields_ser_rule = Attr::none(cx, RENAME_ALL_FIELDS);
+ let mut rename_all_fields_de_rule = Attr::none(cx, RENAME_ALL_FIELDS);
+ let mut ser_bound = Attr::none(cx, BOUND);
+ let mut de_bound = Attr::none(cx, BOUND);
+ let mut untagged = BoolAttr::none(cx, UNTAGGED);
+ let mut internal_tag = Attr::none(cx, TAG);
+ let mut content = Attr::none(cx, CONTENT);
+ let mut type_from = Attr::none(cx, FROM);
+ let mut type_try_from = Attr::none(cx, TRY_FROM);
+ let mut type_into = Attr::none(cx, INTO);
+ let mut remote = Attr::none(cx, REMOTE);
+ let mut field_identifier = BoolAttr::none(cx, FIELD_IDENTIFIER);
+ let mut variant_identifier = BoolAttr::none(cx, VARIANT_IDENTIFIER);
+ let mut serde_path = Attr::none(cx, CRATE);
+ let mut expecting = Attr::none(cx, EXPECTING);
+ let mut non_exhaustive = false;
+
+ for attr in &item.attrs {
+ if attr.path() != SERDE {
+ non_exhaustive |=
+ matches!(&attr.meta, syn::Meta::Path(path) if path == NON_EXHAUSTIVE);
+ continue;
+ }
+
+ if let syn::Meta::List(meta) = &attr.meta {
+ if meta.tokens.is_empty() {
+ continue;
+ }
+ }
+
+ if let Err(err) = attr.parse_nested_meta(|meta| {
+ if meta.path == RENAME {
+ // #[serde(rename = "foo")]
+ // #[serde(rename(serialize = "foo", deserialize = "bar"))]
+ let (ser, de) = get_renames(cx, RENAME, &meta)?;
+ ser_name.set_opt(&meta.path, ser.as_ref().map(syn::LitStr::value));
+ de_name.set_opt(&meta.path, de.as_ref().map(syn::LitStr::value));
+ } else if meta.path == RENAME_ALL {
+ // #[serde(rename_all = "foo")]
+ // #[serde(rename_all(serialize = "foo", deserialize = "bar"))]
+ let one_name = meta.input.peek(Token![=]);
+ let (ser, de) = get_renames(cx, RENAME_ALL, &meta)?;
+ if let Some(ser) = ser {
+ match RenameRule::from_str(&ser.value()) {
+ Ok(rename_rule) => rename_all_ser_rule.set(&meta.path, rename_rule),
+ Err(err) => cx.error_spanned_by(ser, err),
+ }
+ }
+ if let Some(de) = de {
+ match RenameRule::from_str(&de.value()) {
+ Ok(rename_rule) => rename_all_de_rule.set(&meta.path, rename_rule),
+ Err(err) => {
+ if !one_name {
+ cx.error_spanned_by(de, err);
+ }
+ }
+ }
+ }
+ } else if meta.path == RENAME_ALL_FIELDS {
+ // #[serde(rename_all_fields = "foo")]
+ // #[serde(rename_all_fields(serialize = "foo", deserialize = "bar"))]
+ let one_name = meta.input.peek(Token![=]);
+ let (ser, de) = get_renames(cx, RENAME_ALL_FIELDS, &meta)?;
+
+ match item.data {
+ syn::Data::Enum(_) => {
+ if let Some(ser) = ser {
+ match RenameRule::from_str(&ser.value()) {
+ Ok(rename_rule) => {
+ rename_all_fields_ser_rule.set(&meta.path, rename_rule);
+ }
+ Err(err) => cx.error_spanned_by(ser, err),
+ }
+ }
+ if let Some(de) = de {
+ match RenameRule::from_str(&de.value()) {
+ Ok(rename_rule) => {
+ rename_all_fields_de_rule.set(&meta.path, rename_rule);
+ }
+ Err(err) => {
+ if !one_name {
+ cx.error_spanned_by(de, err);
+ }
+ }
+ }
+ }
+ }
+ syn::Data::Struct(_) => {
+ let msg = "#[serde(rename_all_fields)] can only be used on enums";
+ cx.syn_error(meta.error(msg));
+ }
+ syn::Data::Union(_) => {
+ let msg = "#[serde(rename_all_fields)] can only be used on enums";
+ cx.syn_error(meta.error(msg));
+ }
+ }
+ } else if meta.path == TRANSPARENT {
+ // #[serde(transparent)]
+ transparent.set_true(meta.path);
+ } else if meta.path == DENY_UNKNOWN_FIELDS {
+ // #[serde(deny_unknown_fields)]
+ deny_unknown_fields.set_true(meta.path);
+ } else if meta.path == DEFAULT {
+ if meta.input.peek(Token![=]) {
+ // #[serde(default = "...")]
+ if let Some(path) = parse_lit_into_expr_path(cx, DEFAULT, &meta)? {
+ match &item.data {
+ syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields {
+ syn::Fields::Named(_) | syn::Fields::Unnamed(_) => {
+ default.set(&meta.path, Default::Path(path));
+ }
+ syn::Fields::Unit => {
+ let msg = "#[serde(default = \"...\")] can only be used on structs that have fields";
+ cx.syn_error(meta.error(msg));
+ }
+ },
+ syn::Data::Enum(_) => {
+ let msg = "#[serde(default = \"...\")] can only be used on structs";
+ cx.syn_error(meta.error(msg));
+ }
+ syn::Data::Union(_) => {
+ let msg = "#[serde(default = \"...\")] can only be used on structs";
+ cx.syn_error(meta.error(msg));
+ }
+ }
+ }
+ } else {
+ // #[serde(default)]
+ match &item.data {
+ syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields {
+ syn::Fields::Named(_) | syn::Fields::Unnamed(_) => {
+ default.set(meta.path, Default::Default);
+ }
+ syn::Fields::Unit => {
+ let msg = "#[serde(default)] can only be used on structs that have fields";
+ cx.error_spanned_by(fields, msg);
+ }
+ },
+ syn::Data::Enum(_) => {
+ let msg = "#[serde(default)] can only be used on structs";
+ cx.syn_error(meta.error(msg));
+ }
+ syn::Data::Union(_) => {
+ let msg = "#[serde(default)] can only be used on structs";
+ cx.syn_error(meta.error(msg));
+ }
+ }
+ }
+ } else if meta.path == BOUND {
+ // #[serde(bound = "T: SomeBound")]
+ // #[serde(bound(serialize = "...", deserialize = "..."))]
+ let (ser, de) = get_where_predicates(cx, &meta)?;
+ ser_bound.set_opt(&meta.path, ser);
+ de_bound.set_opt(&meta.path, de);
+ } else if meta.path == UNTAGGED {
+ // #[serde(untagged)]
+ match item.data {
+ syn::Data::Enum(_) => {
+ untagged.set_true(&meta.path);
+ }
+ syn::Data::Struct(_) => {
+ let msg = "#[serde(untagged)] can only be used on enums";
+ cx.syn_error(meta.error(msg));
+ }
+ syn::Data::Union(_) => {
+ let msg = "#[serde(untagged)] can only be used on enums";
+ cx.syn_error(meta.error(msg));
+ }
+ }
+ } else if meta.path == TAG {
+ // #[serde(tag = "type")]
+ if let Some(s) = get_lit_str(cx, TAG, &meta)? {
+ match &item.data {
+ syn::Data::Enum(_) => {
+ internal_tag.set(&meta.path, s.value());
+ }
+ syn::Data::Struct(syn::DataStruct { fields, .. }) => match fields {
+ syn::Fields::Named(_) => {
+ internal_tag.set(&meta.path, s.value());
+ }
+ syn::Fields::Unnamed(_) | syn::Fields::Unit => {
+ let msg = "#[serde(tag = \"...\")] can only be used on enums and structs with named fields";
+ cx.syn_error(meta.error(msg));
+ }
+ },
+ syn::Data::Union(_) => {
+ let msg = "#[serde(tag = \"...\")] can only be used on enums and structs with named fields";
+ cx.syn_error(meta.error(msg));
+ }
+ }
+ }
+ } else if meta.path == CONTENT {
+ // #[serde(content = "c")]
+ if let Some(s) = get_lit_str(cx, CONTENT, &meta)? {
+ match &item.data {
+ syn::Data::Enum(_) => {
+ content.set(&meta.path, s.value());
+ }
+ syn::Data::Struct(_) => {
+ let msg = "#[serde(content = \"...\")] can only be used on enums";
+ cx.syn_error(meta.error(msg));
+ }
+ syn::Data::Union(_) => {
+ let msg = "#[serde(content = \"...\")] can only be used on enums";
+ cx.syn_error(meta.error(msg));
+ }
+ }
+ }
+ } else if meta.path == FROM {
+ // #[serde(from = "Type")]
+ if let Some(from_ty) = parse_lit_into_ty(cx, FROM, &meta)? {
+ type_from.set_opt(&meta.path, Some(from_ty));
+ }
+ } else if meta.path == TRY_FROM {
+ // #[serde(try_from = "Type")]
+ if let Some(try_from_ty) = parse_lit_into_ty(cx, TRY_FROM, &meta)? {
+ type_try_from.set_opt(&meta.path, Some(try_from_ty));
+ }
+ } else if meta.path == INTO {
+ // #[serde(into = "Type")]
+ if let Some(into_ty) = parse_lit_into_ty(cx, INTO, &meta)? {
+ type_into.set_opt(&meta.path, Some(into_ty));
+ }
+ } else if meta.path == REMOTE {
+ // #[serde(remote = "...")]
+ if let Some(path) = parse_lit_into_path(cx, REMOTE, &meta)? {
+ if is_primitive_path(&path, "Self") {
+ remote.set(&meta.path, item.ident.clone().into());
+ } else {
+ remote.set(&meta.path, path);
+ }
+ }
+ } else if meta.path == FIELD_IDENTIFIER {
+ // #[serde(field_identifier)]
+ field_identifier.set_true(&meta.path);
+ } else if meta.path == VARIANT_IDENTIFIER {
+ // #[serde(variant_identifier)]
+ variant_identifier.set_true(&meta.path);
+ } else if meta.path == CRATE {
+ // #[serde(crate = "foo")]
+ if let Some(path) = parse_lit_into_path(cx, CRATE, &meta)? {
+ serde_path.set(&meta.path, path);
+ }
+ } else if meta.path == EXPECTING {
+ // #[serde(expecting = "a message")]
+ if let Some(s) = get_lit_str(cx, EXPECTING, &meta)? {
+ expecting.set(&meta.path, s.value());
+ }
+ } else {
+ let path = meta.path.to_token_stream().to_string().replace(' ', "");
+ return Err(
+ meta.error(format_args!("unknown serde container attribute `{}`", path))
+ );
+ }
+ Ok(())
+ }) {
+ cx.syn_error(err);
+ }
+ }
+
+ let mut is_packed = false;
+ for attr in &item.attrs {
+ if attr.path() == REPR {
+ let _ = attr.parse_args_with(|input: ParseStream| {
+ while let Some(token) = input.parse()? {
+ if let TokenTree::Ident(ident) = token {
+ is_packed |= ident == "packed";
+ }
+ }
+ Ok(())
+ });
+ }
+ }
+
+ Container {
+ name: Name::from_attrs(unraw(&item.ident), ser_name, de_name, None),
+ transparent: transparent.get(),
+ deny_unknown_fields: deny_unknown_fields.get(),
+ default: default.get().unwrap_or(Default::None),
+ rename_all_rules: RenameAllRules {
+ serialize: rename_all_ser_rule.get().unwrap_or(RenameRule::None),
+ deserialize: rename_all_de_rule.get().unwrap_or(RenameRule::None),
+ },
+ rename_all_fields_rules: RenameAllRules {
+ serialize: rename_all_fields_ser_rule.get().unwrap_or(RenameRule::None),
+ deserialize: rename_all_fields_de_rule.get().unwrap_or(RenameRule::None),
+ },
+ ser_bound: ser_bound.get(),
+ de_bound: de_bound.get(),
+ tag: decide_tag(cx, item, untagged, internal_tag, content),
+ type_from: type_from.get(),
+ type_try_from: type_try_from.get(),
+ type_into: type_into.get(),
+ remote: remote.get(),
+ identifier: decide_identifier(cx, item, field_identifier, variant_identifier),
+ has_flatten: false,
+ serde_path: serde_path.get(),
+ is_packed,
+ expecting: expecting.get(),
+ non_exhaustive,
+ }
+ }
+
+ pub fn name(&self) -> &Name {
+ &self.name
+ }
+
+ pub fn rename_all_rules(&self) -> RenameAllRules {
+ self.rename_all_rules
+ }
+
+ pub fn rename_all_fields_rules(&self) -> RenameAllRules {
+ self.rename_all_fields_rules
+ }
+
+ pub fn transparent(&self) -> bool {
+ self.transparent
+ }
+
+ pub fn deny_unknown_fields(&self) -> bool {
+ self.deny_unknown_fields
+ }
+
+ pub fn default(&self) -> &Default {
+ &self.default
+ }
+
+ pub fn ser_bound(&self) -> Option<&[syn::WherePredicate]> {
+ self.ser_bound.as_ref().map(|vec| &vec[..])
+ }
+
+ pub fn de_bound(&self) -> Option<&[syn::WherePredicate]> {
+ self.de_bound.as_ref().map(|vec| &vec[..])
+ }
+
+ pub fn tag(&self) -> &TagType {
+ &self.tag
+ }
+
+ pub fn type_from(&self) -> Option<&syn::Type> {
+ self.type_from.as_ref()
+ }
+
+ pub fn type_try_from(&self) -> Option<&syn::Type> {
+ self.type_try_from.as_ref()
+ }
+
+ pub fn type_into(&self) -> Option<&syn::Type> {
+ self.type_into.as_ref()
+ }
+
+ pub fn remote(&self) -> Option<&syn::Path> {
+ self.remote.as_ref()
+ }
+
+ pub fn is_packed(&self) -> bool {
+ self.is_packed
+ }
+
+ pub fn identifier(&self) -> Identifier {
+ self.identifier
+ }
+
+ pub fn has_flatten(&self) -> bool {
+ self.has_flatten
+ }
+
+ pub fn mark_has_flatten(&mut self) {
+ self.has_flatten = true;
+ }
+
+ pub fn custom_serde_path(&self) -> Option<&syn::Path> {
+ self.serde_path.as_ref()
+ }
+
+ pub fn serde_path(&self) -> Cow<syn::Path> {
+ self.custom_serde_path()
+ .map_or_else(|| Cow::Owned(parse_quote!(_serde)), Cow::Borrowed)
+ }
+
+ /// Error message generated when type can't be deserialized.
+ /// If `None`, default message will be used
+ pub fn expecting(&self) -> Option<&str> {
+ self.expecting.as_ref().map(String::as_ref)
+ }
+
+ pub fn non_exhaustive(&self) -> bool {
+ self.non_exhaustive
+ }
+}
+
+fn decide_tag(
+ cx: &Ctxt,
+ item: &syn::DeriveInput,
+ untagged: BoolAttr,
+ internal_tag: Attr<String>,
+ content: Attr<String>,
+) -> TagType {
+ match (
+ untagged.0.get_with_tokens(),
+ internal_tag.get_with_tokens(),
+ content.get_with_tokens(),
+ ) {
+ (None, None, None) => TagType::External,
+ (Some(_), None, None) => TagType::None,
+ (None, Some((_, tag)), None) => {
+ // Check that there are no tuple variants.
+ if let syn::Data::Enum(data) = &item.data {
+ for variant in &data.variants {
+ match &variant.fields {
+ syn::Fields::Named(_) | syn::Fields::Unit => {}
+ syn::Fields::Unnamed(fields) => {
+ if fields.unnamed.len() != 1 {
+ let msg =
+ "#[serde(tag = \"...\")] cannot be used with tuple variants";
+ cx.error_spanned_by(variant, msg);
+ break;
+ }
+ }
+ }
+ }
+ }
+ TagType::Internal { tag }
+ }
+ (Some((untagged_tokens, ())), Some((tag_tokens, _)), None) => {
+ let msg = "enum cannot be both untagged and internally tagged";
+ cx.error_spanned_by(untagged_tokens, msg);
+ cx.error_spanned_by(tag_tokens, msg);
+ TagType::External // doesn't matter, will error
+ }
+ (None, None, Some((content_tokens, _))) => {
+ let msg = "#[serde(tag = \"...\", content = \"...\")] must be used together";
+ cx.error_spanned_by(content_tokens, msg);
+ TagType::External
+ }
+ (Some((untagged_tokens, ())), None, Some((content_tokens, _))) => {
+ let msg = "untagged enum cannot have #[serde(content = \"...\")]";
+ cx.error_spanned_by(untagged_tokens, msg);
+ cx.error_spanned_by(content_tokens, msg);
+ TagType::External
+ }
+ (None, Some((_, tag)), Some((_, content))) => TagType::Adjacent { tag, content },
+ (Some((untagged_tokens, ())), Some((tag_tokens, _)), Some((content_tokens, _))) => {
+ let msg = "untagged enum cannot have #[serde(tag = \"...\", content = \"...\")]";
+ cx.error_spanned_by(untagged_tokens, msg);
+ cx.error_spanned_by(tag_tokens, msg);
+ cx.error_spanned_by(content_tokens, msg);
+ TagType::External
+ }
+ }
+}
+
+fn decide_identifier(
+ cx: &Ctxt,
+ item: &syn::DeriveInput,
+ field_identifier: BoolAttr,
+ variant_identifier: BoolAttr,
+) -> Identifier {
+ match (
+ &item.data,
+ field_identifier.0.get_with_tokens(),
+ variant_identifier.0.get_with_tokens(),
+ ) {
+ (_, None, None) => Identifier::No,
+ (_, Some((field_identifier_tokens, ())), Some((variant_identifier_tokens, ()))) => {
+ let msg =
+ "#[serde(field_identifier)] and #[serde(variant_identifier)] cannot both be set";
+ cx.error_spanned_by(field_identifier_tokens, msg);
+ cx.error_spanned_by(variant_identifier_tokens, msg);
+ Identifier::No
+ }
+ (syn::Data::Enum(_), Some(_), None) => Identifier::Field,
+ (syn::Data::Enum(_), None, Some(_)) => Identifier::Variant,
+ (syn::Data::Struct(syn::DataStruct { struct_token, .. }), Some(_), None) => {
+ let msg = "#[serde(field_identifier)] can only be used on an enum";
+ cx.error_spanned_by(struct_token, msg);
+ Identifier::No
+ }
+ (syn::Data::Union(syn::DataUnion { union_token, .. }), Some(_), None) => {
+ let msg = "#[serde(field_identifier)] can only be used on an enum";
+ cx.error_spanned_by(union_token, msg);
+ Identifier::No
+ }
+ (syn::Data::Struct(syn::DataStruct { struct_token, .. }), None, Some(_)) => {
+ let msg = "#[serde(variant_identifier)] can only be used on an enum";
+ cx.error_spanned_by(struct_token, msg);
+ Identifier::No
+ }
+ (syn::Data::Union(syn::DataUnion { union_token, .. }), None, Some(_)) => {
+ let msg = "#[serde(variant_identifier)] can only be used on an enum";
+ cx.error_spanned_by(union_token, msg);
+ Identifier::No
+ }
+ }
+}
+
+/// Represents variant attribute information
+pub struct Variant {
+ name: Name,
+ rename_all_rules: RenameAllRules,
+ ser_bound: Option<Vec<syn::WherePredicate>>,
+ de_bound: Option<Vec<syn::WherePredicate>>,
+ skip_deserializing: bool,
+ skip_serializing: bool,
+ other: bool,
+ serialize_with: Option<syn::ExprPath>,
+ deserialize_with: Option<syn::ExprPath>,
+ borrow: Option<BorrowAttribute>,
+ untagged: bool,
+}
+
+struct BorrowAttribute {
+ path: syn::Path,
+ lifetimes: Option<BTreeSet<syn::Lifetime>>,
+}
+
+impl Variant {
+ pub fn from_ast(cx: &Ctxt, variant: &syn::Variant) -> Self {
+ let mut ser_name = Attr::none(cx, RENAME);
+ let mut de_name = Attr::none(cx, RENAME);
+ let mut de_aliases = VecAttr::none(cx, RENAME);
+ let mut skip_deserializing = BoolAttr::none(cx, SKIP_DESERIALIZING);
+ let mut skip_serializing = BoolAttr::none(cx, SKIP_SERIALIZING);
+ let mut rename_all_ser_rule = Attr::none(cx, RENAME_ALL);
+ let mut rename_all_de_rule = Attr::none(cx, RENAME_ALL);
+ let mut ser_bound = Attr::none(cx, BOUND);
+ let mut de_bound = Attr::none(cx, BOUND);
+ let mut other = BoolAttr::none(cx, OTHER);
+ let mut serialize_with = Attr::none(cx, SERIALIZE_WITH);
+ let mut deserialize_with = Attr::none(cx, DESERIALIZE_WITH);
+ let mut borrow = Attr::none(cx, BORROW);
+ let mut untagged = BoolAttr::none(cx, UNTAGGED);
+
+ for attr in &variant.attrs {
+ if attr.path() != SERDE {
+ continue;
+ }
+
+ if let syn::Meta::List(meta) = &attr.meta {
+ if meta.tokens.is_empty() {
+ continue;
+ }
+ }
+
+ if let Err(err) = attr.parse_nested_meta(|meta| {
+ if meta.path == RENAME {
+ // #[serde(rename = "foo")]
+ // #[serde(rename(serialize = "foo", deserialize = "bar"))]
+ let (ser, de) = get_multiple_renames(cx, &meta)?;
+ ser_name.set_opt(&meta.path, ser.as_ref().map(syn::LitStr::value));
+ for de_value in de {
+ de_name.set_if_none(de_value.value());
+ de_aliases.insert(&meta.path, de_value.value());
+ }
+ } else if meta.path == ALIAS {
+ // #[serde(alias = "foo")]
+ if let Some(s) = get_lit_str(cx, ALIAS, &meta)? {
+ de_aliases.insert(&meta.path, s.value());
+ }
+ } else if meta.path == RENAME_ALL {
+ // #[serde(rename_all = "foo")]
+ // #[serde(rename_all(serialize = "foo", deserialize = "bar"))]
+ let one_name = meta.input.peek(Token![=]);
+ let (ser, de) = get_renames(cx, RENAME_ALL, &meta)?;
+ if let Some(ser) = ser {
+ match RenameRule::from_str(&ser.value()) {
+ Ok(rename_rule) => rename_all_ser_rule.set(&meta.path, rename_rule),
+ Err(err) => cx.error_spanned_by(ser, err),
+ }
+ }
+ if let Some(de) = de {
+ match RenameRule::from_str(&de.value()) {
+ Ok(rename_rule) => rename_all_de_rule.set(&meta.path, rename_rule),
+ Err(err) => {
+ if !one_name {
+ cx.error_spanned_by(de, err);
+ }
+ }
+ }
+ }
+ } else if meta.path == SKIP {
+ // #[serde(skip)]
+ skip_serializing.set_true(&meta.path);
+ skip_deserializing.set_true(&meta.path);
+ } else if meta.path == SKIP_DESERIALIZING {
+ // #[serde(skip_deserializing)]
+ skip_deserializing.set_true(&meta.path);
+ } else if meta.path == SKIP_SERIALIZING {
+ // #[serde(skip_serializing)]
+ skip_serializing.set_true(&meta.path);
+ } else if meta.path == OTHER {
+ // #[serde(other)]
+ other.set_true(&meta.path);
+ } else if meta.path == BOUND {
+ // #[serde(bound = "T: SomeBound")]
+ // #[serde(bound(serialize = "...", deserialize = "..."))]
+ let (ser, de) = get_where_predicates(cx, &meta)?;
+ ser_bound.set_opt(&meta.path, ser);
+ de_bound.set_opt(&meta.path, de);
+ } else if meta.path == WITH {
+ // #[serde(with = "...")]
+ if let Some(path) = parse_lit_into_expr_path(cx, WITH, &meta)? {
+ let mut ser_path = path.clone();
+ ser_path
+ .path
+ .segments
+ .push(Ident::new("serialize", Span::call_site()).into());
+ serialize_with.set(&meta.path, ser_path);
+ let mut de_path = path;
+ de_path
+ .path
+ .segments
+ .push(Ident::new("deserialize", Span::call_site()).into());
+ deserialize_with.set(&meta.path, de_path);
+ }
+ } else if meta.path == SERIALIZE_WITH {
+ // #[serde(serialize_with = "...")]
+ if let Some(path) = parse_lit_into_expr_path(cx, SERIALIZE_WITH, &meta)? {
+ serialize_with.set(&meta.path, path);
+ }
+ } else if meta.path == DESERIALIZE_WITH {
+ // #[serde(deserialize_with = "...")]
+ if let Some(path) = parse_lit_into_expr_path(cx, DESERIALIZE_WITH, &meta)? {
+ deserialize_with.set(&meta.path, path);
+ }
+ } else if meta.path == BORROW {
+ let borrow_attribute = if meta.input.peek(Token![=]) {
+ // #[serde(borrow = "'a + 'b")]
+ let lifetimes = parse_lit_into_lifetimes(cx, &meta)?;
+ BorrowAttribute {
+ path: meta.path.clone(),
+ lifetimes: Some(lifetimes),
+ }
+ } else {
+ // #[serde(borrow)]
+ BorrowAttribute {
+ path: meta.path.clone(),
+ lifetimes: None,
+ }
+ };
+ match &variant.fields {
+ syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
+ borrow.set(&meta.path, borrow_attribute);
+ }
+ _ => {
+ let msg = "#[serde(borrow)] may only be used on newtype variants";
+ cx.error_spanned_by(variant, msg);
+ }
+ }
+ } else if meta.path == UNTAGGED {
+ untagged.set_true(&meta.path);
+ } else {
+ let path = meta.path.to_token_stream().to_string().replace(' ', "");
+ return Err(
+ meta.error(format_args!("unknown serde variant attribute `{}`", path))
+ );
+ }
+ Ok(())
+ }) {
+ cx.syn_error(err);
+ }
+ }
+
+ Variant {
+ name: Name::from_attrs(unraw(&variant.ident), ser_name, de_name, Some(de_aliases)),
+ rename_all_rules: RenameAllRules {
+ serialize: rename_all_ser_rule.get().unwrap_or(RenameRule::None),
+ deserialize: rename_all_de_rule.get().unwrap_or(RenameRule::None),
+ },
+ ser_bound: ser_bound.get(),
+ de_bound: de_bound.get(),
+ skip_deserializing: skip_deserializing.get(),
+ skip_serializing: skip_serializing.get(),
+ other: other.get(),
+ serialize_with: serialize_with.get(),
+ deserialize_with: deserialize_with.get(),
+ borrow: borrow.get(),
+ untagged: untagged.get(),
+ }
+ }
+
+ pub fn name(&self) -> &Name {
+ &self.name
+ }
+
+ pub fn aliases(&self) -> &BTreeSet<String> {
+ self.name.deserialize_aliases()
+ }
+
+ pub fn rename_by_rules(&mut self, rules: RenameAllRules) {
+ if !self.name.serialize_renamed {
+ self.name.serialize = rules.serialize.apply_to_variant(&self.name.serialize);
+ }
+ if !self.name.deserialize_renamed {
+ self.name.deserialize = rules.deserialize.apply_to_variant(&self.name.deserialize);
+ }
+ self.name
+ .deserialize_aliases
+ .insert(self.name.deserialize.clone());
+ }
+
+ pub fn rename_all_rules(&self) -> RenameAllRules {
+ self.rename_all_rules
+ }
+
+ pub fn ser_bound(&self) -> Option<&[syn::WherePredicate]> {
+ self.ser_bound.as_ref().map(|vec| &vec[..])
+ }
+
+ pub fn de_bound(&self) -> Option<&[syn::WherePredicate]> {
+ self.de_bound.as_ref().map(|vec| &vec[..])
+ }
+
+ pub fn skip_deserializing(&self) -> bool {
+ self.skip_deserializing
+ }
+
+ pub fn skip_serializing(&self) -> bool {
+ self.skip_serializing
+ }
+
+ pub fn other(&self) -> bool {
+ self.other
+ }
+
+ pub fn serialize_with(&self) -> Option<&syn::ExprPath> {
+ self.serialize_with.as_ref()
+ }
+
+ pub fn deserialize_with(&self) -> Option<&syn::ExprPath> {
+ self.deserialize_with.as_ref()
+ }
+
+ pub fn untagged(&self) -> bool {
+ self.untagged
+ }
+}
+
+/// Represents field attribute information
+pub struct Field {
+ name: Name,
+ skip_serializing: bool,
+ skip_deserializing: bool,
+ skip_serializing_if: Option<syn::ExprPath>,
+ default: Default,
+ serialize_with: Option<syn::ExprPath>,
+ deserialize_with: Option<syn::ExprPath>,
+ ser_bound: Option<Vec<syn::WherePredicate>>,
+ de_bound: Option<Vec<syn::WherePredicate>>,
+ borrowed_lifetimes: BTreeSet<syn::Lifetime>,
+ getter: Option<syn::ExprPath>,
+ flatten: bool,
+ transparent: bool,
+}
+
+/// Represents the default to use for a field when deserializing.
+pub enum Default {
+ /// Field must always be specified because it does not have a default.
+ None,
+ /// The default is given by `std::default::Default::default()`.
+ Default,
+ /// The default is given by this function.
+ Path(syn::ExprPath),
+}
+
+impl Default {
+ pub fn is_none(&self) -> bool {
+ match self {
+ Default::None => true,
+ Default::Default | Default::Path(_) => false,
+ }
+ }
+}
+
+impl Field {
+ /// Extract out the `#[serde(...)]` attributes from a struct field.
+ pub fn from_ast(
+ cx: &Ctxt,
+ index: usize,
+ field: &syn::Field,
+ attrs: Option<&Variant>,
+ container_default: &Default,
+ ) -> Self {
+ let mut ser_name = Attr::none(cx, RENAME);
+ let mut de_name = Attr::none(cx, RENAME);
+ let mut de_aliases = VecAttr::none(cx, RENAME);
+ let mut skip_serializing = BoolAttr::none(cx, SKIP_SERIALIZING);
+ let mut skip_deserializing = BoolAttr::none(cx, SKIP_DESERIALIZING);
+ let mut skip_serializing_if = Attr::none(cx, SKIP_SERIALIZING_IF);
+ let mut default = Attr::none(cx, DEFAULT);
+ let mut serialize_with = Attr::none(cx, SERIALIZE_WITH);
+ let mut deserialize_with = Attr::none(cx, DESERIALIZE_WITH);
+ let mut ser_bound = Attr::none(cx, BOUND);
+ let mut de_bound = Attr::none(cx, BOUND);
+ let mut borrowed_lifetimes = Attr::none(cx, BORROW);
+ let mut getter = Attr::none(cx, GETTER);
+ let mut flatten = BoolAttr::none(cx, FLATTEN);
+
+ let ident = match &field.ident {
+ Some(ident) => unraw(ident),
+ None => index.to_string(),
+ };
+
+ if let Some(borrow_attribute) = attrs.and_then(|variant| variant.borrow.as_ref()) {
+ if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, field) {
+ if let Some(lifetimes) = &borrow_attribute.lifetimes {
+ for lifetime in lifetimes {
+ if !borrowable.contains(lifetime) {
+ let msg =
+ format!("field `{}` does not have lifetime {}", ident, lifetime);
+ cx.error_spanned_by(field, msg);
+ }
+ }
+ borrowed_lifetimes.set(&borrow_attribute.path, lifetimes.clone());
+ } else {
+ borrowed_lifetimes.set(&borrow_attribute.path, borrowable);
+ }
+ }
+ }
+
+ for attr in &field.attrs {
+ if attr.path() != SERDE {
+ continue;
+ }
+
+ if let syn::Meta::List(meta) = &attr.meta {
+ if meta.tokens.is_empty() {
+ continue;
+ }
+ }
+
+ if let Err(err) = attr.parse_nested_meta(|meta| {
+ if meta.path == RENAME {
+ // #[serde(rename = "foo")]
+ // #[serde(rename(serialize = "foo", deserialize = "bar"))]
+ let (ser, de) = get_multiple_renames(cx, &meta)?;
+ ser_name.set_opt(&meta.path, ser.as_ref().map(syn::LitStr::value));
+ for de_value in de {
+ de_name.set_if_none(de_value.value());
+ de_aliases.insert(&meta.path, de_value.value());
+ }
+ } else if meta.path == ALIAS {
+ // #[serde(alias = "foo")]
+ if let Some(s) = get_lit_str(cx, ALIAS, &meta)? {
+ de_aliases.insert(&meta.path, s.value());
+ }
+ } else if meta.path == DEFAULT {
+ if meta.input.peek(Token![=]) {
+ // #[serde(default = "...")]
+ if let Some(path) = parse_lit_into_expr_path(cx, DEFAULT, &meta)? {
+ default.set(&meta.path, Default::Path(path));
+ }
+ } else {
+ // #[serde(default)]
+ default.set(&meta.path, Default::Default);
+ }
+ } else if meta.path == SKIP_SERIALIZING {
+ // #[serde(skip_serializing)]
+ skip_serializing.set_true(&meta.path);
+ } else if meta.path == SKIP_DESERIALIZING {
+ // #[serde(skip_deserializing)]
+ skip_deserializing.set_true(&meta.path);
+ } else if meta.path == SKIP {
+ // #[serde(skip)]
+ skip_serializing.set_true(&meta.path);
+ skip_deserializing.set_true(&meta.path);
+ } else if meta.path == SKIP_SERIALIZING_IF {
+ // #[serde(skip_serializing_if = "...")]
+ if let Some(path) = parse_lit_into_expr_path(cx, SKIP_SERIALIZING_IF, &meta)? {
+ skip_serializing_if.set(&meta.path, path);
+ }
+ } else if meta.path == SERIALIZE_WITH {
+ // #[serde(serialize_with = "...")]
+ if let Some(path) = parse_lit_into_expr_path(cx, SERIALIZE_WITH, &meta)? {
+ serialize_with.set(&meta.path, path);
+ }
+ } else if meta.path == DESERIALIZE_WITH {
+ // #[serde(deserialize_with = "...")]
+ if let Some(path) = parse_lit_into_expr_path(cx, DESERIALIZE_WITH, &meta)? {
+ deserialize_with.set(&meta.path, path);
+ }
+ } else if meta.path == WITH {
+ // #[serde(with = "...")]
+ if let Some(path) = parse_lit_into_expr_path(cx, WITH, &meta)? {
+ let mut ser_path = path.clone();
+ ser_path
+ .path
+ .segments
+ .push(Ident::new("serialize", Span::call_site()).into());
+ serialize_with.set(&meta.path, ser_path);
+ let mut de_path = path;
+ de_path
+ .path
+ .segments
+ .push(Ident::new("deserialize", Span::call_site()).into());
+ deserialize_with.set(&meta.path, de_path);
+ }
+ } else if meta.path == BOUND {
+ // #[serde(bound = "T: SomeBound")]
+ // #[serde(bound(serialize = "...", deserialize = "..."))]
+ let (ser, de) = get_where_predicates(cx, &meta)?;
+ ser_bound.set_opt(&meta.path, ser);
+ de_bound.set_opt(&meta.path, de);
+ } else if meta.path == BORROW {
+ if meta.input.peek(Token![=]) {
+ // #[serde(borrow = "'a + 'b")]
+ let lifetimes = parse_lit_into_lifetimes(cx, &meta)?;
+ if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, field) {
+ for lifetime in &lifetimes {
+ if !borrowable.contains(lifetime) {
+ let msg = format!(
+ "field `{}` does not have lifetime {}",
+ ident, lifetime,
+ );
+ cx.error_spanned_by(field, msg);
+ }
+ }
+ borrowed_lifetimes.set(&meta.path, lifetimes);
+ }
+ } else {
+ // #[serde(borrow)]
+ if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, field) {
+ borrowed_lifetimes.set(&meta.path, borrowable);
+ }
+ }
+ } else if meta.path == GETTER {
+ // #[serde(getter = "...")]
+ if let Some(path) = parse_lit_into_expr_path(cx, GETTER, &meta)? {
+ getter.set(&meta.path, path);
+ }
+ } else if meta.path == FLATTEN {
+ // #[serde(flatten)]
+ flatten.set_true(&meta.path);
+ } else {
+ let path = meta.path.to_token_stream().to_string().replace(' ', "");
+ return Err(
+ meta.error(format_args!("unknown serde field attribute `{}`", path))
+ );
+ }
+ Ok(())
+ }) {
+ cx.syn_error(err);
+ }
+ }
+
+ // Is skip_deserializing, initialize the field to Default::default() unless a
+ // different default is specified by `#[serde(default = "...")]` on
+ // ourselves or our container (e.g. the struct we are in).
+ if let Default::None = *container_default {
+ if skip_deserializing.0.value.is_some() {
+ default.set_if_none(Default::Default);
+ }
+ }
+
+ let mut borrowed_lifetimes = borrowed_lifetimes.get().unwrap_or_default();
+ if !borrowed_lifetimes.is_empty() {
+ // Cow<str> and Cow<[u8]> never borrow by default:
+ //
+ // impl<'de, 'a, T: ?Sized> Deserialize<'de> for Cow<'a, T>
+ //
+ // A #[serde(borrow)] attribute enables borrowing that corresponds
+ // roughly to these impls:
+ //
+ // impl<'de: 'a, 'a> Deserialize<'de> for Cow<'a, str>
+ // impl<'de: 'a, 'a> Deserialize<'de> for Cow<'a, [u8]>
+ if is_cow(&field.ty, is_str) {
+ let mut path = syn::Path {
+ leading_colon: None,
+ segments: Punctuated::new(),
+ };
+ let span = Span::call_site();
+ path.segments.push(Ident::new("_serde", span).into());
+ path.segments.push(Ident::new("__private", span).into());
+ path.segments.push(Ident::new("de", span).into());
+ path.segments
+ .push(Ident::new("borrow_cow_str", span).into());
+ let expr = syn::ExprPath {
+ attrs: Vec::new(),
+ qself: None,
+ path,
+ };
+ deserialize_with.set_if_none(expr);
+ } else if is_cow(&field.ty, is_slice_u8) {
+ let mut path = syn::Path {
+ leading_colon: None,
+ segments: Punctuated::new(),
+ };
+ let span = Span::call_site();
+ path.segments.push(Ident::new("_serde", span).into());
+ path.segments.push(Ident::new("__private", span).into());
+ path.segments.push(Ident::new("de", span).into());
+ path.segments
+ .push(Ident::new("borrow_cow_bytes", span).into());
+ let expr = syn::ExprPath {
+ attrs: Vec::new(),
+ qself: None,
+ path,
+ };
+ deserialize_with.set_if_none(expr);
+ }
+ } else if is_implicitly_borrowed(&field.ty) {
+ // Types &str and &[u8] are always implicitly borrowed. No need for
+ // a #[serde(borrow)].
+ collect_lifetimes(&field.ty, &mut borrowed_lifetimes);
+ }
+
+ Field {
+ name: Name::from_attrs(ident, ser_name, de_name, Some(de_aliases)),
+ skip_serializing: skip_serializing.get(),
+ skip_deserializing: skip_deserializing.get(),
+ skip_serializing_if: skip_serializing_if.get(),
+ default: default.get().unwrap_or(Default::None),
+ serialize_with: serialize_with.get(),
+ deserialize_with: deserialize_with.get(),
+ ser_bound: ser_bound.get(),
+ de_bound: de_bound.get(),
+ borrowed_lifetimes,
+ getter: getter.get(),
+ flatten: flatten.get(),
+ transparent: false,
+ }
+ }
+
+ pub fn name(&self) -> &Name {
+ &self.name
+ }
+
+ pub fn aliases(&self) -> &BTreeSet<String> {
+ self.name.deserialize_aliases()
+ }
+
+ pub fn rename_by_rules(&mut self, rules: RenameAllRules) {
+ if !self.name.serialize_renamed {
+ self.name.serialize = rules.serialize.apply_to_field(&self.name.serialize);
+ }
+ if !self.name.deserialize_renamed {
+ self.name.deserialize = rules.deserialize.apply_to_field(&self.name.deserialize);
+ }
+ self.name
+ .deserialize_aliases
+ .insert(self.name.deserialize.clone());
+ }
+
+ pub fn skip_serializing(&self) -> bool {
+ self.skip_serializing
+ }
+
+ pub fn skip_deserializing(&self) -> bool {
+ self.skip_deserializing
+ }
+
+ pub fn skip_serializing_if(&self) -> Option<&syn::ExprPath> {
+ self.skip_serializing_if.as_ref()
+ }
+
+ pub fn default(&self) -> &Default {
+ &self.default
+ }
+
+ pub fn serialize_with(&self) -> Option<&syn::ExprPath> {
+ self.serialize_with.as_ref()
+ }
+
+ pub fn deserialize_with(&self) -> Option<&syn::ExprPath> {
+ self.deserialize_with.as_ref()
+ }
+
+ pub fn ser_bound(&self) -> Option<&[syn::WherePredicate]> {
+ self.ser_bound.as_ref().map(|vec| &vec[..])
+ }
+
+ pub fn de_bound(&self) -> Option<&[syn::WherePredicate]> {
+ self.de_bound.as_ref().map(|vec| &vec[..])
+ }
+
+ pub fn borrowed_lifetimes(&self) -> &BTreeSet<syn::Lifetime> {
+ &self.borrowed_lifetimes
+ }
+
+ pub fn getter(&self) -> Option<&syn::ExprPath> {
+ self.getter.as_ref()
+ }
+
+ pub fn flatten(&self) -> bool {
+ self.flatten
+ }
+
+ pub fn transparent(&self) -> bool {
+ self.transparent
+ }
+
+ pub fn mark_transparent(&mut self) {
+ self.transparent = true;
+ }
+}
+
+type SerAndDe<T> = (Option<T>, Option<T>);
+
+fn get_ser_and_de<'c, T, F, R>(
+ cx: &'c Ctxt,
+ attr_name: Symbol,
+ meta: &ParseNestedMeta,
+ f: F,
+) -> syn::Result<(VecAttr<'c, T>, VecAttr<'c, T>)>
+where
+ T: Clone,
+ F: Fn(&Ctxt, Symbol, Symbol, &ParseNestedMeta) -> syn::Result<R>,
+ R: Into<Option<T>>,
+{
+ let mut ser_meta = VecAttr::none(cx, attr_name);
+ let mut de_meta = VecAttr::none(cx, attr_name);
+
+ let lookahead = meta.input.lookahead1();
+ if lookahead.peek(Token![=]) {
+ if let Some(both) = f(cx, attr_name, attr_name, meta)?.into() {
+ ser_meta.insert(&meta.path, both.clone());
+ de_meta.insert(&meta.path, both);
+ }
+ } else if lookahead.peek(token::Paren) {
+ meta.parse_nested_meta(|meta| {
+ if meta.path == SERIALIZE {
+ if let Some(v) = f(cx, attr_name, SERIALIZE, &meta)?.into() {
+ ser_meta.insert(&meta.path, v);
+ }
+ } else if meta.path == DESERIALIZE {
+ if let Some(v) = f(cx, attr_name, DESERIALIZE, &meta)?.into() {
+ de_meta.insert(&meta.path, v);
+ }
+ } else {
+ return Err(meta.error(format_args!(
+ "malformed {0} attribute, expected `{0}(serialize = ..., deserialize = ...)`",
+ attr_name,
+ )));
+ }
+ Ok(())
+ })?;
+ } else {
+ return Err(lookahead.error());
+ }
+
+ Ok((ser_meta, de_meta))
+}
+
+fn get_renames(
+ cx: &Ctxt,
+ attr_name: Symbol,
+ meta: &ParseNestedMeta,
+) -> syn::Result<SerAndDe<syn::LitStr>> {
+ let (ser, de) = get_ser_and_de(cx, attr_name, meta, get_lit_str2)?;
+ Ok((ser.at_most_one(), de.at_most_one()))
+}
+
+fn get_multiple_renames(
+ cx: &Ctxt,
+ meta: &ParseNestedMeta,
+) -> syn::Result<(Option<syn::LitStr>, Vec<syn::LitStr>)> {
+ let (ser, de) = get_ser_and_de(cx, RENAME, meta, get_lit_str2)?;
+ Ok((ser.at_most_one(), de.get()))
+}
+
+fn get_where_predicates(
+ cx: &Ctxt,
+ meta: &ParseNestedMeta,
+) -> syn::Result<SerAndDe<Vec<syn::WherePredicate>>> {
+ let (ser, de) = get_ser_and_de(cx, BOUND, meta, parse_lit_into_where)?;
+ Ok((ser.at_most_one(), de.at_most_one()))
+}
+
+fn get_lit_str(
+ cx: &Ctxt,
+ attr_name: Symbol,
+ meta: &ParseNestedMeta,
+) -> syn::Result<Option<syn::LitStr>> {
+ get_lit_str2(cx, attr_name, attr_name, meta)
+}
+
+fn get_lit_str2(
+ cx: &Ctxt,
+ attr_name: Symbol,
+ meta_item_name: Symbol,
+ meta: &ParseNestedMeta,
+) -> syn::Result<Option<syn::LitStr>> {
+ let expr: syn::Expr = meta.value()?.parse()?;
+ let mut value = &expr;
+ while let syn::Expr::Group(e) = value {
+ value = &e.expr;
+ }
+ if let syn::Expr::Lit(syn::ExprLit {
+ lit: syn::Lit::Str(lit),
+ ..
+ }) = value
+ {
+ let suffix = lit.suffix();
+ if !suffix.is_empty() {
+ cx.error_spanned_by(
+ lit,
+ format!("unexpected suffix `{}` on string literal", suffix),
+ );
+ }
+ Ok(Some(lit.clone()))
+ } else {
+ cx.error_spanned_by(
+ expr,
+ format!(
+ "expected serde {} attribute to be a string: `{} = \"...\"`",
+ attr_name, meta_item_name
+ ),
+ );
+ Ok(None)
+ }
+}
+
+fn parse_lit_into_path(
+ cx: &Ctxt,
+ attr_name: Symbol,
+ meta: &ParseNestedMeta,
+) -> syn::Result<Option<syn::Path>> {
+ let string = match get_lit_str(cx, attr_name, meta)? {
+ Some(string) => string,
+ None => return Ok(None),
+ };
+
+ Ok(match string.parse() {
+ Ok(path) => Some(path),
+ Err(_) => {
+ cx.error_spanned_by(
+ &string,
+ format!("failed to parse path: {:?}", string.value()),
+ );
+ None
+ }
+ })
+}
+
+fn parse_lit_into_expr_path(
+ cx: &Ctxt,
+ attr_name: Symbol,
+ meta: &ParseNestedMeta,
+) -> syn::Result<Option<syn::ExprPath>> {
+ let string = match get_lit_str(cx, attr_name, meta)? {
+ Some(string) => string,
+ None => return Ok(None),
+ };
+
+ Ok(match string.parse() {
+ Ok(expr) => Some(expr),
+ Err(_) => {
+ cx.error_spanned_by(
+ &string,
+ format!("failed to parse path: {:?}", string.value()),
+ );
+ None
+ }
+ })
+}
+
+fn parse_lit_into_where(
+ cx: &Ctxt,
+ attr_name: Symbol,
+ meta_item_name: Symbol,
+ meta: &ParseNestedMeta,
+) -> syn::Result<Vec<syn::WherePredicate>> {
+ let string = match get_lit_str2(cx, attr_name, meta_item_name, meta)? {
+ Some(string) => string,
+ None => return Ok(Vec::new()),
+ };
+
+ Ok(
+ match string.parse_with(Punctuated::<syn::WherePredicate, Token![,]>::parse_terminated) {
+ Ok(predicates) => Vec::from_iter(predicates),
+ Err(err) => {
+ cx.error_spanned_by(string, err);
+ Vec::new()
+ }
+ },
+ )
+}
+
+fn parse_lit_into_ty(
+ cx: &Ctxt,
+ attr_name: Symbol,
+ meta: &ParseNestedMeta,
+) -> syn::Result<Option<syn::Type>> {
+ let string = match get_lit_str(cx, attr_name, meta)? {
+ Some(string) => string,
+ None => return Ok(None),
+ };
+
+ Ok(match string.parse() {
+ Ok(ty) => Some(ty),
+ Err(_) => {
+ cx.error_spanned_by(
+ &string,
+ format!("failed to parse type: {} = {:?}", attr_name, string.value()),
+ );
+ None
+ }
+ })
+}
+
+// Parses a string literal like "'a + 'b + 'c" containing a nonempty list of
+// lifetimes separated by `+`.
+fn parse_lit_into_lifetimes(
+ cx: &Ctxt,
+ meta: &ParseNestedMeta,
+) -> syn::Result<BTreeSet<syn::Lifetime>> {
+ let string = match get_lit_str(cx, BORROW, meta)? {
+ Some(string) => string,
+ None => return Ok(BTreeSet::new()),
+ };
+
+ if let Ok(lifetimes) = string.parse_with(|input: ParseStream| {
+ let mut set = BTreeSet::new();
+ while !input.is_empty() {
+ let lifetime: Lifetime = input.parse()?;
+ if !set.insert(lifetime.clone()) {
+ cx.error_spanned_by(
+ &string,
+ format!("duplicate borrowed lifetime `{}`", lifetime),
+ );
+ }
+ if input.is_empty() {
+ break;
+ }
+ input.parse::<Token![+]>()?;
+ }
+ Ok(set)
+ }) {
+ if lifetimes.is_empty() {
+ cx.error_spanned_by(string, "at least one lifetime must be borrowed");
+ }
+ return Ok(lifetimes);
+ }
+
+ cx.error_spanned_by(
+ &string,
+ format!("failed to parse borrowed lifetimes: {:?}", string.value()),
+ );
+ Ok(BTreeSet::new())
+}
+
+fn is_implicitly_borrowed(ty: &syn::Type) -> bool {
+ is_implicitly_borrowed_reference(ty) || is_option(ty, is_implicitly_borrowed_reference)
+}
+
+fn is_implicitly_borrowed_reference(ty: &syn::Type) -> bool {
+ is_reference(ty, is_str) || is_reference(ty, is_slice_u8)
+}
+
+// Whether the type looks like it might be `std::borrow::Cow<T>` where elem="T".
+// This can have false negatives and false positives.
+//
+// False negative:
+//
+// use std::borrow::Cow as Pig;
+//
+// #[derive(Deserialize)]
+// struct S<'a> {
+// #[serde(borrow)]
+// pig: Pig<'a, str>,
+// }
+//
+// False positive:
+//
+// type str = [i16];
+//
+// #[derive(Deserialize)]
+// struct S<'a> {
+// #[serde(borrow)]
+// cow: Cow<'a, str>,
+// }
+fn is_cow(ty: &syn::Type, elem: fn(&syn::Type) -> bool) -> bool {
+ let path = match ungroup(ty) {
+ syn::Type::Path(ty) => &ty.path,
+ _ => {
+ return false;
+ }
+ };
+ let seg = match path.segments.last() {
+ Some(seg) => seg,
+ None => {
+ return false;
+ }
+ };
+ let args = match &seg.arguments {
+ syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args,
+ _ => {
+ return false;
+ }
+ };
+ seg.ident == "Cow"
+ && args.len() == 2
+ && match (&args[0], &args[1]) {
+ (syn::GenericArgument::Lifetime(_), syn::GenericArgument::Type(arg)) => elem(arg),
+ _ => false,
+ }
+}
+
+fn is_option(ty: &syn::Type, elem: fn(&syn::Type) -> bool) -> bool {
+ let path = match ungroup(ty) {
+ syn::Type::Path(ty) => &ty.path,
+ _ => {
+ return false;
+ }
+ };
+ let seg = match path.segments.last() {
+ Some(seg) => seg,
+ None => {
+ return false;
+ }
+ };
+ let args = match &seg.arguments {
+ syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args,
+ _ => {
+ return false;
+ }
+ };
+ seg.ident == "Option"
+ && args.len() == 1
+ && match &args[0] {
+ syn::GenericArgument::Type(arg) => elem(arg),
+ _ => false,
+ }
+}
+
+// Whether the type looks like it might be `&T` where elem="T". This can have
+// false negatives and false positives.
+//
+// False negative:
+//
+// type Yarn = str;
+//
+// #[derive(Deserialize)]
+// struct S<'a> {
+// r: &'a Yarn,
+// }
+//
+// False positive:
+//
+// type str = [i16];
+//
+// #[derive(Deserialize)]
+// struct S<'a> {
+// r: &'a str,
+// }
+fn is_reference(ty: &syn::Type, elem: fn(&syn::Type) -> bool) -> bool {
+ match ungroup(ty) {
+ syn::Type::Reference(ty) => ty.mutability.is_none() && elem(&ty.elem),
+ _ => false,
+ }
+}
+
+fn is_str(ty: &syn::Type) -> bool {
+ is_primitive_type(ty, "str")
+}
+
+fn is_slice_u8(ty: &syn::Type) -> bool {
+ match ungroup(ty) {
+ syn::Type::Slice(ty) => is_primitive_type(&ty.elem, "u8"),
+ _ => false,
+ }
+}
+
+fn is_primitive_type(ty: &syn::Type, primitive: &str) -> bool {
+ match ungroup(ty) {
+ syn::Type::Path(ty) => ty.qself.is_none() && is_primitive_path(&ty.path, primitive),
+ _ => false,
+ }
+}
+
+fn is_primitive_path(path: &syn::Path, primitive: &str) -> bool {
+ path.leading_colon.is_none()
+ && path.segments.len() == 1
+ && path.segments[0].ident == primitive
+ && path.segments[0].arguments.is_empty()
+}
+
+// All lifetimes that this type could borrow from a Deserializer.
+//
+// For example a type `S<'a, 'b>` could borrow `'a` and `'b`. On the other hand
+// a type `for<'a> fn(&'a str)` could not borrow `'a` from the Deserializer.
+//
+// This is used when there is an explicit or implicit `#[serde(borrow)]`
+// attribute on the field so there must be at least one borrowable lifetime.
+fn borrowable_lifetimes(
+ cx: &Ctxt,
+ name: &str,
+ field: &syn::Field,
+) -> Result<BTreeSet<syn::Lifetime>, ()> {
+ let mut lifetimes = BTreeSet::new();
+ collect_lifetimes(&field.ty, &mut lifetimes);
+ if lifetimes.is_empty() {
+ let msg = format!("field `{}` has no lifetimes to borrow", name);
+ cx.error_spanned_by(field, msg);
+ Err(())
+ } else {
+ Ok(lifetimes)
+ }
+}
+
+fn collect_lifetimes(ty: &syn::Type, out: &mut BTreeSet<syn::Lifetime>) {
+ match ty {
+ #![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
+ syn::Type::Slice(ty) => {
+ collect_lifetimes(&ty.elem, out);
+ }
+ syn::Type::Array(ty) => {
+ collect_lifetimes(&ty.elem, out);
+ }
+ syn::Type::Ptr(ty) => {
+ collect_lifetimes(&ty.elem, out);
+ }
+ syn::Type::Reference(ty) => {
+ out.extend(ty.lifetime.iter().cloned());
+ collect_lifetimes(&ty.elem, out);
+ }
+ syn::Type::Tuple(ty) => {
+ for elem in &ty.elems {
+ collect_lifetimes(elem, out);
+ }
+ }
+ syn::Type::Path(ty) => {
+ if let Some(qself) = &ty.qself {
+ collect_lifetimes(&qself.ty, out);
+ }
+ for seg in &ty.path.segments {
+ if let syn::PathArguments::AngleBracketed(bracketed) = &seg.arguments {
+ for arg in &bracketed.args {
+ match arg {
+ syn::GenericArgument::Lifetime(lifetime) => {
+ out.insert(lifetime.clone());
+ }
+ syn::GenericArgument::Type(ty) => {
+ collect_lifetimes(ty, out);
+ }
+ syn::GenericArgument::AssocType(binding) => {
+ collect_lifetimes(&binding.ty, out);
+ }
+ syn::GenericArgument::Const(_)
+ | syn::GenericArgument::AssocConst(_)
+ | syn::GenericArgument::Constraint(_)
+ | _ => {}
+ }
+ }
+ }
+ }
+ }
+ syn::Type::Paren(ty) => {
+ collect_lifetimes(&ty.elem, out);
+ }
+ syn::Type::Group(ty) => {
+ collect_lifetimes(&ty.elem, out);
+ }
+ syn::Type::Macro(ty) => {
+ collect_lifetimes_from_tokens(ty.mac.tokens.clone(), out);
+ }
+ syn::Type::BareFn(_)
+ | syn::Type::Never(_)
+ | syn::Type::TraitObject(_)
+ | syn::Type::ImplTrait(_)
+ | syn::Type::Infer(_)
+ | syn::Type::Verbatim(_) => {}
+
+ _ => {}
+ }
+}
+
+fn collect_lifetimes_from_tokens(tokens: TokenStream, out: &mut BTreeSet<syn::Lifetime>) {
+ let mut iter = tokens.into_iter();
+ while let Some(tt) = iter.next() {
+ match &tt {
+ TokenTree::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
+ if let Some(TokenTree::Ident(ident)) = iter.next() {
+ out.insert(syn::Lifetime {
+ apostrophe: op.span(),
+ ident,
+ });
+ }
+ }
+ TokenTree::Group(group) => {
+ let tokens = group.stream();
+ collect_lifetimes_from_tokens(tokens, out);
+ }
+ _ => {}
+ }
+ }
+}
diff --git a/vendor/serde_derive/src/internals/case.rs b/vendor/serde_derive/src/internals/case.rs
new file mode 100644
index 0000000..8c8c02e
--- /dev/null
+++ b/vendor/serde_derive/src/internals/case.rs
@@ -0,0 +1,200 @@
+//! Code to convert the Rust-styled field/variant (e.g. `my_field`, `MyType`) to the
+//! case of the source (e.g. `my-field`, `MY_FIELD`).
+
+use self::RenameRule::*;
+use std::fmt::{self, Debug, Display};
+
+/// The different possible ways to change case of fields in a struct, or variants in an enum.
+#[derive(Copy, Clone, PartialEq)]
+pub enum RenameRule {
+ /// Don't apply a default rename rule.
+ None,
+ /// Rename direct children to "lowercase" style.
+ LowerCase,
+ /// Rename direct children to "UPPERCASE" style.
+ UpperCase,
+ /// Rename direct children to "PascalCase" style, as typically used for
+ /// enum variants.
+ PascalCase,
+ /// Rename direct children to "camelCase" style.
+ CamelCase,
+ /// Rename direct children to "snake_case" style, as commonly used for
+ /// fields.
+ SnakeCase,
+ /// Rename direct children to "SCREAMING_SNAKE_CASE" style, as commonly
+ /// used for constants.
+ ScreamingSnakeCase,
+ /// Rename direct children to "kebab-case" style.
+ KebabCase,
+ /// Rename direct children to "SCREAMING-KEBAB-CASE" style.
+ ScreamingKebabCase,
+}
+
+static RENAME_RULES: &[(&str, RenameRule)] = &[
+ ("lowercase", LowerCase),
+ ("UPPERCASE", UpperCase),
+ ("PascalCase", PascalCase),
+ ("camelCase", CamelCase),
+ ("snake_case", SnakeCase),
+ ("SCREAMING_SNAKE_CASE", ScreamingSnakeCase),
+ ("kebab-case", KebabCase),
+ ("SCREAMING-KEBAB-CASE", ScreamingKebabCase),
+];
+
+impl RenameRule {
+ pub fn from_str(rename_all_str: &str) -> Result<Self, ParseError> {
+ for (name, rule) in RENAME_RULES {
+ if rename_all_str == *name {
+ return Ok(*rule);
+ }
+ }
+ Err(ParseError {
+ unknown: rename_all_str,
+ })
+ }
+
+ /// Apply a renaming rule to an enum variant, returning the version expected in the source.
+ pub fn apply_to_variant(self, variant: &str) -> String {
+ match self {
+ None | PascalCase => variant.to_owned(),
+ LowerCase => variant.to_ascii_lowercase(),
+ UpperCase => variant.to_ascii_uppercase(),
+ CamelCase => variant[..1].to_ascii_lowercase() + &variant[1..],
+ SnakeCase => {
+ let mut snake = String::new();
+ for (i, ch) in variant.char_indices() {
+ if i > 0 && ch.is_uppercase() {
+ snake.push('_');
+ }
+ snake.push(ch.to_ascii_lowercase());
+ }
+ snake
+ }
+ ScreamingSnakeCase => SnakeCase.apply_to_variant(variant).to_ascii_uppercase(),
+ KebabCase => SnakeCase.apply_to_variant(variant).replace('_', "-"),
+ ScreamingKebabCase => ScreamingSnakeCase
+ .apply_to_variant(variant)
+ .replace('_', "-"),
+ }
+ }
+
+ /// Apply a renaming rule to a struct field, returning the version expected in the source.
+ pub fn apply_to_field(self, field: &str) -> String {
+ match self {
+ None | LowerCase | SnakeCase => field.to_owned(),
+ UpperCase => field.to_ascii_uppercase(),
+ PascalCase => {
+ let mut pascal = String::new();
+ let mut capitalize = true;
+ for ch in field.chars() {
+ if ch == '_' {
+ capitalize = true;
+ } else if capitalize {
+ pascal.push(ch.to_ascii_uppercase());
+ capitalize = false;
+ } else {
+ pascal.push(ch);
+ }
+ }
+ pascal
+ }
+ CamelCase => {
+ let pascal = PascalCase.apply_to_field(field);
+ pascal[..1].to_ascii_lowercase() + &pascal[1..]
+ }
+ ScreamingSnakeCase => field.to_ascii_uppercase(),
+ KebabCase => field.replace('_', "-"),
+ ScreamingKebabCase => ScreamingSnakeCase.apply_to_field(field).replace('_', "-"),
+ }
+ }
+
+ /// Returns the `RenameRule` if it is not `None`, `rule_b` otherwise.
+ pub fn or(self, rule_b: Self) -> Self {
+ match self {
+ None => rule_b,
+ _ => self,
+ }
+ }
+}
+
+pub struct ParseError<'a> {
+ unknown: &'a str,
+}
+
+impl<'a> Display for ParseError<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("unknown rename rule `rename_all = ")?;
+ Debug::fmt(self.unknown, f)?;
+ f.write_str("`, expected one of ")?;
+ for (i, (name, _rule)) in RENAME_RULES.iter().enumerate() {
+ if i > 0 {
+ f.write_str(", ")?;
+ }
+ Debug::fmt(name, f)?;
+ }
+ Ok(())
+ }
+}
+
+#[test]
+fn rename_variants() {
+ for &(original, lower, upper, camel, snake, screaming, kebab, screaming_kebab) in &[
+ (
+ "Outcome", "outcome", "OUTCOME", "outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
+ ),
+ (
+ "VeryTasty",
+ "verytasty",
+ "VERYTASTY",
+ "veryTasty",
+ "very_tasty",
+ "VERY_TASTY",
+ "very-tasty",
+ "VERY-TASTY",
+ ),
+ ("A", "a", "A", "a", "a", "A", "a", "A"),
+ ("Z42", "z42", "Z42", "z42", "z42", "Z42", "z42", "Z42"),
+ ] {
+ assert_eq!(None.apply_to_variant(original), original);
+ assert_eq!(LowerCase.apply_to_variant(original), lower);
+ assert_eq!(UpperCase.apply_to_variant(original), upper);
+ assert_eq!(PascalCase.apply_to_variant(original), original);
+ assert_eq!(CamelCase.apply_to_variant(original), camel);
+ assert_eq!(SnakeCase.apply_to_variant(original), snake);
+ assert_eq!(ScreamingSnakeCase.apply_to_variant(original), screaming);
+ assert_eq!(KebabCase.apply_to_variant(original), kebab);
+ assert_eq!(
+ ScreamingKebabCase.apply_to_variant(original),
+ screaming_kebab
+ );
+ }
+}
+
+#[test]
+fn rename_fields() {
+ for &(original, upper, pascal, camel, screaming, kebab, screaming_kebab) in &[
+ (
+ "outcome", "OUTCOME", "Outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
+ ),
+ (
+ "very_tasty",
+ "VERY_TASTY",
+ "VeryTasty",
+ "veryTasty",
+ "VERY_TASTY",
+ "very-tasty",
+ "VERY-TASTY",
+ ),
+ ("a", "A", "A", "a", "A", "a", "A"),
+ ("z42", "Z42", "Z42", "z42", "Z42", "z42", "Z42"),
+ ] {
+ assert_eq!(None.apply_to_field(original), original);
+ assert_eq!(UpperCase.apply_to_field(original), upper);
+ assert_eq!(PascalCase.apply_to_field(original), pascal);
+ assert_eq!(CamelCase.apply_to_field(original), camel);
+ assert_eq!(SnakeCase.apply_to_field(original), original);
+ assert_eq!(ScreamingSnakeCase.apply_to_field(original), screaming);
+ assert_eq!(KebabCase.apply_to_field(original), kebab);
+ assert_eq!(ScreamingKebabCase.apply_to_field(original), screaming_kebab);
+ }
+}
diff --git a/vendor/serde_derive/src/internals/check.rs b/vendor/serde_derive/src/internals/check.rs
new file mode 100644
index 0000000..52b0f37
--- /dev/null
+++ b/vendor/serde_derive/src/internals/check.rs
@@ -0,0 +1,477 @@
+use crate::internals::ast::{Container, Data, Field, Style};
+use crate::internals::attr::{Default, Identifier, TagType};
+use crate::internals::{ungroup, Ctxt, Derive};
+use syn::{Member, Type};
+
+// Cross-cutting checks that require looking at more than a single attrs object.
+// Simpler checks should happen when parsing and building the attrs.
+pub fn check(cx: &Ctxt, cont: &mut Container, derive: Derive) {
+ check_default_on_tuple(cx, cont);
+ check_remote_generic(cx, cont);
+ check_getter(cx, cont);
+ check_flatten(cx, cont);
+ check_identifier(cx, cont);
+ check_variant_skip_attrs(cx, cont);
+ check_internal_tag_field_name_conflict(cx, cont);
+ check_adjacent_tag_conflict(cx, cont);
+ check_transparent(cx, cont, derive);
+ check_from_and_try_from(cx, cont);
+}
+
+// If some field of a tuple struct is marked #[serde(default)] then all fields
+// after it must also be marked with that attribute, or the struct must have a
+// container-level serde(default) attribute. A field's default value is only
+// used for tuple fields if the sequence is exhausted at that point; that means
+// all subsequent fields will fail to deserialize if they don't have their own
+// default.
+fn check_default_on_tuple(cx: &Ctxt, cont: &Container) {
+ if let Default::None = cont.attrs.default() {
+ if let Data::Struct(Style::Tuple, fields) = &cont.data {
+ let mut first_default_index = None;
+ for (i, field) in fields.iter().enumerate() {
+ // Skipped fields automatically get the #[serde(default)]
+ // attribute. We are interested only on non-skipped fields here.
+ if field.attrs.skip_deserializing() {
+ continue;
+ }
+ if let Default::None = field.attrs.default() {
+ if let Some(first) = first_default_index {
+ cx.error_spanned_by(
+ field.ty,
+ format!("field must have #[serde(default)] because previous field {} has #[serde(default)]", first),
+ );
+ }
+ continue;
+ }
+ if first_default_index.is_none() {
+ first_default_index = Some(i);
+ }
+ }
+ }
+ }
+}
+
+// Remote derive definition type must have either all of the generics of the
+// remote type:
+//
+// #[serde(remote = "Generic")]
+// struct Generic<T> {…}
+//
+// or none of them, i.e. defining impls for one concrete instantiation of the
+// remote type only:
+//
+// #[serde(remote = "Generic<T>")]
+// struct ConcreteDef {…}
+//
+fn check_remote_generic(cx: &Ctxt, cont: &Container) {
+ if let Some(remote) = cont.attrs.remote() {
+ let local_has_generic = !cont.generics.params.is_empty();
+ let remote_has_generic = !remote.segments.last().unwrap().arguments.is_none();
+ if local_has_generic && remote_has_generic {
+ cx.error_spanned_by(remote, "remove generic parameters from this path");
+ }
+ }
+}
+
+// Getters are only allowed inside structs (not enums) with the `remote`
+// attribute.
+fn check_getter(cx: &Ctxt, cont: &Container) {
+ match cont.data {
+ Data::Enum(_) => {
+ if cont.data.has_getter() {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(getter = \"...\")] is not allowed in an enum",
+ );
+ }
+ }
+ Data::Struct(_, _) => {
+ if cont.data.has_getter() && cont.attrs.remote().is_none() {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(getter = \"...\")] can only be used in structs that have #[serde(remote = \"...\")]",
+ );
+ }
+ }
+ }
+}
+
+// Flattening has some restrictions we can test.
+fn check_flatten(cx: &Ctxt, cont: &Container) {
+ match &cont.data {
+ Data::Enum(variants) => {
+ for variant in variants {
+ for field in &variant.fields {
+ check_flatten_field(cx, variant.style, field);
+ }
+ }
+ }
+ Data::Struct(style, fields) => {
+ for field in fields {
+ check_flatten_field(cx, *style, field);
+ }
+ }
+ }
+}
+
+fn check_flatten_field(cx: &Ctxt, style: Style, field: &Field) {
+ if !field.attrs.flatten() {
+ return;
+ }
+ match style {
+ Style::Tuple => {
+ cx.error_spanned_by(
+ field.original,
+ "#[serde(flatten)] cannot be used on tuple structs",
+ );
+ }
+ Style::Newtype => {
+ cx.error_spanned_by(
+ field.original,
+ "#[serde(flatten)] cannot be used on newtype structs",
+ );
+ }
+ _ => {}
+ }
+}
+
+// The `other` attribute must be used at most once and it must be the last
+// variant of an enum.
+//
+// Inside a `variant_identifier` all variants must be unit variants. Inside a
+// `field_identifier` all but possibly one variant must be unit variants. The
+// last variant may be a newtype variant which is an implicit "other" case.
+fn check_identifier(cx: &Ctxt, cont: &Container) {
+ let variants = match &cont.data {
+ Data::Enum(variants) => variants,
+ Data::Struct(_, _) => return,
+ };
+
+ for (i, variant) in variants.iter().enumerate() {
+ match (
+ variant.style,
+ cont.attrs.identifier(),
+ variant.attrs.other(),
+ cont.attrs.tag(),
+ ) {
+ // The `other` attribute may not be used in a variant_identifier.
+ (_, Identifier::Variant, true, _) => {
+ cx.error_spanned_by(
+ variant.original,
+ "#[serde(other)] may not be used on a variant identifier",
+ );
+ }
+
+ // Variant with `other` attribute cannot appear in untagged enum
+ (_, Identifier::No, true, &TagType::None) => {
+ cx.error_spanned_by(
+ variant.original,
+ "#[serde(other)] cannot appear on untagged enum",
+ );
+ }
+
+ // Variant with `other` attribute must be the last one.
+ (Style::Unit, Identifier::Field, true, _) | (Style::Unit, Identifier::No, true, _) => {
+ if i < variants.len() - 1 {
+ cx.error_spanned_by(
+ variant.original,
+ "#[serde(other)] must be on the last variant",
+ );
+ }
+ }
+
+ // Variant with `other` attribute must be a unit variant.
+ (_, Identifier::Field, true, _) | (_, Identifier::No, true, _) => {
+ cx.error_spanned_by(
+ variant.original,
+ "#[serde(other)] must be on a unit variant",
+ );
+ }
+
+ // Any sort of variant is allowed if this is not an identifier.
+ (_, Identifier::No, false, _) => {}
+
+ // Unit variant without `other` attribute is always fine.
+ (Style::Unit, _, false, _) => {}
+
+ // The last field is allowed to be a newtype catch-all.
+ (Style::Newtype, Identifier::Field, false, _) => {
+ if i < variants.len() - 1 {
+ cx.error_spanned_by(
+ variant.original,
+ format!("`{}` must be the last variant", variant.ident),
+ );
+ }
+ }
+
+ (_, Identifier::Field, false, _) => {
+ cx.error_spanned_by(
+ variant.original,
+ "#[serde(field_identifier)] may only contain unit variants",
+ );
+ }
+
+ (_, Identifier::Variant, false, _) => {
+ cx.error_spanned_by(
+ variant.original,
+ "#[serde(variant_identifier)] may only contain unit variants",
+ );
+ }
+ }
+ }
+}
+
+// Skip-(de)serializing attributes are not allowed on variants marked
+// (de)serialize_with.
+fn check_variant_skip_attrs(cx: &Ctxt, cont: &Container) {
+ let variants = match &cont.data {
+ Data::Enum(variants) => variants,
+ Data::Struct(_, _) => return,
+ };
+
+ for variant in variants {
+ if variant.attrs.serialize_with().is_some() {
+ if variant.attrs.skip_serializing() {
+ cx.error_spanned_by(
+ variant.original,
+ format!(
+ "variant `{}` cannot have both #[serde(serialize_with)] and #[serde(skip_serializing)]",
+ variant.ident
+ ),
+ );
+ }
+
+ for field in &variant.fields {
+ let member = member_message(&field.member);
+
+ if field.attrs.skip_serializing() {
+ cx.error_spanned_by(
+ variant.original,
+ format!(
+ "variant `{}` cannot have both #[serde(serialize_with)] and a field {} marked with #[serde(skip_serializing)]",
+ variant.ident, member
+ ),
+ );
+ }
+
+ if field.attrs.skip_serializing_if().is_some() {
+ cx.error_spanned_by(
+ variant.original,
+ format!(
+ "variant `{}` cannot have both #[serde(serialize_with)] and a field {} marked with #[serde(skip_serializing_if)]",
+ variant.ident, member
+ ),
+ );
+ }
+ }
+ }
+
+ if variant.attrs.deserialize_with().is_some() {
+ if variant.attrs.skip_deserializing() {
+ cx.error_spanned_by(
+ variant.original,
+ format!(
+ "variant `{}` cannot have both #[serde(deserialize_with)] and #[serde(skip_deserializing)]",
+ variant.ident
+ ),
+ );
+ }
+
+ for field in &variant.fields {
+ if field.attrs.skip_deserializing() {
+ let member = member_message(&field.member);
+
+ cx.error_spanned_by(
+ variant.original,
+ format!(
+ "variant `{}` cannot have both #[serde(deserialize_with)] and a field {} marked with #[serde(skip_deserializing)]",
+ variant.ident, member
+ ),
+ );
+ }
+ }
+ }
+ }
+}
+
+// The tag of an internally-tagged struct variant must not be the same as either
+// one of its fields, as this would result in duplicate keys in the serialized
+// output and/or ambiguity in the to-be-deserialized input.
+fn check_internal_tag_field_name_conflict(cx: &Ctxt, cont: &Container) {
+ let variants = match &cont.data {
+ Data::Enum(variants) => variants,
+ Data::Struct(_, _) => return,
+ };
+
+ let tag = match cont.attrs.tag() {
+ TagType::Internal { tag } => tag.as_str(),
+ TagType::External | TagType::Adjacent { .. } | TagType::None => return,
+ };
+
+ let diagnose_conflict = || {
+ cx.error_spanned_by(
+ cont.original,
+ format!("variant field name `{}` conflicts with internal tag", tag),
+ );
+ };
+
+ for variant in variants {
+ match variant.style {
+ Style::Struct => {
+ if variant.attrs.untagged() {
+ continue;
+ }
+ for field in &variant.fields {
+ let check_ser =
+ !(field.attrs.skip_serializing() || variant.attrs.skip_serializing());
+ let check_de =
+ !(field.attrs.skip_deserializing() || variant.attrs.skip_deserializing());
+ let name = field.attrs.name();
+ let ser_name = name.serialize_name();
+
+ if check_ser && ser_name == tag {
+ diagnose_conflict();
+ return;
+ }
+
+ for de_name in field.attrs.aliases() {
+ if check_de && de_name == tag {
+ diagnose_conflict();
+ return;
+ }
+ }
+ }
+ }
+ Style::Unit | Style::Newtype | Style::Tuple => {}
+ }
+ }
+}
+
+// In the case of adjacently-tagged enums, the type and the contents tag must
+// differ, for the same reason.
+fn check_adjacent_tag_conflict(cx: &Ctxt, cont: &Container) {
+ let (type_tag, content_tag) = match cont.attrs.tag() {
+ TagType::Adjacent { tag, content } => (tag, content),
+ TagType::Internal { .. } | TagType::External | TagType::None => return,
+ };
+
+ if type_tag == content_tag {
+ cx.error_spanned_by(
+ cont.original,
+ format!(
+ "enum tags `{}` for type and content conflict with each other",
+ type_tag
+ ),
+ );
+ }
+}
+
+// Enums and unit structs cannot be transparent.
+fn check_transparent(cx: &Ctxt, cont: &mut Container, derive: Derive) {
+ if !cont.attrs.transparent() {
+ return;
+ }
+
+ if cont.attrs.type_from().is_some() {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(transparent)] is not allowed with #[serde(from = \"...\")]",
+ );
+ }
+
+ if cont.attrs.type_try_from().is_some() {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(transparent)] is not allowed with #[serde(try_from = \"...\")]",
+ );
+ }
+
+ if cont.attrs.type_into().is_some() {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(transparent)] is not allowed with #[serde(into = \"...\")]",
+ );
+ }
+
+ let fields = match &mut cont.data {
+ Data::Enum(_) => {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(transparent)] is not allowed on an enum",
+ );
+ return;
+ }
+ Data::Struct(Style::Unit, _) => {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(transparent)] is not allowed on a unit struct",
+ );
+ return;
+ }
+ Data::Struct(_, fields) => fields,
+ };
+
+ let mut transparent_field = None;
+
+ for field in fields {
+ if allow_transparent(field, derive) {
+ if transparent_field.is_some() {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(transparent)] requires struct to have at most one transparent field",
+ );
+ return;
+ }
+ transparent_field = Some(field);
+ }
+ }
+
+ match transparent_field {
+ Some(transparent_field) => transparent_field.attrs.mark_transparent(),
+ None => match derive {
+ Derive::Serialize => {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(transparent)] requires at least one field that is not skipped",
+ );
+ }
+ Derive::Deserialize => {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(transparent)] requires at least one field that is neither skipped nor has a default",
+ );
+ }
+ },
+ }
+}
+
+fn member_message(member: &Member) -> String {
+ match member {
+ Member::Named(ident) => format!("`{}`", ident),
+ Member::Unnamed(i) => format!("#{}", i.index),
+ }
+}
+
+fn allow_transparent(field: &Field, derive: Derive) -> bool {
+ if let Type::Path(ty) = ungroup(field.ty) {
+ if let Some(seg) = ty.path.segments.last() {
+ if seg.ident == "PhantomData" {
+ return false;
+ }
+ }
+ }
+
+ match derive {
+ Derive::Serialize => !field.attrs.skip_serializing(),
+ Derive::Deserialize => !field.attrs.skip_deserializing() && field.attrs.default().is_none(),
+ }
+}
+
+fn check_from_and_try_from(cx: &Ctxt, cont: &mut Container) {
+ if cont.attrs.type_from().is_some() && cont.attrs.type_try_from().is_some() {
+ cx.error_spanned_by(
+ cont.original,
+ "#[serde(from = \"...\")] and #[serde(try_from = \"...\")] conflict with each other",
+ );
+ }
+}
diff --git a/vendor/serde_derive/src/internals/ctxt.rs b/vendor/serde_derive/src/internals/ctxt.rs
new file mode 100644
index 0000000..a47bfa4
--- /dev/null
+++ b/vendor/serde_derive/src/internals/ctxt.rs
@@ -0,0 +1,68 @@
+use quote::ToTokens;
+use std::cell::RefCell;
+use std::fmt::Display;
+use std::thread;
+
+/// A type to collect errors together and format them.
+///
+/// Dropping this object will cause a panic. It must be consumed using `check`.
+///
+/// References can be shared since this type uses run-time exclusive mut checking.
+#[derive(Default)]
+pub struct Ctxt {
+ // The contents will be set to `None` during checking. This is so that checking can be
+ // enforced.
+ errors: RefCell<Option<Vec<syn::Error>>>,
+}
+
+impl Ctxt {
+ /// Create a new context object.
+ ///
+ /// This object contains no errors, but will still trigger a panic if it is not `check`ed.
+ pub fn new() -> Self {
+ Ctxt {
+ errors: RefCell::new(Some(Vec::new())),
+ }
+ }
+
+ /// Add an error to the context object with a tokenenizable object.
+ ///
+ /// The object is used for spanning in error messages.
+ pub fn error_spanned_by<A: ToTokens, T: Display>(&self, obj: A, msg: T) {
+ self.errors
+ .borrow_mut()
+ .as_mut()
+ .unwrap()
+ // Curb monomorphization from generating too many identical methods.
+ .push(syn::Error::new_spanned(obj.into_token_stream(), msg));
+ }
+
+ /// Add one of Syn's parse errors.
+ pub fn syn_error(&self, err: syn::Error) {
+ self.errors.borrow_mut().as_mut().unwrap().push(err);
+ }
+
+ /// Consume this object, producing a formatted error string if there are errors.
+ pub fn check(self) -> syn::Result<()> {
+ let mut errors = self.errors.borrow_mut().take().unwrap().into_iter();
+
+ let mut combined = match errors.next() {
+ Some(first) => first,
+ None => return Ok(()),
+ };
+
+ for rest in errors {
+ combined.combine(rest);
+ }
+
+ Err(combined)
+ }
+}
+
+impl Drop for Ctxt {
+ fn drop(&mut self) {
+ if !thread::panicking() && self.errors.borrow().is_some() {
+ panic!("forgot to check for errors");
+ }
+ }
+}
diff --git a/vendor/serde_derive/src/internals/mod.rs b/vendor/serde_derive/src/internals/mod.rs
new file mode 100644
index 0000000..f98ef08
--- /dev/null
+++ b/vendor/serde_derive/src/internals/mod.rs
@@ -0,0 +1,27 @@
+pub mod ast;
+pub mod attr;
+
+mod case;
+mod check;
+mod ctxt;
+mod receiver;
+mod respan;
+mod symbol;
+
+use syn::Type;
+
+pub use self::ctxt::Ctxt;
+pub use self::receiver::replace_receiver;
+
+#[derive(Copy, Clone)]
+pub enum Derive {
+ Serialize,
+ Deserialize,
+}
+
+pub fn ungroup(mut ty: &Type) -> &Type {
+ while let Type::Group(group) = ty {
+ ty = &group.elem;
+ }
+ ty
+}
diff --git a/vendor/serde_derive/src/internals/receiver.rs b/vendor/serde_derive/src/internals/receiver.rs
new file mode 100644
index 0000000..fa2a77d
--- /dev/null
+++ b/vendor/serde_derive/src/internals/receiver.rs
@@ -0,0 +1,292 @@
+use crate::internals::respan::respan;
+use proc_macro2::Span;
+use quote::ToTokens;
+use std::mem;
+use syn::punctuated::Punctuated;
+use syn::{
+ parse_quote, Data, DeriveInput, Expr, ExprPath, GenericArgument, GenericParam, Generics, Macro,
+ Path, PathArguments, QSelf, ReturnType, Token, Type, TypeParamBound, TypePath, WherePredicate,
+};
+
+pub fn replace_receiver(input: &mut DeriveInput) {
+ let self_ty = {
+ let ident = &input.ident;
+ let ty_generics = input.generics.split_for_impl().1;
+ parse_quote!(#ident #ty_generics)
+ };
+ let mut visitor = ReplaceReceiver(&self_ty);
+ visitor.visit_generics_mut(&mut input.generics);
+ visitor.visit_data_mut(&mut input.data);
+}
+
+struct ReplaceReceiver<'a>(&'a TypePath);
+
+impl ReplaceReceiver<'_> {
+ fn self_ty(&self, span: Span) -> TypePath {
+ let tokens = self.0.to_token_stream();
+ let respanned = respan(tokens, span);
+ syn::parse2(respanned).unwrap()
+ }
+
+ fn self_to_qself(&self, qself: &mut Option<QSelf>, path: &mut Path) {
+ if path.leading_colon.is_some() || path.segments[0].ident != "Self" {
+ return;
+ }
+
+ if path.segments.len() == 1 {
+ self.self_to_expr_path(path);
+ return;
+ }
+
+ let span = path.segments[0].ident.span();
+ *qself = Some(QSelf {
+ lt_token: Token![<](span),
+ ty: Box::new(Type::Path(self.self_ty(span))),
+ position: 0,
+ as_token: None,
+ gt_token: Token![>](span),
+ });
+
+ path.leading_colon = Some(**path.segments.pairs().next().unwrap().punct().unwrap());
+
+ let segments = mem::replace(&mut path.segments, Punctuated::new());
+ path.segments = segments.into_pairs().skip(1).collect();
+ }
+
+ fn self_to_expr_path(&self, path: &mut Path) {
+ let self_ty = self.self_ty(path.segments[0].ident.span());
+ let variant = mem::replace(path, self_ty.path);
+ for segment in &mut path.segments {
+ if let PathArguments::AngleBracketed(bracketed) = &mut segment.arguments {
+ if bracketed.colon2_token.is_none() && !bracketed.args.is_empty() {
+ bracketed.colon2_token = Some(<Token![::]>::default());
+ }
+ }
+ }
+ if variant.segments.len() > 1 {
+ path.segments.push_punct(<Token![::]>::default());
+ path.segments.extend(variant.segments.into_pairs().skip(1));
+ }
+ }
+}
+
+impl ReplaceReceiver<'_> {
+ // `Self` -> `Receiver`
+ fn visit_type_mut(&mut self, ty: &mut Type) {
+ let span = if let Type::Path(node) = ty {
+ if node.qself.is_none() && node.path.is_ident("Self") {
+ node.path.segments[0].ident.span()
+ } else {
+ self.visit_type_path_mut(node);
+ return;
+ }
+ } else {
+ self.visit_type_mut_impl(ty);
+ return;
+ };
+ *ty = self.self_ty(span).into();
+ }
+
+ // `Self::Assoc` -> `<Receiver>::Assoc`
+ fn visit_type_path_mut(&mut self, ty: &mut TypePath) {
+ if ty.qself.is_none() {
+ self.self_to_qself(&mut ty.qself, &mut ty.path);
+ }
+ self.visit_type_path_mut_impl(ty);
+ }
+
+ // `Self::method` -> `<Receiver>::method`
+ fn visit_expr_path_mut(&mut self, expr: &mut ExprPath) {
+ if expr.qself.is_none() {
+ self.self_to_qself(&mut expr.qself, &mut expr.path);
+ }
+ self.visit_expr_path_mut_impl(expr);
+ }
+
+ // Everything below is simply traversing the syntax tree.
+
+ fn visit_type_mut_impl(&mut self, ty: &mut Type) {
+ match ty {
+ #![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
+ Type::Array(ty) => {
+ self.visit_type_mut(&mut ty.elem);
+ self.visit_expr_mut(&mut ty.len);
+ }
+ Type::BareFn(ty) => {
+ for arg in &mut ty.inputs {
+ self.visit_type_mut(&mut arg.ty);
+ }
+ self.visit_return_type_mut(&mut ty.output);
+ }
+ Type::Group(ty) => self.visit_type_mut(&mut ty.elem),
+ Type::ImplTrait(ty) => {
+ for bound in &mut ty.bounds {
+ self.visit_type_param_bound_mut(bound);
+ }
+ }
+ Type::Macro(ty) => self.visit_macro_mut(&mut ty.mac),
+ Type::Paren(ty) => self.visit_type_mut(&mut ty.elem),
+ Type::Path(ty) => {
+ if let Some(qself) = &mut ty.qself {
+ self.visit_type_mut(&mut qself.ty);
+ }
+ self.visit_path_mut(&mut ty.path);
+ }
+ Type::Ptr(ty) => self.visit_type_mut(&mut ty.elem),
+ Type::Reference(ty) => self.visit_type_mut(&mut ty.elem),
+ Type::Slice(ty) => self.visit_type_mut(&mut ty.elem),
+ Type::TraitObject(ty) => {
+ for bound in &mut ty.bounds {
+ self.visit_type_param_bound_mut(bound);
+ }
+ }
+ Type::Tuple(ty) => {
+ for elem in &mut ty.elems {
+ self.visit_type_mut(elem);
+ }
+ }
+
+ Type::Infer(_) | Type::Never(_) | Type::Verbatim(_) => {}
+
+ _ => {}
+ }
+ }
+
+ fn visit_type_path_mut_impl(&mut self, ty: &mut TypePath) {
+ if let Some(qself) = &mut ty.qself {
+ self.visit_type_mut(&mut qself.ty);
+ }
+ self.visit_path_mut(&mut ty.path);
+ }
+
+ fn visit_expr_path_mut_impl(&mut self, expr: &mut ExprPath) {
+ if let Some(qself) = &mut expr.qself {
+ self.visit_type_mut(&mut qself.ty);
+ }
+ self.visit_path_mut(&mut expr.path);
+ }
+
+ fn visit_path_mut(&mut self, path: &mut Path) {
+ for segment in &mut path.segments {
+ self.visit_path_arguments_mut(&mut segment.arguments);
+ }
+ }
+
+ fn visit_path_arguments_mut(&mut self, arguments: &mut PathArguments) {
+ match arguments {
+ PathArguments::None => {}
+ PathArguments::AngleBracketed(arguments) => {
+ for arg in &mut arguments.args {
+ match arg {
+ #![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
+ GenericArgument::Type(arg) => self.visit_type_mut(arg),
+ GenericArgument::AssocType(arg) => self.visit_type_mut(&mut arg.ty),
+ GenericArgument::Lifetime(_)
+ | GenericArgument::Const(_)
+ | GenericArgument::AssocConst(_)
+ | GenericArgument::Constraint(_) => {}
+ _ => {}
+ }
+ }
+ }
+ PathArguments::Parenthesized(arguments) => {
+ for argument in &mut arguments.inputs {
+ self.visit_type_mut(argument);
+ }
+ self.visit_return_type_mut(&mut arguments.output);
+ }
+ }
+ }
+
+ fn visit_return_type_mut(&mut self, return_type: &mut ReturnType) {
+ match return_type {
+ ReturnType::Default => {}
+ ReturnType::Type(_, output) => self.visit_type_mut(output),
+ }
+ }
+
+ fn visit_type_param_bound_mut(&mut self, bound: &mut TypeParamBound) {
+ match bound {
+ #![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
+ TypeParamBound::Trait(bound) => self.visit_path_mut(&mut bound.path),
+ TypeParamBound::Lifetime(_) | TypeParamBound::Verbatim(_) => {}
+ _ => {}
+ }
+ }
+
+ fn visit_generics_mut(&mut self, generics: &mut Generics) {
+ for param in &mut generics.params {
+ match param {
+ GenericParam::Type(param) => {
+ for bound in &mut param.bounds {
+ self.visit_type_param_bound_mut(bound);
+ }
+ }
+ GenericParam::Lifetime(_) | GenericParam::Const(_) => {}
+ }
+ }
+ if let Some(where_clause) = &mut generics.where_clause {
+ for predicate in &mut where_clause.predicates {
+ match predicate {
+ #![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
+ WherePredicate::Type(predicate) => {
+ self.visit_type_mut(&mut predicate.bounded_ty);
+ for bound in &mut predicate.bounds {
+ self.visit_type_param_bound_mut(bound);
+ }
+ }
+ WherePredicate::Lifetime(_) => {}
+ _ => {}
+ }
+ }
+ }
+ }
+
+ fn visit_data_mut(&mut self, data: &mut Data) {
+ match data {
+ Data::Struct(data) => {
+ for field in &mut data.fields {
+ self.visit_type_mut(&mut field.ty);
+ }
+ }
+ Data::Enum(data) => {
+ for variant in &mut data.variants {
+ for field in &mut variant.fields {
+ self.visit_type_mut(&mut field.ty);
+ }
+ }
+ }
+ Data::Union(_) => {}
+ }
+ }
+
+ fn visit_expr_mut(&mut self, expr: &mut Expr) {
+ match expr {
+ Expr::Binary(expr) => {
+ self.visit_expr_mut(&mut expr.left);
+ self.visit_expr_mut(&mut expr.right);
+ }
+ Expr::Call(expr) => {
+ self.visit_expr_mut(&mut expr.func);
+ for arg in &mut expr.args {
+ self.visit_expr_mut(arg);
+ }
+ }
+ Expr::Cast(expr) => {
+ self.visit_expr_mut(&mut expr.expr);
+ self.visit_type_mut(&mut expr.ty);
+ }
+ Expr::Field(expr) => self.visit_expr_mut(&mut expr.base),
+ Expr::Index(expr) => {
+ self.visit_expr_mut(&mut expr.expr);
+ self.visit_expr_mut(&mut expr.index);
+ }
+ Expr::Paren(expr) => self.visit_expr_mut(&mut expr.expr),
+ Expr::Path(expr) => self.visit_expr_path_mut(expr),
+ Expr::Unary(expr) => self.visit_expr_mut(&mut expr.expr),
+ _ => {}
+ }
+ }
+
+ fn visit_macro_mut(&mut self, _mac: &mut Macro) {}
+}
diff --git a/vendor/serde_derive/src/internals/respan.rs b/vendor/serde_derive/src/internals/respan.rs
new file mode 100644
index 0000000..dcec701
--- /dev/null
+++ b/vendor/serde_derive/src/internals/respan.rs
@@ -0,0 +1,16 @@
+use proc_macro2::{Group, Span, TokenStream, TokenTree};
+
+pub(crate) fn respan(stream: TokenStream, span: Span) -> TokenStream {
+ stream
+ .into_iter()
+ .map(|token| respan_token(token, span))
+ .collect()
+}
+
+fn respan_token(mut token: TokenTree, span: Span) -> TokenTree {
+ if let TokenTree::Group(g) = &mut token {
+ *g = Group::new(g.delimiter(), respan(g.stream(), span));
+ }
+ token.set_span(span);
+ token
+}
diff --git a/vendor/serde_derive/src/internals/symbol.rs b/vendor/serde_derive/src/internals/symbol.rs
new file mode 100644
index 0000000..572391a
--- /dev/null
+++ b/vendor/serde_derive/src/internals/symbol.rs
@@ -0,0 +1,71 @@
+use std::fmt::{self, Display};
+use syn::{Ident, Path};
+
+#[derive(Copy, Clone)]
+pub struct Symbol(&'static str);
+
+pub const ALIAS: Symbol = Symbol("alias");
+pub const BORROW: Symbol = Symbol("borrow");
+pub const BOUND: Symbol = Symbol("bound");
+pub const CONTENT: Symbol = Symbol("content");
+pub const CRATE: Symbol = Symbol("crate");
+pub const DEFAULT: Symbol = Symbol("default");
+pub const DENY_UNKNOWN_FIELDS: Symbol = Symbol("deny_unknown_fields");
+pub const DESERIALIZE: Symbol = Symbol("deserialize");
+pub const DESERIALIZE_WITH: Symbol = Symbol("deserialize_with");
+pub const EXPECTING: Symbol = Symbol("expecting");
+pub const FIELD_IDENTIFIER: Symbol = Symbol("field_identifier");
+pub const FLATTEN: Symbol = Symbol("flatten");
+pub const FROM: Symbol = Symbol("from");
+pub const GETTER: Symbol = Symbol("getter");
+pub const INTO: Symbol = Symbol("into");
+pub const NON_EXHAUSTIVE: Symbol = Symbol("non_exhaustive");
+pub const OTHER: Symbol = Symbol("other");
+pub const REMOTE: Symbol = Symbol("remote");
+pub const RENAME: Symbol = Symbol("rename");
+pub const RENAME_ALL: Symbol = Symbol("rename_all");
+pub const RENAME_ALL_FIELDS: Symbol = Symbol("rename_all_fields");
+pub const REPR: Symbol = Symbol("repr");
+pub const SERDE: Symbol = Symbol("serde");
+pub const SERIALIZE: Symbol = Symbol("serialize");
+pub const SERIALIZE_WITH: Symbol = Symbol("serialize_with");
+pub const SKIP: Symbol = Symbol("skip");
+pub const SKIP_DESERIALIZING: Symbol = Symbol("skip_deserializing");
+pub const SKIP_SERIALIZING: Symbol = Symbol("skip_serializing");
+pub const SKIP_SERIALIZING_IF: Symbol = Symbol("skip_serializing_if");
+pub const TAG: Symbol = Symbol("tag");
+pub const TRANSPARENT: Symbol = Symbol("transparent");
+pub const TRY_FROM: Symbol = Symbol("try_from");
+pub const UNTAGGED: Symbol = Symbol("untagged");
+pub const VARIANT_IDENTIFIER: Symbol = Symbol("variant_identifier");
+pub const WITH: Symbol = Symbol("with");
+
+impl PartialEq<Symbol> for Ident {
+ fn eq(&self, word: &Symbol) -> bool {
+ self == word.0
+ }
+}
+
+impl<'a> PartialEq<Symbol> for &'a Ident {
+ fn eq(&self, word: &Symbol) -> bool {
+ *self == word.0
+ }
+}
+
+impl PartialEq<Symbol> for Path {
+ fn eq(&self, word: &Symbol) -> bool {
+ self.is_ident(word.0)
+ }
+}
+
+impl<'a> PartialEq<Symbol> for &'a Path {
+ fn eq(&self, word: &Symbol) -> bool {
+ self.is_ident(word.0)
+ }
+}
+
+impl Display for Symbol {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str(self.0)
+ }
+}