From 1b6a04ca5504955c571d1c97504fb45ea0befee4 Mon Sep 17 00:00:00 2001 From: Valentin Popov Date: Mon, 8 Jan 2024 01:21:28 +0400 Subject: Initial vendor packages Signed-off-by: Valentin Popov --- vendor/syn/src/attr.rs | 776 +++++++ vendor/syn/src/bigint.rs | 66 + vendor/syn/src/buffer.rs | 432 ++++ vendor/syn/src/custom_keyword.rs | 259 +++ vendor/syn/src/custom_punctuation.rs | 302 +++ vendor/syn/src/data.rs | 404 ++++ vendor/syn/src/derive.rs | 245 +++ vendor/syn/src/discouraged.rs | 219 ++ vendor/syn/src/drops.rs | 58 + vendor/syn/src/error.rs | 467 +++++ vendor/syn/src/export.rs | 73 + vendor/syn/src/expr.rs | 3506 +++++++++++++++++++++++++++++++ vendor/syn/src/ext.rs | 135 ++ vendor/syn/src/file.rs | 125 ++ vendor/syn/src/gen/clone.rs | 2181 +++++++++++++++++++ vendor/syn/src/gen/debug.rs | 3052 +++++++++++++++++++++++++++ vendor/syn/src/gen/eq.rs | 2148 +++++++++++++++++++ vendor/syn/src/gen/fold.rs | 3459 ++++++++++++++++++++++++++++++ vendor/syn/src/gen/hash.rs | 2804 +++++++++++++++++++++++++ vendor/syn/src/gen/visit.rs | 3844 +++++++++++++++++++++++++++++++++ vendor/syn/src/gen/visit_mut.rs | 3847 ++++++++++++++++++++++++++++++++++ vendor/syn/src/gen_helper.rs | 34 + vendor/syn/src/generics.rs | 1227 +++++++++++ vendor/syn/src/group.rs | 291 +++ vendor/syn/src/ident.rs | 107 + vendor/syn/src/item.rs | 3404 ++++++++++++++++++++++++++++++ vendor/syn/src/lib.rs | 1010 +++++++++ vendor/syn/src/lifetime.rs | 156 ++ vendor/syn/src/lit.rs | 1651 +++++++++++++++ vendor/syn/src/lookahead.rs | 169 ++ vendor/syn/src/mac.rs | 211 ++ vendor/syn/src/macros.rs | 176 ++ vendor/syn/src/meta.rs | 426 ++++ vendor/syn/src/op.rs | 218 ++ vendor/syn/src/parse.rs | 1386 ++++++++++++ vendor/syn/src/parse_macro_input.rs | 128 ++ vendor/syn/src/parse_quote.rs | 209 ++ vendor/syn/src/pat.rs | 917 ++++++++ vendor/syn/src/path.rs | 877 ++++++++ vendor/syn/src/print.rs | 16 + vendor/syn/src/punctuated.rs | 1108 ++++++++++ vendor/syn/src/restriction.rs | 171 ++ vendor/syn/src/sealed.rs | 4 + vendor/syn/src/span.rs | 63 + vendor/syn/src/spanned.rs | 118 ++ vendor/syn/src/stmt.rs | 452 ++++ vendor/syn/src/thread.rs | 60 + vendor/syn/src/token.rs | 1138 ++++++++++ vendor/syn/src/tt.rs | 107 + vendor/syn/src/ty.rs | 1189 +++++++++++ vendor/syn/src/verbatim.rs | 33 + vendor/syn/src/whitespace.rs | 65 + 52 files changed, 45523 insertions(+) create mode 100644 vendor/syn/src/attr.rs create mode 100644 vendor/syn/src/bigint.rs create mode 100644 vendor/syn/src/buffer.rs create mode 100644 vendor/syn/src/custom_keyword.rs create mode 100644 vendor/syn/src/custom_punctuation.rs create mode 100644 vendor/syn/src/data.rs create mode 100644 vendor/syn/src/derive.rs create mode 100644 vendor/syn/src/discouraged.rs create mode 100644 vendor/syn/src/drops.rs create mode 100644 vendor/syn/src/error.rs create mode 100644 vendor/syn/src/export.rs create mode 100644 vendor/syn/src/expr.rs create mode 100644 vendor/syn/src/ext.rs create mode 100644 vendor/syn/src/file.rs create mode 100644 vendor/syn/src/gen/clone.rs create mode 100644 vendor/syn/src/gen/debug.rs create mode 100644 vendor/syn/src/gen/eq.rs create mode 100644 vendor/syn/src/gen/fold.rs create mode 100644 vendor/syn/src/gen/hash.rs create mode 100644 vendor/syn/src/gen/visit.rs create mode 100644 vendor/syn/src/gen/visit_mut.rs create mode 100644 vendor/syn/src/gen_helper.rs create mode 100644 vendor/syn/src/generics.rs create mode 100644 vendor/syn/src/group.rs create mode 100644 vendor/syn/src/ident.rs create mode 100644 vendor/syn/src/item.rs create mode 100644 vendor/syn/src/lib.rs create mode 100644 vendor/syn/src/lifetime.rs create mode 100644 vendor/syn/src/lit.rs create mode 100644 vendor/syn/src/lookahead.rs create mode 100644 vendor/syn/src/mac.rs create mode 100644 vendor/syn/src/macros.rs create mode 100644 vendor/syn/src/meta.rs create mode 100644 vendor/syn/src/op.rs create mode 100644 vendor/syn/src/parse.rs create mode 100644 vendor/syn/src/parse_macro_input.rs create mode 100644 vendor/syn/src/parse_quote.rs create mode 100644 vendor/syn/src/pat.rs create mode 100644 vendor/syn/src/path.rs create mode 100644 vendor/syn/src/print.rs create mode 100644 vendor/syn/src/punctuated.rs create mode 100644 vendor/syn/src/restriction.rs create mode 100644 vendor/syn/src/sealed.rs create mode 100644 vendor/syn/src/span.rs create mode 100644 vendor/syn/src/spanned.rs create mode 100644 vendor/syn/src/stmt.rs create mode 100644 vendor/syn/src/thread.rs create mode 100644 vendor/syn/src/token.rs create mode 100644 vendor/syn/src/tt.rs create mode 100644 vendor/syn/src/ty.rs create mode 100644 vendor/syn/src/verbatim.rs create mode 100644 vendor/syn/src/whitespace.rs (limited to 'vendor/syn/src') diff --git a/vendor/syn/src/attr.rs b/vendor/syn/src/attr.rs new file mode 100644 index 0000000..b6c4675 --- /dev/null +++ b/vendor/syn/src/attr.rs @@ -0,0 +1,776 @@ +use super::*; +use proc_macro2::TokenStream; +use std::iter; +use std::slice; + +#[cfg(feature = "parsing")] +use crate::meta::{self, ParseNestedMeta}; +#[cfg(feature = "parsing")] +use crate::parse::{Parse, ParseStream, Parser, Result}; + +ast_struct! { + /// An attribute, like `#[repr(transparent)]`. + /// + ///
+ /// + /// # Syntax + /// + /// Rust has six types of attributes. + /// + /// - Outer attributes like `#[repr(transparent)]`. These appear outside or + /// in front of the item they describe. + /// + /// - Inner attributes like `#![feature(proc_macro)]`. These appear inside + /// of the item they describe, usually a module. + /// + /// - Outer one-line doc comments like `/// Example`. + /// + /// - Inner one-line doc comments like `//! Please file an issue`. + /// + /// - Outer documentation blocks `/** Example */`. + /// + /// - Inner documentation blocks `/*! Please file an issue */`. + /// + /// The `style` field of type `AttrStyle` distinguishes whether an attribute + /// is outer or inner. + /// + /// Every attribute has a `path` that indicates the intended interpretation + /// of the rest of the attribute's contents. The path and the optional + /// additional contents are represented together in the `meta` field of the + /// attribute in three possible varieties: + /// + /// - Meta::Path — attributes whose information content conveys just a + /// path, for example the `#[test]` attribute. + /// + /// - Meta::List — attributes that carry arbitrary tokens after the + /// path, surrounded by a delimiter (parenthesis, bracket, or brace). For + /// example `#[derive(Copy)]` or `#[precondition(x < 5)]`. + /// + /// - Meta::NameValue — attributes with an `=` sign after the path, + /// followed by a Rust expression. For example `#[path = + /// "sys/windows.rs"]`. + /// + /// All doc comments are represented in the NameValue style with a path of + /// "doc", as this is how they are processed by the compiler and by + /// `macro_rules!` macros. + /// + /// ```text + /// #[derive(Copy, Clone)] + /// ~~~~~~Path + /// ^^^^^^^^^^^^^^^^^^^Meta::List + /// + /// #[path = "sys/windows.rs"] + /// ~~~~Path + /// ^^^^^^^^^^^^^^^^^^^^^^^Meta::NameValue + /// + /// #[test] + /// ^^^^Meta::Path + /// ``` + /// + ///
+ /// + /// # Parsing from tokens to Attribute + /// + /// This type does not implement the [`Parse`] trait and thus cannot be + /// parsed directly by [`ParseStream::parse`]. Instead use + /// [`ParseStream::call`] with one of the two parser functions + /// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on + /// which you intend to parse. + /// + /// [`Parse`]: parse::Parse + /// [`ParseStream::parse`]: parse::ParseBuffer::parse + /// [`ParseStream::call`]: parse::ParseBuffer::call + /// + /// ``` + /// use syn::{Attribute, Ident, Result, Token}; + /// use syn::parse::{Parse, ParseStream}; + /// + /// // Parses a unit struct with attributes. + /// // + /// // #[path = "s.tmpl"] + /// // struct S; + /// struct UnitStruct { + /// attrs: Vec, + /// struct_token: Token![struct], + /// name: Ident, + /// semi_token: Token![;], + /// } + /// + /// impl Parse for UnitStruct { + /// fn parse(input: ParseStream) -> Result { + /// Ok(UnitStruct { + /// attrs: input.call(Attribute::parse_outer)?, + /// struct_token: input.parse()?, + /// name: input.parse()?, + /// semi_token: input.parse()?, + /// }) + /// } + /// } + /// ``` + /// + ///


+ /// + /// # Parsing from Attribute to structured arguments + /// + /// The grammar of attributes in Rust is very flexible, which makes the + /// syntax tree not that useful on its own. In particular, arguments of the + /// `Meta::List` variety of attribute are held in an arbitrary `tokens: + /// TokenStream`. Macros are expected to check the `path` of the attribute, + /// decide whether they recognize it, and then parse the remaining tokens + /// according to whatever grammar they wish to require for that kind of + /// attribute. Use [`parse_args()`] to parse those tokens into the expected + /// data structure. + /// + /// [`parse_args()`]: Attribute::parse_args + /// + ///


+ /// + /// # Doc comments + /// + /// The compiler transforms doc comments, such as `/// comment` and `/*! + /// comment */`, into attributes before macros are expanded. Each comment is + /// expanded into an attribute of the form `#[doc = r"comment"]`. + /// + /// As an example, the following `mod` items are expanded identically: + /// + /// ``` + /// # use syn::{ItemMod, parse_quote}; + /// let doc: ItemMod = parse_quote! { + /// /// Single line doc comments + /// /// We write so many! + /// /** + /// * Multi-line comments... + /// * May span many lines + /// */ + /// mod example { + /// //! Of course, they can be inner too + /// /*! And fit in a single line */ + /// } + /// }; + /// let attr: ItemMod = parse_quote! { + /// #[doc = r" Single line doc comments"] + /// #[doc = r" We write so many!"] + /// #[doc = r" + /// * Multi-line comments... + /// * May span many lines + /// "] + /// mod example { + /// #![doc = r" Of course, they can be inner too"] + /// #![doc = r" And fit in a single line "] + /// } + /// }; + /// assert_eq!(doc, attr); + /// ``` + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Attribute { + pub pound_token: Token![#], + pub style: AttrStyle, + pub bracket_token: token::Bracket, + pub meta: Meta, + } +} + +impl Attribute { + /// Returns the path that identifies the interpretation of this attribute. + /// + /// For example this would return the `test` in `#[test]`, the `derive` in + /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`. + pub fn path(&self) -> &Path { + self.meta.path() + } + + /// Parse the arguments to the attribute as a syntax tree. + /// + /// This is similar to pulling out the `TokenStream` from `Meta::List` and + /// doing `syn::parse2::(meta_list.tokens)`, except that using + /// `parse_args` the error message has a more useful span when `tokens` is + /// empty. + /// + /// The surrounding delimiters are *not* included in the input to the + /// parser. + /// + /// ```text + /// #[my_attr(value < 5)] + /// ^^^^^^^^^ what gets parsed + /// ``` + /// + /// # Example + /// + /// ``` + /// use syn::{parse_quote, Attribute, Expr}; + /// + /// let attr: Attribute = parse_quote! { + /// #[precondition(value < 5)] + /// }; + /// + /// if attr.path().is_ident("precondition") { + /// let precondition: Expr = attr.parse_args()?; + /// // ... + /// } + /// # anyhow::Ok(()) + /// ``` + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_args(&self) -> Result { + self.parse_args_with(T::parse) + } + + /// Parse the arguments to the attribute using the given parser. + /// + /// # Example + /// + /// ``` + /// use syn::{parse_quote, Attribute}; + /// + /// let attr: Attribute = parse_quote! { + /// #[inception { #[brrrrrrraaaaawwwwrwrrrmrmrmmrmrmmmmm] }] + /// }; + /// + /// let bwom = attr.parse_args_with(Attribute::parse_outer)?; + /// + /// // Attribute does not have a Parse impl, so we couldn't directly do: + /// // let bwom: Attribute = attr.parse_args()?; + /// # anyhow::Ok(()) + /// ``` + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_args_with(&self, parser: F) -> Result { + match &self.meta { + Meta::Path(path) => Err(crate::error::new2( + path.segments.first().unwrap().ident.span(), + path.segments.last().unwrap().ident.span(), + format!( + "expected attribute arguments in parentheses: {}[{}(...)]", + parsing::DisplayAttrStyle(&self.style), + parsing::DisplayPath(path), + ), + )), + Meta::NameValue(meta) => Err(Error::new( + meta.eq_token.span, + format_args!( + "expected parentheses: {}[{}(...)]", + parsing::DisplayAttrStyle(&self.style), + parsing::DisplayPath(&meta.path), + ), + )), + Meta::List(meta) => meta.parse_args_with(parser), + } + } + + /// Parse the arguments to the attribute, expecting it to follow the + /// conventional structure used by most of Rust's built-in attributes. + /// + /// The [*Meta Item Attribute Syntax*][syntax] section in the Rust reference + /// explains the convention in more detail. Not all attributes follow this + /// convention, so [`parse_args()`][Self::parse_args] is available if you + /// need to parse arbitrarily goofy attribute syntax. + /// + /// [syntax]: https://doc.rust-lang.org/reference/attributes.html#meta-item-attribute-syntax + /// + /// # Example + /// + /// We'll parse a struct, and then parse some of Rust's `#[repr]` attribute + /// syntax. + /// + /// ``` + /// use syn::{parenthesized, parse_quote, token, ItemStruct, LitInt}; + /// + /// let input: ItemStruct = parse_quote! { + /// #[repr(C, align(4))] + /// pub struct MyStruct(u16, u32); + /// }; + /// + /// let mut repr_c = false; + /// let mut repr_transparent = false; + /// let mut repr_align = None::; + /// let mut repr_packed = None::; + /// for attr in &input.attrs { + /// if attr.path().is_ident("repr") { + /// attr.parse_nested_meta(|meta| { + /// // #[repr(C)] + /// if meta.path.is_ident("C") { + /// repr_c = true; + /// return Ok(()); + /// } + /// + /// // #[repr(transparent)] + /// if meta.path.is_ident("transparent") { + /// repr_transparent = true; + /// return Ok(()); + /// } + /// + /// // #[repr(align(N))] + /// if meta.path.is_ident("align") { + /// let content; + /// parenthesized!(content in meta.input); + /// let lit: LitInt = content.parse()?; + /// let n: usize = lit.base10_parse()?; + /// repr_align = Some(n); + /// return Ok(()); + /// } + /// + /// // #[repr(packed)] or #[repr(packed(N))], omitted N means 1 + /// if meta.path.is_ident("packed") { + /// if meta.input.peek(token::Paren) { + /// let content; + /// parenthesized!(content in meta.input); + /// let lit: LitInt = content.parse()?; + /// let n: usize = lit.base10_parse()?; + /// repr_packed = Some(n); + /// } else { + /// repr_packed = Some(1); + /// } + /// return Ok(()); + /// } + /// + /// Err(meta.error("unrecognized repr")) + /// })?; + /// } + /// } + /// # anyhow::Ok(()) + /// ``` + /// + /// # Alternatives + /// + /// In some cases, for attributes which have nested layers of structured + /// content, the following less flexible approach might be more convenient: + /// + /// ``` + /// # use syn::{parse_quote, ItemStruct}; + /// # + /// # let input: ItemStruct = parse_quote! { + /// # #[repr(C, align(4))] + /// # pub struct MyStruct(u16, u32); + /// # }; + /// # + /// use syn::punctuated::Punctuated; + /// use syn::{parenthesized, token, Error, LitInt, Meta, Token}; + /// + /// let mut repr_c = false; + /// let mut repr_transparent = false; + /// let mut repr_align = None::; + /// let mut repr_packed = None::; + /// for attr in &input.attrs { + /// if attr.path().is_ident("repr") { + /// let nested = attr.parse_args_with(Punctuated::::parse_terminated)?; + /// for meta in nested { + /// match meta { + /// // #[repr(C)] + /// Meta::Path(path) if path.is_ident("C") => { + /// repr_c = true; + /// } + /// + /// // #[repr(align(N))] + /// Meta::List(meta) if meta.path.is_ident("align") => { + /// let lit: LitInt = meta.parse_args()?; + /// let n: usize = lit.base10_parse()?; + /// repr_align = Some(n); + /// } + /// + /// /* ... */ + /// + /// _ => { + /// return Err(Error::new_spanned(meta, "unrecognized repr")); + /// } + /// } + /// } + /// } + /// } + /// # Ok(()) + /// ``` + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_nested_meta( + &self, + logic: impl FnMut(ParseNestedMeta) -> Result<()>, + ) -> Result<()> { + self.parse_args_with(meta::parser(logic)) + } + + /// Parses zero or more outer attributes from the stream. + /// + /// # Example + /// + /// See + /// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute). + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_outer(input: ParseStream) -> Result> { + let mut attrs = Vec::new(); + while input.peek(Token![#]) { + attrs.push(input.call(parsing::single_parse_outer)?); + } + Ok(attrs) + } + + /// Parses zero or more inner attributes from the stream. + /// + /// # Example + /// + /// See + /// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute). + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_inner(input: ParseStream) -> Result> { + let mut attrs = Vec::new(); + parsing::parse_inner(input, &mut attrs)?; + Ok(attrs) + } +} + +ast_enum! { + /// Distinguishes between attributes that decorate an item and attributes + /// that are contained within an item. + /// + /// # Outer attributes + /// + /// - `#[repr(transparent)]` + /// - `/// # Example` + /// - `/** Please file an issue */` + /// + /// # Inner attributes + /// + /// - `#![feature(proc_macro)]` + /// - `//! # Example` + /// - `/*! Please file an issue */` + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum AttrStyle { + Outer, + Inner(Token![!]), + } +} + +ast_enum_of_structs! { + /// Content of a compile-time structured attribute. + /// + /// ## Path + /// + /// A meta path is like the `test` in `#[test]`. + /// + /// ## List + /// + /// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`. + /// + /// ## NameValue + /// + /// A name-value meta is like the `path = "..."` in `#[path = + /// "sys/windows.rs"]`. + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum Meta { + Path(Path), + + /// A structured list within an attribute, like `derive(Copy, Clone)`. + List(MetaList), + + /// A name-value pair within an attribute, like `feature = "nightly"`. + NameValue(MetaNameValue), + } +} + +ast_struct! { + /// A structured list within an attribute, like `derive(Copy, Clone)`. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct MetaList { + pub path: Path, + pub delimiter: MacroDelimiter, + pub tokens: TokenStream, + } +} + +ast_struct! { + /// A name-value pair within an attribute, like `feature = "nightly"`. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct MetaNameValue { + pub path: Path, + pub eq_token: Token![=], + pub value: Expr, + } +} + +impl Meta { + /// Returns the path that begins this structured meta item. + /// + /// For example this would return the `test` in `#[test]`, the `derive` in + /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`. + pub fn path(&self) -> &Path { + match self { + Meta::Path(path) => path, + Meta::List(meta) => &meta.path, + Meta::NameValue(meta) => &meta.path, + } + } + + /// Error if this is a `Meta::List` or `Meta::NameValue`. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn require_path_only(&self) -> Result<&Path> { + let error_span = match self { + Meta::Path(path) => return Ok(path), + Meta::List(meta) => meta.delimiter.span().open(), + Meta::NameValue(meta) => meta.eq_token.span, + }; + Err(Error::new(error_span, "unexpected token in attribute")) + } + + /// Error if this is a `Meta::Path` or `Meta::NameValue`. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn require_list(&self) -> Result<&MetaList> { + match self { + Meta::List(meta) => Ok(meta), + Meta::Path(path) => Err(crate::error::new2( + path.segments.first().unwrap().ident.span(), + path.segments.last().unwrap().ident.span(), + format!( + "expected attribute arguments in parentheses: `{}(...)`", + parsing::DisplayPath(path), + ), + )), + Meta::NameValue(meta) => Err(Error::new(meta.eq_token.span, "expected `(`")), + } + } + + /// Error if this is a `Meta::Path` or `Meta::List`. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn require_name_value(&self) -> Result<&MetaNameValue> { + match self { + Meta::NameValue(meta) => Ok(meta), + Meta::Path(path) => Err(crate::error::new2( + path.segments.first().unwrap().ident.span(), + path.segments.last().unwrap().ident.span(), + format!( + "expected a value for this attribute: `{} = ...`", + parsing::DisplayPath(path), + ), + )), + Meta::List(meta) => Err(Error::new(meta.delimiter.span().open(), "expected `=`")), + } + } +} + +impl MetaList { + /// See [`Attribute::parse_args`]. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_args(&self) -> Result { + self.parse_args_with(T::parse) + } + + /// See [`Attribute::parse_args_with`]. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_args_with(&self, parser: F) -> Result { + let scope = self.delimiter.span().close(); + crate::parse::parse_scoped(parser, scope, self.tokens.clone()) + } + + /// See [`Attribute::parse_nested_meta`]. + #[cfg(feature = "parsing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_nested_meta( + &self, + logic: impl FnMut(ParseNestedMeta) -> Result<()>, + ) -> Result<()> { + self.parse_args_with(meta::parser(logic)) + } +} + +pub(crate) trait FilterAttrs<'a> { + type Ret: Iterator; + + fn outer(self) -> Self::Ret; + fn inner(self) -> Self::Ret; +} + +impl<'a> FilterAttrs<'a> for &'a [Attribute] { + type Ret = iter::Filter, fn(&&Attribute) -> bool>; + + fn outer(self) -> Self::Ret { + fn is_outer(attr: &&Attribute) -> bool { + match attr.style { + AttrStyle::Outer => true, + AttrStyle::Inner(_) => false, + } + } + self.iter().filter(is_outer) + } + + fn inner(self) -> Self::Ret { + fn is_inner(attr: &&Attribute) -> bool { + match attr.style { + AttrStyle::Inner(_) => true, + AttrStyle::Outer => false, + } + } + self.iter().filter(is_inner) + } +} + +#[cfg(feature = "parsing")] +pub(crate) mod parsing { + use super::*; + use crate::parse::discouraged::Speculative as _; + use crate::parse::{Parse, ParseStream, Result}; + use std::fmt::{self, Display}; + + pub(crate) fn parse_inner(input: ParseStream, attrs: &mut Vec) -> Result<()> { + while input.peek(Token![#]) && input.peek2(Token![!]) { + attrs.push(input.call(parsing::single_parse_inner)?); + } + Ok(()) + } + + pub(crate) fn single_parse_inner(input: ParseStream) -> Result { + let content; + Ok(Attribute { + pound_token: input.parse()?, + style: AttrStyle::Inner(input.parse()?), + bracket_token: bracketed!(content in input), + meta: content.parse()?, + }) + } + + pub(crate) fn single_parse_outer(input: ParseStream) -> Result { + let content; + Ok(Attribute { + pound_token: input.parse()?, + style: AttrStyle::Outer, + bracket_token: bracketed!(content in input), + meta: content.parse()?, + }) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Meta { + fn parse(input: ParseStream) -> Result { + let path = input.call(Path::parse_mod_style)?; + parse_meta_after_path(path, input) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for MetaList { + fn parse(input: ParseStream) -> Result { + let path = input.call(Path::parse_mod_style)?; + parse_meta_list_after_path(path, input) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for MetaNameValue { + fn parse(input: ParseStream) -> Result { + let path = input.call(Path::parse_mod_style)?; + parse_meta_name_value_after_path(path, input) + } + } + + pub(crate) fn parse_meta_after_path(path: Path, input: ParseStream) -> Result { + if input.peek(token::Paren) || input.peek(token::Bracket) || input.peek(token::Brace) { + parse_meta_list_after_path(path, input).map(Meta::List) + } else if input.peek(Token![=]) { + parse_meta_name_value_after_path(path, input).map(Meta::NameValue) + } else { + Ok(Meta::Path(path)) + } + } + + fn parse_meta_list_after_path(path: Path, input: ParseStream) -> Result { + let (delimiter, tokens) = mac::parse_delimiter(input)?; + Ok(MetaList { + path, + delimiter, + tokens, + }) + } + + fn parse_meta_name_value_after_path(path: Path, input: ParseStream) -> Result { + let eq_token: Token![=] = input.parse()?; + let ahead = input.fork(); + let lit: Option = ahead.parse()?; + let value = if let (Some(lit), true) = (lit, ahead.is_empty()) { + input.advance_to(&ahead); + Expr::Lit(ExprLit { + attrs: Vec::new(), + lit, + }) + } else if input.peek(Token![#]) && input.peek2(token::Bracket) { + return Err(input.error("unexpected attribute inside of attribute")); + } else { + input.parse()? + }; + Ok(MetaNameValue { + path, + eq_token, + value, + }) + } + + pub(super) struct DisplayAttrStyle<'a>(pub &'a AttrStyle); + + impl<'a> Display for DisplayAttrStyle<'a> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str(match self.0 { + AttrStyle::Outer => "#", + AttrStyle::Inner(_) => "#!", + }) + } + } + + pub(super) struct DisplayPath<'a>(pub &'a Path); + + impl<'a> Display for DisplayPath<'a> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + for (i, segment) in self.0.segments.iter().enumerate() { + if i > 0 || self.0.leading_colon.is_some() { + formatter.write_str("::")?; + } + write!(formatter, "{}", segment.ident)?; + } + Ok(()) + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use proc_macro2::TokenStream; + use quote::ToTokens; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Attribute { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.pound_token.to_tokens(tokens); + if let AttrStyle::Inner(b) = &self.style { + b.to_tokens(tokens); + } + self.bracket_token.surround(tokens, |tokens| { + self.meta.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for MetaList { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.path.to_tokens(tokens); + self.delimiter.surround(tokens, self.tokens.clone()); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for MetaNameValue { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.path.to_tokens(tokens); + self.eq_token.to_tokens(tokens); + self.value.to_tokens(tokens); + } + } +} diff --git a/vendor/syn/src/bigint.rs b/vendor/syn/src/bigint.rs new file mode 100644 index 0000000..66aaa93 --- /dev/null +++ b/vendor/syn/src/bigint.rs @@ -0,0 +1,66 @@ +use std::ops::{AddAssign, MulAssign}; + +// For implementing base10_digits() accessor on LitInt. +pub(crate) struct BigInt { + digits: Vec, +} + +impl BigInt { + pub(crate) fn new() -> Self { + BigInt { digits: Vec::new() } + } + + pub(crate) fn to_string(&self) -> String { + let mut repr = String::with_capacity(self.digits.len()); + + let mut has_nonzero = false; + for digit in self.digits.iter().rev() { + has_nonzero |= *digit != 0; + if has_nonzero { + repr.push((*digit + b'0') as char); + } + } + + if repr.is_empty() { + repr.push('0'); + } + + repr + } + + fn reserve_two_digits(&mut self) { + let len = self.digits.len(); + let desired = + len + !self.digits.ends_with(&[0, 0]) as usize + !self.digits.ends_with(&[0]) as usize; + self.digits.resize(desired, 0); + } +} + +impl AddAssign for BigInt { + // Assumes increment <16. + fn add_assign(&mut self, mut increment: u8) { + self.reserve_two_digits(); + + let mut i = 0; + while increment > 0 { + let sum = self.digits[i] + increment; + self.digits[i] = sum % 10; + increment = sum / 10; + i += 1; + } + } +} + +impl MulAssign for BigInt { + // Assumes base <=16. + fn mul_assign(&mut self, base: u8) { + self.reserve_two_digits(); + + let mut carry = 0; + for digit in &mut self.digits { + let prod = *digit * base + carry; + *digit = prod % 10; + carry = prod / 10; + } + } +} diff --git a/vendor/syn/src/buffer.rs b/vendor/syn/src/buffer.rs new file mode 100644 index 0000000..86dec46 --- /dev/null +++ b/vendor/syn/src/buffer.rs @@ -0,0 +1,432 @@ +//! A stably addressed token buffer supporting efficient traversal based on a +//! cheaply copyable cursor. + +// This module is heavily commented as it contains most of the unsafe code in +// Syn, and caution should be used when editing it. The public-facing interface +// is 100% safe but the implementation is fragile internally. + +use crate::Lifetime; +use proc_macro2::extra::DelimSpan; +use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; +use std::cmp::Ordering; +use std::marker::PhantomData; + +/// Internal type which is used instead of `TokenTree` to represent a token tree +/// within a `TokenBuffer`. +enum Entry { + // Mimicking types from proc-macro. + // Group entries contain the offset to the matching End entry. + Group(Group, usize), + Ident(Ident), + Punct(Punct), + Literal(Literal), + // End entries contain the offset (negative) to the start of the buffer. + End(isize), +} + +/// A buffer that can be efficiently traversed multiple times, unlike +/// `TokenStream` which requires a deep copy in order to traverse more than +/// once. +pub struct TokenBuffer { + // NOTE: Do not implement clone on this - while the current design could be + // cloned, other designs which could be desirable may not be cloneable. + entries: Box<[Entry]>, +} + +impl TokenBuffer { + fn recursive_new(entries: &mut Vec, stream: TokenStream) { + for tt in stream { + match tt { + TokenTree::Ident(ident) => entries.push(Entry::Ident(ident)), + TokenTree::Punct(punct) => entries.push(Entry::Punct(punct)), + TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)), + TokenTree::Group(group) => { + let group_start_index = entries.len(); + entries.push(Entry::End(0)); // we replace this below + Self::recursive_new(entries, group.stream()); + let group_end_index = entries.len(); + entries.push(Entry::End(-(group_end_index as isize))); + let group_end_offset = group_end_index - group_start_index; + entries[group_start_index] = Entry::Group(group, group_end_offset); + } + } + } + } + + /// Creates a `TokenBuffer` containing all the tokens from the input + /// `proc_macro::TokenStream`. + #[cfg(feature = "proc-macro")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))] + pub fn new(stream: proc_macro::TokenStream) -> Self { + Self::new2(stream.into()) + } + + /// Creates a `TokenBuffer` containing all the tokens from the input + /// `proc_macro2::TokenStream`. + pub fn new2(stream: TokenStream) -> Self { + let mut entries = Vec::new(); + Self::recursive_new(&mut entries, stream); + entries.push(Entry::End(-(entries.len() as isize))); + Self { + entries: entries.into_boxed_slice(), + } + } + + /// Creates a cursor referencing the first token in the buffer and able to + /// traverse until the end of the buffer. + pub fn begin(&self) -> Cursor { + let ptr = self.entries.as_ptr(); + unsafe { Cursor::create(ptr, ptr.add(self.entries.len() - 1)) } + } +} + +/// A cheaply copyable cursor into a `TokenBuffer`. +/// +/// This cursor holds a shared reference into the immutable data which is used +/// internally to represent a `TokenStream`, and can be efficiently manipulated +/// and copied around. +/// +/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer` +/// object and get a cursor to its first token with `begin()`. +pub struct Cursor<'a> { + // The current entry which the `Cursor` is pointing at. + ptr: *const Entry, + // This is the only `Entry::End` object which this cursor is allowed to + // point at. All other `End` objects are skipped over in `Cursor::create`. + scope: *const Entry, + // Cursor is covariant in 'a. This field ensures that our pointers are still + // valid. + marker: PhantomData<&'a Entry>, +} + +impl<'a> Cursor<'a> { + /// Creates a cursor referencing a static empty TokenStream. + pub fn empty() -> Self { + // It's safe in this situation for us to put an `Entry` object in global + // storage, despite it not actually being safe to send across threads + // (`Ident` is a reference into a thread-local table). This is because + // this entry never includes a `Ident` object. + // + // This wrapper struct allows us to break the rules and put a `Sync` + // object in global storage. + struct UnsafeSyncEntry(Entry); + unsafe impl Sync for UnsafeSyncEntry {} + static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0)); + + Cursor { + ptr: &EMPTY_ENTRY.0, + scope: &EMPTY_ENTRY.0, + marker: PhantomData, + } + } + + /// This create method intelligently exits non-explicitly-entered + /// `None`-delimited scopes when the cursor reaches the end of them, + /// allowing for them to be treated transparently. + unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self { + // NOTE: If we're looking at a `End`, we want to advance the cursor + // past it, unless `ptr == scope`, which means that we're at the edge of + // our cursor's scope. We should only have `ptr != scope` at the exit + // from None-delimited groups entered with `ignore_none`. + while let Entry::End(_) = unsafe { &*ptr } { + if ptr == scope { + break; + } + ptr = unsafe { ptr.add(1) }; + } + + Cursor { + ptr, + scope, + marker: PhantomData, + } + } + + /// Get the current entry. + fn entry(self) -> &'a Entry { + unsafe { &*self.ptr } + } + + /// Bump the cursor to point at the next token after the current one. This + /// is undefined behavior if the cursor is currently looking at an + /// `Entry::End`. + /// + /// If the cursor is looking at an `Entry::Group`, the bumped cursor will + /// point at the first token in the group (with the same scope end). + unsafe fn bump_ignore_group(self) -> Cursor<'a> { + unsafe { Cursor::create(self.ptr.offset(1), self.scope) } + } + + /// While the cursor is looking at a `None`-delimited group, move it to look + /// at the first token inside instead. If the group is empty, this will move + /// the cursor past the `None`-delimited group. + /// + /// WARNING: This mutates its argument. + fn ignore_none(&mut self) { + while let Entry::Group(group, _) = self.entry() { + if group.delimiter() == Delimiter::None { + unsafe { *self = self.bump_ignore_group() }; + } else { + break; + } + } + } + + /// Checks whether the cursor is currently pointing at the end of its valid + /// scope. + pub fn eof(self) -> bool { + // We're at eof if we're at the end of our scope. + self.ptr == self.scope + } + + /// If the cursor is pointing at a `Group` with the given delimiter, returns + /// a cursor into that group and one pointing to the next `TokenTree`. + pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> { + // If we're not trying to enter a none-delimited group, we want to + // ignore them. We have to make sure to _not_ ignore them when we want + // to enter them, of course. For obvious reasons. + if delim != Delimiter::None { + self.ignore_none(); + } + + if let Entry::Group(group, end_offset) = self.entry() { + if group.delimiter() == delim { + let span = group.delim_span(); + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((inside_of_group, span, after_group)); + } + } + + None + } + + pub(crate) fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> { + if let Entry::Group(group, end_offset) = self.entry() { + let delimiter = group.delimiter(); + let span = group.delim_span(); + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((inside_of_group, delimiter, span, after_group)); + } + + None + } + + pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> { + if let Entry::Group(group, end_offset) = self.entry() { + let end_of_group = unsafe { self.ptr.add(*end_offset) }; + let after_group = unsafe { Cursor::create(end_of_group, self.scope) }; + return Some((group.clone(), after_group)); + } + + None + } + + /// If the cursor is pointing at a `Ident`, returns it along with a cursor + /// pointing at the next `TokenTree`. + pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> { + self.ignore_none(); + match self.entry() { + Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump_ignore_group() })), + _ => None, + } + } + + /// If the cursor is pointing at a `Punct`, returns it along with a cursor + /// pointing at the next `TokenTree`. + pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> { + self.ignore_none(); + match self.entry() { + Entry::Punct(punct) if punct.as_char() != '\'' => { + Some((punct.clone(), unsafe { self.bump_ignore_group() })) + } + _ => None, + } + } + + /// If the cursor is pointing at a `Literal`, return it along with a cursor + /// pointing at the next `TokenTree`. + pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> { + self.ignore_none(); + match self.entry() { + Entry::Literal(literal) => Some((literal.clone(), unsafe { self.bump_ignore_group() })), + _ => None, + } + } + + /// If the cursor is pointing at a `Lifetime`, returns it along with a + /// cursor pointing at the next `TokenTree`. + pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> { + self.ignore_none(); + match self.entry() { + Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { + let next = unsafe { self.bump_ignore_group() }; + let (ident, rest) = next.ident()?; + let lifetime = Lifetime { + apostrophe: punct.span(), + ident, + }; + Some((lifetime, rest)) + } + _ => None, + } + } + + /// Copies all remaining tokens visible from this cursor into a + /// `TokenStream`. + pub fn token_stream(self) -> TokenStream { + let mut tts = Vec::new(); + let mut cursor = self; + while let Some((tt, rest)) = cursor.token_tree() { + tts.push(tt); + cursor = rest; + } + tts.into_iter().collect() + } + + /// If the cursor is pointing at a `TokenTree`, returns it along with a + /// cursor pointing at the next `TokenTree`. + /// + /// Returns `None` if the cursor has reached the end of its stream. + /// + /// This method does not treat `None`-delimited groups as transparent, and + /// will return a `Group(None, ..)` if the cursor is looking at one. + pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> { + let (tree, len) = match self.entry() { + Entry::Group(group, end_offset) => (group.clone().into(), *end_offset), + Entry::Literal(literal) => (literal.clone().into(), 1), + Entry::Ident(ident) => (ident.clone().into(), 1), + Entry::Punct(punct) => (punct.clone().into(), 1), + Entry::End(_) => return None, + }; + + let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) }; + Some((tree, rest)) + } + + /// Returns the `Span` of the current token, or `Span::call_site()` if this + /// cursor points to eof. + pub fn span(self) -> Span { + match self.entry() { + Entry::Group(group, _) => group.span(), + Entry::Literal(literal) => literal.span(), + Entry::Ident(ident) => ident.span(), + Entry::Punct(punct) => punct.span(), + Entry::End(_) => Span::call_site(), + } + } + + /// Returns the `Span` of the token immediately prior to the position of + /// this cursor, or of the current token if there is no previous one. + #[cfg(any(feature = "full", feature = "derive"))] + pub(crate) fn prev_span(mut self) -> Span { + if start_of_buffer(self) < self.ptr { + self.ptr = unsafe { self.ptr.offset(-1) }; + if let Entry::End(_) = self.entry() { + // Locate the matching Group begin token. + let mut depth = 1; + loop { + self.ptr = unsafe { self.ptr.offset(-1) }; + match self.entry() { + Entry::Group(group, _) => { + depth -= 1; + if depth == 0 { + return group.span(); + } + } + Entry::End(_) => depth += 1, + Entry::Literal(_) | Entry::Ident(_) | Entry::Punct(_) => {} + } + } + } + } + self.span() + } + + /// Skip over the next token without cloning it. Returns `None` if this + /// cursor points to eof. + /// + /// This method treats `'lifetimes` as a single token. + pub(crate) fn skip(self) -> Option> { + let len = match self.entry() { + Entry::End(_) => return None, + + // Treat lifetimes as a single tt for the purposes of 'skip'. + Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => { + match unsafe { &*self.ptr.add(1) } { + Entry::Ident(_) => 2, + _ => 1, + } + } + + Entry::Group(_, end_offset) => *end_offset, + _ => 1, + }; + + Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) }) + } +} + +impl<'a> Copy for Cursor<'a> {} + +impl<'a> Clone for Cursor<'a> { + fn clone(&self) -> Self { + *self + } +} + +impl<'a> Eq for Cursor<'a> {} + +impl<'a> PartialEq for Cursor<'a> { + fn eq(&self, other: &Self) -> bool { + self.ptr == other.ptr + } +} + +impl<'a> PartialOrd for Cursor<'a> { + fn partial_cmp(&self, other: &Self) -> Option { + if same_buffer(*self, *other) { + Some(cmp_assuming_same_buffer(*self, *other)) + } else { + None + } + } +} + +pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool { + a.scope == b.scope +} + +pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool { + start_of_buffer(a) == start_of_buffer(b) +} + +fn start_of_buffer(cursor: Cursor) -> *const Entry { + unsafe { + match &*cursor.scope { + Entry::End(offset) => cursor.scope.offset(*offset), + _ => unreachable!(), + } + } +} + +pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering { + a.ptr.cmp(&b.ptr) +} + +pub(crate) fn open_span_of_group(cursor: Cursor) -> Span { + match cursor.entry() { + Entry::Group(group, _) => group.span_open(), + _ => cursor.span(), + } +} + +pub(crate) fn close_span_of_group(cursor: Cursor) -> Span { + match cursor.entry() { + Entry::Group(group, _) => group.span_close(), + _ => cursor.span(), + } +} diff --git a/vendor/syn/src/custom_keyword.rs b/vendor/syn/src/custom_keyword.rs new file mode 100644 index 0000000..6ce23db --- /dev/null +++ b/vendor/syn/src/custom_keyword.rs @@ -0,0 +1,259 @@ +/// Define a type that supports parsing and printing a given identifier as if it +/// were a keyword. +/// +/// # Usage +/// +/// As a convention, it is recommended that this macro be invoked within a +/// module called `kw` or `keyword` and that the resulting parser be invoked +/// with a `kw::` or `keyword::` prefix. +/// +/// ``` +/// mod kw { +/// syn::custom_keyword!(whatever); +/// } +/// ``` +/// +/// The generated syntax tree node supports the following operations just like +/// any built-in keyword token. +/// +/// - [Peeking] — `input.peek(kw::whatever)` +/// +/// - [Parsing] — `input.parse::()?` +/// +/// - [Printing] — `quote!( ... #whatever_token ... )` +/// +/// - Construction from a [`Span`] — `let whatever_token = kw::whatever(sp)` +/// +/// - Field access to its span — `let sp = whatever_token.span` +/// +/// [Peeking]: crate::parse::ParseBuffer::peek +/// [Parsing]: crate::parse::ParseBuffer::parse +/// [Printing]: quote::ToTokens +/// [`Span`]: proc_macro2::Span +/// +/// # Example +/// +/// This example parses input that looks like `bool = true` or `str = "value"`. +/// The key must be either the identifier `bool` or the identifier `str`. If +/// `bool`, the value may be either `true` or `false`. If `str`, the value may +/// be any string literal. +/// +/// The symbols `bool` and `str` are not reserved keywords in Rust so these are +/// not considered keywords in the `syn::token` module. Like any other +/// identifier that is not a keyword, these can be declared as custom keywords +/// by crates that need to use them as such. +/// +/// ``` +/// use syn::{LitBool, LitStr, Result, Token}; +/// use syn::parse::{Parse, ParseStream}; +/// +/// mod kw { +/// syn::custom_keyword!(bool); +/// syn::custom_keyword!(str); +/// } +/// +/// enum Argument { +/// Bool { +/// bool_token: kw::bool, +/// eq_token: Token![=], +/// value: LitBool, +/// }, +/// Str { +/// str_token: kw::str, +/// eq_token: Token![=], +/// value: LitStr, +/// }, +/// } +/// +/// impl Parse for Argument { +/// fn parse(input: ParseStream) -> Result { +/// let lookahead = input.lookahead1(); +/// if lookahead.peek(kw::bool) { +/// Ok(Argument::Bool { +/// bool_token: input.parse::()?, +/// eq_token: input.parse()?, +/// value: input.parse()?, +/// }) +/// } else if lookahead.peek(kw::str) { +/// Ok(Argument::Str { +/// str_token: input.parse::()?, +/// eq_token: input.parse()?, +/// value: input.parse()?, +/// }) +/// } else { +/// Err(lookahead.error()) +/// } +/// } +/// } +/// ``` +#[macro_export] +macro_rules! custom_keyword { + ($ident:ident) => { + #[allow(non_camel_case_types)] + pub struct $ident { + pub span: $crate::__private::Span, + } + + #[doc(hidden)] + #[allow(dead_code, non_snake_case)] + pub fn $ident<__S: $crate::__private::IntoSpans<$crate::__private::Span>>( + span: __S, + ) -> $ident { + $ident { + span: $crate::__private::IntoSpans::into_spans(span), + } + } + + const _: () = { + impl $crate::__private::Default for $ident { + fn default() -> Self { + $ident { + span: $crate::__private::Span::call_site(), + } + } + } + + $crate::impl_parse_for_custom_keyword!($ident); + $crate::impl_to_tokens_for_custom_keyword!($ident); + $crate::impl_clone_for_custom_keyword!($ident); + $crate::impl_extra_traits_for_custom_keyword!($ident); + }; + }; +} + +// Not public API. +#[cfg(feature = "parsing")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_parse_for_custom_keyword { + ($ident:ident) => { + // For peek. + impl $crate::__private::CustomToken for $ident { + fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool { + if let $crate::__private::Some((ident, _rest)) = cursor.ident() { + ident == $crate::__private::stringify!($ident) + } else { + false + } + } + + fn display() -> &'static $crate::__private::str { + $crate::__private::concat!("`", $crate::__private::stringify!($ident), "`") + } + } + + impl $crate::parse::Parse for $ident { + fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> { + input.step(|cursor| { + if let $crate::__private::Some((ident, rest)) = cursor.ident() { + if ident == $crate::__private::stringify!($ident) { + return $crate::__private::Ok(($ident { span: ident.span() }, rest)); + } + } + $crate::__private::Err(cursor.error($crate::__private::concat!( + "expected `", + $crate::__private::stringify!($ident), + "`", + ))) + }) + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "parsing"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_parse_for_custom_keyword { + ($ident:ident) => {}; +} + +// Not public API. +#[cfg(feature = "printing")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_to_tokens_for_custom_keyword { + ($ident:ident) => { + impl $crate::__private::ToTokens for $ident { + fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) { + let ident = $crate::Ident::new($crate::__private::stringify!($ident), self.span); + $crate::__private::TokenStreamExt::append(tokens, ident); + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "printing"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_to_tokens_for_custom_keyword { + ($ident:ident) => {}; +} + +// Not public API. +#[cfg(feature = "clone-impls")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_clone_for_custom_keyword { + ($ident:ident) => { + impl $crate::__private::Copy for $ident {} + + #[allow(clippy::expl_impl_clone_on_copy)] + impl $crate::__private::Clone for $ident { + fn clone(&self) -> Self { + *self + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "clone-impls"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_clone_for_custom_keyword { + ($ident:ident) => {}; +} + +// Not public API. +#[cfg(feature = "extra-traits")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_extra_traits_for_custom_keyword { + ($ident:ident) => { + impl $crate::__private::Debug for $ident { + fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::FmtResult { + $crate::__private::Formatter::write_str( + f, + $crate::__private::concat!( + "Keyword [", + $crate::__private::stringify!($ident), + "]", + ), + ) + } + } + + impl $crate::__private::Eq for $ident {} + + impl $crate::__private::PartialEq for $ident { + fn eq(&self, _other: &Self) -> $crate::__private::bool { + true + } + } + + impl $crate::__private::Hash for $ident { + fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {} + } + }; +} + +// Not public API. +#[cfg(not(feature = "extra-traits"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_extra_traits_for_custom_keyword { + ($ident:ident) => {}; +} diff --git a/vendor/syn/src/custom_punctuation.rs b/vendor/syn/src/custom_punctuation.rs new file mode 100644 index 0000000..1b2c768 --- /dev/null +++ b/vendor/syn/src/custom_punctuation.rs @@ -0,0 +1,302 @@ +/// Define a type that supports parsing and printing a multi-character symbol +/// as if it were a punctuation token. +/// +/// # Usage +/// +/// ``` +/// syn::custom_punctuation!(LeftRightArrow, <=>); +/// ``` +/// +/// The generated syntax tree node supports the following operations just like +/// any built-in punctuation token. +/// +/// - [Peeking] — `input.peek(LeftRightArrow)` +/// +/// - [Parsing] — `input.parse::()?` +/// +/// - [Printing] — `quote!( ... #lrarrow ... )` +/// +/// - Construction from a [`Span`] — `let lrarrow = LeftRightArrow(sp)` +/// +/// - Construction from multiple [`Span`] — `let lrarrow = LeftRightArrow([sp, sp, sp])` +/// +/// - Field access to its spans — `let spans = lrarrow.spans` +/// +/// [Peeking]: crate::parse::ParseBuffer::peek +/// [Parsing]: crate::parse::ParseBuffer::parse +/// [Printing]: quote::ToTokens +/// [`Span`]: proc_macro2::Span +/// +/// # Example +/// +/// ``` +/// use proc_macro2::{TokenStream, TokenTree}; +/// use syn::parse::{Parse, ParseStream, Peek, Result}; +/// use syn::punctuated::Punctuated; +/// use syn::Expr; +/// +/// syn::custom_punctuation!(PathSeparator, ); +/// +/// // expr expr expr ... +/// struct PathSegments { +/// segments: Punctuated, +/// } +/// +/// impl Parse for PathSegments { +/// fn parse(input: ParseStream) -> Result { +/// let mut segments = Punctuated::new(); +/// +/// let first = parse_until(input, PathSeparator)?; +/// segments.push_value(syn::parse2(first)?); +/// +/// while input.peek(PathSeparator) { +/// segments.push_punct(input.parse()?); +/// +/// let next = parse_until(input, PathSeparator)?; +/// segments.push_value(syn::parse2(next)?); +/// } +/// +/// Ok(PathSegments { segments }) +/// } +/// } +/// +/// fn parse_until(input: ParseStream, end: E) -> Result { +/// let mut tokens = TokenStream::new(); +/// while !input.is_empty() && !input.peek(end) { +/// let next: TokenTree = input.parse()?; +/// tokens.extend(Some(next)); +/// } +/// Ok(tokens) +/// } +/// +/// fn main() { +/// let input = r#" a::b c::d::e "#; +/// let _: PathSegments = syn::parse_str(input).unwrap(); +/// } +/// ``` +#[macro_export] +macro_rules! custom_punctuation { + ($ident:ident, $($tt:tt)+) => { + pub struct $ident { + pub spans: $crate::custom_punctuation_repr!($($tt)+), + } + + #[doc(hidden)] + #[allow(dead_code, non_snake_case)] + pub fn $ident<__S: $crate::__private::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>( + spans: __S, + ) -> $ident { + let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*; + $ident { + spans: $crate::__private::IntoSpans::into_spans(spans) + } + } + + const _: () = { + impl $crate::__private::Default for $ident { + fn default() -> Self { + $ident($crate::__private::Span::call_site()) + } + } + + $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+); + $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+); + $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+); + $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+); + }; + }; +} + +// Not public API. +#[cfg(feature = "parsing")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_parse_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => { + impl $crate::__private::CustomToken for $ident { + fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool { + $crate::__private::peek_punct(cursor, $crate::stringify_punct!($($tt)+)) + } + + fn display() -> &'static $crate::__private::str { + $crate::__private::concat!("`", $crate::stringify_punct!($($tt)+), "`") + } + } + + impl $crate::parse::Parse for $ident { + fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> { + let spans: $crate::custom_punctuation_repr!($($tt)+) = + $crate::__private::parse_punct(input, $crate::stringify_punct!($($tt)+))?; + Ok($ident(spans)) + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "parsing"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_parse_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => {}; +} + +// Not public API. +#[cfg(feature = "printing")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_to_tokens_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => { + impl $crate::__private::ToTokens for $ident { + fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) { + $crate::__private::print_punct($crate::stringify_punct!($($tt)+), &self.spans, tokens) + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "printing"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_to_tokens_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => {}; +} + +// Not public API. +#[cfg(feature = "clone-impls")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_clone_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => { + impl $crate::__private::Copy for $ident {} + + #[allow(clippy::expl_impl_clone_on_copy)] + impl $crate::__private::Clone for $ident { + fn clone(&self) -> Self { + *self + } + } + }; +} + +// Not public API. +#[cfg(not(feature = "clone-impls"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_clone_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => {}; +} + +// Not public API. +#[cfg(feature = "extra-traits")] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_extra_traits_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => { + impl $crate::__private::Debug for $ident { + fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::FmtResult { + $crate::__private::Formatter::write_str(f, $crate::__private::stringify!($ident)) + } + } + + impl $crate::__private::Eq for $ident {} + + impl $crate::__private::PartialEq for $ident { + fn eq(&self, _other: &Self) -> $crate::__private::bool { + true + } + } + + impl $crate::__private::Hash for $ident { + fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {} + } + }; +} + +// Not public API. +#[cfg(not(feature = "extra-traits"))] +#[doc(hidden)] +#[macro_export] +macro_rules! impl_extra_traits_for_custom_punctuation { + ($ident:ident, $($tt:tt)+) => {}; +} + +// Not public API. +#[doc(hidden)] +#[macro_export] +macro_rules! custom_punctuation_repr { + ($($tt:tt)+) => { + [$crate::__private::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+] + }; +} + +// Not public API. +#[doc(hidden)] +#[macro_export] +#[rustfmt::skip] +macro_rules! custom_punctuation_len { + ($mode:ident, +) => { 1 }; + ($mode:ident, +=) => { 2 }; + ($mode:ident, &) => { 1 }; + ($mode:ident, &&) => { 2 }; + ($mode:ident, &=) => { 2 }; + ($mode:ident, @) => { 1 }; + ($mode:ident, !) => { 1 }; + ($mode:ident, ^) => { 1 }; + ($mode:ident, ^=) => { 2 }; + ($mode:ident, :) => { 1 }; + ($mode:ident, ::) => { 2 }; + ($mode:ident, ,) => { 1 }; + ($mode:ident, /) => { 1 }; + ($mode:ident, /=) => { 2 }; + ($mode:ident, .) => { 1 }; + ($mode:ident, ..) => { 2 }; + ($mode:ident, ...) => { 3 }; + ($mode:ident, ..=) => { 3 }; + ($mode:ident, =) => { 1 }; + ($mode:ident, ==) => { 2 }; + ($mode:ident, >=) => { 2 }; + ($mode:ident, >) => { 1 }; + ($mode:ident, <=) => { 2 }; + ($mode:ident, <) => { 1 }; + ($mode:ident, *=) => { 2 }; + ($mode:ident, !=) => { 2 }; + ($mode:ident, |) => { 1 }; + ($mode:ident, |=) => { 2 }; + ($mode:ident, ||) => { 2 }; + ($mode:ident, #) => { 1 }; + ($mode:ident, ?) => { 1 }; + ($mode:ident, ->) => { 2 }; + ($mode:ident, <-) => { 2 }; + ($mode:ident, %) => { 1 }; + ($mode:ident, %=) => { 2 }; + ($mode:ident, =>) => { 2 }; + ($mode:ident, ;) => { 1 }; + ($mode:ident, <<) => { 2 }; + ($mode:ident, <<=) => { 3 }; + ($mode:ident, >>) => { 2 }; + ($mode:ident, >>=) => { 3 }; + ($mode:ident, *) => { 1 }; + ($mode:ident, -) => { 1 }; + ($mode:ident, -=) => { 2 }; + ($mode:ident, ~) => { 1 }; + (lenient, $tt:tt) => { 0 }; + (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }}; +} + +// Not public API. +#[doc(hidden)] +#[macro_export] +macro_rules! custom_punctuation_unexpected { + () => {}; +} + +// Not public API. +#[doc(hidden)] +#[macro_export] +macro_rules! stringify_punct { + ($($tt:tt)+) => { + $crate::__private::concat!($($crate::__private::stringify!($tt)),+) + }; +} diff --git a/vendor/syn/src/data.rs b/vendor/syn/src/data.rs new file mode 100644 index 0000000..134b76b --- /dev/null +++ b/vendor/syn/src/data.rs @@ -0,0 +1,404 @@ +use super::*; +use crate::punctuated::Punctuated; + +ast_struct! { + /// An enum variant. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Variant { + pub attrs: Vec, + + /// Name of the variant. + pub ident: Ident, + + /// Content stored in the variant. + pub fields: Fields, + + /// Explicit discriminant: `Variant = 1` + pub discriminant: Option<(Token![=], Expr)>, + } +} + +ast_enum_of_structs! { + /// Data stored within an enum variant or struct. + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub enum Fields { + /// Named fields of a struct or struct variant such as `Point { x: f64, + /// y: f64 }`. + Named(FieldsNamed), + + /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. + Unnamed(FieldsUnnamed), + + /// Unit struct or unit variant such as `None`. + Unit, + } +} + +ast_struct! { + /// Named fields of a struct or struct variant such as `Point { x: f64, + /// y: f64 }`. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct FieldsNamed { + pub brace_token: token::Brace, + pub named: Punctuated, + } +} + +ast_struct! { + /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct FieldsUnnamed { + pub paren_token: token::Paren, + pub unnamed: Punctuated, + } +} + +impl Fields { + /// Get an iterator over the borrowed [`Field`] items in this object. This + /// iterator can be used to iterate over a named or unnamed struct or + /// variant's fields uniformly. + pub fn iter(&self) -> punctuated::Iter { + match self { + Fields::Unit => crate::punctuated::empty_punctuated_iter(), + Fields::Named(f) => f.named.iter(), + Fields::Unnamed(f) => f.unnamed.iter(), + } + } + + /// Get an iterator over the mutably borrowed [`Field`] items in this + /// object. This iterator can be used to iterate over a named or unnamed + /// struct or variant's fields uniformly. + pub fn iter_mut(&mut self) -> punctuated::IterMut { + match self { + Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(), + Fields::Named(f) => f.named.iter_mut(), + Fields::Unnamed(f) => f.unnamed.iter_mut(), + } + } + + /// Returns the number of fields. + pub fn len(&self) -> usize { + match self { + Fields::Unit => 0, + Fields::Named(f) => f.named.len(), + Fields::Unnamed(f) => f.unnamed.len(), + } + } + + /// Returns `true` if there are zero fields. + pub fn is_empty(&self) -> bool { + match self { + Fields::Unit => true, + Fields::Named(f) => f.named.is_empty(), + Fields::Unnamed(f) => f.unnamed.is_empty(), + } + } +} + +impl IntoIterator for Fields { + type Item = Field; + type IntoIter = punctuated::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + match self { + Fields::Unit => Punctuated::::new().into_iter(), + Fields::Named(f) => f.named.into_iter(), + Fields::Unnamed(f) => f.unnamed.into_iter(), + } + } +} + +impl<'a> IntoIterator for &'a Fields { + type Item = &'a Field; + type IntoIter = punctuated::Iter<'a, Field>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a> IntoIterator for &'a mut Fields { + type Item = &'a mut Field; + type IntoIter = punctuated::IterMut<'a, Field>; + + fn into_iter(self) -> Self::IntoIter { + self.iter_mut() + } +} + +ast_struct! { + /// A field of a struct or enum variant. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct Field { + pub attrs: Vec, + + pub vis: Visibility, + + pub mutability: FieldMutability, + + /// Name of the field, if any. + /// + /// Fields of tuple structs have no names. + pub ident: Option, + + pub colon_token: Option, + + pub ty: Type, + } +} + +#[cfg(feature = "parsing")] +pub(crate) mod parsing { + use super::*; + use crate::ext::IdentExt as _; + #[cfg(not(feature = "full"))] + use crate::parse::discouraged::Speculative as _; + use crate::parse::{Parse, ParseStream, Result}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for Variant { + fn parse(input: ParseStream) -> Result { + let attrs = input.call(Attribute::parse_outer)?; + let _visibility: Visibility = input.parse()?; + let ident: Ident = input.parse()?; + let fields = if input.peek(token::Brace) { + Fields::Named(input.parse()?) + } else if input.peek(token::Paren) { + Fields::Unnamed(input.parse()?) + } else { + Fields::Unit + }; + let discriminant = if input.peek(Token![=]) { + let eq_token: Token![=] = input.parse()?; + #[cfg(feature = "full")] + let discriminant: Expr = input.parse()?; + #[cfg(not(feature = "full"))] + let discriminant = { + let begin = input.fork(); + let ahead = input.fork(); + let mut discriminant: Result = ahead.parse(); + if discriminant.is_ok() { + input.advance_to(&ahead); + } else if scan_lenient_discriminant(input).is_ok() { + discriminant = Ok(Expr::Verbatim(verbatim::between(&begin, input))); + } + discriminant? + }; + Some((eq_token, discriminant)) + } else { + None + }; + Ok(Variant { + attrs, + ident, + fields, + discriminant, + }) + } + } + + #[cfg(not(feature = "full"))] + pub(crate) fn scan_lenient_discriminant(input: ParseStream) -> Result<()> { + use proc_macro2::Delimiter::{self, Brace, Bracket, Parenthesis}; + + let consume = |delimiter: Delimiter| { + Result::unwrap(input.step(|cursor| match cursor.group(delimiter) { + Some((_inside, _span, rest)) => Ok((true, rest)), + None => Ok((false, *cursor)), + })) + }; + + macro_rules! consume { + [$token:tt] => { + input.parse::>().unwrap().is_some() + }; + } + + let mut initial = true; + let mut depth = 0usize; + loop { + if initial { + if consume![&] { + input.parse::>()?; + } else if consume![if] || consume![match] || consume![while] { + depth += 1; + } else if input.parse::>()?.is_some() + || (consume(Brace) || consume(Bracket) || consume(Parenthesis)) + || (consume![async] || consume![const] || consume![loop] || consume![unsafe]) + && (consume(Brace) || break) + { + initial = false; + } else if consume![let] { + while !consume![=] { + if !((consume![|] || consume![ref] || consume![mut] || consume![@]) + || (consume![!] || input.parse::>()?.is_some()) + || (consume![..=] || consume![..] || consume![&] || consume![_]) + || (consume(Brace) || consume(Bracket) || consume(Parenthesis))) + { + path::parsing::qpath(input, true)?; + } + } + } else if input.parse::>()?.is_some() && !consume![:] { + break; + } else if input.parse::().is_err() { + path::parsing::qpath(input, true)?; + initial = consume![!] || depth == 0 && input.peek(token::Brace); + } + } else if input.is_empty() || input.peek(Token![,]) { + return Ok(()); + } else if depth > 0 && consume(Brace) { + if consume![else] && !consume(Brace) { + initial = consume![if] || break; + } else { + depth -= 1; + } + } else if input.parse::().is_ok() || (consume![..] | consume![=]) { + initial = true; + } else if consume![.] { + if input.parse::>()?.is_none() + && (input.parse::()?.is_named() && consume![::]) + { + AngleBracketedGenericArguments::do_parse(None, input)?; + } + } else if consume![as] { + input.parse::()?; + } else if !(consume(Brace) || consume(Bracket) || consume(Parenthesis)) { + break; + } + } + + Err(input.error("unsupported expression")) + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for FieldsNamed { + fn parse(input: ParseStream) -> Result { + let content; + Ok(FieldsNamed { + brace_token: braced!(content in input), + named: content.parse_terminated(Field::parse_named, Token![,])?, + }) + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for FieldsUnnamed { + fn parse(input: ParseStream) -> Result { + let content; + Ok(FieldsUnnamed { + paren_token: parenthesized!(content in input), + unnamed: content.parse_terminated(Field::parse_unnamed, Token![,])?, + }) + } + } + + impl Field { + /// Parses a named (braced struct) field. + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_named(input: ParseStream) -> Result { + let attrs = input.call(Attribute::parse_outer)?; + let vis: Visibility = input.parse()?; + + let unnamed_field = cfg!(feature = "full") && input.peek(Token![_]); + let ident = if unnamed_field { + input.call(Ident::parse_any) + } else { + input.parse() + }?; + + let colon_token: Token![:] = input.parse()?; + + let ty: Type = if unnamed_field + && (input.peek(Token![struct]) + || input.peek(Token![union]) && input.peek2(token::Brace)) + { + let begin = input.fork(); + input.call(Ident::parse_any)?; + input.parse::()?; + Type::Verbatim(verbatim::between(&begin, input)) + } else { + input.parse()? + }; + + Ok(Field { + attrs, + vis, + mutability: FieldMutability::None, + ident: Some(ident), + colon_token: Some(colon_token), + ty, + }) + } + + /// Parses an unnamed (tuple struct) field. + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + pub fn parse_unnamed(input: ParseStream) -> Result { + Ok(Field { + attrs: input.call(Attribute::parse_outer)?, + vis: input.parse()?, + mutability: FieldMutability::None, + ident: None, + colon_token: None, + ty: input.parse()?, + }) + } + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use crate::print::TokensOrDefault; + use proc_macro2::TokenStream; + use quote::{ToTokens, TokenStreamExt}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Variant { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(&self.attrs); + self.ident.to_tokens(tokens); + self.fields.to_tokens(tokens); + if let Some((eq_token, disc)) = &self.discriminant { + eq_token.to_tokens(tokens); + disc.to_tokens(tokens); + } + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for FieldsNamed { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.brace_token.surround(tokens, |tokens| { + self.named.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for FieldsUnnamed { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.paren_token.surround(tokens, |tokens| { + self.unnamed.to_tokens(tokens); + }); + } + } + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for Field { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.append_all(&self.attrs); + self.vis.to_tokens(tokens); + if let Some(ident) = &self.ident { + ident.to_tokens(tokens); + TokensOrDefault(&self.colon_token).to_tokens(tokens); + } + self.ty.to_tokens(tokens); + } + } +} diff --git a/vendor/syn/src/derive.rs b/vendor/syn/src/derive.rs new file mode 100644 index 0000000..25fa4c9 --- /dev/null +++ b/vendor/syn/src/derive.rs @@ -0,0 +1,245 @@ +use super::*; +use crate::punctuated::Punctuated; + +ast_struct! { + /// Data structure sent to a `proc_macro_derive` macro. + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + pub struct DeriveInput { + pub attrs: Vec, + pub vis: Visibility, + pub ident: Ident, + pub generics: Generics, + pub data: Data, + } +} + +ast_enum! { + /// The storage of a struct, enum or union data structure. + /// + /// # Syntax tree enum + /// + /// This type is a [syntax tree enum]. + /// + /// [syntax tree enum]: Expr#syntax-tree-enums + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + pub enum Data { + Struct(DataStruct), + Enum(DataEnum), + Union(DataUnion), + } +} + +ast_struct! { + /// A struct input to a `proc_macro_derive` macro. + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + pub struct DataStruct { + pub struct_token: Token![struct], + pub fields: Fields, + pub semi_token: Option, + } +} + +ast_struct! { + /// An enum input to a `proc_macro_derive` macro. + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + pub struct DataEnum { + pub enum_token: Token![enum], + pub brace_token: token::Brace, + pub variants: Punctuated, + } +} + +ast_struct! { + /// An untagged union input to a `proc_macro_derive` macro. + #[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))] + pub struct DataUnion { + pub union_token: Token![union], + pub fields: FieldsNamed, + } +} + +#[cfg(feature = "parsing")] +pub(crate) mod parsing { + use super::*; + use crate::parse::{Parse, ParseStream, Result}; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))] + impl Parse for DeriveInput { + fn parse(input: ParseStream) -> Result { + let attrs = input.call(Attribute::parse_outer)?; + let vis = input.parse::()?; + + let lookahead = input.lookahead1(); + if lookahead.peek(Token![struct]) { + let struct_token = input.parse::()?; + let ident = input.parse::()?; + let generics = input.parse::()?; + let (where_clause, fields, semi) = data_struct(input)?; + Ok(DeriveInput { + attrs, + vis, + ident, + generics: Generics { + where_clause, + ..generics + }, + data: Data::Struct(DataStruct { + struct_token, + fields, + semi_token: semi, + }), + }) + } else if lookahead.peek(Token![enum]) { + let enum_token = input.parse::()?; + let ident = input.parse::()?; + let generics = input.parse::()?; + let (where_clause, brace, variants) = data_enum(input)?; + Ok(DeriveInput { + attrs, + vis, + ident, + generics: Generics { + where_clause, + ..generics + }, + data: Data::Enum(DataEnum { + enum_token, + brace_token: brace, + variants, + }), + }) + } else if lookahead.peek(Token![union]) { + let union_token = input.parse::()?; + let ident = input.parse::()?; + let generics = input.parse::()?; + let (where_clause, fields) = data_union(input)?; + Ok(DeriveInput { + attrs, + vis, + ident, + generics: Generics { + where_clause, + ..generics + }, + data: Data::Union(DataUnion { + union_token, + fields, + }), + }) + } else { + Err(lookahead.error()) + } + } + } + + pub(crate) fn data_struct( + input: ParseStream, + ) -> Result<(Option, Fields, Option)> { + let mut lookahead = input.lookahead1(); + let mut where_clause = None; + if lookahead.peek(Token![where]) { + where_clause = Some(input.parse()?); + lookahead = input.lookahead1(); + } + + if where_clause.is_none() && lookahead.peek(token::Paren) { + let fields = input.parse()?; + + lookahead = input.lookahead1(); + if lookahead.peek(Token![where]) { + where_clause = Some(input.parse()?); + lookahead = input.lookahead1(); + } + + if lookahead.peek(Token![;]) { + let semi = input.parse()?; + Ok((where_clause, Fields::Unnamed(fields), Some(semi))) + } else { + Err(lookahead.error()) + } + } else if lookahead.peek(token::Brace) { + let fields = input.parse()?; + Ok((where_clause, Fields::Named(fields), None)) + } else if lookahead.peek(Token![;]) { + let semi = input.parse()?; + Ok((where_clause, Fields::Unit, Some(semi))) + } else { + Err(lookahead.error()) + } + } + + pub(crate) fn data_enum( + input: ParseStream, + ) -> Result<( + Option, + token::Brace, + Punctuated, + )> { + let where_clause = input.parse()?; + + let content; + let brace = braced!(content in input); + let variants = content.parse_terminated(Variant::parse, Token![,])?; + + Ok((where_clause, brace, variants)) + } + + pub(crate) fn data_union(input: ParseStream) -> Result<(Option, FieldsNamed)> { + let where_clause = input.parse()?; + let fields = input.parse()?; + Ok((where_clause, fields)) + } +} + +#[cfg(feature = "printing")] +mod printing { + use super::*; + use crate::attr::FilterAttrs; + use crate::print::TokensOrDefault; + use proc_macro2::TokenStream; + use quote::ToTokens; + + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + impl ToTokens for DeriveInput { + fn to_tokens(&self, tokens: &mut TokenStream) { + for attr in self.attrs.outer() { + attr.to_tokens(tokens); + } + self.vis.to_tokens(tokens); + match &self.data { + Data::Struct(d) => d.struct_token.to_tokens(tokens), + Data::Enum(d) => d.enum_token.to_tokens(tokens), + Data::Union(d) => d.union_token.to_tokens(tokens), + } + self.ident.to_tokens(tokens); + self.generics.to_tokens(tokens); + match &self.data { + Data::Struct(data) => match &data.fields { + Fields::Named(fields) => { + self.generics.where_clause.to_tokens(tokens); + fields.to_tokens(tokens); + } + Fields::Unnamed(fields) => { + fields.to_tokens(tokens); + self.generics.where_clause.to_tokens(tokens); + TokensOrDefault(&data.semi_token).to_tokens(tokens); + } + Fields::Unit => { + self.generics.where_clause.to_tokens(tokens); + TokensOrDefault(&data.semi_token).to_tokens(tokens); + } + }, + Data::Enum(data) => { + self.generics.where_clause.to_tokens(tokens); + data.brace_token.surround(tokens, |tokens| { + data.variants.to_tokens(tokens); + }); + } + Data::Union(data) => { + self.generics.where_clause.to_tokens(tokens); + data.fields.to_tokens(tokens); + } + } + } + } +} diff --git a/vendor/syn/src/discouraged.rs b/vendor/syn/src/discouraged.rs new file mode 100644 index 0000000..fb98d63 --- /dev/null +++ b/vendor/syn/src/discouraged.rs @@ -0,0 +1,219 @@ +//! Extensions to the parsing API with niche applicability. + +use super::*; +use proc_macro2::extra::DelimSpan; + +/// Extensions to the `ParseStream` API to support speculative parsing. +pub trait Speculative { + /// Advance this parse stream to the position of a forked parse stream. + /// + /// This is the opposite operation to [`ParseStream::fork`]. You can fork a + /// parse stream, perform some speculative parsing, then join the original + /// stream to the fork to "commit" the parsing from the fork to the main + /// stream. + /// + /// If you can avoid doing this, you should, as it limits the ability to + /// generate useful errors. That said, it is often the only way to parse + /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem + /// is that when the fork fails to parse an `A`, it's impossible to tell + /// whether that was because of a syntax error and the user meant to provide + /// an `A`, or that the `A`s are finished and it's time to start parsing + /// `B`s. Use with care. + /// + /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by + /// parsing `B*` and removing the leading members of `A` from the + /// repetition, bypassing the need to involve the downsides associated with + /// speculative parsing. + /// + /// [`ParseStream::fork`]: ParseBuffer::fork + /// + /// # Example + /// + /// There has been chatter about the possibility of making the colons in the + /// turbofish syntax like `path::to::` no longer required by accepting + /// `path::to` in expression position. Specifically, according to [RFC + /// 2544], [`PathSegment`] parsing should always try to consume a following + /// `<` token as the start of generic arguments, and reset to the `<` if + /// that fails (e.g. the token is acting as a less-than operator). + /// + /// This is the exact kind of parsing behavior which requires the "fork, + /// try, commit" behavior that [`ParseStream::fork`] discourages. With + /// `advance_to`, we can avoid having to parse the speculatively parsed + /// content a second time. + /// + /// This change in behavior can be implemented in syn by replacing just the + /// `Parse` implementation for `PathSegment`: + /// + /// ``` + /// # use syn::ext::IdentExt; + /// use syn::parse::discouraged::Speculative; + /// # use syn::parse::{Parse, ParseStream}; + /// # use syn::{Ident, PathArguments, Result, Token}; + /// + /// pub struct PathSegment { + /// pub ident: Ident, + /// pub arguments: PathArguments, + /// } + /// # + /// # impl From for PathSegment + /// # where + /// # T: Into, + /// # { + /// # fn from(ident: T) -> Self { + /// # PathSegment { + /// # ident: ident.into(), + /// # arguments: PathArguments::None, + /// # } + /// # } + /// # } + /// + /// impl Parse for PathSegment { + /// fn parse(input: ParseStream) -> Result { + /// if input.peek(Token![super]) + /// || input.peek(Token![self]) + /// || input.peek(Token![Self]) + /// || input.peek(Token![crate]) + /// { + /// let ident = input.call(Ident::parse_any)?; + /// return Ok(PathSegment::from(ident)); + /// } + /// + /// let ident = input.parse()?; + /// if input.peek(Token![::]) && input.peek3(Token![<]) { + /// return Ok(PathSegment { + /// ident, + /// arguments: PathArguments::AngleBracketed(input.parse()?), + /// }); + /// } + /// if input.peek(Token![<]) && !input.peek(Token![<=]) { + /// let fork = input.fork(); + /// if let Ok(arguments) = fork.parse() { + /// input.advance_to(&fork); + /// return Ok(PathSegment { + /// ident, + /// arguments: PathArguments::AngleBracketed(arguments), + /// }); + /// } + /// } + /// Ok(PathSegment::from(ident)) + /// } + /// } + /// + /// # syn::parse_str::("a").unwrap(); + /// ``` + /// + /// # Drawbacks + /// + /// The main drawback of this style of speculative parsing is in error + /// presentation. Even if the lookahead is the "correct" parse, the error + /// that is shown is that of the "fallback" parse. To use the same example + /// as the turbofish above, take the following unfinished "turbofish": + /// + /// ```text + /// let _ = f<&'a fn(), for<'a> serde::>(); + /// ``` + /// + /// If this is parsed as generic arguments, we can provide the error message + /// + /// ```text + /// error: expected identifier + /// --> src.rs:L:C + /// | + /// L | let _ = f<&'a fn(), for<'a> serde::>(); + /// | ^ + /// ``` + /// + /// but if parsed using the above speculative parsing, it falls back to + /// assuming that the `<` is a less-than when it fails to parse the generic + /// arguments, and tries to interpret the `&'a` as the start of a labelled + /// loop, resulting in the much less helpful error + /// + /// ```text + /// error: expected `:` + /// --> src.rs:L:C + /// | + /// L | let _ = f<&'a fn(), for<'a> serde::>(); + /// | ^^ + /// ``` + /// + /// This can be mitigated with various heuristics (two examples: show both + /// forks' parse errors, or show the one that consumed more tokens), but + /// when you can control the grammar, sticking to something that can be + /// parsed LL(3) and without the LL(*) speculative parsing this makes + /// possible, displaying reasonable errors becomes much more simple. + /// + /// [RFC 2544]: https://github.com/rust-lang/rfcs/pull/2544 + /// [`PathSegment`]: crate::PathSegment + /// + /// # Performance + /// + /// This method performs a cheap fixed amount of work that does not depend + /// on how far apart the two streams are positioned. + /// + /// # Panics + /// + /// The forked stream in the argument of `advance_to` must have been + /// obtained by forking `self`. Attempting to advance to any other stream + /// will cause a panic. + fn advance_to(&self, fork: &Self); +} + +impl<'a> Speculative for ParseBuffer<'a> { + fn advance_to(&self, fork: &Self) { + if !crate::buffer::same_scope(self.cursor(), fork.cursor()) { + panic!("Fork was not derived from the advancing parse stream"); + } + + let (self_unexp, self_sp) = inner_unexpected(self); + let (fork_unexp, fork_sp) = inner_unexpected(fork); + if !Rc::ptr_eq(&self_unexp, &fork_unexp) { + match (fork_sp, self_sp) { + // Unexpected set on the fork, but not on `self`, copy it over. + (Some(span), None) => { + self_unexp.set(Unexpected::Some(span)); + } + // Unexpected unset. Use chain to propagate errors from fork. + (None, None) => { + fork_unexp.set(Unexpected::Chain(self_unexp)); + + // Ensure toplevel 'unexpected' tokens from the fork don't + // bubble up the chain by replacing the root `unexpected` + // pointer, only 'unexpected' tokens from existing group + // parsers should bubble. + fork.unexpected + .set(Some(Rc::new(Cell::new(Unexpected::None)))); + } + // Unexpected has been set on `self`. No changes needed. + (_, Some(_)) => {} + } + } + + // See comment on `cell` in the struct definition. + self.cell + .set(unsafe { mem::transmute::>(fork.cursor()) }); + } +} + +/// Extensions to the `ParseStream` API to support manipulating invisible +/// delimiters the same as if they were visible. +pub trait AnyDelimiter { + /// Returns the delimiter, the span of the delimiter token, and the nested + /// contents for further parsing. + fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)>; +} + +impl<'a> AnyDelimiter for ParseBuffer<'a> { + fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)> { + self.step(|cursor| { + if let Some((content, delimiter, span, rest)) = cursor.any_group() { + let scope = crate::buffer::close_span_of_group(*cursor); + let nested = crate::parse::advance_step_cursor(cursor, content); + let unexpected = crate::parse::get_unexpected(self); + let content = crate::parse::new_parse_buffer(scope, nested, unexpected); + Ok(((delimiter, span, content), rest)) + } else { + Err(cursor.error("expected any delimiter")) + } + }) + } +} diff --git a/vendor/syn/src/drops.rs b/vendor/syn/src/drops.rs new file mode 100644 index 0000000..89b42d8 --- /dev/null +++ b/vendor/syn/src/drops.rs @@ -0,0 +1,58 @@ +use std::iter; +use std::mem::ManuallyDrop; +use std::ops::{Deref, DerefMut}; +use std::option; +use std::slice; + +#[repr(transparent)] +pub(crate) struct NoDrop(ManuallyDrop); + +impl NoDrop { + pub(crate) fn new(value: T) -> Self + where + T: TrivialDrop, + { + NoDrop(ManuallyDrop::new(value)) + } +} + +impl Deref for NoDrop { + type Target = T; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for NoDrop { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +pub(crate) trait TrivialDrop {} + +impl TrivialDrop for iter::Empty {} +impl<'a, T> TrivialDrop for slice::Iter<'a, T> {} +impl<'a, T> TrivialDrop for slice::IterMut<'a, T> {} +impl<'a, T> TrivialDrop for option::IntoIter<&'a T> {} +impl<'a, T> TrivialDrop for option::IntoIter<&'a mut T> {} + +#[test] +fn test_needs_drop() { + use std::mem::needs_drop; + + struct NeedsDrop; + + impl Drop for NeedsDrop { + fn drop(&mut self) {} + } + + assert!(needs_drop::()); + + // Test each of the types with a handwritten TrivialDrop impl above. + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); + assert!(!needs_drop::>()); +} diff --git a/vendor/syn/src/error.rs b/vendor/syn/src/error.rs new file mode 100644 index 0000000..71247cd --- /dev/null +++ b/vendor/syn/src/error.rs @@ -0,0 +1,467 @@ +#[cfg(feature = "parsing")] +use crate::buffer::Cursor; +use crate::thread::ThreadBound; +use proc_macro2::{ + Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, +}; +#[cfg(feature = "printing")] +use quote::ToTokens; +use std::fmt::{self, Debug, Display}; +use std::slice; +use std::vec; + +/// The result of a Syn parser. +pub type Result = std::result::Result; + +/// Error returned when a Syn parser cannot parse the input tokens. +/// +/// # Error reporting in proc macros +/// +/// The correct way to report errors back to the compiler from a procedural +/// macro is by emitting an appropriately spanned invocation of +/// [`compile_error!`] in the generated code. This produces a better diagnostic +/// message than simply panicking the macro. +/// +/// [`compile_error!`]: std::compile_error! +/// +/// When parsing macro input, the [`parse_macro_input!`] macro handles the +/// conversion to `compile_error!` automatically. +/// +/// [`parse_macro_input!`]: crate::parse_macro_input! +/// +/// ``` +/// # extern crate proc_macro; +/// # +/// use proc_macro::TokenStream; +/// use syn::parse::{Parse, ParseStream, Result}; +/// use syn::{parse_macro_input, ItemFn}; +/// +/// # const IGNORE: &str = stringify! { +/// #[proc_macro_attribute] +/// # }; +/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream { +/// let args = parse_macro_input!(args as MyAttrArgs); +/// let input = parse_macro_input!(input as ItemFn); +/// +/// /* ... */ +/// # TokenStream::new() +/// } +/// +/// struct MyAttrArgs { +/// # _k: [(); { stringify! { +/// ... +/// # }; 0 }] +/// } +/// +/// impl Parse for MyAttrArgs { +/// fn parse(input: ParseStream) -> Result { +/// # stringify! { +/// ... +/// # }; +/// # unimplemented!() +/// } +/// } +/// ``` +/// +/// For errors that arise later than the initial parsing stage, the +/// [`.to_compile_error()`] or [`.into_compile_error()`] methods can be used to +/// perform an explicit conversion to `compile_error!`. +/// +/// [`.to_compile_error()`]: Error::to_compile_error +/// [`.into_compile_error()`]: Error::into_compile_error +/// +/// ``` +/// # extern crate proc_macro; +/// # +/// # use proc_macro::TokenStream; +/// # use syn::{parse_macro_input, DeriveInput}; +/// # +/// # const IGNORE: &str = stringify! { +/// #[proc_macro_derive(MyDerive)] +/// # }; +/// pub fn my_derive(input: TokenStream) -> TokenStream { +/// let input = parse_macro_input!(input as DeriveInput); +/// +/// // fn(DeriveInput) -> syn::Result +/// expand::my_derive(input) +/// .unwrap_or_else(syn::Error::into_compile_error) +/// .into() +/// } +/// # +/// # mod expand { +/// # use proc_macro2::TokenStream; +/// # use syn::{DeriveInput, Result}; +/// # +/// # pub fn my_derive(input: DeriveInput) -> Result { +/// # unimplemented!() +/// # } +/// # } +/// ``` +pub struct Error { + messages: Vec, +} + +struct ErrorMessage { + // Span is implemented as an index into a thread-local interner to keep the + // size small. It is not safe to access from a different thread. We want + // errors to be Send and Sync to play nicely with ecosystem crates for error + // handling, so pin the span we're given to its original thread and assume + // it is Span::call_site if accessed from any other thread. + span: ThreadBound, + message: String, +} + +// Cannot use std::ops::Range because that does not implement Copy, +// whereas ThreadBound requires a Copy impl as a way to ensure no Drop impls +// are involved. +struct SpanRange { + start: Span, + end: Span, +} + +#[cfg(test)] +struct _Test +where + Error: Send + Sync; + +impl Error { + /// Usually the [`ParseStream::error`] method will be used instead, which + /// automatically uses the correct span from the current position of the + /// parse stream. + /// + /// Use `Error::new` when the error needs to be triggered on some span other + /// than where the parse stream is currently positioned. + /// + /// [`ParseStream::error`]: crate::parse::ParseBuffer::error + /// + /// # Example + /// + /// ``` + /// use syn::{Error, Ident, LitStr, Result, Token}; + /// use syn::parse::ParseStream; + /// + /// // Parses input that looks like `name = "string"` where the key must be + /// // the identifier `name` and the value may be any string literal. + /// // Returns the string literal. + /// fn parse_name(input: ParseStream) -> Result { + /// let name_token: Ident = input.parse()?; + /// if name_token != "name" { + /// // Trigger an error not on the current position of the stream, + /// // but on the position of the unexpected identifier. + /// return Err(Error::new(name_token.span(), "expected `name`")); + /// } + /// input.parse::()?; + /// let s: LitStr = input.parse()?; + /// Ok(s) + /// } + /// ``` + pub fn new(span: Span, message: T) -> Self { + return new(span, message.to_string()); + + fn new(span: Span, message: String) -> Error { + Error { + messages: vec![ErrorMessage { + span: ThreadBound::new(SpanRange { + start: span, + end: span, + }), + message, + }], + } + } + } + + /// Creates an error with the specified message spanning the given syntax + /// tree node. + /// + /// Unlike the `Error::new` constructor, this constructor takes an argument + /// `tokens` which is a syntax tree node. This allows the resulting `Error` + /// to attempt to span all tokens inside of `tokens`. While you would + /// typically be able to use the `Spanned` trait with the above `Error::new` + /// constructor, implementation limitations today mean that + /// `Error::new_spanned` may provide a higher-quality error message on + /// stable Rust. + /// + /// When in doubt it's recommended to stick to `Error::new` (or + /// `ParseStream::error`)! + #[cfg(feature = "printing")] + #[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))] + pub fn new_spanned(tokens: T, message: U) -> Self { + return new_spanned(tokens.into_token_stream(), message.to_string()); + + fn new_spanned(tokens: TokenStream, message: String) -> Error { + let mut iter = tokens.into_iter(); + let start = iter.next().map_or_else(Span::call_site, |t| t.span()); + let end = iter.last().map_or(start, |t| t.span()); + Error { + messages: vec![ErrorMessage { + span: ThreadBound::new(SpanRange { start, end }), + message, + }], + } + } + } + + /// The source location of the error. + /// + /// Spans are not thread-safe so this function returns `Span::call_site()` + /// if called from a different thread than the one on which the `Error` was + /// originally created. + pub fn span(&self) -> Span { + let SpanRange { start, end } = match self.messages[0].span.get() { + Some(span) => *span, + None => return Span::call_site(), + }; + start.join(end).unwrap_or(start) + } + + /// Render the error as an invocation of [`compile_error!`]. + /// + /// The [`parse_macro_input!`] macro provides a convenient way to invoke + /// this method correctly in a procedural macro. + /// + /// [`compile_error!`]: std::compile_error! + /// [`parse_macro_input!`]: crate::parse_macro_input! + pub fn to_compile_error(&self) -> TokenStream { + self.messages + .iter() + .map(ErrorMessage::to_compile_error) + .collect() + } + + /// Render the error as an invocation of [`compile_error!`]. + /// + /// [`compile_error!`]: std::compile_error! + /// + /// # Example + /// + /// ``` + /// # extern crate proc_macro; + /// # + /// use proc_macro::TokenStream; + /// use syn::{parse_macro_input, DeriveInput, Error}; + /// + /// # const _: &str = stringify! { + /// #[proc_macro_derive(MyTrait)] + /// # }; + /// pub fn derive_my_trait(input: TokenStream) -> TokenStream { + /// let input = parse_macro_input!(input as DeriveInput); + /// my_trait::expand(input) + /// .unwrap_or_else(Error::into_compile_error) + /// .into() + /// } + /// + /// mod my_trait { + /// use proc_macro2::TokenStream; + /// use syn::{DeriveInput, Result}; + /// + /// pub(crate) fn expand(input: DeriveInput) -> Result { + /// /* ... */ + /// # unimplemented!() + /// } + /// } + /// ``` + pub fn into_compile_error(self) -> TokenStream { + self.to_compile_error() + } + + /// Add another error message to self such that when `to_compile_error()` is + /// called, both errors will be emitted together. + pub fn combine(&mut self, another: Error) { + self.messages.extend(another.messages); + } +} + +impl ErrorMessage { + fn to_compile_error(&self) -> TokenStream { + let (start, end) = match self.span.get() { + Some(range) => (range.start, range.end), + None => (Span::call_site(), Span::call_site()), + }; + + // ::core::compile_error!($message) + TokenStream::from_iter(vec![ + TokenTree::Punct({ + let mut punct = Punct::new(':', Spacing::Joint); + punct.set_span(start); + punct + }), + TokenTree::Punct({ + let mut punct = Punct::new(':', Spacing::Alone); + punct.set_span(start); + punct + }), + TokenTree::Ident(Ident::new("core", start)), + TokenTree::Punct({ + let mut punct = Punct::new(':', Spacing::Joint); + punct.set_span(start); + punct + }), + TokenTree::Punct({ + let mut punct = Punct::new(':', Spacing::Alone); + punct.set_span(start); + punct + }), + TokenTree::Ident(Ident::new("compile_error", start)), + TokenTree::Punct({ + let mut punct = Punct::new('!', Spacing::Alone); + punct.set_span(start); + punct + }), + TokenTree::Group({ + let mut group = Group::new(Delimiter::Brace, { + TokenStream::from_iter(vec![TokenTree::Literal({ + let mut string = Literal::string(&self.message); + string.set_span(end); + string + })]) + }); + group.set_span(end); + group + }), + ]) + } +} + +#[cfg(feature = "parsing")] +pub(crate) fn new_at(scope: Span, cursor: Cursor, message: T) -> Error { + if cursor.eof() { + Error::new(scope, format!("unexpected end of input, {}", message)) + } else { + let span = crate::buffer::open_span_of_group(cursor); + Error::new(span, message) + } +} + +#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))] +pub(crate) fn new2(start: Span, end: Span, message: T) -> Error { + return new2(start, end, message.to_string()); + + fn new2(start: Span, end: Span, message: String) -> Error { + Error { + messages: vec![ErrorMessage { + span: ThreadBound::new(SpanRange { start, end }), + message, + }], + } + } +} + +impl Debug for Error { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + if self.messages.len() == 1 { + formatter + .debug_tuple("Error") + .field(&self.messages[0]) + .finish() + } else { + formatter + .debug_tuple("Error") + .field(&self.messages) + .finish() + } + } +} + +impl Debug for ErrorMessage { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(&self.message, formatter) + } +} + +impl Display for Error { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str(&self.messages[0].message) + } +} + +impl Clone for Error { + fn clone(&self) -> Self { + Error { + messages: self.messages.clone(), + } + } +} + +impl Clone for ErrorMessage { + fn clone(&self) -> Self { + ErrorMessage { + span: self.span, + message: self.message.clone(), + } + } +} + +impl Clone for SpanRange { + fn clone(&self) -> Self { + *self + } +} + +impl Copy for SpanRange {} + +impl std::error::Error for Error {} + +impl From for Error { + fn from(err: LexError) -> Self { + Error::new(err.span(), err) + } +} + +impl IntoIterator for Error { + type Item = Error; + type IntoIter = IntoIter; + + fn into_iter(self) -> Self::IntoIter { + IntoIter { + messages: self.messages.into_iter(), + } + } +} + +pub struct IntoIter { + messages: vec::IntoIter, +} + +impl Iterator for IntoIter { + type Item = Error; + + fn next(&mut self) -> Option { + Some(Error { + messages: vec![self.messages.next()?], + }) + } +} + +impl<'a> IntoIterator for &'a Error { + type Item = Error; + type IntoIter = Iter<'a>; + + fn into_iter(self) -> Self::IntoIter { + Iter { + messages: self.messages.iter(), + } + } +} + +pub struct Iter<'a> { + messages: slice::Iter<'a, ErrorMessage>, +} + +impl<'a> Iterator for Iter<'a> { + type Item = Error; + + fn next(&mut self) -> Option { + Some(Error { + messages: vec![self.messages.next()?.clone()], + }) + } +} + +impl Extend for Error { + fn extend>(&mut self, iter: T) { + for err in iter { + self.combine(err); + } + } +} diff --git a/vendor/syn/src/export.rs b/vendor/syn/src/export.rs new file mode 100644 index 0000000..b9ea5c7 --- /dev/null +++ b/vendor/syn/src/export.rs @@ -0,0 +1,73 @@ +#[doc(hidden)] +pub use std::clone::Clone; +#[doc(hidden)] +pub use std::cmp::{Eq, PartialEq}; +#[doc(hidden)] +pub use std::concat; +#[doc(hidden)] +pub use std::default::Default; +#[doc(hidden)] +pub use std::fmt::Debug; +#[doc(hidden)] +pub use std::hash::{Hash, Hasher}; +#[doc(hidden)] +pub use std::marker::Copy; +#[doc(hidden)] +pub use std::option::Option::{None, Some}; +#[doc(hidden)] +pub use std::result::Result::{Err, Ok}; +#[doc(hidden)] +pub use std::stringify; + +#[doc(hidden)] +pub type Formatter<'a> = std::fmt::Formatter<'a>; +#[doc(hidden)] +pub type FmtResult = std::fmt::Result; + +#[doc(hidden)] +pub type bool = std::primitive::bool; +#[doc(hidden)] +pub type str = std::primitive::str; + +#[cfg(feature = "printing")] +#[doc(hidden)] +pub use quote; + +#[doc(hidden)] +pub type Span = proc_macro2::Span; +#[doc(hidden)] +pub type TokenStream2 = proc_macro2::TokenStream; + +#[cfg(feature = "parsing")] +#[doc(hidden)] +pub use crate::group::{parse_braces, parse_brackets, parse_parens}; + +#[doc(hidden)] +pub use crate::span::IntoSpans; + +#[cfg(all(feature = "parsing", feature = "printing"))] +#[doc(hidden)] +pub use crate::parse_quote::parse as parse_quote; + +#[cfg(feature = "parsing")] +#[doc(hidden)] +pub use crate::token::parsing::{peek_punct, punct as parse_punct}; + +#[cfg(feature = "printing")] +#[doc(hidden)] +pub use crate::token::printing::punct as print_punct; + +#[cfg(feature = "parsing")] +#[doc(hidden)] +pub use crate::token::private::CustomToken; + +#[cfg(feature = "proc-macro")] +#[doc(hidden)] +pub type TokenStream = proc_macro::TokenStream; + +#[cfg(feature = "printing")] +#[doc(hidden)] +pub use quote::{ToTokens, TokenStreamExt}; + +#[doc(hidden)] +pub struct private(pub(crate) ()); diff --git a/vendor/syn/src/expr.rs b/vendor/syn/src/expr.rs new file mode 100644 index 0000000..7fb0f7b --- /dev/null +++ b/vendor/syn/src/expr.rs @@ -0,0 +1,3506 @@ +use super::*; +use crate::punctuated::Punctuated; +use proc_macro2::{Span, TokenStream}; +#[cfg(feature = "printing")] +use quote::IdentFragment; +#[cfg(feature = "printing")] +use std::fmt::{self, Display}; +use std::hash::{Hash, Hasher}; +#[cfg(feature = "parsing")] +use std::mem; + +ast_enum_of_structs! { + /// A Rust expression. + /// + /// *This type is available only if Syn is built with the `"derive"` or `"full"` + /// feature, but most of the variants are not available unless "full" is enabled.* + /// + /// # Syntax tree enums + /// + /// This type is a syntax tree enum. In Syn this and other syntax tree enums + /// are designed to be traversed using the following rebinding idiom. + /// + /// ``` + /// # use syn::Expr; + /// # + /// # fn example(expr: Expr) { + /// # const IGNORE: &str = stringify! { + /// let expr: Expr = /* ... */; + /// # }; + /// match expr { + /// Expr::MethodCall(expr) => { + /// /* ... */ + /// } + /// Expr::Cast(expr) => { + /// /* ... */ + /// } + /// Expr::If(expr) => { + /// /* ... */ + /// } + /// + /// /* ... */ + /// # _ => {} + /// # } + /// # } + /// ``` + /// + /// We begin with a variable `expr` of type `Expr` that has no fields + /// (because it is an enum), and by matching on it and rebinding a variable + /// with the same name `expr` we effectively imbue our variable with all of + /// the data fields provided by the variant that it turned out to be. So for + /// example above if we ended up in the `MethodCall` case then we get to use + /// `expr.receiver`, `expr.args` etc; if we ended up in the `If` case we get + /// to use `expr.cond`, `expr.then_branch`, `expr.else_branch`. + /// + /// This approach avoids repeating the variant names twice on every line. + /// + /// ``` + /// # use syn::{Expr, ExprMethodCall}; + /// # + /// # fn example(expr: Expr) { + /// // Repetitive; recommend not doing this. + /// match expr { + /// Expr::MethodCall(ExprMethodCall { method, args, .. }) => { + /// # } + /// # _ => {} + /// # } + /// # } + /// ``` + /// + /// In general, the name to which a syntax tree enum variant is bound should + /// be a suitable name for the complete syntax tree enum type. + /// + /// ``` + /// # use syn::{Expr, ExprField}; + /// # + /// # fn example(discriminant: ExprField) { + /// // Binding is called `base` which is the name I would use if I were + /// // assigning `*discriminant.base` without an `if let`. + /// if let Expr::Tuple(base) = *discriminant.base { + /// # } + /// # } + /// ``` + /// + /// A sign that you may not be choosing the right variable names is if you + /// see names getting repeated in your code, like accessing + /// `receiver.receiver` or `pat.pat` or `cond.cond`. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + #[non_exhaustive] + pub enum Expr { + /// A slice literal expression: `[a, b, c, d]`. + Array(ExprArray), + + /// An assignment expression: `a = compute()`. + Assign(ExprAssign), + + /// An async block: `async { ... }`. + Async(ExprAsync), + + /// An await expression: `fut.await`. + Await(ExprAwait), + + /// A binary operation: `a + b`, `a += b`. + Binary(ExprBinary), + + /// A blocked scope: `{ ... }`. + Block(ExprBlock), + + /// A `break`, with an optional label to break and an optional + /// expression. + Break(ExprBreak), + + /// A function call expression: `invoke(a, b)`. + Call(ExprCall), + + /// A cast expression: `foo as f64`. + Cast(ExprCast), + + /// A closure expression: `|a, b| a + b`. + Closure(ExprClosure), + + /// A const block: `const { ... }`. + Const(ExprConst), + + /// A `continue`, with an optional label. + Continue(ExprContinue), + + /// Access of a named struct field (`obj.k`) or unnamed tuple struct + /// field (`obj.0`). + Field(ExprField), + + /// A for loop: `for pat in expr { ... }`. + ForLoop(ExprForLoop), + + /// An expression contained within invisible delimiters. + /// + /// This variant is important for faithfully representing the precedence + /// of expressions and is related to `None`-delimited spans in a + /// `TokenStream`. + Group(ExprGroup), + + /// An `if` expression with an optional `else` block: `if expr { ... } + /// else { ... }`. + /// + /// The `else` branch expression may only be an `If` or `Block` + /// expression, not any of the other types of expression. + If(ExprIf), + + /// A square bracketed indexing expression: `vector[2]`. + Index(ExprIndex), + + /// The inferred value of a const generic argument, denoted `_`. + Infer(ExprInfer), + + /// A `let` guard: `let Some(x) = opt`. + Let(ExprLet), + + /// A literal in place of an expression: `1`, `"foo"`. + Lit(ExprLit), + + /// Conditionless loop: `loop { ... }`. + Loop(ExprLoop), + + /// A macro invocation expression: `format!("{}", q)`. + Macro(ExprMacro), + + /// A `match` expression: `match n { Some(n) => {}, None => {} }`. + Match(ExprMatch), + + /// A method call expression: `x.foo::(a, b)`. + MethodCall(ExprMethodCall), + + /// A parenthesized expression: `(a + b)`. + Paren(ExprParen), + + /// A path like `std::mem::replace` possibly containing generic + /// parameters and a qualified self-type. + /// + /// A plain identifier like `x` is a path of length 1. + Path(ExprPath), + + /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`. + Range(ExprRange), + + /// A referencing operation: `&a` or `&mut a`. + Reference(ExprReference), + + /// An array literal constructed from one repeated element: `[0u8; N]`. + Repeat(ExprRepeat), + + /// A `return`, with an optional value to be returned. + Return(ExprReturn), + + /// A struct literal expression: `Point { x: 1, y: 1 }`. + /// + /// The `rest` provides the value of the remaining fields as in `S { a: + /// 1, b: 1, ..rest }`. + Struct(ExprStruct), + + /// A try-expression: `expr?`. + Try(ExprTry), + + /// A try block: `try { ... }`. + TryBlock(ExprTryBlock), + + /// A tuple expression: `(a, b, c, d)`. + Tuple(ExprTuple), + + /// A unary operation: `!x`, `*x`. + Unary(ExprUnary), + + /// An unsafe block: `unsafe { ... }`. + Unsafe(ExprUnsafe), + + /// Tokens in expression position not interpreted by Syn. + Verbatim(TokenStream), + + /// A while loop: `while expr { ... }`. + While(ExprWhile), + + /// A yield expression: `yield expr`. + Yield(ExprYield), + + // For testing exhaustiveness in downstream code, use the following idiom: + // + // match expr { + // #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))] + // + // Expr::Array(expr) => {...} + // Expr::Assign(expr) => {...} + // ... + // Expr::Yield(expr) => {...} + // + // _ => { /* some sane fallback */ } + // } + // + // This way we fail your tests but don't break your library when adding + // a variant. You will be notified by a test failure when a variant is + // added, so that you can add code to handle it, but your library will + // continue to compile and work for downstream users in the interim. + } +} + +ast_struct! { + /// A slice literal expression: `[a, b, c, d]`. + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprArray #full { + pub attrs: Vec, + pub bracket_token: token::Bracket, + pub elems: Punctuated, + } +} + +ast_struct! { + /// An assignment expression: `a = compute()`. + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprAssign #full { + pub attrs: Vec, + pub left: Box, + pub eq_token: Token![=], + pub right: Box, + } +} + +ast_struct! { + /// An async block: `async { ... }`. + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprAsync #full { + pub attrs: Vec, + pub async_token: Token![async], + pub capture: Option, + pub block: Block, + } +} + +ast_struct! { + /// An await expression: `fut.await`. + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprAwait #full { + pub attrs: Vec, + pub base: Box, + pub dot_token: Token![.], + pub await_token: Token![await], + } +} + +ast_struct! { + /// A binary operation: `a + b`, `a += b`. + #[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))] + pub struct ExprBinary { + pub attrs: Vec, + pub left: Box, + pub op: BinOp, + pub right: Box, + } +} + +ast_struct! { + /// A blocked scope: `{ ... }`. + #[cfg_attr(doc_cfg, doc(cfg(feature = "full")))] + pub struct ExprBlock #full { + pub attrs: Vec, + pub label: Option