aboutsummaryrefslogtreecommitdiff
path: root/vendor/proc-macro2
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/proc-macro2')
-rw-r--r--vendor/proc-macro2/.cargo-checksum.json1
-rw-r--r--vendor/proc-macro2/Cargo.toml67
-rw-r--r--vendor/proc-macro2/LICENSE-APACHE176
-rw-r--r--vendor/proc-macro2/LICENSE-MIT23
-rw-r--r--vendor/proc-macro2/README.md94
-rw-r--r--vendor/proc-macro2/build.rs202
-rw-r--r--vendor/proc-macro2/build/probe.rs21
-rw-r--r--vendor/proc-macro2/rust-toolchain.toml2
-rw-r--r--vendor/proc-macro2/src/detection.rs75
-rw-r--r--vendor/proc-macro2/src/extra.rs84
-rw-r--r--vendor/proc-macro2/src/fallback.rs1143
-rw-r--r--vendor/proc-macro2/src/lib.rs1328
-rw-r--r--vendor/proc-macro2/src/location.rs29
-rw-r--r--vendor/proc-macro2/src/marker.rs21
-rw-r--r--vendor/proc-macro2/src/parse.rs996
-rw-r--r--vendor/proc-macro2/src/rcvec.rs145
-rw-r--r--vendor/proc-macro2/src/wrapper.rs930
-rw-r--r--vendor/proc-macro2/tests/comments.rs105
-rw-r--r--vendor/proc-macro2/tests/features.rs8
-rw-r--r--vendor/proc-macro2/tests/marker.rs99
-rw-r--r--vendor/proc-macro2/tests/test.rs759
-rw-r--r--vendor/proc-macro2/tests/test_fmt.rs28
-rw-r--r--vendor/proc-macro2/tests/test_size.rs42
23 files changed, 0 insertions, 6378 deletions
diff --git a/vendor/proc-macro2/.cargo-checksum.json b/vendor/proc-macro2/.cargo-checksum.json
deleted file mode 100644
index 744053b..0000000
--- a/vendor/proc-macro2/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"Cargo.toml":"4a85db50c7866e4eba8673358705982eaf715207ae4b8e16d3f485671fa9f473","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"c609b6865476d6c35879784e9155367a97a0da496aa5c3c61488440a20f59883","build.rs":"8b4facae0d125ca3b437b4f5ebcd6ea3da3fcc65fcfc2cf357ae544423aa4568","build/probe.rs":"827da142d033f027d9f2a52ffdc0a619c7c34a2a280635e38c64fcd46cf7b635","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/extra.rs":"d378a9e799e5c49933b067cd38f5364d16a152ef337eef86ce42fdc86005ddf3","src/fallback.rs":"35e46d4fa73175dcf857084e12f5bb7e094481738dcf59a98b1c584552d076bc","src/lib.rs":"d0f6c5e918b827df600cf5e73cf92ca52c16584b4ac7dd96eb63562947d36bd5","src/location.rs":"f55d2e61f1bb1af65e14ed04c9e91eb1ddbf8430e8c05f2048d1cd538d27368e","src/marker.rs":"c8c90351b8ebcf5b11520831b199add628bc613b0f5559260b51a3c4f6406d8a","src/parse.rs":"4b77cddbc2752bc4d38a65acd8b96b6786c5220d19b1e1b37810257b5d24132d","src/rcvec.rs":"1c3c48c4f819927cc445ae15ca3bb06775feff2fd1cb21901ae4c40c7e6b4e82","src/wrapper.rs":"46ae8c6bc87edb04c3b3fc3506234bcca050c309c209b93bc61b2d21235c8362","tests/comments.rs":"31115b3a56c83d93eef2fb4c9566bf4543e302560732986161b98aef504785ed","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"3190ee07dae510251f360db701ce257030f94a479b6689c3a9ef804bd5d8d099","tests/test.rs":"7511be57e097b15403cf36feb858b4aabdc832fac7024571059a559a7e2ed2a0","tests/test_fmt.rs":"b7743b612af65f2c88cbe109d50a093db7aa7e87f9e37bf45b7bbaeb240aa020","tests/test_size.rs":"acf05963c1e62052d769d237b50844a2c59b4182b491231b099a4f74e5456ab0"},"package":"95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c"} \ No newline at end of file
diff --git a/vendor/proc-macro2/Cargo.toml b/vendor/proc-macro2/Cargo.toml
deleted file mode 100644
index 280bbdf..0000000
--- a/vendor/proc-macro2/Cargo.toml
+++ /dev/null
@@ -1,67 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies.
-#
-# If you are reading this file be aware that the original Cargo.toml
-# will likely look very different (and much more reasonable).
-# See Cargo.toml.orig for the original contents.
-
-[package]
-edition = "2021"
-rust-version = "1.56"
-name = "proc-macro2"
-version = "1.0.76"
-authors = [
- "David Tolnay <dtolnay@gmail.com>",
- "Alex Crichton <alex@alexcrichton.com>",
-]
-autobenches = false
-description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case."
-documentation = "https://docs.rs/proc-macro2"
-readme = "README.md"
-keywords = [
- "macros",
- "syn",
-]
-categories = ["development-tools::procedural-macro-helpers"]
-license = "MIT OR Apache-2.0"
-repository = "https://github.com/dtolnay/proc-macro2"
-
-[package.metadata.docs.rs]
-rustc-args = [
- "--cfg",
- "procmacro2_semver_exempt",
-]
-rustdoc-args = [
- "--cfg",
- "procmacro2_semver_exempt",
- "--cfg",
- "doc_cfg",
- "--generate-link-to-definition",
-]
-targets = ["x86_64-unknown-linux-gnu"]
-
-[package.metadata.playground]
-features = ["span-locations"]
-
-[lib]
-doc-scrape-examples = false
-
-[dependencies.unicode-ident]
-version = "1.0"
-
-[dev-dependencies.quote]
-version = "1.0"
-default_features = false
-
-[dev-dependencies.rustversion]
-version = "1"
-
-[features]
-default = ["proc-macro"]
-nightly = []
-proc-macro = []
-span-locations = []
diff --git a/vendor/proc-macro2/LICENSE-APACHE b/vendor/proc-macro2/LICENSE-APACHE
deleted file mode 100644
index 1b5ec8b..0000000
--- a/vendor/proc-macro2/LICENSE-APACHE
+++ /dev/null
@@ -1,176 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
diff --git a/vendor/proc-macro2/LICENSE-MIT b/vendor/proc-macro2/LICENSE-MIT
deleted file mode 100644
index 31aa793..0000000
--- a/vendor/proc-macro2/LICENSE-MIT
+++ /dev/null
@@ -1,23 +0,0 @@
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
diff --git a/vendor/proc-macro2/README.md b/vendor/proc-macro2/README.md
deleted file mode 100644
index 3a29ce8..0000000
--- a/vendor/proc-macro2/README.md
+++ /dev/null
@@ -1,94 +0,0 @@
-# proc-macro2
-
-[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/proc--macro2-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/proc-macro2)
-[<img alt="crates.io" src="https://img.shields.io/crates/v/proc-macro2.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/proc-macro2)
-[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-proc--macro2-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/proc-macro2)
-[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/proc-macro2/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/proc-macro2/actions?query=branch%3Amaster)
-
-A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
-This library serves two purposes:
-
-- **Bring proc-macro-like functionality to other contexts like build.rs and
- main.rs.** Types from `proc_macro` are entirely specific to procedural macros
- and cannot ever exist in code outside of a procedural macro. Meanwhile
- `proc_macro2` types may exist anywhere including non-macro code. By developing
- foundational libraries like [syn] and [quote] against `proc_macro2` rather
- than `proc_macro`, the procedural macro ecosystem becomes easily applicable to
- many other use cases and we avoid reimplementing non-macro equivalents of
- those libraries.
-
-- **Make procedural macros unit testable.** As a consequence of being specific
- to procedural macros, nothing that uses `proc_macro` can be executed from a
- unit test. In order for helper libraries or components of a macro to be
- testable in isolation, they must be implemented using `proc_macro2`.
-
-[syn]: https://github.com/dtolnay/syn
-[quote]: https://github.com/dtolnay/quote
-
-## Usage
-
-```toml
-[dependencies]
-proc-macro2 = "1.0"
-```
-
-The skeleton of a typical procedural macro typically looks like this:
-
-```rust
-extern crate proc_macro;
-
-#[proc_macro_derive(MyDerive)]
-pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
- let input = proc_macro2::TokenStream::from(input);
-
- let output: proc_macro2::TokenStream = {
- /* transform input */
- };
-
- proc_macro::TokenStream::from(output)
-}
-```
-
-If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate
-parse errors correctly back to the compiler when parsing fails.
-
-[`parse_macro_input!`]: https://docs.rs/syn/2.0/syn/macro.parse_macro_input.html
-
-## Unstable features
-
-The default feature set of proc-macro2 tracks the most recent stable compiler
-API. Functionality in `proc_macro` that is not yet stable is not exposed by
-proc-macro2 by default.
-
-To opt into the additional APIs available in the most recent nightly compiler,
-the `procmacro2_semver_exempt` config flag must be passed to rustc. We will
-polyfill those nightly-only APIs back to Rust 1.56.0. As these are unstable APIs
-that track the nightly compiler, minor versions of proc-macro2 may make breaking
-changes to them at any time.
-
-```
-RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
-```
-
-Note that this must not only be done for your crate, but for any crate that
-depends on your crate. This infectious nature is intentional, as it serves as a
-reminder that you are outside of the normal semver guarantees.
-
-Semver exempt methods are marked as such in the proc-macro2 documentation.
-
-<br>
-
-#### License
-
-<sup>
-Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
-2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
-</sup>
-
-<br>
-
-<sub>
-Unless you explicitly state otherwise, any contribution intentionally submitted
-for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
-be dual licensed as above, without any additional terms or conditions.
-</sub>
diff --git a/vendor/proc-macro2/build.rs b/vendor/proc-macro2/build.rs
deleted file mode 100644
index 3347f87..0000000
--- a/vendor/proc-macro2/build.rs
+++ /dev/null
@@ -1,202 +0,0 @@
-// rustc-cfg emitted by the build script:
-//
-// "wrap_proc_macro"
-// Wrap types from libproc_macro rather than polyfilling the whole API.
-// Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
-// because we can't emulate the unstable API without emulating everything
-// else. Also enabled unconditionally on nightly, in which case the
-// procmacro2_semver_exempt surface area is implemented by using the
-// nightly-only proc_macro API.
-//
-// "hygiene"
-// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
-// and Span::located_at. Enabled on Rust 1.45+.
-//
-// "proc_macro_span"
-// Enable non-dummy behavior of Span::start and Span::end methods which
-// requires an unstable compiler feature. Enabled when building with
-// nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
-// features.
-//
-// "super_unstable"
-// Implement the semver exempt API in terms of the nightly-only proc_macro
-// API. Enabled when using procmacro2_semver_exempt on a nightly compiler.
-//
-// "span_locations"
-// Provide methods Span::start and Span::end which give the line/column
-// location of a token. Enabled by procmacro2_semver_exempt or the
-// "span-locations" Cargo cfg. This is behind a cfg because tracking
-// location inside spans is a performance hit.
-//
-// "is_available"
-// Use proc_macro::is_available() to detect if the proc macro API is
-// available or needs to be polyfilled instead of trying to use the proc
-// macro API and catching a panic if it isn't available. Enabled on Rust
-// 1.57+.
-
-use std::env;
-use std::ffi::OsString;
-use std::path::Path;
-use std::process::{self, Command, Stdio};
-use std::str;
-use std::u32;
-
-fn main() {
- let rustc = rustc_minor_version().unwrap_or(u32::MAX);
-
- let docs_rs = env::var_os("DOCS_RS").is_some();
- let semver_exempt = cfg!(procmacro2_semver_exempt) || docs_rs;
- if semver_exempt {
- // https://github.com/dtolnay/proc-macro2/issues/147
- println!("cargo:rustc-cfg=procmacro2_semver_exempt");
- }
-
- if semver_exempt || cfg!(feature = "span-locations") {
- println!("cargo:rustc-cfg=span_locations");
- }
-
- if rustc < 57 {
- println!("cargo:rustc-cfg=no_is_available");
- }
-
- if rustc < 66 {
- println!("cargo:rustc-cfg=no_source_text");
- }
-
- if !cfg!(feature = "proc-macro") {
- println!("cargo:rerun-if-changed=build.rs");
- return;
- }
-
- println!("cargo:rerun-if-changed=build/probe.rs");
-
- let proc_macro_span;
- let consider_rustc_bootstrap;
- if compile_probe(false) {
- // This is a nightly or dev compiler, so it supports unstable features
- // regardless of RUSTC_BOOTSTRAP. No need to rerun build script if
- // RUSTC_BOOTSTRAP is changed.
- proc_macro_span = true;
- consider_rustc_bootstrap = false;
- } else if let Some(rustc_bootstrap) = env::var_os("RUSTC_BOOTSTRAP") {
- if compile_probe(true) {
- // This is a stable or beta compiler for which the user has set
- // RUSTC_BOOTSTRAP to turn on unstable features. Rerun build script
- // if they change it.
- proc_macro_span = true;
- consider_rustc_bootstrap = true;
- } else if rustc_bootstrap == "1" {
- // This compiler does not support the proc macro Span API in the
- // form that proc-macro2 expects. No need to pay attention to
- // RUSTC_BOOTSTRAP.
- proc_macro_span = false;
- consider_rustc_bootstrap = false;
- } else {
- // This is a stable or beta compiler for which RUSTC_BOOTSTRAP is
- // set to restrict the use of unstable features by this crate.
- proc_macro_span = false;
- consider_rustc_bootstrap = true;
- }
- } else {
- // Without RUSTC_BOOTSTRAP, this compiler does not support the proc
- // macro Span API in the form that proc-macro2 expects, but try again if
- // the user turns on unstable features.
- proc_macro_span = false;
- consider_rustc_bootstrap = true;
- }
-
- if proc_macro_span || !semver_exempt {
- println!("cargo:rustc-cfg=wrap_proc_macro");
- }
-
- if proc_macro_span {
- println!("cargo:rustc-cfg=proc_macro_span");
- }
-
- if semver_exempt && proc_macro_span {
- println!("cargo:rustc-cfg=super_unstable");
- }
-
- if consider_rustc_bootstrap {
- println!("cargo:rerun-if-env-changed=RUSTC_BOOTSTRAP");
- }
-}
-
-fn compile_probe(rustc_bootstrap: bool) -> bool {
- if env::var_os("RUSTC_STAGE").is_some() {
- // We are running inside rustc bootstrap. This is a highly non-standard
- // environment with issues such as:
- //
- // https://github.com/rust-lang/cargo/issues/11138
- // https://github.com/rust-lang/rust/issues/114839
- //
- // Let's just not use nightly features here.
- return false;
- }
-
- let rustc = cargo_env_var("RUSTC");
- let out_dir = cargo_env_var("OUT_DIR");
- let probefile = Path::new("build").join("probe.rs");
-
- // Make sure to pick up Cargo rustc configuration.
- let mut cmd = if let Some(wrapper) = env::var_os("RUSTC_WRAPPER") {
- let mut cmd = Command::new(wrapper);
- // The wrapper's first argument is supposed to be the path to rustc.
- cmd.arg(rustc);
- cmd
- } else {
- Command::new(rustc)
- };
-
- if !rustc_bootstrap {
- cmd.env_remove("RUSTC_BOOTSTRAP");
- }
-
- cmd.stderr(Stdio::null())
- .arg("--edition=2021")
- .arg("--crate-name=proc_macro2")
- .arg("--crate-type=lib")
- .arg("--emit=dep-info,metadata")
- .arg("--out-dir")
- .arg(out_dir)
- .arg(probefile);
-
- if let Some(target) = env::var_os("TARGET") {
- cmd.arg("--target").arg(target);
- }
-
- // If Cargo wants to set RUSTFLAGS, use that.
- if let Ok(rustflags) = env::var("CARGO_ENCODED_RUSTFLAGS") {
- if !rustflags.is_empty() {
- for arg in rustflags.split('\x1f') {
- cmd.arg(arg);
- }
- }
- }
-
- match cmd.status() {
- Ok(status) => status.success(),
- Err(_) => false,
- }
-}
-
-fn rustc_minor_version() -> Option<u32> {
- let rustc = cargo_env_var("RUSTC");
- let output = Command::new(rustc).arg("--version").output().ok()?;
- let version = str::from_utf8(&output.stdout).ok()?;
- let mut pieces = version.split('.');
- if pieces.next() != Some("rustc 1") {
- return None;
- }
- pieces.next()?.parse().ok()
-}
-
-fn cargo_env_var(key: &str) -> OsString {
- env::var_os(key).unwrap_or_else(|| {
- eprintln!(
- "Environment variable ${} is not set during execution of build script",
- key,
- );
- process::exit(1);
- })
-}
diff --git a/vendor/proc-macro2/build/probe.rs b/vendor/proc-macro2/build/probe.rs
deleted file mode 100644
index 5afa13a..0000000
--- a/vendor/proc-macro2/build/probe.rs
+++ /dev/null
@@ -1,21 +0,0 @@
-// This code exercises the surface area that we expect of Span's unstable API.
-// If the current toolchain is able to compile it, then proc-macro2 is able to
-// offer these APIs too.
-
-#![feature(proc_macro_span)]
-
-extern crate proc_macro;
-
-use core::ops::RangeBounds;
-use proc_macro::{Literal, Span};
-
-pub fn join(this: &Span, other: Span) -> Option<Span> {
- this.join(other)
-}
-
-pub fn subspan<R: RangeBounds<usize>>(this: &Literal, range: R) -> Option<Span> {
- this.subspan(range)
-}
-
-// Include in sccache cache key.
-const _: Option<&str> = option_env!("RUSTC_BOOTSTRAP");
diff --git a/vendor/proc-macro2/rust-toolchain.toml b/vendor/proc-macro2/rust-toolchain.toml
deleted file mode 100644
index 20fe888..0000000
--- a/vendor/proc-macro2/rust-toolchain.toml
+++ /dev/null
@@ -1,2 +0,0 @@
-[toolchain]
-components = ["rust-src"]
diff --git a/vendor/proc-macro2/src/detection.rs b/vendor/proc-macro2/src/detection.rs
deleted file mode 100644
index beba7b2..0000000
--- a/vendor/proc-macro2/src/detection.rs
+++ /dev/null
@@ -1,75 +0,0 @@
-use core::sync::atomic::{AtomicUsize, Ordering};
-use std::sync::Once;
-
-static WORKS: AtomicUsize = AtomicUsize::new(0);
-static INIT: Once = Once::new();
-
-pub(crate) fn inside_proc_macro() -> bool {
- match WORKS.load(Ordering::Relaxed) {
- 1 => return false,
- 2 => return true,
- _ => {}
- }
-
- INIT.call_once(initialize);
- inside_proc_macro()
-}
-
-pub(crate) fn force_fallback() {
- WORKS.store(1, Ordering::Relaxed);
-}
-
-pub(crate) fn unforce_fallback() {
- initialize();
-}
-
-#[cfg(not(no_is_available))]
-fn initialize() {
- let available = proc_macro::is_available();
- WORKS.store(available as usize + 1, Ordering::Relaxed);
-}
-
-// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
-// then use catch_unwind to determine whether the compiler's proc_macro is
-// working. When proc-macro2 is used from outside of a procedural macro all
-// of the proc_macro crate's APIs currently panic.
-//
-// The Once is to prevent the possibility of this ordering:
-//
-// thread 1 calls take_hook, gets the user's original hook
-// thread 1 calls set_hook with the null hook
-// thread 2 calls take_hook, thinks null hook is the original hook
-// thread 2 calls set_hook with the null hook
-// thread 1 calls set_hook with the actual original hook
-// thread 2 calls set_hook with what it thinks is the original hook
-//
-// in which the user's hook has been lost.
-//
-// There is still a race condition where a panic in a different thread can
-// happen during the interval that the user's original panic hook is
-// unregistered such that their hook is incorrectly not called. This is
-// sufficiently unlikely and less bad than printing panic messages to stderr
-// on correct use of this crate. Maybe there is a libstd feature request
-// here. For now, if a user needs to guarantee that this failure mode does
-// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
-// the main thread before launching any other threads.
-#[cfg(no_is_available)]
-fn initialize() {
- use std::panic::{self, PanicInfo};
-
- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
-
- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
- let sanity_check = &*null_hook as *const PanicHook;
- let original_hook = panic::take_hook();
- panic::set_hook(null_hook);
-
- let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
- WORKS.store(works as usize + 1, Ordering::Relaxed);
-
- let hopefully_null_hook = panic::take_hook();
- panic::set_hook(original_hook);
- if sanity_check != &*hopefully_null_hook {
- panic!("observed race condition in proc_macro2::inside_proc_macro");
- }
-}
diff --git a/vendor/proc-macro2/src/extra.rs b/vendor/proc-macro2/src/extra.rs
deleted file mode 100644
index 4a69d46..0000000
--- a/vendor/proc-macro2/src/extra.rs
+++ /dev/null
@@ -1,84 +0,0 @@
-//! Items which do not have a correspondence to any API in the proc_macro crate,
-//! but are necessary to include in proc-macro2.
-
-use crate::fallback;
-use crate::imp;
-use crate::marker::Marker;
-use crate::Span;
-use core::fmt::{self, Debug};
-
-/// An object that holds a [`Group`]'s `span_open()` and `span_close()` together
-/// (in a more compact representation than holding those 2 spans individually.
-///
-/// [`Group`]: crate::Group
-#[derive(Copy, Clone)]
-pub struct DelimSpan {
- inner: DelimSpanEnum,
- _marker: Marker,
-}
-
-#[derive(Copy, Clone)]
-enum DelimSpanEnum {
- #[cfg(wrap_proc_macro)]
- Compiler {
- join: proc_macro::Span,
- open: proc_macro::Span,
- close: proc_macro::Span,
- },
- Fallback(fallback::Span),
-}
-
-impl DelimSpan {
- pub(crate) fn new(group: &imp::Group) -> Self {
- #[cfg(wrap_proc_macro)]
- let inner = match group {
- imp::Group::Compiler(group) => DelimSpanEnum::Compiler {
- join: group.span(),
- open: group.span_open(),
- close: group.span_close(),
- },
- imp::Group::Fallback(group) => DelimSpanEnum::Fallback(group.span()),
- };
-
- #[cfg(not(wrap_proc_macro))]
- let inner = DelimSpanEnum::Fallback(group.span());
-
- DelimSpan {
- inner,
- _marker: Marker,
- }
- }
-
- /// Returns a span covering the entire delimited group.
- pub fn join(&self) -> Span {
- match &self.inner {
- #[cfg(wrap_proc_macro)]
- DelimSpanEnum::Compiler { join, .. } => Span::_new(imp::Span::Compiler(*join)),
- DelimSpanEnum::Fallback(span) => Span::_new_fallback(*span),
- }
- }
-
- /// Returns a span for the opening punctuation of the group only.
- pub fn open(&self) -> Span {
- match &self.inner {
- #[cfg(wrap_proc_macro)]
- DelimSpanEnum::Compiler { open, .. } => Span::_new(imp::Span::Compiler(*open)),
- DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.first_byte()),
- }
- }
-
- /// Returns a span for the closing punctuation of the group only.
- pub fn close(&self) -> Span {
- match &self.inner {
- #[cfg(wrap_proc_macro)]
- DelimSpanEnum::Compiler { close, .. } => Span::_new(imp::Span::Compiler(*close)),
- DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.last_byte()),
- }
- }
-}
-
-impl Debug for DelimSpan {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Debug::fmt(&self.join(), f)
- }
-}
diff --git a/vendor/proc-macro2/src/fallback.rs b/vendor/proc-macro2/src/fallback.rs
deleted file mode 100644
index 7b40427..0000000
--- a/vendor/proc-macro2/src/fallback.rs
+++ /dev/null
@@ -1,1143 +0,0 @@
-#[cfg(span_locations)]
-use crate::location::LineColumn;
-use crate::parse::{self, Cursor};
-use crate::rcvec::{RcVec, RcVecBuilder, RcVecIntoIter, RcVecMut};
-use crate::{Delimiter, Spacing, TokenTree};
-#[cfg(all(span_locations, not(fuzzing)))]
-use alloc::collections::BTreeMap;
-#[cfg(all(span_locations, not(fuzzing)))]
-use core::cell::RefCell;
-#[cfg(span_locations)]
-use core::cmp;
-use core::fmt::{self, Debug, Display, Write};
-use core::mem::ManuallyDrop;
-use core::ops::RangeBounds;
-use core::ptr;
-use core::str::FromStr;
-use std::path::PathBuf;
-
-/// Force use of proc-macro2's fallback implementation of the API for now, even
-/// if the compiler's implementation is available.
-pub fn force() {
- #[cfg(wrap_proc_macro)]
- crate::detection::force_fallback();
-}
-
-/// Resume using the compiler's implementation of the proc macro API if it is
-/// available.
-pub fn unforce() {
- #[cfg(wrap_proc_macro)]
- crate::detection::unforce_fallback();
-}
-
-#[derive(Clone)]
-pub(crate) struct TokenStream {
- inner: RcVec<TokenTree>,
-}
-
-#[derive(Debug)]
-pub(crate) struct LexError {
- pub(crate) span: Span,
-}
-
-impl LexError {
- pub(crate) fn span(&self) -> Span {
- self.span
- }
-
- pub(crate) fn call_site() -> Self {
- LexError {
- span: Span::call_site(),
- }
- }
-}
-
-impl TokenStream {
- pub fn new() -> Self {
- TokenStream {
- inner: RcVecBuilder::new().build(),
- }
- }
-
- pub fn is_empty(&self) -> bool {
- self.inner.len() == 0
- }
-
- fn take_inner(self) -> RcVecBuilder<TokenTree> {
- let nodrop = ManuallyDrop::new(self);
- unsafe { ptr::read(&nodrop.inner) }.make_owned()
- }
-}
-
-fn push_token_from_proc_macro(mut vec: RcVecMut<TokenTree>, token: TokenTree) {
- // https://github.com/dtolnay/proc-macro2/issues/235
- match token {
- TokenTree::Literal(crate::Literal {
- #[cfg(wrap_proc_macro)]
- inner: crate::imp::Literal::Fallback(literal),
- #[cfg(not(wrap_proc_macro))]
- inner: literal,
- ..
- }) if literal.repr.starts_with('-') => {
- push_negative_literal(vec, literal);
- }
- _ => vec.push(token),
- }
-
- #[cold]
- fn push_negative_literal(mut vec: RcVecMut<TokenTree>, mut literal: Literal) {
- literal.repr.remove(0);
- let mut punct = crate::Punct::new('-', Spacing::Alone);
- punct.set_span(crate::Span::_new_fallback(literal.span));
- vec.push(TokenTree::Punct(punct));
- vec.push(TokenTree::Literal(crate::Literal::_new_fallback(literal)));
- }
-}
-
-// Nonrecursive to prevent stack overflow.
-impl Drop for TokenStream {
- fn drop(&mut self) {
- let mut inner = match self.inner.get_mut() {
- Some(inner) => inner,
- None => return,
- };
- while let Some(token) = inner.pop() {
- let group = match token {
- TokenTree::Group(group) => group.inner,
- _ => continue,
- };
- #[cfg(wrap_proc_macro)]
- let group = match group {
- crate::imp::Group::Fallback(group) => group,
- crate::imp::Group::Compiler(_) => continue,
- };
- inner.extend(group.stream.take_inner());
- }
- }
-}
-
-pub(crate) struct TokenStreamBuilder {
- inner: RcVecBuilder<TokenTree>,
-}
-
-impl TokenStreamBuilder {
- pub fn new() -> Self {
- TokenStreamBuilder {
- inner: RcVecBuilder::new(),
- }
- }
-
- pub fn with_capacity(cap: usize) -> Self {
- TokenStreamBuilder {
- inner: RcVecBuilder::with_capacity(cap),
- }
- }
-
- pub fn push_token_from_parser(&mut self, tt: TokenTree) {
- self.inner.push(tt);
- }
-
- pub fn build(self) -> TokenStream {
- TokenStream {
- inner: self.inner.build(),
- }
- }
-}
-
-#[cfg(span_locations)]
-fn get_cursor(src: &str) -> Cursor {
- #[cfg(fuzzing)]
- return Cursor { rest: src, off: 1 };
-
- // Create a dummy file & add it to the source map
- #[cfg(not(fuzzing))]
- SOURCE_MAP.with(|cm| {
- let mut cm = cm.borrow_mut();
- let span = cm.add_file(src);
- Cursor {
- rest: src,
- off: span.lo,
- }
- })
-}
-
-#[cfg(not(span_locations))]
-fn get_cursor(src: &str) -> Cursor {
- Cursor { rest: src }
-}
-
-impl FromStr for TokenStream {
- type Err = LexError;
-
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- // Create a dummy file & add it to the source map
- let mut cursor = get_cursor(src);
-
- // Strip a byte order mark if present
- const BYTE_ORDER_MARK: &str = "\u{feff}";
- if cursor.starts_with(BYTE_ORDER_MARK) {
- cursor = cursor.advance(BYTE_ORDER_MARK.len());
- }
-
- parse::token_stream(cursor)
- }
-}
-
-impl Display for LexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.write_str("cannot parse string into token stream")
- }
-}
-
-impl Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let mut joint = false;
- for (i, tt) in self.inner.iter().enumerate() {
- if i != 0 && !joint {
- write!(f, " ")?;
- }
- joint = false;
- match tt {
- TokenTree::Group(tt) => Display::fmt(tt, f),
- TokenTree::Ident(tt) => Display::fmt(tt, f),
- TokenTree::Punct(tt) => {
- joint = tt.spacing() == Spacing::Joint;
- Display::fmt(tt, f)
- }
- TokenTree::Literal(tt) => Display::fmt(tt, f),
- }?;
- }
-
- Ok(())
- }
-}
-
-impl Debug for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.write_str("TokenStream ")?;
- f.debug_list().entries(self.clone()).finish()
- }
-}
-
-#[cfg(feature = "proc-macro")]
-impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> Self {
- inner
- .to_string()
- .parse()
- .expect("compiler token stream parse failed")
- }
-}
-
-#[cfg(feature = "proc-macro")]
-impl From<TokenStream> for proc_macro::TokenStream {
- fn from(inner: TokenStream) -> Self {
- inner
- .to_string()
- .parse()
- .expect("failed to parse to compiler tokens")
- }
-}
-
-impl From<TokenTree> for TokenStream {
- fn from(tree: TokenTree) -> Self {
- let mut stream = RcVecBuilder::new();
- push_token_from_proc_macro(stream.as_mut(), tree);
- TokenStream {
- inner: stream.build(),
- }
- }
-}
-
-impl FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
- let mut stream = TokenStream::new();
- stream.extend(tokens);
- stream
- }
-}
-
-impl FromIterator<TokenStream> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
- let mut v = RcVecBuilder::new();
-
- for stream in streams {
- v.extend(stream.take_inner());
- }
-
- TokenStream { inner: v.build() }
- }
-}
-
-impl Extend<TokenTree> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
- let mut vec = self.inner.make_mut();
- tokens
- .into_iter()
- .for_each(|token| push_token_from_proc_macro(vec.as_mut(), token));
- }
-}
-
-impl Extend<TokenStream> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
- self.inner.make_mut().extend(streams.into_iter().flatten());
- }
-}
-
-pub(crate) type TokenTreeIter = RcVecIntoIter<TokenTree>;
-
-impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = TokenTreeIter;
-
- fn into_iter(self) -> TokenTreeIter {
- self.take_inner().into_iter()
- }
-}
-
-#[derive(Clone, PartialEq, Eq)]
-pub(crate) struct SourceFile {
- path: PathBuf,
-}
-
-impl SourceFile {
- /// Get the path to this source file as a string.
- pub fn path(&self) -> PathBuf {
- self.path.clone()
- }
-
- pub fn is_real(&self) -> bool {
- false
- }
-}
-
-impl Debug for SourceFile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("SourceFile")
- .field("path", &self.path())
- .field("is_real", &self.is_real())
- .finish()
- }
-}
-
-#[cfg(all(span_locations, not(fuzzing)))]
-thread_local! {
- static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
- // Start with a single dummy file which all call_site() and def_site()
- // spans reference.
- files: vec![FileInfo {
- source_text: String::new(),
- span: Span { lo: 0, hi: 0 },
- lines: vec![0],
- char_index_to_byte_offset: BTreeMap::new(),
- }],
- });
-}
-
-#[cfg(all(span_locations, not(fuzzing)))]
-struct FileInfo {
- source_text: String,
- span: Span,
- lines: Vec<usize>,
- char_index_to_byte_offset: BTreeMap<usize, usize>,
-}
-
-#[cfg(all(span_locations, not(fuzzing)))]
-impl FileInfo {
- fn offset_line_column(&self, offset: usize) -> LineColumn {
- assert!(self.span_within(Span {
- lo: offset as u32,
- hi: offset as u32,
- }));
- let offset = offset - self.span.lo as usize;
- match self.lines.binary_search(&offset) {
- Ok(found) => LineColumn {
- line: found + 1,
- column: 0,
- },
- Err(idx) => LineColumn {
- line: idx,
- column: offset - self.lines[idx - 1],
- },
- }
- }
-
- fn span_within(&self, span: Span) -> bool {
- span.lo >= self.span.lo && span.hi <= self.span.hi
- }
-
- fn source_text(&mut self, span: Span) -> String {
- let lo_char = (span.lo - self.span.lo) as usize;
-
- // Look up offset of the largest already-computed char index that is
- // less than or equal to the current requested one. We resume counting
- // chars from that point.
- let (&last_char_index, &last_byte_offset) = self
- .char_index_to_byte_offset
- .range(..=lo_char)
- .next_back()
- .unwrap_or((&0, &0));
-
- let lo_byte = if last_char_index == lo_char {
- last_byte_offset
- } else {
- let total_byte_offset = match self.source_text[last_byte_offset..]
- .char_indices()
- .nth(lo_char - last_char_index)
- {
- Some((additional_offset, _ch)) => last_byte_offset + additional_offset,
- None => self.source_text.len(),
- };
- self.char_index_to_byte_offset
- .insert(lo_char, total_byte_offset);
- total_byte_offset
- };
-
- let trunc_lo = &self.source_text[lo_byte..];
- let char_len = (span.hi - span.lo) as usize;
- let source_text = match trunc_lo.char_indices().nth(char_len) {
- Some((offset, _ch)) => &trunc_lo[..offset],
- None => trunc_lo,
- };
- source_text.to_owned()
- }
-}
-
-/// Computes the offsets of each line in the given source string
-/// and the total number of characters
-#[cfg(all(span_locations, not(fuzzing)))]
-fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
- let mut lines = vec![0];
- let mut total = 0;
-
- for ch in s.chars() {
- total += 1;
- if ch == '\n' {
- lines.push(total);
- }
- }
-
- (total, lines)
-}
-
-#[cfg(all(span_locations, not(fuzzing)))]
-struct SourceMap {
- files: Vec<FileInfo>,
-}
-
-#[cfg(all(span_locations, not(fuzzing)))]
-impl SourceMap {
- fn next_start_pos(&self) -> u32 {
- // Add 1 so there's always space between files.
- //
- // We'll always have at least 1 file, as we initialize our files list
- // with a dummy file.
- self.files.last().unwrap().span.hi + 1
- }
-
- fn add_file(&mut self, src: &str) -> Span {
- let (len, lines) = lines_offsets(src);
- let lo = self.next_start_pos();
- let span = Span {
- lo,
- hi: lo + (len as u32),
- };
-
- self.files.push(FileInfo {
- source_text: src.to_owned(),
- span,
- lines,
- // Populated lazily by source_text().
- char_index_to_byte_offset: BTreeMap::new(),
- });
-
- span
- }
-
- #[cfg(procmacro2_semver_exempt)]
- fn filepath(&self, span: Span) -> PathBuf {
- for (i, file) in self.files.iter().enumerate() {
- if file.span_within(span) {
- return PathBuf::from(if i == 0 {
- "<unspecified>".to_owned()
- } else {
- format!("<parsed string {}>", i)
- });
- }
- }
- unreachable!("Invalid span with no related FileInfo!");
- }
-
- fn fileinfo(&self, span: Span) -> &FileInfo {
- for file in &self.files {
- if file.span_within(span) {
- return file;
- }
- }
- unreachable!("Invalid span with no related FileInfo!");
- }
-
- fn fileinfo_mut(&mut self, span: Span) -> &mut FileInfo {
- for file in &mut self.files {
- if file.span_within(span) {
- return file;
- }
- }
- unreachable!("Invalid span with no related FileInfo!");
- }
-}
-
-#[derive(Clone, Copy, PartialEq, Eq)]
-pub(crate) struct Span {
- #[cfg(span_locations)]
- pub(crate) lo: u32,
- #[cfg(span_locations)]
- pub(crate) hi: u32,
-}
-
-impl Span {
- #[cfg(not(span_locations))]
- pub fn call_site() -> Self {
- Span {}
- }
-
- #[cfg(span_locations)]
- pub fn call_site() -> Self {
- Span { lo: 0, hi: 0 }
- }
-
- pub fn mixed_site() -> Self {
- Span::call_site()
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn def_site() -> Self {
- Span::call_site()
- }
-
- pub fn resolved_at(&self, _other: Span) -> Span {
- // Stable spans consist only of line/column information, so
- // `resolved_at` and `located_at` only select which span the
- // caller wants line/column information from.
- *self
- }
-
- pub fn located_at(&self, other: Span) -> Span {
- other
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn source_file(&self) -> SourceFile {
- #[cfg(fuzzing)]
- return SourceFile {
- path: PathBuf::from("<unspecified>"),
- };
-
- #[cfg(not(fuzzing))]
- SOURCE_MAP.with(|cm| {
- let cm = cm.borrow();
- let path = cm.filepath(*self);
- SourceFile { path }
- })
- }
-
- #[cfg(span_locations)]
- pub fn start(&self) -> LineColumn {
- #[cfg(fuzzing)]
- return LineColumn { line: 0, column: 0 };
-
- #[cfg(not(fuzzing))]
- SOURCE_MAP.with(|cm| {
- let cm = cm.borrow();
- let fi = cm.fileinfo(*self);
- fi.offset_line_column(self.lo as usize)
- })
- }
-
- #[cfg(span_locations)]
- pub fn end(&self) -> LineColumn {
- #[cfg(fuzzing)]
- return LineColumn { line: 0, column: 0 };
-
- #[cfg(not(fuzzing))]
- SOURCE_MAP.with(|cm| {
- let cm = cm.borrow();
- let fi = cm.fileinfo(*self);
- fi.offset_line_column(self.hi as usize)
- })
- }
-
- #[cfg(not(span_locations))]
- pub fn join(&self, _other: Span) -> Option<Span> {
- Some(Span {})
- }
-
- #[cfg(span_locations)]
- pub fn join(&self, other: Span) -> Option<Span> {
- #[cfg(fuzzing)]
- return {
- let _ = other;
- None
- };
-
- #[cfg(not(fuzzing))]
- SOURCE_MAP.with(|cm| {
- let cm = cm.borrow();
- // If `other` is not within the same FileInfo as us, return None.
- if !cm.fileinfo(*self).span_within(other) {
- return None;
- }
- Some(Span {
- lo: cmp::min(self.lo, other.lo),
- hi: cmp::max(self.hi, other.hi),
- })
- })
- }
-
- #[cfg(not(span_locations))]
- pub fn source_text(&self) -> Option<String> {
- None
- }
-
- #[cfg(span_locations)]
- pub fn source_text(&self) -> Option<String> {
- #[cfg(fuzzing)]
- return None;
-
- #[cfg(not(fuzzing))]
- {
- if self.is_call_site() {
- None
- } else {
- Some(SOURCE_MAP.with(|cm| cm.borrow_mut().fileinfo_mut(*self).source_text(*self)))
- }
- }
- }
-
- #[cfg(not(span_locations))]
- pub(crate) fn first_byte(self) -> Self {
- self
- }
-
- #[cfg(span_locations)]
- pub(crate) fn first_byte(self) -> Self {
- Span {
- lo: self.lo,
- hi: cmp::min(self.lo.saturating_add(1), self.hi),
- }
- }
-
- #[cfg(not(span_locations))]
- pub(crate) fn last_byte(self) -> Self {
- self
- }
-
- #[cfg(span_locations)]
- pub(crate) fn last_byte(self) -> Self {
- Span {
- lo: cmp::max(self.hi.saturating_sub(1), self.lo),
- hi: self.hi,
- }
- }
-
- #[cfg(span_locations)]
- fn is_call_site(&self) -> bool {
- self.lo == 0 && self.hi == 0
- }
-}
-
-impl Debug for Span {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- #[cfg(span_locations)]
- return write!(f, "bytes({}..{})", self.lo, self.hi);
-
- #[cfg(not(span_locations))]
- write!(f, "Span")
- }
-}
-
-pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
- #[cfg(span_locations)]
- {
- if span.is_call_site() {
- return;
- }
- }
-
- if cfg!(span_locations) {
- debug.field("span", &span);
- }
-}
-
-#[derive(Clone)]
-pub(crate) struct Group {
- delimiter: Delimiter,
- stream: TokenStream,
- span: Span,
-}
-
-impl Group {
- pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
- Group {
- delimiter,
- stream,
- span: Span::call_site(),
- }
- }
-
- pub fn delimiter(&self) -> Delimiter {
- self.delimiter
- }
-
- pub fn stream(&self) -> TokenStream {
- self.stream.clone()
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn span_open(&self) -> Span {
- self.span.first_byte()
- }
-
- pub fn span_close(&self) -> Span {
- self.span.last_byte()
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-}
-
-impl Display for Group {
- // We attempt to match libproc_macro's formatting.
- // Empty parens: ()
- // Nonempty parens: (...)
- // Empty brackets: []
- // Nonempty brackets: [...]
- // Empty braces: { }
- // Nonempty braces: { ... }
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let (open, close) = match self.delimiter {
- Delimiter::Parenthesis => ("(", ")"),
- Delimiter::Brace => ("{ ", "}"),
- Delimiter::Bracket => ("[", "]"),
- Delimiter::None => ("", ""),
- };
-
- f.write_str(open)?;
- Display::fmt(&self.stream, f)?;
- if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
- f.write_str(" ")?;
- }
- f.write_str(close)?;
-
- Ok(())
- }
-}
-
-impl Debug for Group {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = fmt.debug_struct("Group");
- debug.field("delimiter", &self.delimiter);
- debug.field("stream", &self.stream);
- debug_span_field_if_nontrivial(&mut debug, self.span);
- debug.finish()
- }
-}
-
-#[derive(Clone)]
-pub(crate) struct Ident {
- sym: String,
- span: Span,
- raw: bool,
-}
-
-impl Ident {
- #[track_caller]
- pub fn new_checked(string: &str, span: Span) -> Self {
- validate_ident(string);
- Ident::new_unchecked(string, span)
- }
-
- pub fn new_unchecked(string: &str, span: Span) -> Self {
- Ident {
- sym: string.to_owned(),
- span,
- raw: false,
- }
- }
-
- #[track_caller]
- pub fn new_raw_checked(string: &str, span: Span) -> Self {
- validate_ident_raw(string);
- Ident::new_raw_unchecked(string, span)
- }
-
- pub fn new_raw_unchecked(string: &str, span: Span) -> Self {
- Ident {
- sym: string.to_owned(),
- span,
- raw: true,
- }
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-}
-
-pub(crate) fn is_ident_start(c: char) -> bool {
- c == '_' || unicode_ident::is_xid_start(c)
-}
-
-pub(crate) fn is_ident_continue(c: char) -> bool {
- unicode_ident::is_xid_continue(c)
-}
-
-#[track_caller]
-fn validate_ident(string: &str) {
- if string.is_empty() {
- panic!("Ident is not allowed to be empty; use Option<Ident>");
- }
-
- if string.bytes().all(|digit| b'0' <= digit && digit <= b'9') {
- panic!("Ident cannot be a number; use Literal instead");
- }
-
- fn ident_ok(string: &str) -> bool {
- let mut chars = string.chars();
- let first = chars.next().unwrap();
- if !is_ident_start(first) {
- return false;
- }
- for ch in chars {
- if !is_ident_continue(ch) {
- return false;
- }
- }
- true
- }
-
- if !ident_ok(string) {
- panic!("{:?} is not a valid Ident", string);
- }
-}
-
-#[track_caller]
-fn validate_ident_raw(string: &str) {
- validate_ident(string);
-
- match string {
- "_" | "super" | "self" | "Self" | "crate" => {
- panic!("`r#{}` cannot be a raw identifier", string);
- }
- _ => {}
- }
-}
-
-impl PartialEq for Ident {
- fn eq(&self, other: &Ident) -> bool {
- self.sym == other.sym && self.raw == other.raw
- }
-}
-
-impl<T> PartialEq<T> for Ident
-where
- T: ?Sized + AsRef<str>,
-{
- fn eq(&self, other: &T) -> bool {
- let other = other.as_ref();
- if self.raw {
- other.starts_with("r#") && self.sym == other[2..]
- } else {
- self.sym == other
- }
- }
-}
-
-impl Display for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- if self.raw {
- f.write_str("r#")?;
- }
- Display::fmt(&self.sym, f)
- }
-}
-
-#[allow(clippy::missing_fields_in_debug)]
-impl Debug for Ident {
- // Ident(proc_macro), Ident(r#union)
- #[cfg(not(span_locations))]
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = f.debug_tuple("Ident");
- debug.field(&format_args!("{}", self));
- debug.finish()
- }
-
- // Ident {
- // sym: proc_macro,
- // span: bytes(128..138)
- // }
- #[cfg(span_locations)]
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = f.debug_struct("Ident");
- debug.field("sym", &format_args!("{}", self));
- debug_span_field_if_nontrivial(&mut debug, self.span);
- debug.finish()
- }
-}
-
-#[derive(Clone)]
-pub(crate) struct Literal {
- repr: String,
- span: Span,
-}
-
-macro_rules! suffixed_numbers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(format!(concat!("{}", stringify!($kind)), n))
- }
- )*)
-}
-
-macro_rules! unsuffixed_numbers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(n.to_string())
- }
- )*)
-}
-
-impl Literal {
- pub(crate) fn _new(repr: String) -> Self {
- Literal {
- repr,
- span: Span::call_site(),
- }
- }
-
- pub(crate) unsafe fn from_str_unchecked(repr: &str) -> Self {
- Literal::_new(repr.to_owned())
- }
-
- suffixed_numbers! {
- u8_suffixed => u8,
- u16_suffixed => u16,
- u32_suffixed => u32,
- u64_suffixed => u64,
- u128_suffixed => u128,
- usize_suffixed => usize,
- i8_suffixed => i8,
- i16_suffixed => i16,
- i32_suffixed => i32,
- i64_suffixed => i64,
- i128_suffixed => i128,
- isize_suffixed => isize,
-
- f32_suffixed => f32,
- f64_suffixed => f64,
- }
-
- unsuffixed_numbers! {
- u8_unsuffixed => u8,
- u16_unsuffixed => u16,
- u32_unsuffixed => u32,
- u64_unsuffixed => u64,
- u128_unsuffixed => u128,
- usize_unsuffixed => usize,
- i8_unsuffixed => i8,
- i16_unsuffixed => i16,
- i32_unsuffixed => i32,
- i64_unsuffixed => i64,
- i128_unsuffixed => i128,
- isize_unsuffixed => isize,
- }
-
- pub fn f32_unsuffixed(f: f32) -> Literal {
- let mut s = f.to_string();
- if !s.contains('.') {
- s.push_str(".0");
- }
- Literal::_new(s)
- }
-
- pub fn f64_unsuffixed(f: f64) -> Literal {
- let mut s = f.to_string();
- if !s.contains('.') {
- s.push_str(".0");
- }
- Literal::_new(s)
- }
-
- pub fn string(t: &str) -> Literal {
- let mut repr = String::with_capacity(t.len() + 2);
- repr.push('"');
- let mut chars = t.chars();
- while let Some(ch) = chars.next() {
- if ch == '\0' {
- repr.push_str(
- if chars
- .as_str()
- .starts_with(|next| '0' <= next && next <= '7')
- {
- // circumvent clippy::octal_escapes lint
- "\\x00"
- } else {
- "\\0"
- },
- );
- } else if ch == '\'' {
- // escape_debug turns this into "\'" which is unnecessary.
- repr.push(ch);
- } else {
- repr.extend(ch.escape_debug());
- }
- }
- repr.push('"');
- Literal::_new(repr)
- }
-
- pub fn character(t: char) -> Literal {
- let mut repr = String::new();
- repr.push('\'');
- if t == '"' {
- // escape_debug turns this into '\"' which is unnecessary.
- repr.push(t);
- } else {
- repr.extend(t.escape_debug());
- }
- repr.push('\'');
- Literal::_new(repr)
- }
-
- pub fn byte_string(bytes: &[u8]) -> Literal {
- let mut escaped = "b\"".to_string();
- let mut bytes = bytes.iter();
- while let Some(&b) = bytes.next() {
- #[allow(clippy::match_overlapping_arm)]
- match b {
- b'\0' => escaped.push_str(match bytes.as_slice().first() {
- // circumvent clippy::octal_escapes lint
- Some(b'0'..=b'7') => r"\x00",
- _ => r"\0",
- }),
- b'\t' => escaped.push_str(r"\t"),
- b'\n' => escaped.push_str(r"\n"),
- b'\r' => escaped.push_str(r"\r"),
- b'"' => escaped.push_str("\\\""),
- b'\\' => escaped.push_str("\\\\"),
- b'\x20'..=b'\x7E' => escaped.push(b as char),
- _ => {
- let _ = write!(escaped, "\\x{:02X}", b);
- }
- }
- }
- escaped.push('"');
- Literal::_new(escaped)
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-
- pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
- #[cfg(not(span_locations))]
- {
- let _ = range;
- None
- }
-
- #[cfg(span_locations)]
- {
- use core::ops::Bound;
-
- let lo = match range.start_bound() {
- Bound::Included(start) => {
- let start = u32::try_from(*start).ok()?;
- self.span.lo.checked_add(start)?
- }
- Bound::Excluded(start) => {
- let start = u32::try_from(*start).ok()?;
- self.span.lo.checked_add(start)?.checked_add(1)?
- }
- Bound::Unbounded => self.span.lo,
- };
- let hi = match range.end_bound() {
- Bound::Included(end) => {
- let end = u32::try_from(*end).ok()?;
- self.span.lo.checked_add(end)?.checked_add(1)?
- }
- Bound::Excluded(end) => {
- let end = u32::try_from(*end).ok()?;
- self.span.lo.checked_add(end)?
- }
- Bound::Unbounded => self.span.hi,
- };
- if lo <= hi && hi <= self.span.hi {
- Some(Span { lo, hi })
- } else {
- None
- }
- }
- }
-}
-
-impl FromStr for Literal {
- type Err = LexError;
-
- fn from_str(repr: &str) -> Result<Self, Self::Err> {
- let mut cursor = get_cursor(repr);
- #[cfg(span_locations)]
- let lo = cursor.off;
-
- let negative = cursor.starts_with_char('-');
- if negative {
- cursor = cursor.advance(1);
- if !cursor.starts_with_fn(|ch| ch.is_ascii_digit()) {
- return Err(LexError::call_site());
- }
- }
-
- if let Ok((rest, mut literal)) = parse::literal(cursor) {
- if rest.is_empty() {
- if negative {
- literal.repr.insert(0, '-');
- }
- literal.span = Span {
- #[cfg(span_locations)]
- lo,
- #[cfg(span_locations)]
- hi: rest.off,
- };
- return Ok(literal);
- }
- }
- Err(LexError::call_site())
- }
-}
-
-impl Display for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Display::fmt(&self.repr, f)
- }
-}
-
-impl Debug for Literal {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = fmt.debug_struct("Literal");
- debug.field("lit", &format_args!("{}", self.repr));
- debug_span_field_if_nontrivial(&mut debug, self.span);
- debug.finish()
- }
-}
diff --git a/vendor/proc-macro2/src/lib.rs b/vendor/proc-macro2/src/lib.rs
deleted file mode 100644
index 7e8f543..0000000
--- a/vendor/proc-macro2/src/lib.rs
+++ /dev/null
@@ -1,1328 +0,0 @@
-//! [![github]](https://github.com/dtolnay/proc-macro2)&ensp;[![crates-io]](https://crates.io/crates/proc-macro2)&ensp;[![docs-rs]](crate)
-//!
-//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
-//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
-//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
-//!
-//! <br>
-//!
-//! A wrapper around the procedural macro API of the compiler's [`proc_macro`]
-//! crate. This library serves two purposes:
-//!
-//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
-//!
-//! - **Bring proc-macro-like functionality to other contexts like build.rs and
-//! main.rs.** Types from `proc_macro` are entirely specific to procedural
-//! macros and cannot ever exist in code outside of a procedural macro.
-//! Meanwhile `proc_macro2` types may exist anywhere including non-macro code.
-//! By developing foundational libraries like [syn] and [quote] against
-//! `proc_macro2` rather than `proc_macro`, the procedural macro ecosystem
-//! becomes easily applicable to many other use cases and we avoid
-//! reimplementing non-macro equivalents of those libraries.
-//!
-//! - **Make procedural macros unit testable.** As a consequence of being
-//! specific to procedural macros, nothing that uses `proc_macro` can be
-//! executed from a unit test. In order for helper libraries or components of
-//! a macro to be testable in isolation, they must be implemented using
-//! `proc_macro2`.
-//!
-//! [syn]: https://github.com/dtolnay/syn
-//! [quote]: https://github.com/dtolnay/quote
-//!
-//! # Usage
-//!
-//! The skeleton of a typical procedural macro typically looks like this:
-//!
-//! ```
-//! extern crate proc_macro;
-//!
-//! # const IGNORE: &str = stringify! {
-//! #[proc_macro_derive(MyDerive)]
-//! # };
-//! # #[cfg(wrap_proc_macro)]
-//! pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
-//! let input = proc_macro2::TokenStream::from(input);
-//!
-//! let output: proc_macro2::TokenStream = {
-//! /* transform input */
-//! # input
-//! };
-//!
-//! proc_macro::TokenStream::from(output)
-//! }
-//! ```
-//!
-//! If parsing with [Syn], you'll use [`parse_macro_input!`] instead to
-//! propagate parse errors correctly back to the compiler when parsing fails.
-//!
-//! [`parse_macro_input!`]: https://docs.rs/syn/2.0/syn/macro.parse_macro_input.html
-//!
-//! # Unstable features
-//!
-//! The default feature set of proc-macro2 tracks the most recent stable
-//! compiler API. Functionality in `proc_macro` that is not yet stable is not
-//! exposed by proc-macro2 by default.
-//!
-//! To opt into the additional APIs available in the most recent nightly
-//! compiler, the `procmacro2_semver_exempt` config flag must be passed to
-//! rustc. We will polyfill those nightly-only APIs back to Rust 1.56.0. As
-//! these are unstable APIs that track the nightly compiler, minor versions of
-//! proc-macro2 may make breaking changes to them at any time.
-//!
-//! ```sh
-//! RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
-//! ```
-//!
-//! Note that this must not only be done for your crate, but for any crate that
-//! depends on your crate. This infectious nature is intentional, as it serves
-//! as a reminder that you are outside of the normal semver guarantees.
-//!
-//! Semver exempt methods are marked as such in the proc-macro2 documentation.
-//!
-//! # Thread-Safety
-//!
-//! Most types in this crate are `!Sync` because the underlying compiler
-//! types make use of thread-local memory, meaning they cannot be accessed from
-//! a different thread.
-
-// Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.76")]
-#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
-#![cfg_attr(super_unstable, feature(proc_macro_def_site))]
-#![cfg_attr(doc_cfg, feature(doc_cfg))]
-#![deny(unsafe_op_in_unsafe_fn)]
-#![allow(
- clippy::cast_lossless,
- clippy::cast_possible_truncation,
- clippy::checked_conversions,
- clippy::doc_markdown,
- clippy::items_after_statements,
- clippy::iter_without_into_iter,
- clippy::let_underscore_untyped,
- clippy::manual_assert,
- clippy::manual_range_contains,
- clippy::missing_safety_doc,
- clippy::must_use_candidate,
- clippy::needless_doctest_main,
- clippy::new_without_default,
- clippy::return_self_not_must_use,
- clippy::shadow_unrelated,
- clippy::trivially_copy_pass_by_ref,
- clippy::unnecessary_wraps,
- clippy::unused_self,
- clippy::used_underscore_binding,
- clippy::vec_init_then_push
-)]
-
-#[cfg(all(procmacro2_semver_exempt, wrap_proc_macro, not(super_unstable)))]
-compile_error! {"\
- Something is not right. If you've tried to turn on \
- procmacro2_semver_exempt, you need to ensure that it \
- is turned on for the compilation of the proc-macro2 \
- build script as well.
-"}
-
-#[cfg(all(
- procmacro2_nightly_testing,
- feature = "proc-macro",
- not(proc_macro_span)
-))]
-compile_error! {"\
- Build script probe failed to compile.
-"}
-
-extern crate alloc;
-
-#[cfg(feature = "proc-macro")]
-extern crate proc_macro;
-
-mod marker;
-mod parse;
-mod rcvec;
-
-#[cfg(wrap_proc_macro)]
-mod detection;
-
-// Public for proc_macro2::fallback::force() and unforce(), but those are quite
-// a niche use case so we omit it from rustdoc.
-#[doc(hidden)]
-pub mod fallback;
-
-pub mod extra;
-
-#[cfg(not(wrap_proc_macro))]
-use crate::fallback as imp;
-#[path = "wrapper.rs"]
-#[cfg(wrap_proc_macro)]
-mod imp;
-
-#[cfg(span_locations)]
-mod location;
-
-use crate::extra::DelimSpan;
-use crate::marker::Marker;
-use core::cmp::Ordering;
-use core::fmt::{self, Debug, Display};
-use core::hash::{Hash, Hasher};
-use core::ops::RangeBounds;
-use core::str::FromStr;
-use std::error::Error;
-#[cfg(procmacro2_semver_exempt)]
-use std::path::PathBuf;
-
-#[cfg(span_locations)]
-#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
-pub use crate::location::LineColumn;
-
-/// An abstract stream of tokens, or more concretely a sequence of token trees.
-///
-/// This type provides interfaces for iterating over token trees and for
-/// collecting token trees into one stream.
-///
-/// Token stream is both the input and output of `#[proc_macro]`,
-/// `#[proc_macro_attribute]` and `#[proc_macro_derive]` definitions.
-#[derive(Clone)]
-pub struct TokenStream {
- inner: imp::TokenStream,
- _marker: Marker,
-}
-
-/// Error returned from `TokenStream::from_str`.
-pub struct LexError {
- inner: imp::LexError,
- _marker: Marker,
-}
-
-impl TokenStream {
- fn _new(inner: imp::TokenStream) -> Self {
- TokenStream {
- inner,
- _marker: Marker,
- }
- }
-
- fn _new_fallback(inner: fallback::TokenStream) -> Self {
- TokenStream {
- inner: inner.into(),
- _marker: Marker,
- }
- }
-
- /// Returns an empty `TokenStream` containing no token trees.
- pub fn new() -> Self {
- TokenStream::_new(imp::TokenStream::new())
- }
-
- /// Checks if this `TokenStream` is empty.
- pub fn is_empty(&self) -> bool {
- self.inner.is_empty()
- }
-}
-
-/// `TokenStream::default()` returns an empty stream,
-/// i.e. this is equivalent with `TokenStream::new()`.
-impl Default for TokenStream {
- fn default() -> Self {
- TokenStream::new()
- }
-}
-
-/// Attempts to break the string into tokens and parse those tokens into a token
-/// stream.
-///
-/// May fail for a number of reasons, for example, if the string contains
-/// unbalanced delimiters or characters not existing in the language.
-///
-/// NOTE: Some errors may cause panics instead of returning `LexError`. We
-/// reserve the right to change these errors into `LexError`s later.
-impl FromStr for TokenStream {
- type Err = LexError;
-
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- let e = src.parse().map_err(|e| LexError {
- inner: e,
- _marker: Marker,
- })?;
- Ok(TokenStream::_new(e))
- }
-}
-
-#[cfg(feature = "proc-macro")]
-#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))]
-impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> Self {
- TokenStream::_new(inner.into())
- }
-}
-
-#[cfg(feature = "proc-macro")]
-#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))]
-impl From<TokenStream> for proc_macro::TokenStream {
- fn from(inner: TokenStream) -> Self {
- inner.inner.into()
- }
-}
-
-impl From<TokenTree> for TokenStream {
- fn from(token: TokenTree) -> Self {
- TokenStream::_new(imp::TokenStream::from(token))
- }
-}
-
-impl Extend<TokenTree> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
- self.inner.extend(streams);
- }
-}
-
-impl Extend<TokenStream> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
- self.inner
- .extend(streams.into_iter().map(|stream| stream.inner));
- }
-}
-
-/// Collects a number of token trees into a single stream.
-impl FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
- TokenStream::_new(streams.into_iter().collect())
- }
-}
-impl FromIterator<TokenStream> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
- TokenStream::_new(streams.into_iter().map(|i| i.inner).collect())
- }
-}
-
-/// Prints the token stream as a string that is supposed to be losslessly
-/// convertible back into the same token stream (modulo spans), except for
-/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
-/// numeric literals.
-impl Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Display::fmt(&self.inner, f)
- }
-}
-
-/// Prints token in a form convenient for debugging.
-impl Debug for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Debug::fmt(&self.inner, f)
- }
-}
-
-impl LexError {
- pub fn span(&self) -> Span {
- Span::_new(self.inner.span())
- }
-}
-
-impl Debug for LexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Debug::fmt(&self.inner, f)
- }
-}
-
-impl Display for LexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Display::fmt(&self.inner, f)
- }
-}
-
-impl Error for LexError {}
-
-/// The source file of a given `Span`.
-///
-/// This type is semver exempt and not exposed by default.
-#[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))]
-#[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
-#[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile {
- inner: imp::SourceFile,
- _marker: Marker,
-}
-
-#[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))]
-impl SourceFile {
- fn _new(inner: imp::SourceFile) -> Self {
- SourceFile {
- inner,
- _marker: Marker,
- }
- }
-
- /// Get the path to this source file.
- ///
- /// ### Note
- ///
- /// If the code span associated with this `SourceFile` was generated by an
- /// external macro, this may not be an actual path on the filesystem. Use
- /// [`is_real`] to check.
- ///
- /// Also note that even if `is_real` returns `true`, if
- /// `--remap-path-prefix` was passed on the command line, the path as given
- /// may not actually be valid.
- ///
- /// [`is_real`]: #method.is_real
- pub fn path(&self) -> PathBuf {
- self.inner.path()
- }
-
- /// Returns `true` if this source file is a real source file, and not
- /// generated by an external macro's expansion.
- pub fn is_real(&self) -> bool {
- self.inner.is_real()
- }
-}
-
-#[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))]
-impl Debug for SourceFile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Debug::fmt(&self.inner, f)
- }
-}
-
-/// A region of source code, along with macro expansion information.
-#[derive(Copy, Clone)]
-pub struct Span {
- inner: imp::Span,
- _marker: Marker,
-}
-
-impl Span {
- fn _new(inner: imp::Span) -> Self {
- Span {
- inner,
- _marker: Marker,
- }
- }
-
- fn _new_fallback(inner: fallback::Span) -> Self {
- Span {
- inner: inner.into(),
- _marker: Marker,
- }
- }
-
- /// The span of the invocation of the current procedural macro.
- ///
- /// Identifiers created with this span will be resolved as if they were
- /// written directly at the macro call location (call-site hygiene) and
- /// other code at the macro call site will be able to refer to them as well.
- pub fn call_site() -> Self {
- Span::_new(imp::Span::call_site())
- }
-
- /// The span located at the invocation of the procedural macro, but with
- /// local variables, labels, and `$crate` resolved at the definition site
- /// of the macro. This is the same hygiene behavior as `macro_rules`.
- pub fn mixed_site() -> Self {
- Span::_new(imp::Span::mixed_site())
- }
-
- /// A span that resolves at the macro definition site.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
- pub fn def_site() -> Self {
- Span::_new(imp::Span::def_site())
- }
-
- /// Creates a new span with the same line/column information as `self` but
- /// that resolves symbols as though it were at `other`.
- pub fn resolved_at(&self, other: Span) -> Span {
- Span::_new(self.inner.resolved_at(other.inner))
- }
-
- /// Creates a new span with the same name resolution behavior as `self` but
- /// with the line/column information of `other`.
- pub fn located_at(&self, other: Span) -> Span {
- Span::_new(self.inner.located_at(other.inner))
- }
-
- /// Convert `proc_macro2::Span` to `proc_macro::Span`.
- ///
- /// This method is available when building with a nightly compiler, or when
- /// building with rustc 1.29+ *without* semver exempt features.
- ///
- /// # Panics
- ///
- /// Panics if called from outside of a procedural macro. Unlike
- /// `proc_macro2::Span`, the `proc_macro::Span` type can only exist within
- /// the context of a procedural macro invocation.
- #[cfg(wrap_proc_macro)]
- pub fn unwrap(self) -> proc_macro::Span {
- self.inner.unwrap()
- }
-
- // Soft deprecated. Please use Span::unwrap.
- #[cfg(wrap_proc_macro)]
- #[doc(hidden)]
- pub fn unstable(self) -> proc_macro::Span {
- self.unwrap()
- }
-
- /// The original source file into which this span points.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))]
- #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
- pub fn source_file(&self) -> SourceFile {
- SourceFile::_new(self.inner.source_file())
- }
-
- /// Get the starting line/column in the source file for this span.
- ///
- /// This method requires the `"span-locations"` feature to be enabled.
- ///
- /// When executing in a procedural macro context, the returned line/column
- /// are only meaningful if compiled with a nightly toolchain. The stable
- /// toolchain does not have this information available. When executing
- /// outside of a procedural macro, such as main.rs or build.rs, the
- /// line/column are always meaningful regardless of toolchain.
- #[cfg(span_locations)]
- #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
- pub fn start(&self) -> LineColumn {
- self.inner.start()
- }
-
- /// Get the ending line/column in the source file for this span.
- ///
- /// This method requires the `"span-locations"` feature to be enabled.
- ///
- /// When executing in a procedural macro context, the returned line/column
- /// are only meaningful if compiled with a nightly toolchain. The stable
- /// toolchain does not have this information available. When executing
- /// outside of a procedural macro, such as main.rs or build.rs, the
- /// line/column are always meaningful regardless of toolchain.
- #[cfg(span_locations)]
- #[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
- pub fn end(&self) -> LineColumn {
- self.inner.end()
- }
-
- /// Create a new span encompassing `self` and `other`.
- ///
- /// Returns `None` if `self` and `other` are from different files.
- ///
- /// Warning: the underlying [`proc_macro::Span::join`] method is
- /// nightly-only. When called from within a procedural macro not using a
- /// nightly compiler, this method will always return `None`.
- ///
- /// [`proc_macro::Span::join`]: https://doc.rust-lang.org/proc_macro/struct.Span.html#method.join
- pub fn join(&self, other: Span) -> Option<Span> {
- self.inner.join(other.inner).map(Span::_new)
- }
-
- /// Compares two spans to see if they're equal.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
- pub fn eq(&self, other: &Span) -> bool {
- self.inner.eq(&other.inner)
- }
-
- /// Returns the source text behind a span. This preserves the original
- /// source code, including spaces and comments. It only returns a result if
- /// the span corresponds to real source code.
- ///
- /// Note: The observable result of a macro should only rely on the tokens
- /// and not on this source text. The result of this function is a best
- /// effort to be used for diagnostics only.
- pub fn source_text(&self) -> Option<String> {
- self.inner.source_text()
- }
-}
-
-/// Prints a span in a form convenient for debugging.
-impl Debug for Span {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Debug::fmt(&self.inner, f)
- }
-}
-
-/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
-#[derive(Clone)]
-pub enum TokenTree {
- /// A token stream surrounded by bracket delimiters.
- Group(Group),
- /// An identifier.
- Ident(Ident),
- /// A single punctuation character (`+`, `,`, `$`, etc.).
- Punct(Punct),
- /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
- Literal(Literal),
-}
-
-impl TokenTree {
- /// Returns the span of this tree, delegating to the `span` method of
- /// the contained token or a delimited stream.
- pub fn span(&self) -> Span {
- match self {
- TokenTree::Group(t) => t.span(),
- TokenTree::Ident(t) => t.span(),
- TokenTree::Punct(t) => t.span(),
- TokenTree::Literal(t) => t.span(),
- }
- }
-
- /// Configures the span for *only this token*.
- ///
- /// Note that if this token is a `Group` then this method will not configure
- /// the span of each of the internal tokens, this will simply delegate to
- /// the `set_span` method of each variant.
- pub fn set_span(&mut self, span: Span) {
- match self {
- TokenTree::Group(t) => t.set_span(span),
- TokenTree::Ident(t) => t.set_span(span),
- TokenTree::Punct(t) => t.set_span(span),
- TokenTree::Literal(t) => t.set_span(span),
- }
- }
-}
-
-impl From<Group> for TokenTree {
- fn from(g: Group) -> Self {
- TokenTree::Group(g)
- }
-}
-
-impl From<Ident> for TokenTree {
- fn from(g: Ident) -> Self {
- TokenTree::Ident(g)
- }
-}
-
-impl From<Punct> for TokenTree {
- fn from(g: Punct) -> Self {
- TokenTree::Punct(g)
- }
-}
-
-impl From<Literal> for TokenTree {
- fn from(g: Literal) -> Self {
- TokenTree::Literal(g)
- }
-}
-
-/// Prints the token tree as a string that is supposed to be losslessly
-/// convertible back into the same token tree (modulo spans), except for
-/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
-/// numeric literals.
-impl Display for TokenTree {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- TokenTree::Group(t) => Display::fmt(t, f),
- TokenTree::Ident(t) => Display::fmt(t, f),
- TokenTree::Punct(t) => Display::fmt(t, f),
- TokenTree::Literal(t) => Display::fmt(t, f),
- }
- }
-}
-
-/// Prints token tree in a form convenient for debugging.
-impl Debug for TokenTree {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- // Each of these has the name in the struct type in the derived debug,
- // so don't bother with an extra layer of indirection
- match self {
- TokenTree::Group(t) => Debug::fmt(t, f),
- TokenTree::Ident(t) => {
- let mut debug = f.debug_struct("Ident");
- debug.field("sym", &format_args!("{}", t));
- imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
- debug.finish()
- }
- TokenTree::Punct(t) => Debug::fmt(t, f),
- TokenTree::Literal(t) => Debug::fmt(t, f),
- }
- }
-}
-
-/// A delimited token stream.
-///
-/// A `Group` internally contains a `TokenStream` which is surrounded by
-/// `Delimiter`s.
-#[derive(Clone)]
-pub struct Group {
- inner: imp::Group,
-}
-
-/// Describes how a sequence of token trees is delimited.
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Delimiter {
- /// `( ... )`
- Parenthesis,
- /// `{ ... }`
- Brace,
- /// `[ ... ]`
- Bracket,
- /// `Ø ... Ø`
- ///
- /// An implicit delimiter, that may, for example, appear around tokens
- /// coming from a "macro variable" `$var`. It is important to preserve
- /// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`.
- /// Implicit delimiters may not survive roundtrip of a token stream through
- /// a string.
- None,
-}
-
-impl Group {
- fn _new(inner: imp::Group) -> Self {
- Group { inner }
- }
-
- fn _new_fallback(inner: fallback::Group) -> Self {
- Group {
- inner: inner.into(),
- }
- }
-
- /// Creates a new `Group` with the given delimiter and token stream.
- ///
- /// This constructor will set the span for this group to
- /// `Span::call_site()`. To change the span you can use the `set_span`
- /// method below.
- pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
- Group {
- inner: imp::Group::new(delimiter, stream.inner),
- }
- }
-
- /// Returns the punctuation used as the delimiter for this group: a set of
- /// parentheses, square brackets, or curly braces.
- pub fn delimiter(&self) -> Delimiter {
- self.inner.delimiter()
- }
-
- /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
- ///
- /// Note that the returned token stream does not include the delimiter
- /// returned above.
- pub fn stream(&self) -> TokenStream {
- TokenStream::_new(self.inner.stream())
- }
-
- /// Returns the span for the delimiters of this token stream, spanning the
- /// entire `Group`.
- ///
- /// ```text
- /// pub fn span(&self) -> Span {
- /// ^^^^^^^
- /// ```
- pub fn span(&self) -> Span {
- Span::_new(self.inner.span())
- }
-
- /// Returns the span pointing to the opening delimiter of this group.
- ///
- /// ```text
- /// pub fn span_open(&self) -> Span {
- /// ^
- /// ```
- pub fn span_open(&self) -> Span {
- Span::_new(self.inner.span_open())
- }
-
- /// Returns the span pointing to the closing delimiter of this group.
- ///
- /// ```text
- /// pub fn span_close(&self) -> Span {
- /// ^
- /// ```
- pub fn span_close(&self) -> Span {
- Span::_new(self.inner.span_close())
- }
-
- /// Returns an object that holds this group's `span_open()` and
- /// `span_close()` together (in a more compact representation than holding
- /// those 2 spans individually).
- pub fn delim_span(&self) -> DelimSpan {
- DelimSpan::new(&self.inner)
- }
-
- /// Configures the span for this `Group`'s delimiters, but not its internal
- /// tokens.
- ///
- /// This method will **not** set the span of all the internal tokens spanned
- /// by this group, but rather it will only set the span of the delimiter
- /// tokens at the level of the `Group`.
- pub fn set_span(&mut self, span: Span) {
- self.inner.set_span(span.inner);
- }
-}
-
-/// Prints the group as a string that should be losslessly convertible back
-/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
-/// with `Delimiter::None` delimiters.
-impl Display for Group {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- Display::fmt(&self.inner, formatter)
- }
-}
-
-impl Debug for Group {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- Debug::fmt(&self.inner, formatter)
- }
-}
-
-/// A `Punct` is a single punctuation character like `+`, `-` or `#`.
-///
-/// Multicharacter operators like `+=` are represented as two instances of
-/// `Punct` with different forms of `Spacing` returned.
-#[derive(Clone)]
-pub struct Punct {
- ch: char,
- spacing: Spacing,
- span: Span,
-}
-
-/// Whether a `Punct` is followed immediately by another `Punct` or followed by
-/// another token or whitespace.
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Spacing {
- /// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`.
- Alone,
- /// E.g. `+` is `Joint` in `+=` or `'` is `Joint` in `'#`.
- ///
- /// Additionally, single quote `'` can join with identifiers to form
- /// lifetimes `'ident`.
- Joint,
-}
-
-impl Punct {
- /// Creates a new `Punct` from the given character and spacing.
- ///
- /// The `ch` argument must be a valid punctuation character permitted by the
- /// language, otherwise the function will panic.
- ///
- /// The returned `Punct` will have the default span of `Span::call_site()`
- /// which can be further configured with the `set_span` method below.
- pub fn new(ch: char, spacing: Spacing) -> Self {
- Punct {
- ch,
- spacing,
- span: Span::call_site(),
- }
- }
-
- /// Returns the value of this punctuation character as `char`.
- pub fn as_char(&self) -> char {
- self.ch
- }
-
- /// Returns the spacing of this punctuation character, indicating whether
- /// it's immediately followed by another `Punct` in the token stream, so
- /// they can potentially be combined into a multicharacter operator
- /// (`Joint`), or it's followed by some other token or whitespace (`Alone`)
- /// so the operator has certainly ended.
- pub fn spacing(&self) -> Spacing {
- self.spacing
- }
-
- /// Returns the span for this punctuation character.
- pub fn span(&self) -> Span {
- self.span
- }
-
- /// Configure the span for this punctuation character.
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-}
-
-/// Prints the punctuation character as a string that should be losslessly
-/// convertible back into the same character.
-impl Display for Punct {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Display::fmt(&self.ch, f)
- }
-}
-
-impl Debug for Punct {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = fmt.debug_struct("Punct");
- debug.field("char", &self.ch);
- debug.field("spacing", &self.spacing);
- imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
- debug.finish()
- }
-}
-
-/// A word of Rust code, which may be a keyword or legal variable name.
-///
-/// An identifier consists of at least one Unicode code point, the first of
-/// which has the XID_Start property and the rest of which have the XID_Continue
-/// property.
-///
-/// - The empty string is not an identifier. Use `Option<Ident>`.
-/// - A lifetime is not an identifier. Use `syn::Lifetime` instead.
-///
-/// An identifier constructed with `Ident::new` is permitted to be a Rust
-/// keyword, though parsing one through its [`Parse`] implementation rejects
-/// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the
-/// behaviour of `Ident::new`.
-///
-/// [`Parse`]: https://docs.rs/syn/2.0/syn/parse/trait.Parse.html
-///
-/// # Examples
-///
-/// A new ident can be created from a string using the `Ident::new` function.
-/// A span must be provided explicitly which governs the name resolution
-/// behavior of the resulting identifier.
-///
-/// ```
-/// use proc_macro2::{Ident, Span};
-///
-/// fn main() {
-/// let call_ident = Ident::new("calligraphy", Span::call_site());
-///
-/// println!("{}", call_ident);
-/// }
-/// ```
-///
-/// An ident can be interpolated into a token stream using the `quote!` macro.
-///
-/// ```
-/// use proc_macro2::{Ident, Span};
-/// use quote::quote;
-///
-/// fn main() {
-/// let ident = Ident::new("demo", Span::call_site());
-///
-/// // Create a variable binding whose name is this ident.
-/// let expanded = quote! { let #ident = 10; };
-///
-/// // Create a variable binding with a slightly different name.
-/// let temp_ident = Ident::new(&format!("new_{}", ident), Span::call_site());
-/// let expanded = quote! { let #temp_ident = 10; };
-/// }
-/// ```
-///
-/// A string representation of the ident is available through the `to_string()`
-/// method.
-///
-/// ```
-/// # use proc_macro2::{Ident, Span};
-/// #
-/// # let ident = Ident::new("another_identifier", Span::call_site());
-/// #
-/// // Examine the ident as a string.
-/// let ident_string = ident.to_string();
-/// if ident_string.len() > 60 {
-/// println!("Very long identifier: {}", ident_string)
-/// }
-/// ```
-#[derive(Clone)]
-pub struct Ident {
- inner: imp::Ident,
- _marker: Marker,
-}
-
-impl Ident {
- fn _new(inner: imp::Ident) -> Self {
- Ident {
- inner,
- _marker: Marker,
- }
- }
-
- /// Creates a new `Ident` with the given `string` as well as the specified
- /// `span`.
- ///
- /// The `string` argument must be a valid identifier permitted by the
- /// language, otherwise the function will panic.
- ///
- /// Note that `span`, currently in rustc, configures the hygiene information
- /// for this identifier.
- ///
- /// As of this time `Span::call_site()` explicitly opts-in to "call-site"
- /// hygiene meaning that identifiers created with this span will be resolved
- /// as if they were written directly at the location of the macro call, and
- /// other code at the macro call site will be able to refer to them as well.
- ///
- /// Later spans like `Span::def_site()` will allow to opt-in to
- /// "definition-site" hygiene meaning that identifiers created with this
- /// span will be resolved at the location of the macro definition and other
- /// code at the macro call site will not be able to refer to them.
- ///
- /// Due to the current importance of hygiene this constructor, unlike other
- /// tokens, requires a `Span` to be specified at construction.
- ///
- /// # Panics
- ///
- /// Panics if the input string is neither a keyword nor a legal variable
- /// name. If you are not sure whether the string contains an identifier and
- /// need to handle an error case, use
- /// <a href="https://docs.rs/syn/2.0/syn/fn.parse_str.html"><code
- /// style="padding-right:0;">syn::parse_str</code></a><code
- /// style="padding-left:0;">::&lt;Ident&gt;</code>
- /// rather than `Ident::new`.
- #[track_caller]
- pub fn new(string: &str, span: Span) -> Self {
- Ident::_new(imp::Ident::new_checked(string, span.inner))
- }
-
- /// Same as `Ident::new`, but creates a raw identifier (`r#ident`). The
- /// `string` argument must be a valid identifier permitted by the language
- /// (including keywords, e.g. `fn`). Keywords which are usable in path
- /// segments (e.g. `self`, `super`) are not supported, and will cause a
- /// panic.
- #[track_caller]
- pub fn new_raw(string: &str, span: Span) -> Self {
- Ident::_new(imp::Ident::new_raw_checked(string, span.inner))
- }
-
- /// Returns the span of this `Ident`.
- pub fn span(&self) -> Span {
- Span::_new(self.inner.span())
- }
-
- /// Configures the span of this `Ident`, possibly changing its hygiene
- /// context.
- pub fn set_span(&mut self, span: Span) {
- self.inner.set_span(span.inner);
- }
-}
-
-impl PartialEq for Ident {
- fn eq(&self, other: &Ident) -> bool {
- self.inner == other.inner
- }
-}
-
-impl<T> PartialEq<T> for Ident
-where
- T: ?Sized + AsRef<str>,
-{
- fn eq(&self, other: &T) -> bool {
- self.inner == other
- }
-}
-
-impl Eq for Ident {}
-
-impl PartialOrd for Ident {
- fn partial_cmp(&self, other: &Ident) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
-
-impl Ord for Ident {
- fn cmp(&self, other: &Ident) -> Ordering {
- self.to_string().cmp(&other.to_string())
- }
-}
-
-impl Hash for Ident {
- fn hash<H: Hasher>(&self, hasher: &mut H) {
- self.to_string().hash(hasher);
- }
-}
-
-/// Prints the identifier as a string that should be losslessly convertible back
-/// into the same identifier.
-impl Display for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Display::fmt(&self.inner, f)
- }
-}
-
-impl Debug for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Debug::fmt(&self.inner, f)
- }
-}
-
-/// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
-/// byte character (`b'a'`), an integer or floating point number with or without
-/// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
-///
-/// Boolean literals like `true` and `false` do not belong here, they are
-/// `Ident`s.
-#[derive(Clone)]
-pub struct Literal {
- inner: imp::Literal,
- _marker: Marker,
-}
-
-macro_rules! suffixed_int_literals {
- ($($name:ident => $kind:ident,)*) => ($(
- /// Creates a new suffixed integer literal with the specified value.
- ///
- /// This function will create an integer like `1u32` where the integer
- /// value specified is the first part of the token and the integral is
- /// also suffixed at the end. Literals created from negative numbers may
- /// not survive roundtrips through `TokenStream` or strings and may be
- /// broken into two tokens (`-` and positive literal).
- ///
- /// Literals created through this method have the `Span::call_site()`
- /// span by default, which can be configured with the `set_span` method
- /// below.
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(imp::Literal::$name(n))
- }
- )*)
-}
-
-macro_rules! unsuffixed_int_literals {
- ($($name:ident => $kind:ident,)*) => ($(
- /// Creates a new unsuffixed integer literal with the specified value.
- ///
- /// This function will create an integer like `1` where the integer
- /// value specified is the first part of the token. No suffix is
- /// specified on this token, meaning that invocations like
- /// `Literal::i8_unsuffixed(1)` are equivalent to
- /// `Literal::u32_unsuffixed(1)`. Literals created from negative numbers
- /// may not survive roundtrips through `TokenStream` or strings and may
- /// be broken into two tokens (`-` and positive literal).
- ///
- /// Literals created through this method have the `Span::call_site()`
- /// span by default, which can be configured with the `set_span` method
- /// below.
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(imp::Literal::$name(n))
- }
- )*)
-}
-
-impl Literal {
- fn _new(inner: imp::Literal) -> Self {
- Literal {
- inner,
- _marker: Marker,
- }
- }
-
- fn _new_fallback(inner: fallback::Literal) -> Self {
- Literal {
- inner: inner.into(),
- _marker: Marker,
- }
- }
-
- suffixed_int_literals! {
- u8_suffixed => u8,
- u16_suffixed => u16,
- u32_suffixed => u32,
- u64_suffixed => u64,
- u128_suffixed => u128,
- usize_suffixed => usize,
- i8_suffixed => i8,
- i16_suffixed => i16,
- i32_suffixed => i32,
- i64_suffixed => i64,
- i128_suffixed => i128,
- isize_suffixed => isize,
- }
-
- unsuffixed_int_literals! {
- u8_unsuffixed => u8,
- u16_unsuffixed => u16,
- u32_unsuffixed => u32,
- u64_unsuffixed => u64,
- u128_unsuffixed => u128,
- usize_unsuffixed => usize,
- i8_unsuffixed => i8,
- i16_unsuffixed => i16,
- i32_unsuffixed => i32,
- i64_unsuffixed => i64,
- i128_unsuffixed => i128,
- isize_unsuffixed => isize,
- }
-
- /// Creates a new unsuffixed floating-point literal.
- ///
- /// This constructor is similar to those like `Literal::i8_unsuffixed` where
- /// the float's value is emitted directly into the token but no suffix is
- /// used, so it may be inferred to be a `f64` later in the compiler.
- /// Literals created from negative numbers may not survive round-trips
- /// through `TokenStream` or strings and may be broken into two tokens (`-`
- /// and positive literal).
- ///
- /// # Panics
- ///
- /// This function requires that the specified float is finite, for example
- /// if it is infinity or NaN this function will panic.
- pub fn f64_unsuffixed(f: f64) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f64_unsuffixed(f))
- }
-
- /// Creates a new suffixed floating-point literal.
- ///
- /// This constructor will create a literal like `1.0f64` where the value
- /// specified is the preceding part of the token and `f64` is the suffix of
- /// the token. This token will always be inferred to be an `f64` in the
- /// compiler. Literals created from negative numbers may not survive
- /// round-trips through `TokenStream` or strings and may be broken into two
- /// tokens (`-` and positive literal).
- ///
- /// # Panics
- ///
- /// This function requires that the specified float is finite, for example
- /// if it is infinity or NaN this function will panic.
- pub fn f64_suffixed(f: f64) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f64_suffixed(f))
- }
-
- /// Creates a new unsuffixed floating-point literal.
- ///
- /// This constructor is similar to those like `Literal::i8_unsuffixed` where
- /// the float's value is emitted directly into the token but no suffix is
- /// used, so it may be inferred to be a `f64` later in the compiler.
- /// Literals created from negative numbers may not survive round-trips
- /// through `TokenStream` or strings and may be broken into two tokens (`-`
- /// and positive literal).
- ///
- /// # Panics
- ///
- /// This function requires that the specified float is finite, for example
- /// if it is infinity or NaN this function will panic.
- pub fn f32_unsuffixed(f: f32) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f32_unsuffixed(f))
- }
-
- /// Creates a new suffixed floating-point literal.
- ///
- /// This constructor will create a literal like `1.0f32` where the value
- /// specified is the preceding part of the token and `f32` is the suffix of
- /// the token. This token will always be inferred to be an `f32` in the
- /// compiler. Literals created from negative numbers may not survive
- /// round-trips through `TokenStream` or strings and may be broken into two
- /// tokens (`-` and positive literal).
- ///
- /// # Panics
- ///
- /// This function requires that the specified float is finite, for example
- /// if it is infinity or NaN this function will panic.
- pub fn f32_suffixed(f: f32) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f32_suffixed(f))
- }
-
- /// String literal.
- pub fn string(string: &str) -> Literal {
- Literal::_new(imp::Literal::string(string))
- }
-
- /// Character literal.
- pub fn character(ch: char) -> Literal {
- Literal::_new(imp::Literal::character(ch))
- }
-
- /// Byte string literal.
- pub fn byte_string(s: &[u8]) -> Literal {
- Literal::_new(imp::Literal::byte_string(s))
- }
-
- /// Returns the span encompassing this literal.
- pub fn span(&self) -> Span {
- Span::_new(self.inner.span())
- }
-
- /// Configures the span associated for this literal.
- pub fn set_span(&mut self, span: Span) {
- self.inner.set_span(span.inner);
- }
-
- /// Returns a `Span` that is a subset of `self.span()` containing only
- /// the source bytes in range `range`. Returns `None` if the would-be
- /// trimmed span is outside the bounds of `self`.
- ///
- /// Warning: the underlying [`proc_macro::Literal::subspan`] method is
- /// nightly-only. When called from within a procedural macro not using a
- /// nightly compiler, this method will always return `None`.
- ///
- /// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan
- pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
- self.inner.subspan(range).map(Span::_new)
- }
-
- // Intended for the `quote!` macro to use when constructing a proc-macro2
- // token out of a macro_rules $:literal token, which is already known to be
- // a valid literal. This avoids reparsing/validating the literal's string
- // representation. This is not public API other than for quote.
- #[doc(hidden)]
- pub unsafe fn from_str_unchecked(repr: &str) -> Self {
- Literal::_new(unsafe { imp::Literal::from_str_unchecked(repr) })
- }
-}
-
-impl FromStr for Literal {
- type Err = LexError;
-
- fn from_str(repr: &str) -> Result<Self, LexError> {
- repr.parse().map(Literal::_new).map_err(|inner| LexError {
- inner,
- _marker: Marker,
- })
- }
-}
-
-impl Debug for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Debug::fmt(&self.inner, f)
- }
-}
-
-impl Display for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- Display::fmt(&self.inner, f)
- }
-}
-
-/// Public implementation details for the `TokenStream` type, such as iterators.
-pub mod token_stream {
- use crate::marker::Marker;
- use crate::{imp, TokenTree};
- use core::fmt::{self, Debug};
-
- pub use crate::TokenStream;
-
- /// An iterator over `TokenStream`'s `TokenTree`s.
- ///
- /// The iteration is "shallow", e.g. the iterator doesn't recurse into
- /// delimited groups, and returns whole groups as token trees.
- #[derive(Clone)]
- pub struct IntoIter {
- inner: imp::TokenTreeIter,
- _marker: Marker,
- }
-
- impl Iterator for IntoIter {
- type Item = TokenTree;
-
- fn next(&mut self) -> Option<TokenTree> {
- self.inner.next()
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
- }
-
- impl Debug for IntoIter {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.write_str("TokenStream ")?;
- f.debug_list().entries(self.clone()).finish()
- }
- }
-
- impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = IntoIter;
-
- fn into_iter(self) -> IntoIter {
- IntoIter {
- inner: self.inner.into_iter(),
- _marker: Marker,
- }
- }
- }
-}
diff --git a/vendor/proc-macro2/src/location.rs b/vendor/proc-macro2/src/location.rs
deleted file mode 100644
index 463026c..0000000
--- a/vendor/proc-macro2/src/location.rs
+++ /dev/null
@@ -1,29 +0,0 @@
-use core::cmp::Ordering;
-
-/// A line-column pair representing the start or end of a `Span`.
-///
-/// This type is semver exempt and not exposed by default.
-#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
-pub struct LineColumn {
- /// The 1-indexed line in the source file on which the span starts or ends
- /// (inclusive).
- pub line: usize,
- /// The 0-indexed column (in UTF-8 characters) in the source file on which
- /// the span starts or ends (inclusive).
- pub column: usize,
-}
-
-impl Ord for LineColumn {
- fn cmp(&self, other: &Self) -> Ordering {
- self.line
- .cmp(&other.line)
- .then(self.column.cmp(&other.column))
- }
-}
-
-impl PartialOrd for LineColumn {
- fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
diff --git a/vendor/proc-macro2/src/marker.rs b/vendor/proc-macro2/src/marker.rs
deleted file mode 100644
index e8874bd..0000000
--- a/vendor/proc-macro2/src/marker.rs
+++ /dev/null
@@ -1,21 +0,0 @@
-use alloc::rc::Rc;
-use core::marker::PhantomData;
-use core::panic::{RefUnwindSafe, UnwindSafe};
-
-// Zero sized marker with the correct set of autotrait impls we want all proc
-// macro types to have.
-pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
-
-pub(crate) use self::value::*;
-
-mod value {
- pub(crate) use core::marker::PhantomData as Marker;
-}
-
-pub(crate) struct ProcMacroAutoTraits(
- #[allow(dead_code)] // https://github.com/rust-lang/rust/issues/119645
- Rc<()>,
-);
-
-impl UnwindSafe for ProcMacroAutoTraits {}
-impl RefUnwindSafe for ProcMacroAutoTraits {}
diff --git a/vendor/proc-macro2/src/parse.rs b/vendor/proc-macro2/src/parse.rs
deleted file mode 100644
index 07239bc..0000000
--- a/vendor/proc-macro2/src/parse.rs
+++ /dev/null
@@ -1,996 +0,0 @@
-use crate::fallback::{
- self, is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
- TokenStreamBuilder,
-};
-use crate::{Delimiter, Punct, Spacing, TokenTree};
-use core::char;
-use core::str::{Bytes, CharIndices, Chars};
-
-#[derive(Copy, Clone, Eq, PartialEq)]
-pub(crate) struct Cursor<'a> {
- pub rest: &'a str,
- #[cfg(span_locations)]
- pub off: u32,
-}
-
-impl<'a> Cursor<'a> {
- pub fn advance(&self, bytes: usize) -> Cursor<'a> {
- let (_front, rest) = self.rest.split_at(bytes);
- Cursor {
- rest,
- #[cfg(span_locations)]
- off: self.off + _front.chars().count() as u32,
- }
- }
-
- pub fn starts_with(&self, s: &str) -> bool {
- self.rest.starts_with(s)
- }
-
- pub fn starts_with_char(&self, ch: char) -> bool {
- self.rest.starts_with(ch)
- }
-
- pub fn starts_with_fn<Pattern>(&self, f: Pattern) -> bool
- where
- Pattern: FnMut(char) -> bool,
- {
- self.rest.starts_with(f)
- }
-
- pub fn is_empty(&self) -> bool {
- self.rest.is_empty()
- }
-
- fn len(&self) -> usize {
- self.rest.len()
- }
-
- fn as_bytes(&self) -> &'a [u8] {
- self.rest.as_bytes()
- }
-
- fn bytes(&self) -> Bytes<'a> {
- self.rest.bytes()
- }
-
- fn chars(&self) -> Chars<'a> {
- self.rest.chars()
- }
-
- fn char_indices(&self) -> CharIndices<'a> {
- self.rest.char_indices()
- }
-
- fn parse(&self, tag: &str) -> Result<Cursor<'a>, Reject> {
- if self.starts_with(tag) {
- Ok(self.advance(tag.len()))
- } else {
- Err(Reject)
- }
- }
-}
-
-pub(crate) struct Reject;
-type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>;
-
-fn skip_whitespace(input: Cursor) -> Cursor {
- let mut s = input;
-
- while !s.is_empty() {
- let byte = s.as_bytes()[0];
- if byte == b'/' {
- if s.starts_with("//")
- && (!s.starts_with("///") || s.starts_with("////"))
- && !s.starts_with("//!")
- {
- let (cursor, _) = take_until_newline_or_eof(s);
- s = cursor;
- continue;
- } else if s.starts_with("/**/") {
- s = s.advance(4);
- continue;
- } else if s.starts_with("/*")
- && (!s.starts_with("/**") || s.starts_with("/***"))
- && !s.starts_with("/*!")
- {
- match block_comment(s) {
- Ok((rest, _)) => {
- s = rest;
- continue;
- }
- Err(Reject) => return s,
- }
- }
- }
- match byte {
- b' ' | 0x09..=0x0d => {
- s = s.advance(1);
- continue;
- }
- b if b.is_ascii() => {}
- _ => {
- let ch = s.chars().next().unwrap();
- if is_whitespace(ch) {
- s = s.advance(ch.len_utf8());
- continue;
- }
- }
- }
- return s;
- }
- s
-}
-
-fn block_comment(input: Cursor) -> PResult<&str> {
- if !input.starts_with("/*") {
- return Err(Reject);
- }
-
- let mut depth = 0usize;
- let bytes = input.as_bytes();
- let mut i = 0usize;
- let upper = bytes.len() - 1;
-
- while i < upper {
- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
- depth += 1;
- i += 1; // eat '*'
- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
- depth -= 1;
- if depth == 0 {
- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
- }
- i += 1; // eat '/'
- }
- i += 1;
- }
-
- Err(Reject)
-}
-
-fn is_whitespace(ch: char) -> bool {
- // Rust treats left-to-right mark and right-to-left mark as whitespace
- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
-}
-
-fn word_break(input: Cursor) -> Result<Cursor, Reject> {
- match input.chars().next() {
- Some(ch) if is_ident_continue(ch) => Err(Reject),
- Some(_) | None => Ok(input),
- }
-}
-
-// Rustc's representation of a macro expansion error in expression position or
-// type position.
-const ERROR: &str = "(/*ERROR*/)";
-
-pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
- let mut trees = TokenStreamBuilder::new();
- let mut stack = Vec::new();
-
- loop {
- input = skip_whitespace(input);
-
- if let Ok((rest, ())) = doc_comment(input, &mut trees) {
- input = rest;
- continue;
- }
-
- #[cfg(span_locations)]
- let lo = input.off;
-
- let first = match input.bytes().next() {
- Some(first) => first,
- None => match stack.last() {
- None => return Ok(trees.build()),
- #[cfg(span_locations)]
- Some((lo, _frame)) => {
- return Err(LexError {
- span: Span { lo: *lo, hi: *lo },
- })
- }
- #[cfg(not(span_locations))]
- Some(_frame) => return Err(LexError { span: Span {} }),
- },
- };
-
- if let Some(open_delimiter) = match first {
- b'(' if !input.starts_with(ERROR) => Some(Delimiter::Parenthesis),
- b'[' => Some(Delimiter::Bracket),
- b'{' => Some(Delimiter::Brace),
- _ => None,
- } {
- input = input.advance(1);
- let frame = (open_delimiter, trees);
- #[cfg(span_locations)]
- let frame = (lo, frame);
- stack.push(frame);
- trees = TokenStreamBuilder::new();
- } else if let Some(close_delimiter) = match first {
- b')' => Some(Delimiter::Parenthesis),
- b']' => Some(Delimiter::Bracket),
- b'}' => Some(Delimiter::Brace),
- _ => None,
- } {
- let frame = match stack.pop() {
- Some(frame) => frame,
- None => return Err(lex_error(input)),
- };
- #[cfg(span_locations)]
- let (lo, frame) = frame;
- let (open_delimiter, outer) = frame;
- if open_delimiter != close_delimiter {
- return Err(lex_error(input));
- }
- input = input.advance(1);
- let mut g = Group::new(open_delimiter, trees.build());
- g.set_span(Span {
- #[cfg(span_locations)]
- lo,
- #[cfg(span_locations)]
- hi: input.off,
- });
- trees = outer;
- trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_fallback(g)));
- } else {
- let (rest, mut tt) = match leaf_token(input) {
- Ok((rest, tt)) => (rest, tt),
- Err(Reject) => return Err(lex_error(input)),
- };
- tt.set_span(crate::Span::_new_fallback(Span {
- #[cfg(span_locations)]
- lo,
- #[cfg(span_locations)]
- hi: rest.off,
- }));
- trees.push_token_from_parser(tt);
- input = rest;
- }
- }
-}
-
-fn lex_error(cursor: Cursor) -> LexError {
- #[cfg(not(span_locations))]
- let _ = cursor;
- LexError {
- span: Span {
- #[cfg(span_locations)]
- lo: cursor.off,
- #[cfg(span_locations)]
- hi: cursor.off,
- },
- }
-}
-
-fn leaf_token(input: Cursor) -> PResult<TokenTree> {
- if let Ok((input, l)) = literal(input) {
- // must be parsed before ident
- Ok((input, TokenTree::Literal(crate::Literal::_new_fallback(l))))
- } else if let Ok((input, p)) = punct(input) {
- Ok((input, TokenTree::Punct(p)))
- } else if let Ok((input, i)) = ident(input) {
- Ok((input, TokenTree::Ident(i)))
- } else if input.starts_with(ERROR) {
- let rest = input.advance(ERROR.len());
- let repr = crate::Literal::_new_fallback(Literal::_new(ERROR.to_owned()));
- Ok((rest, TokenTree::Literal(repr)))
- } else {
- Err(Reject)
- }
-}
-
-fn ident(input: Cursor) -> PResult<crate::Ident> {
- if [
- "r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#", "c\"", "cr\"", "cr#",
- ]
- .iter()
- .any(|prefix| input.starts_with(prefix))
- {
- Err(Reject)
- } else {
- ident_any(input)
- }
-}
-
-fn ident_any(input: Cursor) -> PResult<crate::Ident> {
- let raw = input.starts_with("r#");
- let rest = input.advance((raw as usize) << 1);
-
- let (rest, sym) = ident_not_raw(rest)?;
-
- if !raw {
- let ident = crate::Ident::_new(crate::imp::Ident::new_unchecked(
- sym,
- fallback::Span::call_site(),
- ));
- return Ok((rest, ident));
- }
-
- match sym {
- "_" | "super" | "self" | "Self" | "crate" => return Err(Reject),
- _ => {}
- }
-
- let ident = crate::Ident::_new(crate::imp::Ident::new_raw_unchecked(
- sym,
- fallback::Span::call_site(),
- ));
- Ok((rest, ident))
-}
-
-fn ident_not_raw(input: Cursor) -> PResult<&str> {
- let mut chars = input.char_indices();
-
- match chars.next() {
- Some((_, ch)) if is_ident_start(ch) => {}
- _ => return Err(Reject),
- }
-
- let mut end = input.len();
- for (i, ch) in chars {
- if !is_ident_continue(ch) {
- end = i;
- break;
- }
- }
-
- Ok((input.advance(end), &input.rest[..end]))
-}
-
-pub(crate) fn literal(input: Cursor) -> PResult<Literal> {
- let rest = literal_nocapture(input)?;
- let end = input.len() - rest.len();
- Ok((rest, Literal::_new(input.rest[..end].to_string())))
-}
-
-fn literal_nocapture(input: Cursor) -> Result<Cursor, Reject> {
- if let Ok(ok) = string(input) {
- Ok(ok)
- } else if let Ok(ok) = byte_string(input) {
- Ok(ok)
- } else if let Ok(ok) = c_string(input) {
- Ok(ok)
- } else if let Ok(ok) = byte(input) {
- Ok(ok)
- } else if let Ok(ok) = character(input) {
- Ok(ok)
- } else if let Ok(ok) = float(input) {
- Ok(ok)
- } else if let Ok(ok) = int(input) {
- Ok(ok)
- } else {
- Err(Reject)
- }
-}
-
-fn literal_suffix(input: Cursor) -> Cursor {
- match ident_not_raw(input) {
- Ok((input, _)) => input,
- Err(Reject) => input,
- }
-}
-
-fn string(input: Cursor) -> Result<Cursor, Reject> {
- if let Ok(input) = input.parse("\"") {
- cooked_string(input)
- } else if let Ok(input) = input.parse("r") {
- raw_string(input)
- } else {
- Err(Reject)
- }
-}
-
-fn cooked_string(mut input: Cursor) -> Result<Cursor, Reject> {
- let mut chars = input.char_indices();
-
- while let Some((i, ch)) = chars.next() {
- match ch {
- '"' => {
- let input = input.advance(i + 1);
- return Ok(literal_suffix(input));
- }
- '\r' => match chars.next() {
- Some((_, '\n')) => {}
- _ => break,
- },
- '\\' => match chars.next() {
- Some((_, 'x')) => {
- backslash_x_char(&mut chars)?;
- }
- Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0')) => {}
- Some((_, 'u')) => {
- backslash_u(&mut chars)?;
- }
- Some((newline, ch @ ('\n' | '\r'))) => {
- input = input.advance(newline + 1);
- trailing_backslash(&mut input, ch as u8)?;
- chars = input.char_indices();
- }
- _ => break,
- },
- _ch => {}
- }
- }
- Err(Reject)
-}
-
-fn raw_string(input: Cursor) -> Result<Cursor, Reject> {
- let (input, delimiter) = delimiter_of_raw_string(input)?;
- let mut bytes = input.bytes().enumerate();
- while let Some((i, byte)) = bytes.next() {
- match byte {
- b'"' if input.rest[i + 1..].starts_with(delimiter) => {
- let rest = input.advance(i + 1 + delimiter.len());
- return Ok(literal_suffix(rest));
- }
- b'\r' => match bytes.next() {
- Some((_, b'\n')) => {}
- _ => break,
- },
- _ => {}
- }
- }
- Err(Reject)
-}
-
-fn byte_string(input: Cursor) -> Result<Cursor, Reject> {
- if let Ok(input) = input.parse("b\"") {
- cooked_byte_string(input)
- } else if let Ok(input) = input.parse("br") {
- raw_byte_string(input)
- } else {
- Err(Reject)
- }
-}
-
-fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, Reject> {
- let mut bytes = input.bytes().enumerate();
- while let Some((offset, b)) = bytes.next() {
- match b {
- b'"' => {
- let input = input.advance(offset + 1);
- return Ok(literal_suffix(input));
- }
- b'\r' => match bytes.next() {
- Some((_, b'\n')) => {}
- _ => break,
- },
- b'\\' => match bytes.next() {
- Some((_, b'x')) => {
- backslash_x_byte(&mut bytes)?;
- }
- Some((_, b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"')) => {}
- Some((newline, b @ (b'\n' | b'\r'))) => {
- input = input.advance(newline + 1);
- trailing_backslash(&mut input, b)?;
- bytes = input.bytes().enumerate();
- }
- _ => break,
- },
- b if b.is_ascii() => {}
- _ => break,
- }
- }
- Err(Reject)
-}
-
-fn delimiter_of_raw_string(input: Cursor) -> PResult<&str> {
- for (i, byte) in input.bytes().enumerate() {
- match byte {
- b'"' => {
- if i > 255 {
- // https://github.com/rust-lang/rust/pull/95251
- return Err(Reject);
- }
- return Ok((input.advance(i + 1), &input.rest[..i]));
- }
- b'#' => {}
- _ => break,
- }
- }
- Err(Reject)
-}
-
-fn raw_byte_string(input: Cursor) -> Result<Cursor, Reject> {
- let (input, delimiter) = delimiter_of_raw_string(input)?;
- let mut bytes = input.bytes().enumerate();
- while let Some((i, byte)) = bytes.next() {
- match byte {
- b'"' if input.rest[i + 1..].starts_with(delimiter) => {
- let rest = input.advance(i + 1 + delimiter.len());
- return Ok(literal_suffix(rest));
- }
- b'\r' => match bytes.next() {
- Some((_, b'\n')) => {}
- _ => break,
- },
- other => {
- if !other.is_ascii() {
- break;
- }
- }
- }
- }
- Err(Reject)
-}
-
-fn c_string(input: Cursor) -> Result<Cursor, Reject> {
- if let Ok(input) = input.parse("c\"") {
- cooked_c_string(input)
- } else if let Ok(input) = input.parse("cr") {
- raw_c_string(input)
- } else {
- Err(Reject)
- }
-}
-
-fn raw_c_string(input: Cursor) -> Result<Cursor, Reject> {
- let (input, delimiter) = delimiter_of_raw_string(input)?;
- let mut bytes = input.bytes().enumerate();
- while let Some((i, byte)) = bytes.next() {
- match byte {
- b'"' if input.rest[i + 1..].starts_with(delimiter) => {
- let rest = input.advance(i + 1 + delimiter.len());
- return Ok(literal_suffix(rest));
- }
- b'\r' => match bytes.next() {
- Some((_, b'\n')) => {}
- _ => break,
- },
- b'\0' => break,
- _ => {}
- }
- }
- Err(Reject)
-}
-
-fn cooked_c_string(mut input: Cursor) -> Result<Cursor, Reject> {
- let mut chars = input.char_indices();
-
- while let Some((i, ch)) = chars.next() {
- match ch {
- '"' => {
- let input = input.advance(i + 1);
- return Ok(literal_suffix(input));
- }
- '\r' => match chars.next() {
- Some((_, '\n')) => {}
- _ => break,
- },
- '\\' => match chars.next() {
- Some((_, 'x')) => {
- backslash_x_nonzero(&mut chars)?;
- }
- Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"')) => {}
- Some((_, 'u')) => {
- if backslash_u(&mut chars)? == '\0' {
- break;
- }
- }
- Some((newline, ch @ ('\n' | '\r'))) => {
- input = input.advance(newline + 1);
- trailing_backslash(&mut input, ch as u8)?;
- chars = input.char_indices();
- }
- _ => break,
- },
- '\0' => break,
- _ch => {}
- }
- }
- Err(Reject)
-}
-
-fn byte(input: Cursor) -> Result<Cursor, Reject> {
- let input = input.parse("b'")?;
- let mut bytes = input.bytes().enumerate();
- let ok = match bytes.next().map(|(_, b)| b) {
- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
- Some(b'x') => backslash_x_byte(&mut bytes).is_ok(),
- Some(b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"') => true,
- _ => false,
- },
- b => b.is_some(),
- };
- if !ok {
- return Err(Reject);
- }
- let (offset, _) = bytes.next().ok_or(Reject)?;
- if !input.chars().as_str().is_char_boundary(offset) {
- return Err(Reject);
- }
- let input = input.advance(offset).parse("'")?;
- Ok(literal_suffix(input))
-}
-
-fn character(input: Cursor) -> Result<Cursor, Reject> {
- let input = input.parse("'")?;
- let mut chars = input.char_indices();
- let ok = match chars.next().map(|(_, ch)| ch) {
- Some('\\') => match chars.next().map(|(_, ch)| ch) {
- Some('x') => backslash_x_char(&mut chars).is_ok(),
- Some('u') => backslash_u(&mut chars).is_ok(),
- Some('n' | 'r' | 't' | '\\' | '0' | '\'' | '"') => true,
- _ => false,
- },
- ch => ch.is_some(),
- };
- if !ok {
- return Err(Reject);
- }
- let (idx, _) = chars.next().ok_or(Reject)?;
- let input = input.advance(idx).parse("'")?;
- Ok(literal_suffix(input))
-}
-
-macro_rules! next_ch {
- ($chars:ident @ $pat:pat) => {
- match $chars.next() {
- Some((_, ch)) => match ch {
- $pat => ch,
- _ => return Err(Reject),
- },
- None => return Err(Reject),
- }
- };
-}
-
-fn backslash_x_char<I>(chars: &mut I) -> Result<(), Reject>
-where
- I: Iterator<Item = (usize, char)>,
-{
- next_ch!(chars @ '0'..='7');
- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
- Ok(())
-}
-
-fn backslash_x_byte<I>(chars: &mut I) -> Result<(), Reject>
-where
- I: Iterator<Item = (usize, u8)>,
-{
- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
- Ok(())
-}
-
-fn backslash_x_nonzero<I>(chars: &mut I) -> Result<(), Reject>
-where
- I: Iterator<Item = (usize, char)>,
-{
- let first = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
- let second = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
- if first == '0' && second == '0' {
- Err(Reject)
- } else {
- Ok(())
- }
-}
-
-fn backslash_u<I>(chars: &mut I) -> Result<char, Reject>
-where
- I: Iterator<Item = (usize, char)>,
-{
- next_ch!(chars @ '{');
- let mut value = 0;
- let mut len = 0;
- for (_, ch) in chars {
- let digit = match ch {
- '0'..='9' => ch as u8 - b'0',
- 'a'..='f' => 10 + ch as u8 - b'a',
- 'A'..='F' => 10 + ch as u8 - b'A',
- '_' if len > 0 => continue,
- '}' if len > 0 => return char::from_u32(value).ok_or(Reject),
- _ => break,
- };
- if len == 6 {
- break;
- }
- value *= 0x10;
- value += u32::from(digit);
- len += 1;
- }
- Err(Reject)
-}
-
-fn trailing_backslash(input: &mut Cursor, mut last: u8) -> Result<(), Reject> {
- let mut whitespace = input.bytes().enumerate();
- loop {
- if last == b'\r' && whitespace.next().map_or(true, |(_, b)| b != b'\n') {
- return Err(Reject);
- }
- match whitespace.next() {
- Some((_, b @ (b' ' | b'\t' | b'\n' | b'\r'))) => {
- last = b;
- }
- Some((offset, _)) => {
- *input = input.advance(offset);
- return Ok(());
- }
- None => return Err(Reject),
- }
- }
-}
-
-fn float(input: Cursor) -> Result<Cursor, Reject> {
- let mut rest = float_digits(input)?;
- if let Some(ch) = rest.chars().next() {
- if is_ident_start(ch) {
- rest = ident_not_raw(rest)?.0;
- }
- }
- word_break(rest)
-}
-
-fn float_digits(input: Cursor) -> Result<Cursor, Reject> {
- let mut chars = input.chars().peekable();
- match chars.next() {
- Some(ch) if '0' <= ch && ch <= '9' => {}
- _ => return Err(Reject),
- }
-
- let mut len = 1;
- let mut has_dot = false;
- let mut has_exp = false;
- while let Some(&ch) = chars.peek() {
- match ch {
- '0'..='9' | '_' => {
- chars.next();
- len += 1;
- }
- '.' => {
- if has_dot {
- break;
- }
- chars.next();
- if chars
- .peek()
- .map_or(false, |&ch| ch == '.' || is_ident_start(ch))
- {
- return Err(Reject);
- }
- len += 1;
- has_dot = true;
- }
- 'e' | 'E' => {
- chars.next();
- len += 1;
- has_exp = true;
- break;
- }
- _ => break,
- }
- }
-
- if !(has_dot || has_exp) {
- return Err(Reject);
- }
-
- if has_exp {
- let token_before_exp = if has_dot {
- Ok(input.advance(len - 1))
- } else {
- Err(Reject)
- };
- let mut has_sign = false;
- let mut has_exp_value = false;
- while let Some(&ch) = chars.peek() {
- match ch {
- '+' | '-' => {
- if has_exp_value {
- break;
- }
- if has_sign {
- return token_before_exp;
- }
- chars.next();
- len += 1;
- has_sign = true;
- }
- '0'..='9' => {
- chars.next();
- len += 1;
- has_exp_value = true;
- }
- '_' => {
- chars.next();
- len += 1;
- }
- _ => break,
- }
- }
- if !has_exp_value {
- return token_before_exp;
- }
- }
-
- Ok(input.advance(len))
-}
-
-fn int(input: Cursor) -> Result<Cursor, Reject> {
- let mut rest = digits(input)?;
- if let Some(ch) = rest.chars().next() {
- if is_ident_start(ch) {
- rest = ident_not_raw(rest)?.0;
- }
- }
- word_break(rest)
-}
-
-fn digits(mut input: Cursor) -> Result<Cursor, Reject> {
- let base = if input.starts_with("0x") {
- input = input.advance(2);
- 16
- } else if input.starts_with("0o") {
- input = input.advance(2);
- 8
- } else if input.starts_with("0b") {
- input = input.advance(2);
- 2
- } else {
- 10
- };
-
- let mut len = 0;
- let mut empty = true;
- for b in input.bytes() {
- match b {
- b'0'..=b'9' => {
- let digit = (b - b'0') as u64;
- if digit >= base {
- return Err(Reject);
- }
- }
- b'a'..=b'f' => {
- let digit = 10 + (b - b'a') as u64;
- if digit >= base {
- break;
- }
- }
- b'A'..=b'F' => {
- let digit = 10 + (b - b'A') as u64;
- if digit >= base {
- break;
- }
- }
- b'_' => {
- if empty && base == 10 {
- return Err(Reject);
- }
- len += 1;
- continue;
- }
- _ => break,
- };
- len += 1;
- empty = false;
- }
- if empty {
- Err(Reject)
- } else {
- Ok(input.advance(len))
- }
-}
-
-fn punct(input: Cursor) -> PResult<Punct> {
- let (rest, ch) = punct_char(input)?;
- if ch == '\'' {
- if ident_any(rest)?.0.starts_with_char('\'') {
- Err(Reject)
- } else {
- Ok((rest, Punct::new('\'', Spacing::Joint)))
- }
- } else {
- let kind = match punct_char(rest) {
- Ok(_) => Spacing::Joint,
- Err(Reject) => Spacing::Alone,
- };
- Ok((rest, Punct::new(ch, kind)))
- }
-}
-
-fn punct_char(input: Cursor) -> PResult<char> {
- if input.starts_with("//") || input.starts_with("/*") {
- // Do not accept `/` of a comment as a punct.
- return Err(Reject);
- }
-
- let mut chars = input.chars();
- let first = match chars.next() {
- Some(ch) => ch,
- None => {
- return Err(Reject);
- }
- };
- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
- if recognized.contains(first) {
- Ok((input.advance(first.len_utf8()), first))
- } else {
- Err(Reject)
- }
-}
-
-fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult<'a, ()> {
- #[cfg(span_locations)]
- let lo = input.off;
- let (rest, (comment, inner)) = doc_comment_contents(input)?;
- let fallback_span = Span {
- #[cfg(span_locations)]
- lo,
- #[cfg(span_locations)]
- hi: rest.off,
- };
- let span = crate::Span::_new_fallback(fallback_span);
-
- let mut scan_for_bare_cr = comment;
- while let Some(cr) = scan_for_bare_cr.find('\r') {
- let rest = &scan_for_bare_cr[cr + 1..];
- if !rest.starts_with('\n') {
- return Err(Reject);
- }
- scan_for_bare_cr = rest;
- }
-
- let mut pound = Punct::new('#', Spacing::Alone);
- pound.set_span(span);
- trees.push_token_from_parser(TokenTree::Punct(pound));
-
- if inner {
- let mut bang = Punct::new('!', Spacing::Alone);
- bang.set_span(span);
- trees.push_token_from_parser(TokenTree::Punct(bang));
- }
-
- let doc_ident = crate::Ident::_new(crate::imp::Ident::new_unchecked("doc", fallback_span));
- let mut equal = Punct::new('=', Spacing::Alone);
- equal.set_span(span);
- let mut literal = crate::Literal::string(comment);
- literal.set_span(span);
- let mut bracketed = TokenStreamBuilder::with_capacity(3);
- bracketed.push_token_from_parser(TokenTree::Ident(doc_ident));
- bracketed.push_token_from_parser(TokenTree::Punct(equal));
- bracketed.push_token_from_parser(TokenTree::Literal(literal));
- let group = Group::new(Delimiter::Bracket, bracketed.build());
- let mut group = crate::Group::_new_fallback(group);
- group.set_span(span);
- trees.push_token_from_parser(TokenTree::Group(group));
-
- Ok((rest, ()))
-}
-
-fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
- if input.starts_with("//!") {
- let input = input.advance(3);
- let (input, s) = take_until_newline_or_eof(input);
- Ok((input, (s, true)))
- } else if input.starts_with("/*!") {
- let (input, s) = block_comment(input)?;
- Ok((input, (&s[3..s.len() - 2], true)))
- } else if input.starts_with("///") {
- let input = input.advance(3);
- if input.starts_with_char('/') {
- return Err(Reject);
- }
- let (input, s) = take_until_newline_or_eof(input);
- Ok((input, (s, false)))
- } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
- let (input, s) = block_comment(input)?;
- Ok((input, (&s[3..s.len() - 2], false)))
- } else {
- Err(Reject)
- }
-}
-
-fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
- let chars = input.char_indices();
-
- for (i, ch) in chars {
- if ch == '\n' {
- return (input.advance(i), &input.rest[..i]);
- } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
- return (input.advance(i + 1), &input.rest[..i]);
- }
- }
-
- (input.advance(input.len()), input.rest)
-}
diff --git a/vendor/proc-macro2/src/rcvec.rs b/vendor/proc-macro2/src/rcvec.rs
deleted file mode 100644
index 37955af..0000000
--- a/vendor/proc-macro2/src/rcvec.rs
+++ /dev/null
@@ -1,145 +0,0 @@
-use alloc::rc::Rc;
-use alloc::vec;
-use core::mem;
-use core::panic::RefUnwindSafe;
-use core::slice;
-
-pub(crate) struct RcVec<T> {
- inner: Rc<Vec<T>>,
-}
-
-pub(crate) struct RcVecBuilder<T> {
- inner: Vec<T>,
-}
-
-pub(crate) struct RcVecMut<'a, T> {
- inner: &'a mut Vec<T>,
-}
-
-#[derive(Clone)]
-pub(crate) struct RcVecIntoIter<T> {
- inner: vec::IntoIter<T>,
-}
-
-impl<T> RcVec<T> {
- pub fn is_empty(&self) -> bool {
- self.inner.is_empty()
- }
-
- pub fn len(&self) -> usize {
- self.inner.len()
- }
-
- pub fn iter(&self) -> slice::Iter<T> {
- self.inner.iter()
- }
-
- pub fn make_mut(&mut self) -> RcVecMut<T>
- where
- T: Clone,
- {
- RcVecMut {
- inner: Rc::make_mut(&mut self.inner),
- }
- }
-
- pub fn get_mut(&mut self) -> Option<RcVecMut<T>> {
- let inner = Rc::get_mut(&mut self.inner)?;
- Some(RcVecMut { inner })
- }
-
- pub fn make_owned(mut self) -> RcVecBuilder<T>
- where
- T: Clone,
- {
- let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) {
- mem::take(owned)
- } else {
- Vec::clone(&self.inner)
- };
- RcVecBuilder { inner: vec }
- }
-}
-
-impl<T> RcVecBuilder<T> {
- pub fn new() -> Self {
- RcVecBuilder { inner: Vec::new() }
- }
-
- pub fn with_capacity(cap: usize) -> Self {
- RcVecBuilder {
- inner: Vec::with_capacity(cap),
- }
- }
-
- pub fn push(&mut self, element: T) {
- self.inner.push(element);
- }
-
- pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
- self.inner.extend(iter);
- }
-
- pub fn as_mut(&mut self) -> RcVecMut<T> {
- RcVecMut {
- inner: &mut self.inner,
- }
- }
-
- pub fn build(self) -> RcVec<T> {
- RcVec {
- inner: Rc::new(self.inner),
- }
- }
-}
-
-impl<'a, T> RcVecMut<'a, T> {
- pub fn push(&mut self, element: T) {
- self.inner.push(element);
- }
-
- pub fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
- self.inner.extend(iter);
- }
-
- pub fn pop(&mut self) -> Option<T> {
- self.inner.pop()
- }
-
- pub fn as_mut(&mut self) -> RcVecMut<T> {
- RcVecMut { inner: self.inner }
- }
-}
-
-impl<T> Clone for RcVec<T> {
- fn clone(&self) -> Self {
- RcVec {
- inner: Rc::clone(&self.inner),
- }
- }
-}
-
-impl<T> IntoIterator for RcVecBuilder<T> {
- type Item = T;
- type IntoIter = RcVecIntoIter<T>;
-
- fn into_iter(self) -> Self::IntoIter {
- RcVecIntoIter {
- inner: self.inner.into_iter(),
- }
- }
-}
-
-impl<T> Iterator for RcVecIntoIter<T> {
- type Item = T;
-
- fn next(&mut self) -> Option<Self::Item> {
- self.inner.next()
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
-}
-
-impl<T> RefUnwindSafe for RcVec<T> where T: RefUnwindSafe {}
diff --git a/vendor/proc-macro2/src/wrapper.rs b/vendor/proc-macro2/src/wrapper.rs
deleted file mode 100644
index f5eb826..0000000
--- a/vendor/proc-macro2/src/wrapper.rs
+++ /dev/null
@@ -1,930 +0,0 @@
-use crate::detection::inside_proc_macro;
-#[cfg(span_locations)]
-use crate::location::LineColumn;
-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
-use core::fmt::{self, Debug, Display};
-use core::ops::RangeBounds;
-use core::str::FromStr;
-use std::panic;
-#[cfg(super_unstable)]
-use std::path::PathBuf;
-
-#[derive(Clone)]
-pub(crate) enum TokenStream {
- Compiler(DeferredTokenStream),
- Fallback(fallback::TokenStream),
-}
-
-// Work around https://github.com/rust-lang/rust/issues/65080.
-// In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
-// we hold on to the appended tokens and do proc_macro::TokenStream::extend as
-// late as possible to batch together consecutive uses of the Extend impl.
-#[derive(Clone)]
-pub(crate) struct DeferredTokenStream {
- stream: proc_macro::TokenStream,
- extra: Vec<proc_macro::TokenTree>,
-}
-
-pub(crate) enum LexError {
- Compiler(proc_macro::LexError),
- Fallback(fallback::LexError),
-
- // Rustc was supposed to return a LexError, but it panicked instead.
- // https://github.com/rust-lang/rust/issues/58736
- CompilerPanic,
-}
-
-#[cold]
-fn mismatch(line: u32) -> ! {
- #[cfg(procmacro2_backtrace)]
- {
- let backtrace = std::backtrace::Backtrace::force_capture();
- panic!("compiler/fallback mismatch #{}\n\n{}", line, backtrace)
- }
- #[cfg(not(procmacro2_backtrace))]
- {
- panic!("compiler/fallback mismatch #{}", line)
- }
-}
-
-impl DeferredTokenStream {
- fn new(stream: proc_macro::TokenStream) -> Self {
- DeferredTokenStream {
- stream,
- extra: Vec::new(),
- }
- }
-
- fn is_empty(&self) -> bool {
- self.stream.is_empty() && self.extra.is_empty()
- }
-
- fn evaluate_now(&mut self) {
- // If-check provides a fast short circuit for the common case of `extra`
- // being empty, which saves a round trip over the proc macro bridge.
- // Improves macro expansion time in winrt by 6% in debug mode.
- if !self.extra.is_empty() {
- self.stream.extend(self.extra.drain(..));
- }
- }
-
- fn into_token_stream(mut self) -> proc_macro::TokenStream {
- self.evaluate_now();
- self.stream
- }
-}
-
-impl TokenStream {
- pub fn new() -> Self {
- if inside_proc_macro() {
- TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
- } else {
- TokenStream::Fallback(fallback::TokenStream::new())
- }
- }
-
- pub fn is_empty(&self) -> bool {
- match self {
- TokenStream::Compiler(tts) => tts.is_empty(),
- TokenStream::Fallback(tts) => tts.is_empty(),
- }
- }
-
- fn unwrap_nightly(self) -> proc_macro::TokenStream {
- match self {
- TokenStream::Compiler(s) => s.into_token_stream(),
- TokenStream::Fallback(_) => mismatch(line!()),
- }
- }
-
- fn unwrap_stable(self) -> fallback::TokenStream {
- match self {
- TokenStream::Compiler(_) => mismatch(line!()),
- TokenStream::Fallback(s) => s,
- }
- }
-}
-
-impl FromStr for TokenStream {
- type Err = LexError;
-
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- if inside_proc_macro() {
- Ok(TokenStream::Compiler(DeferredTokenStream::new(
- proc_macro_parse(src)?,
- )))
- } else {
- Ok(TokenStream::Fallback(src.parse()?))
- }
- }
-}
-
-// Work around https://github.com/rust-lang/rust/issues/58736.
-fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
- let result = panic::catch_unwind(|| src.parse().map_err(LexError::Compiler));
- result.unwrap_or_else(|_| Err(LexError::CompilerPanic))
-}
-
-impl Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
- TokenStream::Fallback(tts) => Display::fmt(tts, f),
- }
- }
-}
-
-impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> Self {
- TokenStream::Compiler(DeferredTokenStream::new(inner))
- }
-}
-
-impl From<TokenStream> for proc_macro::TokenStream {
- fn from(inner: TokenStream) -> Self {
- match inner {
- TokenStream::Compiler(inner) => inner.into_token_stream(),
- TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(),
- }
- }
-}
-
-impl From<fallback::TokenStream> for TokenStream {
- fn from(inner: fallback::TokenStream) -> Self {
- TokenStream::Fallback(inner)
- }
-}
-
-// Assumes inside_proc_macro().
-fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
- match token {
- TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
- TokenTree::Punct(tt) => {
- let spacing = match tt.spacing() {
- Spacing::Joint => proc_macro::Spacing::Joint,
- Spacing::Alone => proc_macro::Spacing::Alone,
- };
- let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
- punct.set_span(tt.span().inner.unwrap_nightly());
- punct.into()
- }
- TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
- TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
- }
-}
-
-impl From<TokenTree> for TokenStream {
- fn from(token: TokenTree) -> Self {
- if inside_proc_macro() {
- TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
- } else {
- TokenStream::Fallback(token.into())
- }
- }
-}
-
-impl FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
- if inside_proc_macro() {
- TokenStream::Compiler(DeferredTokenStream::new(
- trees.into_iter().map(into_compiler_token).collect(),
- ))
- } else {
- TokenStream::Fallback(trees.into_iter().collect())
- }
- }
-}
-
-impl FromIterator<TokenStream> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
- let mut streams = streams.into_iter();
- match streams.next() {
- Some(TokenStream::Compiler(mut first)) => {
- first.evaluate_now();
- first.stream.extend(streams.map(|s| match s {
- TokenStream::Compiler(s) => s.into_token_stream(),
- TokenStream::Fallback(_) => mismatch(line!()),
- }));
- TokenStream::Compiler(first)
- }
- Some(TokenStream::Fallback(mut first)) => {
- first.extend(streams.map(|s| match s {
- TokenStream::Fallback(s) => s,
- TokenStream::Compiler(_) => mismatch(line!()),
- }));
- TokenStream::Fallback(first)
- }
- None => TokenStream::new(),
- }
- }
-}
-
-impl Extend<TokenTree> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
- match self {
- TokenStream::Compiler(tts) => {
- // Here is the reason for DeferredTokenStream.
- for token in stream {
- tts.extra.push(into_compiler_token(token));
- }
- }
- TokenStream::Fallback(tts) => tts.extend(stream),
- }
- }
-}
-
-impl Extend<TokenStream> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
- match self {
- TokenStream::Compiler(tts) => {
- tts.evaluate_now();
- tts.stream
- .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
- }
- TokenStream::Fallback(tts) => {
- tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
- }
- }
- }
-}
-
-impl Debug for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
- TokenStream::Fallback(tts) => Debug::fmt(tts, f),
- }
- }
-}
-
-impl LexError {
- pub(crate) fn span(&self) -> Span {
- match self {
- LexError::Compiler(_) | LexError::CompilerPanic => Span::call_site(),
- LexError::Fallback(e) => Span::Fallback(e.span()),
- }
- }
-}
-
-impl From<proc_macro::LexError> for LexError {
- fn from(e: proc_macro::LexError) -> Self {
- LexError::Compiler(e)
- }
-}
-
-impl From<fallback::LexError> for LexError {
- fn from(e: fallback::LexError) -> Self {
- LexError::Fallback(e)
- }
-}
-
-impl Debug for LexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- LexError::Compiler(e) => Debug::fmt(e, f),
- LexError::Fallback(e) => Debug::fmt(e, f),
- LexError::CompilerPanic => {
- let fallback = fallback::LexError::call_site();
- Debug::fmt(&fallback, f)
- }
- }
- }
-}
-
-impl Display for LexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- LexError::Compiler(e) => Display::fmt(e, f),
- LexError::Fallback(e) => Display::fmt(e, f),
- LexError::CompilerPanic => {
- let fallback = fallback::LexError::call_site();
- Display::fmt(&fallback, f)
- }
- }
- }
-}
-
-#[derive(Clone)]
-pub(crate) enum TokenTreeIter {
- Compiler(proc_macro::token_stream::IntoIter),
- Fallback(fallback::TokenTreeIter),
-}
-
-impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = TokenTreeIter;
-
- fn into_iter(self) -> TokenTreeIter {
- match self {
- TokenStream::Compiler(tts) => {
- TokenTreeIter::Compiler(tts.into_token_stream().into_iter())
- }
- TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()),
- }
- }
-}
-
-impl Iterator for TokenTreeIter {
- type Item = TokenTree;
-
- fn next(&mut self) -> Option<TokenTree> {
- let token = match self {
- TokenTreeIter::Compiler(iter) => iter.next()?,
- TokenTreeIter::Fallback(iter) => return iter.next(),
- };
- Some(match token {
- proc_macro::TokenTree::Group(tt) => crate::Group::_new(Group::Compiler(tt)).into(),
- proc_macro::TokenTree::Punct(tt) => {
- let spacing = match tt.spacing() {
- proc_macro::Spacing::Joint => Spacing::Joint,
- proc_macro::Spacing::Alone => Spacing::Alone,
- };
- let mut o = Punct::new(tt.as_char(), spacing);
- o.set_span(crate::Span::_new(Span::Compiler(tt.span())));
- o.into()
- }
- proc_macro::TokenTree::Ident(s) => crate::Ident::_new(Ident::Compiler(s)).into(),
- proc_macro::TokenTree::Literal(l) => crate::Literal::_new(Literal::Compiler(l)).into(),
- })
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- match self {
- TokenTreeIter::Compiler(tts) => tts.size_hint(),
- TokenTreeIter::Fallback(tts) => tts.size_hint(),
- }
- }
-}
-
-#[derive(Clone, PartialEq, Eq)]
-#[cfg(super_unstable)]
-pub(crate) enum SourceFile {
- Compiler(proc_macro::SourceFile),
- Fallback(fallback::SourceFile),
-}
-
-#[cfg(super_unstable)]
-impl SourceFile {
- fn nightly(sf: proc_macro::SourceFile) -> Self {
- SourceFile::Compiler(sf)
- }
-
- /// Get the path to this source file as a string.
- pub fn path(&self) -> PathBuf {
- match self {
- SourceFile::Compiler(a) => a.path(),
- SourceFile::Fallback(a) => a.path(),
- }
- }
-
- pub fn is_real(&self) -> bool {
- match self {
- SourceFile::Compiler(a) => a.is_real(),
- SourceFile::Fallback(a) => a.is_real(),
- }
- }
-}
-
-#[cfg(super_unstable)]
-impl Debug for SourceFile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- SourceFile::Compiler(a) => Debug::fmt(a, f),
- SourceFile::Fallback(a) => Debug::fmt(a, f),
- }
- }
-}
-
-#[derive(Copy, Clone)]
-pub(crate) enum Span {
- Compiler(proc_macro::Span),
- Fallback(fallback::Span),
-}
-
-impl Span {
- pub fn call_site() -> Self {
- if inside_proc_macro() {
- Span::Compiler(proc_macro::Span::call_site())
- } else {
- Span::Fallback(fallback::Span::call_site())
- }
- }
-
- pub fn mixed_site() -> Self {
- if inside_proc_macro() {
- Span::Compiler(proc_macro::Span::mixed_site())
- } else {
- Span::Fallback(fallback::Span::mixed_site())
- }
- }
-
- #[cfg(super_unstable)]
- pub fn def_site() -> Self {
- if inside_proc_macro() {
- Span::Compiler(proc_macro::Span::def_site())
- } else {
- Span::Fallback(fallback::Span::def_site())
- }
- }
-
- pub fn resolved_at(&self, other: Span) -> Span {
- match (self, other) {
- (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
- (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
- (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
- (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
- }
- }
-
- pub fn located_at(&self, other: Span) -> Span {
- match (self, other) {
- (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
- (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
- (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
- (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
- }
- }
-
- pub fn unwrap(self) -> proc_macro::Span {
- match self {
- Span::Compiler(s) => s,
- Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"),
- }
- }
-
- #[cfg(super_unstable)]
- pub fn source_file(&self) -> SourceFile {
- match self {
- Span::Compiler(s) => SourceFile::nightly(s.source_file()),
- Span::Fallback(s) => SourceFile::Fallback(s.source_file()),
- }
- }
-
- #[cfg(span_locations)]
- pub fn start(&self) -> LineColumn {
- match self {
- Span::Compiler(_) => LineColumn { line: 0, column: 0 },
- Span::Fallback(s) => s.start(),
- }
- }
-
- #[cfg(span_locations)]
- pub fn end(&self) -> LineColumn {
- match self {
- Span::Compiler(_) => LineColumn { line: 0, column: 0 },
- Span::Fallback(s) => s.end(),
- }
- }
-
- pub fn join(&self, other: Span) -> Option<Span> {
- let ret = match (self, other) {
- #[cfg(proc_macro_span)]
- (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?),
- (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?),
- _ => return None,
- };
- Some(ret)
- }
-
- #[cfg(super_unstable)]
- pub fn eq(&self, other: &Span) -> bool {
- match (self, other) {
- (Span::Compiler(a), Span::Compiler(b)) => a.eq(b),
- (Span::Fallback(a), Span::Fallback(b)) => a.eq(b),
- _ => false,
- }
- }
-
- pub fn source_text(&self) -> Option<String> {
- match self {
- #[cfg(not(no_source_text))]
- Span::Compiler(s) => s.source_text(),
- #[cfg(no_source_text)]
- Span::Compiler(_) => None,
- Span::Fallback(s) => s.source_text(),
- }
- }
-
- fn unwrap_nightly(self) -> proc_macro::Span {
- match self {
- Span::Compiler(s) => s,
- Span::Fallback(_) => mismatch(line!()),
- }
- }
-}
-
-impl From<proc_macro::Span> for crate::Span {
- fn from(proc_span: proc_macro::Span) -> Self {
- crate::Span::_new(Span::Compiler(proc_span))
- }
-}
-
-impl From<fallback::Span> for Span {
- fn from(inner: fallback::Span) -> Self {
- Span::Fallback(inner)
- }
-}
-
-impl Debug for Span {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Span::Compiler(s) => Debug::fmt(s, f),
- Span::Fallback(s) => Debug::fmt(s, f),
- }
- }
-}
-
-pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
- match span {
- Span::Compiler(s) => {
- debug.field("span", &s);
- }
- Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
- }
-}
-
-#[derive(Clone)]
-pub(crate) enum Group {
- Compiler(proc_macro::Group),
- Fallback(fallback::Group),
-}
-
-impl Group {
- pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
- match stream {
- TokenStream::Compiler(tts) => {
- let delimiter = match delimiter {
- Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
- Delimiter::Bracket => proc_macro::Delimiter::Bracket,
- Delimiter::Brace => proc_macro::Delimiter::Brace,
- Delimiter::None => proc_macro::Delimiter::None,
- };
- Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream()))
- }
- TokenStream::Fallback(stream) => {
- Group::Fallback(fallback::Group::new(delimiter, stream))
- }
- }
- }
-
- pub fn delimiter(&self) -> Delimiter {
- match self {
- Group::Compiler(g) => match g.delimiter() {
- proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
- proc_macro::Delimiter::Bracket => Delimiter::Bracket,
- proc_macro::Delimiter::Brace => Delimiter::Brace,
- proc_macro::Delimiter::None => Delimiter::None,
- },
- Group::Fallback(g) => g.delimiter(),
- }
- }
-
- pub fn stream(&self) -> TokenStream {
- match self {
- Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())),
- Group::Fallback(g) => TokenStream::Fallback(g.stream()),
- }
- }
-
- pub fn span(&self) -> Span {
- match self {
- Group::Compiler(g) => Span::Compiler(g.span()),
- Group::Fallback(g) => Span::Fallback(g.span()),
- }
- }
-
- pub fn span_open(&self) -> Span {
- match self {
- Group::Compiler(g) => Span::Compiler(g.span_open()),
- Group::Fallback(g) => Span::Fallback(g.span_open()),
- }
- }
-
- pub fn span_close(&self) -> Span {
- match self {
- Group::Compiler(g) => Span::Compiler(g.span_close()),
- Group::Fallback(g) => Span::Fallback(g.span_close()),
- }
- }
-
- pub fn set_span(&mut self, span: Span) {
- match (self, span) {
- (Group::Compiler(g), Span::Compiler(s)) => g.set_span(s),
- (Group::Fallback(g), Span::Fallback(s)) => g.set_span(s),
- (Group::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
- (Group::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
- }
- }
-
- fn unwrap_nightly(self) -> proc_macro::Group {
- match self {
- Group::Compiler(g) => g,
- Group::Fallback(_) => mismatch(line!()),
- }
- }
-}
-
-impl From<fallback::Group> for Group {
- fn from(g: fallback::Group) -> Self {
- Group::Fallback(g)
- }
-}
-
-impl Display for Group {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Group::Compiler(group) => Display::fmt(group, formatter),
- Group::Fallback(group) => Display::fmt(group, formatter),
- }
- }
-}
-
-impl Debug for Group {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Group::Compiler(group) => Debug::fmt(group, formatter),
- Group::Fallback(group) => Debug::fmt(group, formatter),
- }
- }
-}
-
-#[derive(Clone)]
-pub(crate) enum Ident {
- Compiler(proc_macro::Ident),
- Fallback(fallback::Ident),
-}
-
-impl Ident {
- #[track_caller]
- pub fn new_checked(string: &str, span: Span) -> Self {
- match span {
- Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)),
- Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_checked(string, s)),
- }
- }
-
- pub fn new_unchecked(string: &str, span: fallback::Span) -> Self {
- Ident::Fallback(fallback::Ident::new_unchecked(string, span))
- }
-
- #[track_caller]
- pub fn new_raw_checked(string: &str, span: Span) -> Self {
- match span {
- Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new_raw(string, s)),
- Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw_checked(string, s)),
- }
- }
-
- pub fn new_raw_unchecked(string: &str, span: fallback::Span) -> Self {
- Ident::Fallback(fallback::Ident::new_raw_unchecked(string, span))
- }
-
- pub fn span(&self) -> Span {
- match self {
- Ident::Compiler(t) => Span::Compiler(t.span()),
- Ident::Fallback(t) => Span::Fallback(t.span()),
- }
- }
-
- pub fn set_span(&mut self, span: Span) {
- match (self, span) {
- (Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s),
- (Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s),
- (Ident::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
- (Ident::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
- }
- }
-
- fn unwrap_nightly(self) -> proc_macro::Ident {
- match self {
- Ident::Compiler(s) => s,
- Ident::Fallback(_) => mismatch(line!()),
- }
- }
-}
-
-impl PartialEq for Ident {
- fn eq(&self, other: &Ident) -> bool {
- match (self, other) {
- (Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(),
- (Ident::Fallback(t), Ident::Fallback(o)) => t == o,
- (Ident::Compiler(_), Ident::Fallback(_)) => mismatch(line!()),
- (Ident::Fallback(_), Ident::Compiler(_)) => mismatch(line!()),
- }
- }
-}
-
-impl<T> PartialEq<T> for Ident
-where
- T: ?Sized + AsRef<str>,
-{
- fn eq(&self, other: &T) -> bool {
- let other = other.as_ref();
- match self {
- Ident::Compiler(t) => t.to_string() == other,
- Ident::Fallback(t) => t == other,
- }
- }
-}
-
-impl Display for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Ident::Compiler(t) => Display::fmt(t, f),
- Ident::Fallback(t) => Display::fmt(t, f),
- }
- }
-}
-
-impl Debug for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Ident::Compiler(t) => Debug::fmt(t, f),
- Ident::Fallback(t) => Debug::fmt(t, f),
- }
- }
-}
-
-#[derive(Clone)]
-pub(crate) enum Literal {
- Compiler(proc_macro::Literal),
- Fallback(fallback::Literal),
-}
-
-macro_rules! suffixed_numbers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- if inside_proc_macro() {
- Literal::Compiler(proc_macro::Literal::$name(n))
- } else {
- Literal::Fallback(fallback::Literal::$name(n))
- }
- }
- )*)
-}
-
-macro_rules! unsuffixed_integers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- if inside_proc_macro() {
- Literal::Compiler(proc_macro::Literal::$name(n))
- } else {
- Literal::Fallback(fallback::Literal::$name(n))
- }
- }
- )*)
-}
-
-impl Literal {
- pub unsafe fn from_str_unchecked(repr: &str) -> Self {
- if inside_proc_macro() {
- Literal::Compiler(proc_macro::Literal::from_str(repr).expect("invalid literal"))
- } else {
- Literal::Fallback(unsafe { fallback::Literal::from_str_unchecked(repr) })
- }
- }
-
- suffixed_numbers! {
- u8_suffixed => u8,
- u16_suffixed => u16,
- u32_suffixed => u32,
- u64_suffixed => u64,
- u128_suffixed => u128,
- usize_suffixed => usize,
- i8_suffixed => i8,
- i16_suffixed => i16,
- i32_suffixed => i32,
- i64_suffixed => i64,
- i128_suffixed => i128,
- isize_suffixed => isize,
-
- f32_suffixed => f32,
- f64_suffixed => f64,
- }
-
- unsuffixed_integers! {
- u8_unsuffixed => u8,
- u16_unsuffixed => u16,
- u32_unsuffixed => u32,
- u64_unsuffixed => u64,
- u128_unsuffixed => u128,
- usize_unsuffixed => usize,
- i8_unsuffixed => i8,
- i16_unsuffixed => i16,
- i32_unsuffixed => i32,
- i64_unsuffixed => i64,
- i128_unsuffixed => i128,
- isize_unsuffixed => isize,
- }
-
- pub fn f32_unsuffixed(f: f32) -> Literal {
- if inside_proc_macro() {
- Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
- } else {
- Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
- }
- }
-
- pub fn f64_unsuffixed(f: f64) -> Literal {
- if inside_proc_macro() {
- Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
- } else {
- Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
- }
- }
-
- pub fn string(t: &str) -> Literal {
- if inside_proc_macro() {
- Literal::Compiler(proc_macro::Literal::string(t))
- } else {
- Literal::Fallback(fallback::Literal::string(t))
- }
- }
-
- pub fn character(t: char) -> Literal {
- if inside_proc_macro() {
- Literal::Compiler(proc_macro::Literal::character(t))
- } else {
- Literal::Fallback(fallback::Literal::character(t))
- }
- }
-
- pub fn byte_string(bytes: &[u8]) -> Literal {
- if inside_proc_macro() {
- Literal::Compiler(proc_macro::Literal::byte_string(bytes))
- } else {
- Literal::Fallback(fallback::Literal::byte_string(bytes))
- }
- }
-
- pub fn span(&self) -> Span {
- match self {
- Literal::Compiler(lit) => Span::Compiler(lit.span()),
- Literal::Fallback(lit) => Span::Fallback(lit.span()),
- }
- }
-
- pub fn set_span(&mut self, span: Span) {
- match (self, span) {
- (Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s),
- (Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s),
- (Literal::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
- (Literal::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
- }
- }
-
- pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
- match self {
- #[cfg(proc_macro_span)]
- Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler),
- #[cfg(not(proc_macro_span))]
- Literal::Compiler(_lit) => None,
- Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback),
- }
- }
-
- fn unwrap_nightly(self) -> proc_macro::Literal {
- match self {
- Literal::Compiler(s) => s,
- Literal::Fallback(_) => mismatch(line!()),
- }
- }
-}
-
-impl From<fallback::Literal> for Literal {
- fn from(s: fallback::Literal) -> Self {
- Literal::Fallback(s)
- }
-}
-
-impl FromStr for Literal {
- type Err = LexError;
-
- fn from_str(repr: &str) -> Result<Self, Self::Err> {
- if inside_proc_macro() {
- let literal = proc_macro::Literal::from_str(repr)?;
- Ok(Literal::Compiler(literal))
- } else {
- let literal = fallback::Literal::from_str(repr)?;
- Ok(Literal::Fallback(literal))
- }
- }
-}
-
-impl Display for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Literal::Compiler(t) => Display::fmt(t, f),
- Literal::Fallback(t) => Display::fmt(t, f),
- }
- }
-}
-
-impl Debug for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
- Literal::Compiler(t) => Debug::fmt(t, f),
- Literal::Fallback(t) => Debug::fmt(t, f),
- }
- }
-}
diff --git a/vendor/proc-macro2/tests/comments.rs b/vendor/proc-macro2/tests/comments.rs
deleted file mode 100644
index 4f7236d..0000000
--- a/vendor/proc-macro2/tests/comments.rs
+++ /dev/null
@@ -1,105 +0,0 @@
-#![allow(clippy::assertions_on_result_states)]
-
-use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
-
-// #[doc = "..."] -> "..."
-fn lit_of_outer_doc_comment(tokens: &TokenStream) -> Literal {
- lit_of_doc_comment(tokens, false)
-}
-
-// #![doc = "..."] -> "..."
-fn lit_of_inner_doc_comment(tokens: &TokenStream) -> Literal {
- lit_of_doc_comment(tokens, true)
-}
-
-fn lit_of_doc_comment(tokens: &TokenStream, inner: bool) -> Literal {
- let mut iter = tokens.clone().into_iter();
- match iter.next().unwrap() {
- TokenTree::Punct(punct) => {
- assert_eq!(punct.as_char(), '#');
- assert_eq!(punct.spacing(), Spacing::Alone);
- }
- _ => panic!("wrong token {:?}", tokens),
- }
- if inner {
- match iter.next().unwrap() {
- TokenTree::Punct(punct) => {
- assert_eq!(punct.as_char(), '!');
- assert_eq!(punct.spacing(), Spacing::Alone);
- }
- _ => panic!("wrong token {:?}", tokens),
- }
- }
- iter = match iter.next().unwrap() {
- TokenTree::Group(group) => {
- assert_eq!(group.delimiter(), Delimiter::Bracket);
- assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
- group.stream().into_iter()
- }
- _ => panic!("wrong token {:?}", tokens),
- };
- match iter.next().unwrap() {
- TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
- _ => panic!("wrong token {:?}", tokens),
- }
- match iter.next().unwrap() {
- TokenTree::Punct(punct) => {
- assert_eq!(punct.as_char(), '=');
- assert_eq!(punct.spacing(), Spacing::Alone);
- }
- _ => panic!("wrong token {:?}", tokens),
- }
- match iter.next().unwrap() {
- TokenTree::Literal(literal) => {
- assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
- literal
- }
- _ => panic!("wrong token {:?}", tokens),
- }
-}
-
-#[test]
-fn closed_immediately() {
- let stream = "/**/".parse::<TokenStream>().unwrap();
- let tokens = stream.into_iter().collect::<Vec<_>>();
- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
-}
-
-#[test]
-fn incomplete() {
- assert!("/*/".parse::<TokenStream>().is_err());
-}
-
-#[test]
-fn lit() {
- let stream = "/// doc".parse::<TokenStream>().unwrap();
- let lit = lit_of_outer_doc_comment(&stream);
- assert_eq!(lit.to_string(), "\" doc\"");
-
- let stream = "//! doc".parse::<TokenStream>().unwrap();
- let lit = lit_of_inner_doc_comment(&stream);
- assert_eq!(lit.to_string(), "\" doc\"");
-
- let stream = "/** doc */".parse::<TokenStream>().unwrap();
- let lit = lit_of_outer_doc_comment(&stream);
- assert_eq!(lit.to_string(), "\" doc \"");
-
- let stream = "/*! doc */".parse::<TokenStream>().unwrap();
- let lit = lit_of_inner_doc_comment(&stream);
- assert_eq!(lit.to_string(), "\" doc \"");
-}
-
-#[test]
-fn carriage_return() {
- let stream = "///\r\n".parse::<TokenStream>().unwrap();
- let lit = lit_of_outer_doc_comment(&stream);
- assert_eq!(lit.to_string(), "\"\"");
-
- let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
- let lit = lit_of_outer_doc_comment(&stream);
- assert_eq!(lit.to_string(), "\"\\r\\n\"");
-
- "///\r".parse::<TokenStream>().unwrap_err();
- "///\r \n".parse::<TokenStream>().unwrap_err();
- "/**\r \n*/".parse::<TokenStream>().unwrap_err();
-}
diff --git a/vendor/proc-macro2/tests/features.rs b/vendor/proc-macro2/tests/features.rs
deleted file mode 100644
index 073f6e6..0000000
--- a/vendor/proc-macro2/tests/features.rs
+++ /dev/null
@@ -1,8 +0,0 @@
-#[test]
-#[ignore]
-fn make_sure_no_proc_macro() {
- assert!(
- !cfg!(feature = "proc-macro"),
- "still compiled with proc_macro?"
- );
-}
diff --git a/vendor/proc-macro2/tests/marker.rs b/vendor/proc-macro2/tests/marker.rs
deleted file mode 100644
index d08fbfc..0000000
--- a/vendor/proc-macro2/tests/marker.rs
+++ /dev/null
@@ -1,99 +0,0 @@
-#![allow(clippy::extra_unused_type_parameters)]
-
-use proc_macro2::{
- Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
-};
-
-macro_rules! assert_impl {
- ($ty:ident is $($marker:ident) and +) => {
- #[test]
- #[allow(non_snake_case)]
- fn $ty() {
- fn assert_implemented<T: $($marker +)+>() {}
- assert_implemented::<$ty>();
- }
- };
-
- ($ty:ident is not $($marker:ident) or +) => {
- #[test]
- #[allow(non_snake_case)]
- fn $ty() {
- $(
- {
- // Implemented for types that implement $marker.
- trait IsNotImplemented {
- fn assert_not_implemented() {}
- }
- impl<T: $marker> IsNotImplemented for T {}
-
- // Implemented for the type being tested.
- trait IsImplemented {
- fn assert_not_implemented() {}
- }
- impl IsImplemented for $ty {}
-
- // If $ty does not implement $marker, there is no ambiguity
- // in the following trait method call.
- <$ty>::assert_not_implemented();
- }
- )+
- }
- };
-}
-
-assert_impl!(Delimiter is Send and Sync);
-assert_impl!(Spacing is Send and Sync);
-
-assert_impl!(Group is not Send or Sync);
-assert_impl!(Ident is not Send or Sync);
-assert_impl!(LexError is not Send or Sync);
-assert_impl!(Literal is not Send or Sync);
-assert_impl!(Punct is not Send or Sync);
-assert_impl!(Span is not Send or Sync);
-assert_impl!(TokenStream is not Send or Sync);
-assert_impl!(TokenTree is not Send or Sync);
-
-#[cfg(procmacro2_semver_exempt)]
-mod semver_exempt {
- use proc_macro2::{LineColumn, SourceFile};
-
- assert_impl!(LineColumn is Send and Sync);
-
- assert_impl!(SourceFile is not Send or Sync);
-}
-
-mod unwind_safe {
- use proc_macro2::{
- Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
- };
- #[cfg(procmacro2_semver_exempt)]
- use proc_macro2::{LineColumn, SourceFile};
- use std::panic::{RefUnwindSafe, UnwindSafe};
-
- macro_rules! assert_unwind_safe {
- ($($types:ident)*) => {
- $(
- assert_impl!($types is UnwindSafe and RefUnwindSafe);
- )*
- };
- }
-
- assert_unwind_safe! {
- Delimiter
- Group
- Ident
- LexError
- Literal
- Punct
- Spacing
- Span
- TokenStream
- TokenTree
- }
-
- #[cfg(procmacro2_semver_exempt)]
- assert_unwind_safe! {
- LineColumn
- SourceFile
- }
-}
diff --git a/vendor/proc-macro2/tests/test.rs b/vendor/proc-macro2/tests/test.rs
deleted file mode 100644
index b75cd55..0000000
--- a/vendor/proc-macro2/tests/test.rs
+++ /dev/null
@@ -1,759 +0,0 @@
-#![allow(
- clippy::assertions_on_result_states,
- clippy::items_after_statements,
- clippy::non_ascii_literal,
- clippy::octal_escapes
-)]
-
-use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
-use std::iter;
-use std::str::{self, FromStr};
-
-#[test]
-fn idents() {
- assert_eq!(
- Ident::new("String", Span::call_site()).to_string(),
- "String"
- );
- assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
- assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
-}
-
-#[test]
-fn raw_idents() {
- assert_eq!(
- Ident::new_raw("String", Span::call_site()).to_string(),
- "r#String"
- );
- assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
-}
-
-#[test]
-#[should_panic(expected = "`r#_` cannot be a raw identifier")]
-fn ident_raw_underscore() {
- Ident::new_raw("_", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "`r#super` cannot be a raw identifier")]
-fn ident_raw_reserved() {
- Ident::new_raw("super", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
-fn ident_empty() {
- Ident::new("", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
-fn ident_number() {
- Ident::new("255", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "\"a#\" is not a valid Ident")]
-fn ident_invalid() {
- Ident::new("a#", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "not a valid Ident")]
-fn raw_ident_empty() {
- Ident::new("r#", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "not a valid Ident")]
-fn raw_ident_number() {
- Ident::new("r#255", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
-fn raw_ident_invalid() {
- Ident::new("r#a#", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "not a valid Ident")]
-fn lifetime_empty() {
- Ident::new("'", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "not a valid Ident")]
-fn lifetime_number() {
- Ident::new("'255", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = r#""'a#" is not a valid Ident"#)]
-fn lifetime_invalid() {
- Ident::new("'a#", Span::call_site());
-}
-
-#[test]
-fn literal_string() {
- assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
- assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
- assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
- assert_eq!(
- Literal::string("a\00b\07c\08d\0e\0").to_string(),
- "\"a\\x000b\\x007c\\08d\\0e\\0\"",
- );
-
- "\"\\\r\n x\"".parse::<TokenStream>().unwrap();
- "\"\\\r\n \rx\"".parse::<TokenStream>().unwrap_err();
-}
-
-#[test]
-fn literal_raw_string() {
- "r\"\r\n\"".parse::<TokenStream>().unwrap();
-
- fn raw_string_literal_with_hashes(n: usize) -> String {
- let mut literal = String::new();
- literal.push('r');
- literal.extend(iter::repeat('#').take(n));
- literal.push('"');
- literal.push('"');
- literal.extend(iter::repeat('#').take(n));
- literal
- }
-
- raw_string_literal_with_hashes(255)
- .parse::<TokenStream>()
- .unwrap();
-
- // https://github.com/rust-lang/rust/pull/95251
- raw_string_literal_with_hashes(256)
- .parse::<TokenStream>()
- .unwrap_err();
-}
-
-#[test]
-fn literal_byte_string() {
- assert_eq!(Literal::byte_string(b"").to_string(), "b\"\"");
- assert_eq!(
- Literal::byte_string(b"\0\t\n\r\"\\2\x10").to_string(),
- "b\"\\0\\t\\n\\r\\\"\\\\2\\x10\"",
- );
- assert_eq!(
- Literal::byte_string(b"a\00b\07c\08d\0e\0").to_string(),
- "b\"a\\x000b\\x007c\\08d\\0e\\0\"",
- );
-
- "b\"\\\r\n x\"".parse::<TokenStream>().unwrap();
- "b\"\\\r\n \rx\"".parse::<TokenStream>().unwrap_err();
- "b\"\\\r\n \u{a0}x\"".parse::<TokenStream>().unwrap_err();
- "br\"\u{a0}\"".parse::<TokenStream>().unwrap_err();
-}
-
-#[test]
-fn literal_c_string() {
- let strings = r###"
- c"hello\x80我叫\u{1F980}" // from the RFC
- cr"\"
- cr##"Hello "world"!"##
- c"\t\n\r\"\\"
- "###;
-
- let mut tokens = strings.parse::<TokenStream>().unwrap().into_iter();
-
- for expected in &[
- r#"c"hello\x80我叫\u{1F980}""#,
- r#"cr"\""#,
- r###"cr##"Hello "world"!"##"###,
- r#"c"\t\n\r\"\\""#,
- ] {
- match tokens.next().unwrap() {
- TokenTree::Literal(literal) => {
- assert_eq!(literal.to_string(), *expected);
- }
- unexpected => panic!("unexpected token: {:?}", unexpected),
- }
- }
-
- if let Some(unexpected) = tokens.next() {
- panic!("unexpected token: {:?}", unexpected);
- }
-
- for invalid in &[r#"c"\0""#, r#"c"\x00""#, r#"c"\u{0}""#, "c\"\0\""] {
- if let Ok(unexpected) = invalid.parse::<TokenStream>() {
- panic!("unexpected token: {:?}", unexpected);
- }
- }
-}
-
-#[test]
-fn literal_character() {
- assert_eq!(Literal::character('x').to_string(), "'x'");
- assert_eq!(Literal::character('\'').to_string(), "'\\''");
- assert_eq!(Literal::character('"').to_string(), "'\"'");
-}
-
-#[test]
-fn literal_integer() {
- assert_eq!(Literal::u8_suffixed(10).to_string(), "10u8");
- assert_eq!(Literal::u16_suffixed(10).to_string(), "10u16");
- assert_eq!(Literal::u32_suffixed(10).to_string(), "10u32");
- assert_eq!(Literal::u64_suffixed(10).to_string(), "10u64");
- assert_eq!(Literal::u128_suffixed(10).to_string(), "10u128");
- assert_eq!(Literal::usize_suffixed(10).to_string(), "10usize");
-
- assert_eq!(Literal::i8_suffixed(10).to_string(), "10i8");
- assert_eq!(Literal::i16_suffixed(10).to_string(), "10i16");
- assert_eq!(Literal::i32_suffixed(10).to_string(), "10i32");
- assert_eq!(Literal::i64_suffixed(10).to_string(), "10i64");
- assert_eq!(Literal::i128_suffixed(10).to_string(), "10i128");
- assert_eq!(Literal::isize_suffixed(10).to_string(), "10isize");
-
- assert_eq!(Literal::u8_unsuffixed(10).to_string(), "10");
- assert_eq!(Literal::u16_unsuffixed(10).to_string(), "10");
- assert_eq!(Literal::u32_unsuffixed(10).to_string(), "10");
- assert_eq!(Literal::u64_unsuffixed(10).to_string(), "10");
- assert_eq!(Literal::u128_unsuffixed(10).to_string(), "10");
- assert_eq!(Literal::usize_unsuffixed(10).to_string(), "10");
-
- assert_eq!(Literal::i8_unsuffixed(10).to_string(), "10");
- assert_eq!(Literal::i16_unsuffixed(10).to_string(), "10");
- assert_eq!(Literal::i32_unsuffixed(10).to_string(), "10");
- assert_eq!(Literal::i64_unsuffixed(10).to_string(), "10");
- assert_eq!(Literal::i128_unsuffixed(10).to_string(), "10");
- assert_eq!(Literal::isize_unsuffixed(10).to_string(), "10");
-}
-
-#[test]
-fn literal_float() {
- assert_eq!(Literal::f32_suffixed(10.0).to_string(), "10f32");
- assert_eq!(Literal::f64_suffixed(10.0).to_string(), "10f64");
-
- assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
- assert_eq!(Literal::f64_unsuffixed(10.0).to_string(), "10.0");
-}
-
-#[test]
-fn literal_suffix() {
- fn token_count(p: &str) -> usize {
- p.parse::<TokenStream>().unwrap().into_iter().count()
- }
-
- assert_eq!(token_count("999u256"), 1);
- assert_eq!(token_count("999r#u256"), 3);
- assert_eq!(token_count("1."), 1);
- assert_eq!(token_count("1.f32"), 3);
- assert_eq!(token_count("1.0_0"), 1);
- assert_eq!(token_count("1._0"), 3);
- assert_eq!(token_count("1._m"), 3);
- assert_eq!(token_count("\"\"s"), 1);
- assert_eq!(token_count("r\"\"r"), 1);
- assert_eq!(token_count("b\"\"b"), 1);
- assert_eq!(token_count("br\"\"br"), 1);
- assert_eq!(token_count("r#\"\"#r"), 1);
- assert_eq!(token_count("'c'c"), 1);
- assert_eq!(token_count("b'b'b"), 1);
- assert_eq!(token_count("0E"), 1);
- assert_eq!(token_count("0o0A"), 1);
- assert_eq!(token_count("0E--0"), 4);
- assert_eq!(token_count("0.0ECMA"), 1);
-}
-
-#[test]
-fn literal_iter_negative() {
- let negative_literal = Literal::i32_suffixed(-3);
- let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
- let mut iter = tokens.into_iter();
- match iter.next().unwrap() {
- TokenTree::Punct(punct) => {
- assert_eq!(punct.as_char(), '-');
- assert_eq!(punct.spacing(), Spacing::Alone);
- }
- unexpected => panic!("unexpected token {:?}", unexpected),
- }
- match iter.next().unwrap() {
- TokenTree::Literal(literal) => {
- assert_eq!(literal.to_string(), "3i32");
- }
- unexpected => panic!("unexpected token {:?}", unexpected),
- }
- assert!(iter.next().is_none());
-}
-
-#[test]
-fn literal_parse() {
- assert!("1".parse::<Literal>().is_ok());
- assert!("-1".parse::<Literal>().is_ok());
- assert!("-1u12".parse::<Literal>().is_ok());
- assert!("1.0".parse::<Literal>().is_ok());
- assert!("-1.0".parse::<Literal>().is_ok());
- assert!("-1.0f12".parse::<Literal>().is_ok());
- assert!("'a'".parse::<Literal>().is_ok());
- assert!("\"\n\"".parse::<Literal>().is_ok());
- assert!("0 1".parse::<Literal>().is_err());
- assert!(" 0".parse::<Literal>().is_err());
- assert!("0 ".parse::<Literal>().is_err());
- assert!("/* comment */0".parse::<Literal>().is_err());
- assert!("0/* comment */".parse::<Literal>().is_err());
- assert!("0// comment".parse::<Literal>().is_err());
- assert!("- 1".parse::<Literal>().is_err());
- assert!("- 1.0".parse::<Literal>().is_err());
- assert!("-\"\"".parse::<Literal>().is_err());
-}
-
-#[test]
-fn literal_span() {
- let positive = "0.1".parse::<Literal>().unwrap();
- let negative = "-0.1".parse::<Literal>().unwrap();
- let subspan = positive.subspan(1..2);
-
- #[cfg(not(span_locations))]
- {
- let _ = negative;
- assert!(subspan.is_none());
- }
-
- #[cfg(span_locations)]
- {
- assert_eq!(positive.span().start().column, 0);
- assert_eq!(positive.span().end().column, 3);
- assert_eq!(negative.span().start().column, 0);
- assert_eq!(negative.span().end().column, 4);
- assert_eq!(subspan.unwrap().source_text().unwrap(), ".");
- }
-
- assert!(positive.subspan(1..4).is_none());
-}
-
-#[cfg(span_locations)]
-#[test]
-fn source_text() {
- let input = " 𓀕 a z ";
- let mut tokens = input
- .parse::<proc_macro2::TokenStream>()
- .unwrap()
- .into_iter();
-
- let first = tokens.next().unwrap();
- assert_eq!("𓀕", first.span().source_text().unwrap());
-
- let second = tokens.next().unwrap();
- let third = tokens.next().unwrap();
- assert_eq!("z", third.span().source_text().unwrap());
- assert_eq!("a", second.span().source_text().unwrap());
-}
-
-#[test]
-fn roundtrip() {
- fn roundtrip(p: &str) {
- println!("parse: {}", p);
- let s = p.parse::<TokenStream>().unwrap().to_string();
- println!("first: {}", s);
- let s2 = s.parse::<TokenStream>().unwrap().to_string();
- assert_eq!(s, s2);
- }
- roundtrip("a");
- roundtrip("<<");
- roundtrip("<<=");
- roundtrip(
- "
- 1
- 1.0
- 1f32
- 2f64
- 1usize
- 4isize
- 4e10
- 1_000
- 1_0i32
- 8u8
- 9
- 0
- 0xffffffffffffffffffffffffffffffff
- 1x
- 1u80
- 1f320
- ",
- );
- roundtrip("'a");
- roundtrip("'_");
- roundtrip("'static");
- roundtrip("'\\u{10__FFFF}'");
- roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
-}
-
-#[test]
-fn fail() {
- fn fail(p: &str) {
- if let Ok(s) = p.parse::<TokenStream>() {
- panic!("should have failed to parse: {}\n{:#?}", p, s);
- }
- }
- fail("' static");
- fail("r#1");
- fail("r#_");
- fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
- fail("\"\\u{999999}\""); // outside of valid range of char
- fail("\"\\u{_0}\""); // leading underscore
- fail("\"\\u{}\""); // empty
- fail("b\"\r\""); // bare carriage return in byte string
- fail("r\"\r\""); // bare carriage return in raw string
- fail("\"\\\r \""); // backslash carriage return
- fail("'aa'aa");
- fail("br##\"\"#");
- fail("\"\\\n\u{85}\r\"");
-}
-
-#[cfg(span_locations)]
-#[test]
-fn span_test() {
- check_spans(
- "\
-/// This is a document comment
-testing 123
-{
- testing 234
-}",
- &[
- (1, 0, 1, 30), // #
- (1, 0, 1, 30), // [ ... ]
- (1, 0, 1, 30), // doc
- (1, 0, 1, 30), // =
- (1, 0, 1, 30), // "This is..."
- (2, 0, 2, 7), // testing
- (2, 8, 2, 11), // 123
- (3, 0, 5, 1), // { ... }
- (4, 2, 4, 9), // testing
- (4, 10, 4, 13), // 234
- ],
- );
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[cfg(not(nightly))]
-#[test]
-fn default_span() {
- let start = Span::call_site().start();
- assert_eq!(start.line, 1);
- assert_eq!(start.column, 0);
- let end = Span::call_site().end();
- assert_eq!(end.line, 1);
- assert_eq!(end.column, 0);
- let source_file = Span::call_site().source_file();
- assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
- assert!(!source_file.is_real());
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[test]
-fn span_join() {
- let source1 = "aaa\nbbb"
- .parse::<TokenStream>()
- .unwrap()
- .into_iter()
- .collect::<Vec<_>>();
- let source2 = "ccc\nddd"
- .parse::<TokenStream>()
- .unwrap()
- .into_iter()
- .collect::<Vec<_>>();
-
- assert!(source1[0].span().source_file() != source2[0].span().source_file());
- assert_eq!(
- source1[0].span().source_file(),
- source1[1].span().source_file()
- );
-
- let joined1 = source1[0].span().join(source1[1].span());
- let joined2 = source1[0].span().join(source2[0].span());
- assert!(joined1.is_some());
- assert!(joined2.is_none());
-
- let start = joined1.unwrap().start();
- let end = joined1.unwrap().end();
- assert_eq!(start.line, 1);
- assert_eq!(start.column, 0);
- assert_eq!(end.line, 2);
- assert_eq!(end.column, 3);
-
- assert_eq!(
- joined1.unwrap().source_file(),
- source1[0].span().source_file()
- );
-}
-
-#[test]
-fn no_panic() {
- let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
- assert!(s.parse::<TokenStream>().is_err());
-}
-
-#[test]
-fn punct_before_comment() {
- let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
- match tts.next().unwrap() {
- TokenTree::Punct(tt) => {
- assert_eq!(tt.as_char(), '~');
- assert_eq!(tt.spacing(), Spacing::Alone);
- }
- wrong => panic!("wrong token {:?}", wrong),
- }
-}
-
-#[test]
-fn joint_last_token() {
- // This test verifies that we match the behavior of libproc_macro *not* in
- // the range nightly-2020-09-06 through nightly-2020-09-10, in which this
- // behavior was temporarily broken.
- // See https://github.com/rust-lang/rust/issues/76399
-
- let joint_punct = Punct::new(':', Spacing::Joint);
- let stream = TokenStream::from(TokenTree::Punct(joint_punct));
- let punct = match stream.into_iter().next().unwrap() {
- TokenTree::Punct(punct) => punct,
- _ => unreachable!(),
- };
- assert_eq!(punct.spacing(), Spacing::Joint);
-}
-
-#[test]
-fn raw_identifier() {
- let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
- match tts.next().unwrap() {
- TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
- wrong => panic!("wrong token {:?}", wrong),
- }
- assert!(tts.next().is_none());
-}
-
-#[test]
-fn test_debug_ident() {
- let ident = Ident::new("proc_macro", Span::call_site());
-
- #[cfg(not(span_locations))]
- let expected = "Ident(proc_macro)";
-
- #[cfg(span_locations)]
- let expected = "Ident { sym: proc_macro }";
-
- assert_eq!(expected, format!("{:?}", ident));
-}
-
-#[test]
-fn test_debug_tokenstream() {
- let tts = TokenStream::from_str("[a + 1]").unwrap();
-
- #[cfg(not(span_locations))]
- let expected = "\
-TokenStream [
- Group {
- delimiter: Bracket,
- stream: TokenStream [
- Ident {
- sym: a,
- },
- Punct {
- char: '+',
- spacing: Alone,
- },
- Literal {
- lit: 1,
- },
- ],
- },
-]\
- ";
-
- #[cfg(not(span_locations))]
- let expected_before_trailing_commas = "\
-TokenStream [
- Group {
- delimiter: Bracket,
- stream: TokenStream [
- Ident {
- sym: a
- },
- Punct {
- char: '+',
- spacing: Alone
- },
- Literal {
- lit: 1
- }
- ]
- }
-]\
- ";
-
- #[cfg(span_locations)]
- let expected = "\
-TokenStream [
- Group {
- delimiter: Bracket,
- stream: TokenStream [
- Ident {
- sym: a,
- span: bytes(2..3),
- },
- Punct {
- char: '+',
- spacing: Alone,
- span: bytes(4..5),
- },
- Literal {
- lit: 1,
- span: bytes(6..7),
- },
- ],
- span: bytes(1..8),
- },
-]\
- ";
-
- #[cfg(span_locations)]
- let expected_before_trailing_commas = "\
-TokenStream [
- Group {
- delimiter: Bracket,
- stream: TokenStream [
- Ident {
- sym: a,
- span: bytes(2..3)
- },
- Punct {
- char: '+',
- spacing: Alone,
- span: bytes(4..5)
- },
- Literal {
- lit: 1,
- span: bytes(6..7)
- }
- ],
- span: bytes(1..8)
- }
-]\
- ";
-
- let actual = format!("{:#?}", tts);
- if actual.ends_with(",\n]") {
- assert_eq!(expected, actual);
- } else {
- assert_eq!(expected_before_trailing_commas, actual);
- }
-}
-
-#[test]
-fn default_tokenstream_is_empty() {
- let default_token_stream = <TokenStream as Default>::default();
-
- assert!(default_token_stream.is_empty());
-}
-
-#[test]
-fn tokenstream_size_hint() {
- let tokens = "a b (c d) e".parse::<TokenStream>().unwrap();
-
- assert_eq!(tokens.into_iter().size_hint(), (4, Some(4)));
-}
-
-#[test]
-fn tuple_indexing() {
- // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
- let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
- assert_eq!("tuple", tokens.next().unwrap().to_string());
- assert_eq!(".", tokens.next().unwrap().to_string());
- assert_eq!("0.0", tokens.next().unwrap().to_string());
- assert!(tokens.next().is_none());
-}
-
-#[cfg(span_locations)]
-#[test]
-fn non_ascii_tokens() {
- check_spans("// abc", &[]);
- check_spans("// ábc", &[]);
- check_spans("// abc x", &[]);
- check_spans("// ábc x", &[]);
- check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
- check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
- check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
- check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
- check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
- check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
- check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
- check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
- check_spans(r##"r#"abc"#"##, &[(1, 0, 1, 8)]);
- check_spans(r##"r#"ábc"#"##, &[(1, 0, 1, 8)]);
- check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
- check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
- check_spans("'a'", &[(1, 0, 1, 3)]);
- check_spans("'á'", &[(1, 0, 1, 3)]);
- check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
- check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
- check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
- check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
- check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
- check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
- check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
- check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
- check_spans("abc", &[(1, 0, 1, 3)]);
- check_spans("ábc", &[(1, 0, 1, 3)]);
- check_spans("ábć", &[(1, 0, 1, 3)]);
- check_spans("abc// foo", &[(1, 0, 1, 3)]);
- check_spans("ábc// foo", &[(1, 0, 1, 3)]);
- check_spans("ábć// foo", &[(1, 0, 1, 3)]);
- check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
-}
-
-#[cfg(span_locations)]
-fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
- let ts = p.parse::<TokenStream>().unwrap();
- check_spans_internal(ts, &mut lines);
- assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
-}
-
-#[cfg(span_locations)]
-fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
- for i in ts {
- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
- *lines = rest;
-
- let start = i.span().start();
- assert_eq!(start.line, sline, "sline did not match for {}", i);
- assert_eq!(start.column, scol, "scol did not match for {}", i);
-
- let end = i.span().end();
- assert_eq!(end.line, eline, "eline did not match for {}", i);
- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
-
- if let TokenTree::Group(g) = i {
- check_spans_internal(g.stream().clone(), lines);
- }
- }
- }
-}
-
-#[test]
-fn whitespace() {
- // space, horizontal tab, vertical tab, form feed, carriage return, line
- // feed, non-breaking space, left-to-right mark, right-to-left mark
- let various_spaces = " \t\u{b}\u{c}\r\n\u{a0}\u{200e}\u{200f}";
- let tokens = various_spaces.parse::<TokenStream>().unwrap();
- assert_eq!(tokens.into_iter().count(), 0);
-
- let lone_carriage_returns = " \r \r\r\n ";
- lone_carriage_returns.parse::<TokenStream>().unwrap();
-}
-
-#[test]
-fn byte_order_mark() {
- let string = "\u{feff}foo";
- let tokens = string.parse::<TokenStream>().unwrap();
- match tokens.into_iter().next().unwrap() {
- TokenTree::Ident(ident) => assert_eq!(ident, "foo"),
- _ => unreachable!(),
- }
-
- let string = "foo\u{feff}";
- string.parse::<TokenStream>().unwrap_err();
-}
diff --git a/vendor/proc-macro2/tests/test_fmt.rs b/vendor/proc-macro2/tests/test_fmt.rs
deleted file mode 100644
index 86a4c38..0000000
--- a/vendor/proc-macro2/tests/test_fmt.rs
+++ /dev/null
@@ -1,28 +0,0 @@
-#![allow(clippy::from_iter_instead_of_collect)]
-
-use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
-use std::iter;
-
-#[test]
-fn test_fmt_group() {
- let ident = Ident::new("x", Span::call_site());
- let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
- let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
- let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
- let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
- let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
- let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
- let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
- let none_empty = Group::new(Delimiter::None, TokenStream::new());
- let none_nonempty = Group::new(Delimiter::None, inner);
-
- // Matches libproc_macro.
- assert_eq!("()", parens_empty.to_string());
- assert_eq!("(x)", parens_nonempty.to_string());
- assert_eq!("[]", brackets_empty.to_string());
- assert_eq!("[x]", brackets_nonempty.to_string());
- assert_eq!("{ }", braces_empty.to_string());
- assert_eq!("{ x }", braces_nonempty.to_string());
- assert_eq!("", none_empty.to_string());
- assert_eq!("x", none_nonempty.to_string());
-}
diff --git a/vendor/proc-macro2/tests/test_size.rs b/vendor/proc-macro2/tests/test_size.rs
deleted file mode 100644
index 46e58db..0000000
--- a/vendor/proc-macro2/tests/test_size.rs
+++ /dev/null
@@ -1,42 +0,0 @@
-extern crate proc_macro;
-
-use std::mem;
-
-#[rustversion::attr(before(1.32), ignore)]
-#[test]
-fn test_proc_macro_span_size() {
- assert_eq!(mem::size_of::<proc_macro::Span>(), 4);
- assert_eq!(mem::size_of::<Option<proc_macro::Span>>(), 4);
-}
-
-#[cfg_attr(not(all(not(wrap_proc_macro), not(span_locations))), ignore)]
-#[test]
-fn test_proc_macro2_fallback_span_size_without_locations() {
- assert_eq!(mem::size_of::<proc_macro2::Span>(), 0);
- assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 1);
-}
-
-#[cfg_attr(not(all(not(wrap_proc_macro), span_locations)), ignore)]
-#[test]
-fn test_proc_macro2_fallback_span_size_with_locations() {
- assert_eq!(mem::size_of::<proc_macro2::Span>(), 8);
- assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12);
-}
-
-#[rustversion::attr(before(1.32), ignore)]
-#[rustversion::attr(
- since(1.32),
- cfg_attr(not(all(wrap_proc_macro, not(span_locations))), ignore)
-)]
-#[test]
-fn test_proc_macro2_wrapper_span_size_without_locations() {
- assert_eq!(mem::size_of::<proc_macro2::Span>(), 4);
- assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 8);
-}
-
-#[cfg_attr(not(all(wrap_proc_macro, span_locations)), ignore)]
-#[test]
-fn test_proc_macro2_wrapper_span_size_with_locations() {
- assert_eq!(mem::size_of::<proc_macro2::Span>(), 12);
- assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12);
-}