diff options
author | Valentin Popov <valentin@popov.link> | 2024-07-19 15:37:58 +0300 |
---|---|---|
committer | Valentin Popov <valentin@popov.link> | 2024-07-19 15:37:58 +0300 |
commit | a990de90fe41456a23e58bd087d2f107d321f3a1 (patch) | |
tree | 15afc392522a9e85dc3332235e311b7d39352ea9 /vendor/proc-macro2/tests/comments.rs | |
parent | 3d48cd3f81164bbfc1a755dc1d4a9a02f98c8ddd (diff) | |
download | fparkan-a990de90fe41456a23e58bd087d2f107d321f3a1.tar.xz fparkan-a990de90fe41456a23e58bd087d2f107d321f3a1.zip |
Deleted vendor folder
Diffstat (limited to 'vendor/proc-macro2/tests/comments.rs')
-rw-r--r-- | vendor/proc-macro2/tests/comments.rs | 105 |
1 files changed, 0 insertions, 105 deletions
diff --git a/vendor/proc-macro2/tests/comments.rs b/vendor/proc-macro2/tests/comments.rs deleted file mode 100644 index 4f7236d..0000000 --- a/vendor/proc-macro2/tests/comments.rs +++ /dev/null @@ -1,105 +0,0 @@ -#![allow(clippy::assertions_on_result_states)] - -use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree}; - -// #[doc = "..."] -> "..." -fn lit_of_outer_doc_comment(tokens: &TokenStream) -> Literal { - lit_of_doc_comment(tokens, false) -} - -// #![doc = "..."] -> "..." -fn lit_of_inner_doc_comment(tokens: &TokenStream) -> Literal { - lit_of_doc_comment(tokens, true) -} - -fn lit_of_doc_comment(tokens: &TokenStream, inner: bool) -> Literal { - let mut iter = tokens.clone().into_iter(); - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '#'); - assert_eq!(punct.spacing(), Spacing::Alone); - } - _ => panic!("wrong token {:?}", tokens), - } - if inner { - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '!'); - assert_eq!(punct.spacing(), Spacing::Alone); - } - _ => panic!("wrong token {:?}", tokens), - } - } - iter = match iter.next().unwrap() { - TokenTree::Group(group) => { - assert_eq!(group.delimiter(), Delimiter::Bracket); - assert!(iter.next().is_none(), "unexpected token {:?}", tokens); - group.stream().into_iter() - } - _ => panic!("wrong token {:?}", tokens), - }; - match iter.next().unwrap() { - TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"), - _ => panic!("wrong token {:?}", tokens), - } - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '='); - assert_eq!(punct.spacing(), Spacing::Alone); - } - _ => panic!("wrong token {:?}", tokens), - } - match iter.next().unwrap() { - TokenTree::Literal(literal) => { - assert!(iter.next().is_none(), "unexpected token {:?}", tokens); - literal - } - _ => panic!("wrong token {:?}", tokens), - } -} - -#[test] -fn closed_immediately() { - let stream = "/**/".parse::<TokenStream>().unwrap(); - let tokens = stream.into_iter().collect::<Vec<_>>(); - assert!(tokens.is_empty(), "not empty -- {:?}", tokens); -} - -#[test] -fn incomplete() { - assert!("/*/".parse::<TokenStream>().is_err()); -} - -#[test] -fn lit() { - let stream = "/// doc".parse::<TokenStream>().unwrap(); - let lit = lit_of_outer_doc_comment(&stream); - assert_eq!(lit.to_string(), "\" doc\""); - - let stream = "//! doc".parse::<TokenStream>().unwrap(); - let lit = lit_of_inner_doc_comment(&stream); - assert_eq!(lit.to_string(), "\" doc\""); - - let stream = "/** doc */".parse::<TokenStream>().unwrap(); - let lit = lit_of_outer_doc_comment(&stream); - assert_eq!(lit.to_string(), "\" doc \""); - - let stream = "/*! doc */".parse::<TokenStream>().unwrap(); - let lit = lit_of_inner_doc_comment(&stream); - assert_eq!(lit.to_string(), "\" doc \""); -} - -#[test] -fn carriage_return() { - let stream = "///\r\n".parse::<TokenStream>().unwrap(); - let lit = lit_of_outer_doc_comment(&stream); - assert_eq!(lit.to_string(), "\"\""); - - let stream = "/**\r\n*/".parse::<TokenStream>().unwrap(); - let lit = lit_of_outer_doc_comment(&stream); - assert_eq!(lit.to_string(), "\"\\r\\n\""); - - "///\r".parse::<TokenStream>().unwrap_err(); - "///\r \n".parse::<TokenStream>().unwrap_err(); - "/**\r \n*/".parse::<TokenStream>().unwrap_err(); -} |