summaryrefslogtreecommitdiffstats
path: root/src/test/ui/proc-macro/auxiliary/expand-expr.rs
diff options
context:
space:
mode:
Diffstat (limited to 'src/test/ui/proc-macro/auxiliary/expand-expr.rs')
-rw-r--r--src/test/ui/proc-macro/auxiliary/expand-expr.rs149
1 files changed, 149 insertions, 0 deletions
diff --git a/src/test/ui/proc-macro/auxiliary/expand-expr.rs b/src/test/ui/proc-macro/auxiliary/expand-expr.rs
new file mode 100644
index 000000000..5463e79d7
--- /dev/null
+++ b/src/test/ui/proc-macro/auxiliary/expand-expr.rs
@@ -0,0 +1,149 @@
+// force-host
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+#![deny(warnings)]
+#![feature(proc_macro_expand, proc_macro_span)]
+
+extern crate proc_macro;
+
+use proc_macro::*;
+use std::str::FromStr;
+
+// Flatten the TokenStream, removing any toplevel `Delimiter::None`s for
+// comparison.
+fn flatten(ts: TokenStream) -> Vec<TokenTree> {
+ ts.into_iter()
+ .flat_map(|tt| match &tt {
+ TokenTree::Group(group) if group.delimiter() == Delimiter::None => {
+ flatten(group.stream())
+ }
+ _ => vec![tt],
+ })
+ .collect()
+}
+
+// Assert that two TokenStream values are roughly equal to one-another.
+fn assert_ts_eq(lhs: &TokenStream, rhs: &TokenStream) {
+ let ltts = flatten(lhs.clone());
+ let rtts = flatten(rhs.clone());
+
+ if ltts.len() != rtts.len() {
+ panic!(
+ "expected the same number of tts ({} == {})\nlhs:\n{:#?}\nrhs:\n{:#?}",
+ ltts.len(),
+ rtts.len(),
+ lhs,
+ rhs
+ )
+ }
+
+ for (ltt, rtt) in ltts.iter().zip(&rtts) {
+ match (ltt, rtt) {
+ (TokenTree::Group(l), TokenTree::Group(r)) => {
+ assert_eq!(
+ l.delimiter(),
+ r.delimiter(),
+ "expected delimiters to match for {:?} and {:?}",
+ l,
+ r
+ );
+ assert_ts_eq(&l.stream(), &r.stream());
+ }
+ (TokenTree::Punct(l), TokenTree::Punct(r)) => assert_eq!(
+ (l.as_char(), l.spacing()),
+ (r.as_char(), r.spacing()),
+ "expected punct to match for {:?} and {:?}",
+ l,
+ r
+ ),
+ (TokenTree::Ident(l), TokenTree::Ident(r)) => assert_eq!(
+ l.to_string(),
+ r.to_string(),
+ "expected ident to match for {:?} and {:?}",
+ l,
+ r
+ ),
+ (TokenTree::Literal(l), TokenTree::Literal(r)) => assert_eq!(
+ l.to_string(),
+ r.to_string(),
+ "expected literal to match for {:?} and {:?}",
+ l,
+ r
+ ),
+ (l, r) => panic!("expected type to match for {:?} and {:?}", l, r),
+ }
+ }
+}
+
+#[proc_macro]
+pub fn expand_expr_is(input: TokenStream) -> TokenStream {
+ let mut iter = input.into_iter();
+ let mut expected_tts = Vec::new();
+ loop {
+ match iter.next() {
+ Some(TokenTree::Punct(ref p)) if p.as_char() == ',' => break,
+ Some(tt) => expected_tts.push(tt),
+ None => panic!("expected comma"),
+ }
+ }
+
+ let expected = expected_tts.into_iter().collect::<TokenStream>();
+ let expanded = iter.collect::<TokenStream>().expand_expr().expect("expand_expr failed");
+ assert!(
+ expected.to_string() == expanded.to_string(),
+ "assert failed\nexpected: `{}`\nexpanded: `{}`",
+ expected.to_string(),
+ expanded.to_string()
+ );
+
+ // Also compare the raw tts to make sure they line up.
+ assert_ts_eq(&expected, &expanded);
+
+ TokenStream::new()
+}
+
+#[proc_macro]
+pub fn expand_expr_fail(input: TokenStream) -> TokenStream {
+ match input.expand_expr() {
+ Ok(ts) => panic!("expand_expr unexpectedly succeeded: `{}`", ts),
+ Err(_) => TokenStream::new(),
+ }
+}
+
+#[proc_macro]
+pub fn check_expand_expr_file(ts: TokenStream) -> TokenStream {
+ // Check that the passed in `file!()` invocation and a parsed `file!`
+ // invocation expand to the same literal.
+ let input_t = ts.expand_expr().expect("expand_expr failed on macro input").to_string();
+ let parse_t = TokenStream::from_str("file!{}")
+ .unwrap()
+ .expand_expr()
+ .expect("expand_expr failed on internal macro")
+ .to_string();
+ assert_eq!(input_t, parse_t);
+
+ // Check that the literal matches `Span::call_site().source_file().path()`
+ let expect_t =
+ Literal::string(&Span::call_site().source_file().path().to_string_lossy()).to_string();
+ assert_eq!(input_t, expect_t);
+
+ TokenStream::new()
+}
+
+#[proc_macro]
+pub fn recursive_expand(_: TokenStream) -> TokenStream {
+ // Recursively call until we hit the recursion limit and get an error.
+ //
+ // NOTE: This doesn't panic if expansion fails because that'll cause a very
+ // large number of errors to fill the output.
+ TokenStream::from_str("recursive_expand!{}")
+ .unwrap()
+ .expand_expr()
+ .unwrap_or(std::iter::once(TokenTree::Literal(Literal::u32_suffixed(0))).collect())
+}
+
+#[proc_macro]
+pub fn echo_pm(input: TokenStream) -> TokenStream {
+ input
+}