/src/test/ui/proc-macro/auxiliary/attributes-included.rs

https://gitlab.com/rust-lang/rust · Rust · 150 lines · 134 code · 14 blank · 2 comment · 19 complexity · e62c744ad92696ec82696f37cd31440e MD5 · raw file

  1. // force-host
  2. // no-prefer-dynamic
  3. #![crate_type = "proc-macro"]
  4. extern crate proc_macro;
  5. use proc_macro::{TokenStream, TokenTree, Delimiter, Literal, Spacing, Group};
  6. #[proc_macro_attribute]
  7. pub fn foo(attr: TokenStream, input: TokenStream) -> TokenStream {
  8. assert!(attr.is_empty());
  9. let input = input.into_iter().collect::<Vec<_>>();
  10. {
  11. let mut cursor = &input[..];
  12. assert_inline(&mut cursor);
  13. assert_doc(&mut cursor);
  14. assert_inline(&mut cursor);
  15. assert_doc(&mut cursor);
  16. assert_foo(&mut cursor);
  17. assert!(cursor.is_empty());
  18. }
  19. fold_stream(input.into_iter().collect())
  20. }
  21. #[proc_macro_attribute]
  22. pub fn bar(attr: TokenStream, input: TokenStream) -> TokenStream {
  23. assert!(attr.is_empty());
  24. let input = input.into_iter().collect::<Vec<_>>();
  25. {
  26. let mut cursor = &input[..];
  27. assert_inline(&mut cursor);
  28. assert_doc(&mut cursor);
  29. assert_invoc(&mut cursor);
  30. assert_inline(&mut cursor);
  31. assert_doc(&mut cursor);
  32. assert_foo(&mut cursor);
  33. assert!(cursor.is_empty());
  34. }
  35. input.into_iter().collect()
  36. }
  37. fn assert_inline(slice: &mut &[TokenTree]) {
  38. match &slice[0] {
  39. TokenTree::Punct(tt) => assert_eq!(tt.as_char(), '#'),
  40. _ => panic!("expected '#' char"),
  41. }
  42. match &slice[1] {
  43. TokenTree::Group(tt) => assert_eq!(tt.delimiter(), Delimiter::Bracket),
  44. _ => panic!("expected brackets"),
  45. }
  46. *slice = &slice[2..];
  47. }
  48. fn assert_doc(slice: &mut &[TokenTree]) {
  49. match &slice[0] {
  50. TokenTree::Punct(tt) => {
  51. assert_eq!(tt.as_char(), '#');
  52. assert_eq!(tt.spacing(), Spacing::Alone);
  53. }
  54. _ => panic!("expected #"),
  55. }
  56. let inner = match &slice[1] {
  57. TokenTree::Group(tt) => {
  58. assert_eq!(tt.delimiter(), Delimiter::Bracket);
  59. tt.stream()
  60. }
  61. _ => panic!("expected brackets"),
  62. };
  63. let tokens = inner.into_iter().collect::<Vec<_>>();
  64. let tokens = &tokens[..];
  65. if tokens.len() != 3 {
  66. panic!("expected three tokens in doc")
  67. }
  68. match &tokens[0] {
  69. TokenTree::Ident(tt) => assert_eq!("doc", &*tt.to_string()),
  70. _ => panic!("expected `doc`"),
  71. }
  72. match &tokens[1] {
  73. TokenTree::Punct(tt) => {
  74. assert_eq!(tt.as_char(), '=');
  75. assert_eq!(tt.spacing(), Spacing::Alone);
  76. }
  77. _ => panic!("expected equals"),
  78. }
  79. match tokens[2] {
  80. TokenTree::Literal(_) => {}
  81. _ => panic!("expected literal"),
  82. }
  83. *slice = &slice[2..];
  84. }
  85. fn assert_invoc(slice: &mut &[TokenTree]) {
  86. match &slice[0] {
  87. TokenTree::Punct(tt) => assert_eq!(tt.as_char(), '#'),
  88. _ => panic!("expected '#' char"),
  89. }
  90. match &slice[1] {
  91. TokenTree::Group(tt) => assert_eq!(tt.delimiter(), Delimiter::Bracket),
  92. _ => panic!("expected brackets"),
  93. }
  94. *slice = &slice[2..];
  95. }
  96. fn assert_foo(slice: &mut &[TokenTree]) {
  97. match &slice[0] {
  98. TokenTree::Ident(tt) => assert_eq!(&*tt.to_string(), "fn"),
  99. _ => panic!("expected fn"),
  100. }
  101. match &slice[1] {
  102. TokenTree::Ident(tt) => assert_eq!(&*tt.to_string(), "foo"),
  103. _ => panic!("expected foo"),
  104. }
  105. match &slice[2] {
  106. TokenTree::Group(tt) => {
  107. assert_eq!(tt.delimiter(), Delimiter::Parenthesis);
  108. assert!(tt.stream().is_empty());
  109. }
  110. _ => panic!("expected parens"),
  111. }
  112. match &slice[3] {
  113. TokenTree::Group(tt) => assert_eq!(tt.delimiter(), Delimiter::Brace),
  114. _ => panic!("expected braces"),
  115. }
  116. *slice = &slice[4..];
  117. }
  118. fn fold_stream(input: TokenStream) -> TokenStream {
  119. input.into_iter().map(fold_tree).collect()
  120. }
  121. fn fold_tree(input: TokenTree) -> TokenTree {
  122. match input {
  123. TokenTree::Group(b) => {
  124. TokenTree::Group(Group::new(b.delimiter(), fold_stream(b.stream())))
  125. }
  126. TokenTree::Punct(b) => TokenTree::Punct(b),
  127. TokenTree::Ident(a) => TokenTree::Ident(a),
  128. TokenTree::Literal(a) => {
  129. if a.to_string() != "\"foo\"" {
  130. TokenTree::Literal(a)
  131. } else {
  132. TokenTree::Literal(Literal::i32_unsuffixed(3))
  133. }
  134. }
  135. }
  136. }