macro_rules_attribute_proc_macro/
mod.rs1use {
5 ::core::{
6 ops::Not as _,
7 iter::FromIterator as _,
8 },
9 ::proc_macro::{*,
10 TokenTree as TT,
11 },
12};
13
14#[proc_macro_attribute] pub
16fn macro_rules_attribute (
17 attrs: TokenStream,
18 input: TokenStream,
19) -> TokenStream
20{
21 let ret = macro_rules_attribute_impl(&attrs.vec(), input);
22 #[cfg(feature = "verbose-expansions")]
23 eprintln!("{}", ret);
24 ret
25}
26
27fn macro_rules_attribute_impl (
28 attrs: &'_ [TokenTree],
29 input: TokenStream
30) -> TokenStream
31{
32 let mut ret: TokenStream;
33 match is_path_bang_terminated(&attrs) {
35 | Ok(PathIsBangTerminated(trailing_bang)) => {
36 ret = attrs.iter().cloned().collect();
37 if trailing_bang {
38 } else {
40 ret.extend([TT::Punct(Punct::new('!', Spacing::Alone))]);
42 }
43 },
44 | Err(()) => return parse_path_error(attrs),
45 }
46 ret.extend([TT::Group(Group::new(
47 Delimiter::Brace,
48 input.into_iter().collect(),
51 ))]);
52 ret
53}
54
55#[proc_macro_attribute] pub
57fn macro_rules_derive (
58 attrs: TokenStream,
59 input: TokenStream,
60) -> TokenStream
61{
62 let mut ret = TokenStream::new();
63 ret.extend(
64 attrs
65 .vec()
66 .split_inclusive(is_punct(','))
69 .map(|attr| match attr {
70 | [hd @ .., p] if is_punct(',')(p) => hd,
71 | _ => attr,
72 })
73 .flat_map(|attr| macro_rules_attribute_impl(attr, input.clone()))
74 );
75 ret.extend(real_derive(ts!(::macro_rules_attribute::Custom)));
76 ret.extend(input);
77 #[cfg(feature = "verbose-expansions")]
78 eprintln!("{}", ret);
79 ret
80}
81
82#[proc_macro_attribute] pub
84fn derive (
85 attrs: TokenStream,
86 input: TokenStream,
87) -> TokenStream
88{
89 let attrs = attrs.vec();
90
91 if attrs.iter().any(is_punct('!')).not() {
93 let mut ret = real_derive(attrs.into_iter().collect());
95 ret.extend(input);
96 #[cfg(feature = "verbose-expansions")]
97 eprintln!("{}", ret);
98 return ret;
99 }
100
101 let each_attr = || {
116 attrs
117 .split_inclusive(is_punct(','))
120 .map(|attr| match attr {
121 | [hd @ .., p] if is_punct(',')(p) => hd,
122 | _ => attr,
123 })
124 };
125 let ref each_is_path_bang_terminated =
126 each_attr()
127 .map(is_path_bang_terminated)
128 .vec()
129 ;
130 for (attr, parse_bang) in each_attr().zip(each_is_path_bang_terminated) {
131 if let Err(()) = parse_bang {
132 return parse_path_error(attr);
133 }
134 }
135 let attrs_banged = |banged| {
136 each_attr()
137 .zip(each_is_path_bang_terminated)
138 .filter(move |(_, parse_bang)| parse_bang.unwrap().0 == banged)
139 .map(|(attr, _)| attr)
140 };
141 let mut ret = TokenStream::new();
142 attrs_banged(true).for_each(|attr| {
143 ret.extend(macro_rules_attribute_impl(attr, input.clone()))
144 });
145 ret.extend(real_derive(
146 attrs_banged(false)
147 .flat_map(|attr| attr.iter().cloned().chain(ts!(,)))
148 .chain(ts!(::macro_rules_attribute::Custom,))
149 .collect()
150 ,
151 ));
152 ret.extend(input);
153
154 #[cfg(feature = "verbose-expansions")]
155 eprintln!("{}", ret);
156 ret
157}
158
159#[proc_macro_derive(Custom, attributes(custom, derive_args))] pub
160fn custom(_:TokenStream) -> TokenStream {
161 TokenStream::new()
162}
163
164fn real_derive (
165 derives: TokenStream,
166) -> TokenStream
167{
168 TokenStream::from_iter([
170 TT::Punct(Punct::new('#', Spacing::Alone)),
171 TT::Group(Group::new(
172 Delimiter::Bracket,
173 {
174 let mut ts: TokenStream = ts!(
175 ::core::prelude::v1::derive
176 );
177 ts.extend([TT::Group(Group::new(
178 Delimiter::Parenthesis,
179 derives,
180 ))]);
181 ts
182 },
183 ))
184 ])
185}
186
187
188#[::core::prelude::v1::derive(Clone, Copy)]
189struct PathIsBangTerminated(bool);
190
191fn parse_path_error (
192 incorrect_input: &[TokenTree],
193) -> TokenStream
194{
195 let mut spans = incorrect_input.iter().map(|tt| tt.span());
196 let mut ts = ts!(
197 ::core::compile_error! {
198 "\
199 expected a parameter of the form `path::to::macro_name !` \
200 or `path::to::macro_name`.\
201 "
202 }
203 ).vec();
204 let fst_span = spans.next().unwrap_or_else(Span::call_site);
205 let lst_span = spans.fold(fst_span, |_, cur| cur);
206 ts.iter_mut().for_each(|tt| tt.set_span(fst_span));
207 ts.last_mut().unwrap().set_span(lst_span);
208 ts.into_iter().collect()
209}
210
211fn is_path_bang_terminated (
215 tts: &'_ [TokenTree],
216) -> Result<PathIsBangTerminated, ()>
217{
218 let mut tts = tts.iter().peekable();
219
220 macro_rules! parse_optional_semicolons {() => (
221 match tts.peek() {
222 | Some(TT::Punct(p)) => {
223 let _ = tts.next();
224 if p.as_char() == ':' && p.spacing() == Spacing::Joint {
225 match tts.next() {
226 | Some(TT::Punct(p))
227 if p.as_char() == ':'
228 && p.spacing() == Spacing::Alone
229 => {
230 Some(())
231 },
232 | _ => return Err(()),
233 }
234 } else {
235 return Err(());
236 }
237 },
238 | _ => None,
239 }
240 )}
241
242 macro_rules! parse_trailing_comma {() => (
243 if tts.peek().copied().map_or(false, is_punct(',')) {
244 let _ = tts.next();
245 if tts.next().is_some() {
246 return Err(());
247 }
248 }
249 )}
250
251 parse_optional_semicolons!();
252 loop {
254 match tts.next() {
255 | Some(TT::Ident(_)) => {},
256 | _ => return Err(()),
257 }
258 parse_trailing_comma!();
261 if tts.peek().is_none() {
262 return Ok(PathIsBangTerminated(false));
263 }
264 if tts.peek().copied().map_or(false, is_punct('!')) {
266 let _ = tts.next();
267 parse_trailing_comma!();
269 return if tts.next().is_none() {
270 Ok(PathIsBangTerminated(true))
271 } else {
272 Err(())
273 };
274 }
275 if parse_optional_semicolons!().is_none() {
277 return Err(());
278 }
279 }
280}
281
282fn is_punct (c: char)
283 -> impl 'static + Fn(&'_ TokenTree) -> bool
284{
285 move |tt| matches!(tt, TT::Punct(p) if p.as_char() == c)
286}
287
288macro_rules! ts {( $($tt:tt)* ) => (
289 ::core::stringify! {
290 $($tt)*
291 }
292 .parse::<::proc_macro::TokenStream>()
293 .unwrap()
294)} use ts;
295
296trait CollectVec : IntoIterator + Sized {
297 fn vec (self: Self)
298 -> Vec<Self::Item>
299 {
300 impl<T : IntoIterator> CollectVec for T {}
301
302 self.into_iter().collect()
303 }
304}