auto_impl/proxy.rs
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142
use proc_macro_error::emit_error;
use std::iter::Peekable;
use crate::proc_macro::{token_stream, TokenStream, TokenTree};
/// Types for which a trait can automatically be implemented.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ProxyType {
Ref,
RefMut,
Arc,
Rc,
Box,
Fn,
FnMut,
FnOnce,
}
impl ProxyType {
pub(crate) fn is_fn(&self) -> bool {
matches!(*self, ProxyType::Fn | ProxyType::FnMut | ProxyType::FnOnce)
}
}
/// Parses the attribute token stream into a list of proxy types.
///
/// The attribute token stream is the one in `#[auto_impl(...)]`. It is
/// supposed to be a comma-separated list of possible proxy types. Legal values
/// are `&`, `&mut`, `Box`, `Rc`, `Arc`, `Fn`, `FnMut` and `FnOnce`.
///
/// If the given TokenStream is not valid, errors are emitted as appropriate.
/// Erroneous types will not be put into the Vec but rather simply skipped,
/// the emitted errors will abort the compilation anyway.
pub(crate) fn parse_types(args: TokenStream) -> Vec<ProxyType> {
let mut out = Vec::new();
let mut iter = args.into_iter().peekable();
// While there are still tokens left...
while iter.peek().is_some() {
// First, we expect one of the proxy types.
if let Ok(ty) = eat_type(&mut iter) {
out.push(ty);
}
// If the next token is a comma, we eat it (trailing commas are
// allowed). If not, nothing happens (in this case, it's probably the
// end of the stream, otherwise an error will occur later).
let comma_next =
matches!(iter.peek(), Some(TokenTree::Punct(punct)) if punct.as_char() == ',');
if comma_next {
let _ = iter.next();
}
}
out
}
/// Parses one `ProxyType` from the given token iterator. The iterator must not
/// be empty!
fn eat_type(iter: &mut Peekable<token_stream::IntoIter>) -> Result<ProxyType, ()> {
#[rustfmt::skip]
const NOTE_TEXT: &str = "\
attribute format should be `#[auto_impl(<types>)]` where `<types>` is \
a comma-separated list of types. Allowed values for types: `&`, \
`&mut`, `Box`, `Rc`, `Arc`, `Fn`, `FnMut` and `FnOnce`.\
";
const EXPECTED_TEXT: &str = "expected '&' or ident.";
// We can unwrap because this function requires the iterator to be
// non-empty.
let ty = match iter.next().unwrap() {
TokenTree::Group(group) => {
emit_error!(
group.span(),
"unexpected group, {}", EXPECTED_TEXT;
note = NOTE_TEXT;
);
return Err(());
}
TokenTree::Literal(lit) => {
emit_error!(
lit.span(),
"unexpected literal, {}", EXPECTED_TEXT;
note = NOTE_TEXT;
);
return Err(());
}
TokenTree::Punct(punct) => {
// Only '&' are allowed. Everything else leads to an error.
if punct.as_char() != '&' {
emit_error!(
punct.span(),
"unexpected punctuation '{}', {}", punct, EXPECTED_TEXT;
note = NOTE_TEXT;
);
return Err(());
}
// Check if the next token is `mut`. If not, we will ignore it.
let is_mut_next =
matches!(iter.peek(), Some(TokenTree::Ident(id)) if id.to_string() == "mut");
if is_mut_next {
// Eat `mut`
let _ = iter.next();
ProxyType::RefMut
} else {
ProxyType::Ref
}
}
TokenTree::Ident(ident) => match &*ident.to_string() {
"Box" => ProxyType::Box,
"Rc" => ProxyType::Rc,
"Arc" => ProxyType::Arc,
"Fn" => ProxyType::Fn,
"FnMut" => ProxyType::FnMut,
"FnOnce" => ProxyType::FnOnce,
_ => {
emit_error!(
ident.span(),
"unexpected '{}', {}", ident, EXPECTED_TEXT;
note = NOTE_TEXT;
);
return Err(());
}
},
};
Ok(ty)
}
// Right now, we can't really write useful tests. Many functions from
// `proc_macro` use a compiler internal session. This session is only valid
// when we were actually called as a proc macro. We need to add tests once
// this limitation of `proc_macro` is fixed.