vte_generate_state_changes/
lib.rs

1extern crate proc_macro;
2
3use std::iter::Peekable;
4
5use proc_macro2::TokenTree::{Group, Literal, Punct};
6use proc_macro2::{token_stream, TokenStream, TokenTree};
7use quote::quote;
8
9/// Create a `const fn` which will return an array with all state changes.
10#[proc_macro]
11pub fn generate_state_changes(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
12    // Convert from proc_macro -> proc_macro2
13    let item: TokenStream = item.into();
14    let mut iter = item.into_iter().peekable();
15
16    // Determine output function name
17    let fn_name = iter.next().unwrap();
18
19    // Separator between name and body with state changes
20    expect_punct(&mut iter, ',');
21
22    // Create token stream to assign each state change to the array
23    let assignments_stream = states_stream(&mut iter);
24
25    quote!(
26        const fn #fn_name() -> [[u8; 256]; 16] {
27            let mut state_changes = [[0; 256]; 16];
28
29            #assignments_stream
30
31            state_changes
32        }
33    )
34    .into()
35}
36
37/// Generate the array assignment statements for all origin states.
38fn states_stream(iter: &mut impl Iterator<Item = TokenTree>) -> TokenStream {
39    let mut states_stream = next_group(iter).into_iter().peekable();
40
41    // Loop over all origin state entries
42    let mut tokens = quote!();
43    while states_stream.peek().is_some() {
44        // Add all mappings for this state
45        tokens.extend(state_entry_stream(&mut states_stream));
46
47        // Allow trailing comma
48        optional_punct(&mut states_stream, ',');
49    }
50    tokens
51}
52
53/// Generate the array assignment statements for one origin state.
54fn state_entry_stream(iter: &mut Peekable<token_stream::IntoIter>) -> TokenStream {
55    // Origin state name
56    let state = iter.next().unwrap().into();
57
58    // Token stream with all the byte->target mappings
59    let mut changes_stream = next_group(iter).into_iter().peekable();
60
61    let mut tokens = quote!();
62    while changes_stream.peek().is_some() {
63        // Add next mapping for this state
64        tokens.extend(change_stream(&mut changes_stream, &state));
65
66        // Allow trailing comma
67        optional_punct(&mut changes_stream, ',');
68    }
69    tokens
70}
71
72/// Generate the array assignment statement for a single byte->target mapping for one state.
73fn change_stream(iter: &mut Peekable<token_stream::IntoIter>, state: &TokenTree) -> TokenStream {
74    // Start of input byte range
75    let start = next_usize(iter);
76
77    // End of input byte range
78    let end = if optional_punct(iter, '.') {
79        // Read inclusive end of range
80        expect_punct(iter, '.');
81        expect_punct(iter, '=');
82        next_usize(iter)
83    } else {
84        // Without range, end is equal to start
85        start
86    };
87
88    // Separator between byte input range and output state
89    expect_punct(iter, '=');
90    expect_punct(iter, '>');
91
92    // Token stream with target state and action
93    let mut target_change_stream = next_group(iter).into_iter().peekable();
94
95    let mut tokens = quote!();
96    while target_change_stream.peek().is_some() {
97        // Target state/action for all bytes in the range
98        let (target_state, target_action) = target_change(&mut target_change_stream);
99
100        // Create a new entry for every byte in the range
101        for byte in start..=end {
102            // TODO: Force adding `State::` and `Action::`?
103            // TODO: Should we really use `pack` here without import?
104            tokens.extend(quote!(
105                state_changes[State::#state as usize][#byte] =
106                    pack(State::#target_state, Action::#target_action);
107            ));
108        }
109    }
110    tokens
111}
112
113/// Get next target state and action.
114fn target_change(iter: &mut Peekable<token_stream::IntoIter>) -> (TokenTree, TokenTree) {
115    let target_state = iter.next().unwrap();
116
117    // Separator between state and action
118    expect_punct(iter, ',');
119
120    let target_action = iter.next().unwrap();
121
122    (target_state, target_action)
123}
124
125/// Check if next token matches specific punctuation.
126fn optional_punct(iter: &mut Peekable<token_stream::IntoIter>, c: char) -> bool {
127    match iter.peek() {
128        Some(Punct(punct)) if punct.as_char() == c => iter.next().is_some(),
129        _ => false,
130    }
131}
132
133/// Ensure next token matches specific punctuation.
134///
135/// # Panics
136///
137/// Panics if the punctuation does not match.
138fn expect_punct(iter: &mut impl Iterator<Item = TokenTree>, c: char) {
139    match iter.next() {
140        Some(Punct(ref punct)) if punct.as_char() == c => (),
141        token => panic!("Expected punctuation '{}', but got {:?}", c, token),
142    }
143}
144
145/// Get next token as [`usize`].
146///
147/// # Panics
148///
149/// Panics if the next token is not a [`usize`] in hex or decimal literal format.
150fn next_usize(iter: &mut impl Iterator<Item = TokenTree>) -> usize {
151    match iter.next() {
152        Some(Literal(literal)) => {
153            let literal = literal.to_string();
154            if literal.starts_with("0x") {
155                usize::from_str_radix(&literal[2..], 16).unwrap()
156            } else {
157                usize::from_str_radix(&literal, 10).unwrap()
158            }
159        },
160        token => panic!("Expected literal, but got {:?}", token),
161    }
162}
163
164/// Get next token as [`Group`].
165///
166/// # Panics
167///
168/// Panics if the next token is not a [`Group`].
169fn next_group(iter: &mut impl Iterator<Item = TokenTree>) -> TokenStream {
170    match iter.next() {
171        Some(Group(group)) => group.stream(),
172        token => panic!("Expected group, but got {:?}", token),
173    }
174}