summaryrefslogtreecommitdiffstats
path: root/vendor/vte_generate_state_changes
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:41:41 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-04 12:41:41 +0000
commit10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87 (patch)
treebdffd5d80c26cf4a7a518281a204be1ace85b4c1 /vendor/vte_generate_state_changes
parentReleasing progress-linux version 1.70.0+dfsg1-9~progress7.99u1. (diff)
downloadrustc-10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87.tar.xz
rustc-10ee2acdd26a7f1298c6f6d6b7af9b469fe29b87.zip
Merging upstream version 1.70.0+dfsg2.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/vte_generate_state_changes')
-rw-r--r--vendor/vte_generate_state_changes/.cargo-checksum.json1
-rw-r--r--vendor/vte_generate_state_changes/Cargo.toml28
-rw-r--r--vendor/vte_generate_state_changes/src/lib.rs174
3 files changed, 203 insertions, 0 deletions
diff --git a/vendor/vte_generate_state_changes/.cargo-checksum.json b/vendor/vte_generate_state_changes/.cargo-checksum.json
new file mode 100644
index 000000000..5832594b3
--- /dev/null
+++ b/vendor/vte_generate_state_changes/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"c8bdd580ea675f66ff5cf757a964ed8c6e8ff431ed9aad9bc6069f1b6561b85f","src/lib.rs":"417a43cd9fa1d7efd94efd76336701b46d79dbfa4c92db92268cf4951541dcf0"},"package":"d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff"} \ No newline at end of file
diff --git a/vendor/vte_generate_state_changes/Cargo.toml b/vendor/vte_generate_state_changes/Cargo.toml
new file mode 100644
index 000000000..42242f5b1
--- /dev/null
+++ b/vendor/vte_generate_state_changes/Cargo.toml
@@ -0,0 +1,28 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+edition = "2018"
+name = "vte_generate_state_changes"
+version = "0.1.1"
+authors = ["Christian Duerr <contact@christianduerr.com>"]
+description = "Proc macro for generating VTE state changes"
+license = "Apache-2.0 OR MIT"
+repository = "https://github.com/jwilm/vte"
+
+[lib]
+proc-macro = true
+[dependencies.proc-macro2]
+version = "1.0.6"
+
+[dependencies.quote]
+version = "1.0.2"
diff --git a/vendor/vte_generate_state_changes/src/lib.rs b/vendor/vte_generate_state_changes/src/lib.rs
new file mode 100644
index 000000000..5cfb3acf2
--- /dev/null
+++ b/vendor/vte_generate_state_changes/src/lib.rs
@@ -0,0 +1,174 @@
+extern crate proc_macro;
+
+use std::iter::Peekable;
+
+use proc_macro2::TokenTree::{Group, Literal, Punct};
+use proc_macro2::{token_stream, TokenStream, TokenTree};
+use quote::quote;
+
+/// Create a `const fn` which will return an array with all state changes.
+#[proc_macro]
+pub fn generate_state_changes(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
+ // Convert from proc_macro -> proc_macro2
+ let item: TokenStream = item.into();
+ let mut iter = item.into_iter().peekable();
+
+ // Determine output function name
+ let fn_name = iter.next().unwrap();
+
+ // Separator between name and body with state changes
+ expect_punct(&mut iter, ',');
+
+ // Create token stream to assign each state change to the array
+ let assignments_stream = states_stream(&mut iter);
+
+ quote!(
+ const fn #fn_name() -> [[u8; 256]; 16] {
+ let mut state_changes = [[0; 256]; 16];
+
+ #assignments_stream
+
+ state_changes
+ }
+ )
+ .into()
+}
+
+/// Generate the array assignment statements for all origin states.
+fn states_stream(iter: &mut impl Iterator<Item = TokenTree>) -> TokenStream {
+ let mut states_stream = next_group(iter).into_iter().peekable();
+
+ // Loop over all origin state entries
+ let mut tokens = quote!();
+ while states_stream.peek().is_some() {
+ // Add all mappings for this state
+ tokens.extend(state_entry_stream(&mut states_stream));
+
+ // Allow trailing comma
+ optional_punct(&mut states_stream, ',');
+ }
+ tokens
+}
+
+/// Generate the array assignment statements for one origin state.
+fn state_entry_stream(iter: &mut Peekable<token_stream::IntoIter>) -> TokenStream {
+ // Origin state name
+ let state = iter.next().unwrap().into();
+
+ // Token stream with all the byte->target mappings
+ let mut changes_stream = next_group(iter).into_iter().peekable();
+
+ let mut tokens = quote!();
+ while changes_stream.peek().is_some() {
+ // Add next mapping for this state
+ tokens.extend(change_stream(&mut changes_stream, &state));
+
+ // Allow trailing comma
+ optional_punct(&mut changes_stream, ',');
+ }
+ tokens
+}
+
+/// Generate the array assignment statement for a single byte->target mapping for one state.
+fn change_stream(iter: &mut Peekable<token_stream::IntoIter>, state: &TokenTree) -> TokenStream {
+ // Start of input byte range
+ let start = next_usize(iter);
+
+ // End of input byte range
+ let end = if optional_punct(iter, '.') {
+ // Read inclusive end of range
+ expect_punct(iter, '.');
+ expect_punct(iter, '=');
+ next_usize(iter)
+ } else {
+ // Without range, end is equal to start
+ start
+ };
+
+ // Separator between byte input range and output state
+ expect_punct(iter, '=');
+ expect_punct(iter, '>');
+
+ // Token stream with target state and action
+ let mut target_change_stream = next_group(iter).into_iter().peekable();
+
+ let mut tokens = quote!();
+ while target_change_stream.peek().is_some() {
+ // Target state/action for all bytes in the range
+ let (target_state, target_action) = target_change(&mut target_change_stream);
+
+ // Create a new entry for every byte in the range
+ for byte in start..=end {
+ // TODO: Force adding `State::` and `Action::`?
+ // TODO: Should we really use `pack` here without import?
+ tokens.extend(quote!(
+ state_changes[State::#state as usize][#byte] =
+ pack(State::#target_state, Action::#target_action);
+ ));
+ }
+ }
+ tokens
+}
+
+/// Get next target state and action.
+fn target_change(iter: &mut Peekable<token_stream::IntoIter>) -> (TokenTree, TokenTree) {
+ let target_state = iter.next().unwrap();
+
+ // Separator between state and action
+ expect_punct(iter, ',');
+
+ let target_action = iter.next().unwrap();
+
+ (target_state, target_action)
+}
+
+/// Check if next token matches specific punctuation.
+fn optional_punct(iter: &mut Peekable<token_stream::IntoIter>, c: char) -> bool {
+ match iter.peek() {
+ Some(Punct(punct)) if punct.as_char() == c => iter.next().is_some(),
+ _ => false,
+ }
+}
+
+/// Ensure next token matches specific punctuation.
+///
+/// # Panics
+///
+/// Panics if the punctuation does not match.
+fn expect_punct(iter: &mut impl Iterator<Item = TokenTree>, c: char) {
+ match iter.next() {
+ Some(Punct(ref punct)) if punct.as_char() == c => (),
+ token => panic!("Expected punctuation '{}', but got {:?}", c, token),
+ }
+}
+
+/// Get next token as [`usize`].
+///
+/// # Panics
+///
+/// Panics if the next token is not a [`usize`] in hex or decimal literal format.
+fn next_usize(iter: &mut impl Iterator<Item = TokenTree>) -> usize {
+ match iter.next() {
+ Some(Literal(literal)) => {
+ let literal = literal.to_string();
+ if literal.starts_with("0x") {
+ usize::from_str_radix(&literal[2..], 16).unwrap()
+ } else {
+ usize::from_str_radix(&literal, 10).unwrap()
+ }
+ },
+ token => panic!("Expected literal, but got {:?}", token),
+ }
+}
+
+/// Get next token as [`Group`].
+///
+/// # Panics
+///
+/// Panics if the next token is not a [`Group`].
+fn next_group(iter: &mut impl Iterator<Item = TokenTree>) -> TokenStream {
+ match iter.next() {
+ Some(Group(group)) => group.stream(),
+ token => panic!("Expected group, but got {:?}", token),
+ }
+}