summaryrefslogtreecommitdiffstats
path: root/rust/vendor/nom-derive-impl
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 17:39:49 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 17:39:49 +0000
commita0aa2307322cd47bbf416810ac0292925e03be87 (patch)
tree37076262a026c4b48c8a0e84f44ff9187556ca35 /rust/vendor/nom-derive-impl
parentInitial commit. (diff)
downloadsuricata-a0aa2307322cd47bbf416810ac0292925e03be87.tar.xz
suricata-a0aa2307322cd47bbf416810ac0292925e03be87.zip
Adding upstream version 1:7.0.3.upstream/1%7.0.3
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'rust/vendor/nom-derive-impl')
-rw-r--r--rust/vendor/nom-derive-impl/.cargo-checksum.json1
-rw-r--r--rust/vendor/nom-derive-impl/Cargo.toml54
-rw-r--r--rust/vendor/nom-derive-impl/README.md118
-rw-r--r--rust/vendor/nom-derive-impl/src/config.rs130
-rw-r--r--rust/vendor/nom-derive-impl/src/endian.rs149
-rw-r--r--rust/vendor/nom-derive-impl/src/enums.rs105
-rw-r--r--rust/vendor/nom-derive-impl/src/gen.rs48
-rw-r--r--rust/vendor/nom-derive-impl/src/gen/enums.rs217
-rw-r--r--rust/vendor/nom-derive-impl/src/gen/fieldless_enums.rs178
-rw-r--r--rust/vendor/nom-derive-impl/src/gen/generator.rs369
-rw-r--r--rust/vendor/nom-derive-impl/src/gen/structs.rs190
-rw-r--r--rust/vendor/nom-derive-impl/src/lib.rs81
-rw-r--r--rust/vendor/nom-derive-impl/src/meta/attr.rs319
-rw-r--r--rust/vendor/nom-derive-impl/src/meta/attr_list.rs19
-rw-r--r--rust/vendor/nom-derive-impl/src/meta/error.rs10
-rw-r--r--rust/vendor/nom-derive-impl/src/meta/mod.rs60
-rw-r--r--rust/vendor/nom-derive-impl/src/parsertree.rs173
-rw-r--r--rust/vendor/nom-derive-impl/src/structs.rs489
18 files changed, 2710 insertions, 0 deletions
diff --git a/rust/vendor/nom-derive-impl/.cargo-checksum.json b/rust/vendor/nom-derive-impl/.cargo-checksum.json
new file mode 100644
index 0000000..69de4a8
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"799e8654dffdef43ae2bcb4ec95e35ae66c4827e14983e0b4c4f474eb4dd4c18","README.md":"5b7691490b9651f892750a839a4e154e704d73938161fbdbf69b434fe07fafc2","src/config.rs":"47f7c8c03c8b928a3110fbd32b5b84afd98e7f79e5e8fceeb10136f46811c10c","src/endian.rs":"c6db47df4329c829e7d8bef9c6c12b2882683437193419c5970fffa04c5cbde1","src/enums.rs":"226e3aaf20cb177ad74e9c0efaaa232ed9f074f12b180fb783031eabf1532340","src/gen.rs":"5ae45e250e9608af1ab5e2ab80fcf58fdc95d98deac4b75a229708cb627711b5","src/gen/enums.rs":"5a0d04afa5f13895dd23d485155c790a375d7159873e6aff7cdaddf19cd85f3c","src/gen/fieldless_enums.rs":"bd211fdd146f68c3f206307882fca58b54343ebb175107e25b61e49f587cdfc1","src/gen/generator.rs":"48d867ffc48c73b834e571fdf35eab5852f3e48e2fb714e25d15afd52939d9b2","src/gen/structs.rs":"4ce278e5f652de71024a4a408c3a64450f7b5445c9211ba31a00b1ff75548053","src/lib.rs":"31bc155f290bb50c7c1125b5b0db3efcec4a754e2453ef58d8497840d7081612","src/meta/attr.rs":"109c9e020bfc8ff815b1f3717d8e3c228e7c7dda8a03235c8ae875aa2eaa45e8","src/meta/attr_list.rs":"eee9cc6277cdc9688e2ae2350064cf9bfad5f8c044edde3551bf017edd35ff6f","src/meta/error.rs":"c00afe9f449098aa26d17af949b7fcadc245b379231adae99a4af009fee5344a","src/meta/mod.rs":"bd403c9842ab03d305332f83bb725286c0e275670312d1118393762308ae4088","src/parsertree.rs":"66e6fd636c2d187d4f7a74e38f3fbebd52d942caf96a934562a4414395739fb9","src/structs.rs":"f5143d25a75ec28137c7494e09db8305ad7dae002627bd06457ffe1b63a1d33a"},"package":"cd0b9a93a84b0d3ec3e70e02d332dc33ac6dfac9cde63e17fcb77172dededa62"} \ No newline at end of file
diff --git a/rust/vendor/nom-derive-impl/Cargo.toml b/rust/vendor/nom-derive-impl/Cargo.toml
new file mode 100644
index 0000000..2c7889d
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/Cargo.toml
@@ -0,0 +1,54 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+name = "nom-derive-impl"
+version = "0.10.1"
+authors = ["Pierre Chifflier <chifflier@wzdftpd.net>"]
+include = [
+ "LICENSE-*",
+ "README.md",
+ ".gitignore",
+ ".travis.yml",
+ "Cargo.toml",
+ "src/*.rs",
+ "src/gen/*.rs",
+ "src/meta/*.rs",
+ "tests/*.rs",
+]
+description = "Custom derive nom parsers from struct"
+homepage = "https://github.com/rust-bakery/nom-derive"
+readme = "README.md"
+keywords = [
+ "parser",
+ "nom",
+]
+categories = ["parsing"]
+license = "MIT/Apache-2.0"
+repository = "https://github.com/rust-bakery/nom-derive.git"
+
+[lib]
+proc-macro = true
+
+[dependencies.proc-macro2]
+version = "1.0"
+
+[dependencies.quote]
+version = "1.0"
+
+[dependencies.syn]
+version = "1.0.58"
+features = [
+ "parsing",
+ "extra-traits",
+ "full",
+]
diff --git a/rust/vendor/nom-derive-impl/README.md b/rust/vendor/nom-derive-impl/README.md
new file mode 100644
index 0000000..6bbb37f
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/README.md
@@ -0,0 +1,118 @@
+<!-- cargo-sync-readme start -->
+
+# nom-derive
+
+[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](./LICENSE-MIT)
+[![Apache License 2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](./LICENSE-APACHE)
+[![docs.rs](https://docs.rs/nom-derive/badge.svg)](https://docs.rs/nom-derive)
+[![Build Status](https://travis-ci.org/chifflier/nom-derive.svg?branch=master)](https://travis-ci.org/chifflier/nom-derive)
+[![Crates.io Version](https://img.shields.io/crates/v/nom-derive.svg)](https://crates.io/crates/nom-derive)
+
+## Overview
+
+nom-derive is a custom derive attribute, to derive [nom] parsers automatically from the structure definition.
+
+It is not meant to replace [nom], but to provide a quick and easy way to generate parsers for
+structures, especially for simple structures. This crate aims at simplifying common cases.
+In some cases, writing the parser manually will remain more efficient.
+
+- [API documentation](https://docs.rs/nom-derive)
+- The [docs::Nom] pseudo-module. This is the main
+ documentation for the `Nom` attribute, with all possible options and many examples.
+
+*Feedback welcome !*
+
+## `#[derive(Nom)]`
+
+This crate exposes a single custom-derive macro `Nom` which
+implements `parse` for the struct it is applied to.
+
+The goal of this project is that:
+
+* `derive(Nom)` should be enough for you to derive [nom] parsers for simple
+ structures easily, without having to write it manually
+* it allows overriding any parsing method by your own
+* it allows using generated parsing functions along with handwritten parsers and
+ combining them without efforts
+* it remains as fast as nom
+
+`nom-derive` adds declarative parsing to `nom`. It also allows mixing with
+procedural parsing easily, making writing parsers for byte-encoded formats
+very easy.
+
+For example:
+
+```rust
+use nom_derive::*;
+
+#[derive(Nom)]
+struct S {
+ a: u32,
+ b: u16,
+ c: u16
+}
+```
+
+This adds static method `parse` to `S`. The generated code looks
+like:
+```rust,ignore
+impl S {
+ pub fn parse(i: &[u8]) -> nom::IResult(&[u8], S) {
+ let (i, a) = be_u32(i)?;
+ let (i, b) = be_u16(i)?;
+ let (i, c) = be_u16(i)?;
+ Ok((i, S{ a, b, c }))
+ }
+}
+```
+
+To parse input, just call `let res = S::parse(input);`.
+
+For extensive documentation of all attributes and examples, see the documentation of [docs::Nom]
+custom derive attribute.
+
+Many examples are provided, and more can be found in the [project
+tests](https://github.com/rust-bakery/nom-derive/tree/master/tests).
+
+## Combinators visibility
+
+All inferred parsers will generate code with absolute type path, so there is no need
+to add `use` statements for them. However, if you use any combinator directly (or in a `Parse`
+statement, for ex.), it has to be imported as usual.
+
+That is probably not going to change, since
+* a proc_macro cannot export items other than functions tagged with `#[proc_macro_derive]`
+* there are variants of combinators with the same names (complete/streaming, bits/bytes), so
+ re-exporting them would create side-effects.
+
+## Debug tips
+
+* If the generated parser does not compile, add `#[nom(DebugDerive)]` to the structure.
+ It will dump the generated parser to `stderr`.
+* If the generated parser fails at runtime, try adding `#[nom(Debug)]` to the structure or
+ to fields. It wraps subparsers in `dbg_dmp` and will print the field name and input to
+ `stderr` if the parser fails.
+
+[nom]: https://github.com/geal/nom
+<!-- cargo-sync-readme end -->
+
+## Changes
+
+See `CHANGELOG.md`, and `UPGRADING.md` for instructions for upgrading major versions.
+
+## License
+
+Licensed under either of
+
+ * Apache License, Version 2.0
+ ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
+ * MIT license
+ ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
+
+at your option.
+
+## Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in the work by you, as defined in the Apache-2.0 license, shall be
+dual licensed as above, without any additional terms or conditions.
diff --git a/rust/vendor/nom-derive-impl/src/config.rs b/rust/vendor/nom-derive-impl/src/config.rs
new file mode 100644
index 0000000..1c98859
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/config.rs
@@ -0,0 +1,130 @@
+use crate::endian::ParserEndianness;
+use crate::meta::attr::{MetaAttr, MetaAttrType};
+use proc_macro2::{Span, TokenStream};
+use syn::{spanned::Spanned, Error};
+
+#[derive(Debug)]
+pub struct Config {
+ pub struct_name: String,
+ /// Endianness for all parsers, if specified
+ pub global_endianness: ParserEndianness,
+ /// Endianness for this struct or enum, if specified
+ pub object_endianness: ParserEndianness,
+ /// Complete option for this struct (default: streaming)
+ pub complete: bool,
+ pub debug: bool,
+ pub debug_derive: bool,
+ pub generic_errors: bool,
+ selector_type: Option<TokenStream>,
+ selector_name: Option<String>,
+ input_name: String,
+ orig_input_name: String,
+ lifetime_name: String,
+ error_name: String,
+}
+
+impl Config {
+ pub fn from_meta_list(name: String, l: &[MetaAttr]) -> Result<Self, Error> {
+ let mut req_big_endian = false;
+ let mut req_little_endian = false;
+ let mut complete = false;
+ let mut debug = false;
+ let mut debug_derive = false;
+ let mut generic_errors = false;
+ let mut span_endian = None;
+ for meta in l {
+ match meta.attr_type {
+ MetaAttrType::BigEndian => {
+ req_big_endian = true;
+ span_endian = Some(meta.span());
+ }
+ MetaAttrType::LittleEndian => req_little_endian = true,
+ MetaAttrType::Complete => complete = true,
+ MetaAttrType::Debug => debug = true,
+ MetaAttrType::DebugDerive => debug_derive = true,
+ MetaAttrType::GenericErrors => generic_errors = true,
+ _ => (),
+ }
+ }
+ if req_big_endian & req_little_endian {
+ return Err(Error::new(
+ span_endian.unwrap_or_else(Span::call_site),
+ "Struct cannot be both big and little endian",
+ ));
+ }
+ let object_endianness = if req_big_endian {
+ ParserEndianness::BigEndian
+ } else if req_little_endian {
+ ParserEndianness::LittleEndian
+ } else {
+ ParserEndianness::Unspecified
+ };
+ let input_name = l
+ .iter()
+ .find_map(|m| {
+ if m.is_type(MetaAttrType::InputName) {
+ Some(m.arg().unwrap().to_string())
+ } else {
+ None
+ }
+ })
+ .unwrap_or_else(|| "i".to_string());
+ let selector_type = l.iter().find_map(|m| {
+ if m.is_type(MetaAttrType::Selector) {
+ Some(m.arg().unwrap().clone())
+ } else {
+ None
+ }
+ });
+ let selector_name = if selector_type.is_some() {
+ Some(String::from("selector"))
+ } else {
+ None
+ };
+ Ok(Config {
+ struct_name: name,
+ global_endianness: ParserEndianness::Unspecified,
+ object_endianness,
+ complete,
+ debug,
+ debug_derive,
+ generic_errors,
+ selector_type,
+ selector_name,
+ orig_input_name: "orig_".to_string() + &input_name,
+ lifetime_name: String::from("'nom"),
+ error_name: String::from("NomErr"),
+ input_name,
+ })
+ }
+
+ #[inline]
+ pub fn selector_type(&self) -> Option<&TokenStream> {
+ self.selector_type.as_ref()
+ }
+
+ #[inline]
+ pub fn selector_name(&self) -> Option<&str> {
+ self.selector_name.as_ref().map(|s| s.as_ref())
+ }
+
+ #[inline]
+ pub fn input_name(&self) -> &str {
+ &self.input_name
+ }
+
+ #[inline]
+ pub fn orig_input_name(&self) -> &str {
+ &self.orig_input_name
+ }
+
+ #[inline]
+ pub fn lifetime_name(&self) -> &str {
+ &self.lifetime_name
+ }
+
+ #[inline]
+ pub fn error_name(&self) -> &str {
+ &self.error_name
+ }
+}
diff --git a/rust/vendor/nom-derive-impl/src/endian.rs b/rust/vendor/nom-derive-impl/src/endian.rs
new file mode 100644
index 0000000..696901a
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/endian.rs
@@ -0,0 +1,149 @@
+use crate::config::*;
+use crate::meta::attr::{MetaAttr, MetaAttrType};
+use proc_macro2::Span;
+use syn::{spanned::Spanned, Error, Result};
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum ParserEndianness {
+ Unspecified,
+ LittleEndian,
+ BigEndian,
+ SetEndian,
+}
+
+pub fn get_object_endianness(config: &Config) -> ParserEndianness {
+ // first, check struct endianness
+ if config.object_endianness != ParserEndianness::Unspecified {
+ return config.object_endianness;
+ }
+ // finally, return global endianness
+ config.global_endianness
+}
+
+pub fn set_object_endianness(
+ span: Span,
+ endianness: ParserEndianness,
+ meta_list: &[MetaAttr],
+ config: &mut Config,
+) -> Result<()> {
+ config.object_endianness = endianness;
+ // first, check local attribute
+ let mut req_big_endian = false;
+ let mut req_little_endian = false;
+ let mut req_set_endian = false;
+ let mut span_endian = None;
+ for meta in meta_list {
+ match meta.attr_type {
+ MetaAttrType::BigEndian => req_big_endian = true,
+ MetaAttrType::LittleEndian => req_little_endian = true,
+ MetaAttrType::SetEndian => req_set_endian = true,
+ _ => continue,
+ }
+ span_endian = Some(meta.span());
+ }
+ // test if 2 or more flags are set
+ if two_or_more(req_big_endian, req_little_endian, req_set_endian) {
+ return Err(Error::new(
+ span_endian.unwrap_or(span),
+ "cannot be both big, little and/or set endian",
+ ));
+ }
+ if req_big_endian {
+ config.object_endianness = ParserEndianness::BigEndian;
+ } else if req_little_endian {
+ config.object_endianness = ParserEndianness::LittleEndian;
+ } else if req_set_endian {
+ config.object_endianness = ParserEndianness::SetEndian;
+ };
+ Ok(())
+}
+
+fn two_or_more(a: bool, b: bool, c: bool) -> bool {
+ if a {
+ b | c
+ } else {
+ b & c
+ }
+}
+
+pub fn get_local_endianness(
+ span: Span,
+ meta_list: &[MetaAttr],
+ config: &Config,
+) -> Result<ParserEndianness> {
+ // first, check local attribute
+ let mut req_big_endian = false;
+ let mut req_little_endian = false;
+ let mut req_set_endian = false;
+ for meta in meta_list {
+ match meta.attr_type {
+ MetaAttrType::BigEndian => req_big_endian = true,
+ MetaAttrType::LittleEndian => req_little_endian = true,
+ MetaAttrType::SetEndian => req_set_endian = true,
+ _ => (),
+ }
+ }
+ // test if 2 or more flags are set
+ if two_or_more(req_big_endian, req_little_endian, req_set_endian) {
+ return Err(Error::new(
+ span,
+ "cannot be both big, little and/or set endian",
+ ));
+ }
+ if req_big_endian {
+ return Ok(ParserEndianness::BigEndian);
+ } else if req_little_endian {
+ return Ok(ParserEndianness::LittleEndian);
+ } else if req_set_endian {
+ return Ok(ParserEndianness::SetEndian);
+ };
+ // otherwise, get object-level endianness
+ Ok(get_object_endianness(config))
+}
+
+pub fn validate_endianness(
+ attr_endianness: ParserEndianness,
+ object_endianness: ParserEndianness,
+ global_endianness: ParserEndianness,
+) -> Result<()> {
+ let mut req_big_endian = false;
+ let mut req_little_endian = false;
+ let mut req_set_endian = false;
+
+ match attr_endianness {
+ ParserEndianness::Unspecified => (),
+ ParserEndianness::BigEndian => req_big_endian = true,
+ ParserEndianness::LittleEndian => req_little_endian = true,
+ _ => unreachable!(),
+ }
+
+ match object_endianness {
+ ParserEndianness::Unspecified => (),
+ ParserEndianness::BigEndian => req_big_endian = true,
+ ParserEndianness::LittleEndian => req_little_endian = true,
+ ParserEndianness::SetEndian => req_set_endian = true,
+ }
+
+ match global_endianness {
+ ParserEndianness::Unspecified => (),
+ ParserEndianness::BigEndian => req_big_endian = true,
+ ParserEndianness::LittleEndian => req_little_endian = true,
+ _ => unreachable!(),
+ }
+
+ if req_big_endian & req_little_endian {
+ return Err(Error::new(
+ Span::call_site(),
+ "Object cannot be both big and little endian",
+ ));
+ }
+
+ if req_set_endian & (req_big_endian | req_little_endian) {
+ return Err(Error::new(
+ Span::call_site(),
+ "Object cannot be both SetEndian, and specify big or little endian",
+ ));
+ }
+
+ Ok(())
+}
diff --git a/rust/vendor/nom-derive-impl/src/enums.rs b/rust/vendor/nom-derive-impl/src/enums.rs
new file mode 100644
index 0000000..3bb99cd
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/enums.rs
@@ -0,0 +1,105 @@
+use crate::config::*;
+use crate::meta;
+use crate::meta::attr::{MetaAttr, MetaAttrType};
+use crate::parsertree::{ParserExpr, ParserTreeItem};
+use crate::structs::{get_pre_post_exec, parse_fields, StructParser, StructParserTree};
+use syn::{spanned::Spanned, *};
+
+#[derive(Debug)]
+pub(crate) struct VariantParserTree {
+ pub ident: syn::Ident,
+ pub selector_type: String,
+ pub struct_def: StructParserTree,
+}
+
+pub(crate) fn parse_variant(
+ variant: &syn::Variant,
+ config: &mut Config,
+) -> Result<VariantParserTree> {
+ // eprintln!("variant: {:?}", variant);
+ let meta_list =
+ meta::parse_nom_attribute(&variant.attrs).expect("Parsing the 'nom' meta attribute failed");
+ let selector = get_selector(&meta_list).ok_or_else(|| {
+ Error::new(
+ variant.span(),
+ "Nom-derive: the 'Selector' attribute must be used to give the value of selector item",
+ )
+ })?;
+ let mut struct_def = parse_fields(&variant.fields, config)?;
+ if variant.fields == syn::Fields::Unit {
+ let mut p = None;
+ for meta in &meta_list {
+ if meta.attr_type == MetaAttrType::Parse {
+ let s = meta.arg().unwrap();
+ p = Some(ParserExpr::Raw(s.clone()));
+ }
+ }
+ let (pre, post) = get_pre_post_exec(&meta_list, config);
+ let p = p.unwrap_or(ParserExpr::Nop);
+ let item = ParserTreeItem::new(Some(variant.ident.clone()), p);
+ let sp = StructParser::new("_".to_string(), item, pre, post);
+ struct_def.parsers.push(sp);
+ }
+ // discriminant ?
+ Ok(VariantParserTree {
+ ident: variant.ident.clone(),
+ selector_type: selector,
+ struct_def,
+ })
+}
+
+fn get_selector(meta_list: &[MetaAttr]) -> Option<String> {
+ for meta in meta_list {
+ if MetaAttrType::Selector == meta.attr_type {
+ return Some(meta.arg().unwrap().to_string());
+ }
+ }
+ None
+}
+
+pub(crate) fn get_repr(attrs: &[syn::Attribute]) -> Option<Ident> {
+ for attr in attrs {
+ if let Ok(ref meta) = attr.parse_meta() {
+ match meta {
+ syn::Meta::NameValue(_) | syn::Meta::Path(_) => (),
+ syn::Meta::List(ref metalist) => {
+ if let Some(ident) = metalist.path.get_ident() {
+ if ident == "repr" {
+ for n in metalist.nested.iter() {
+ match n {
+ syn::NestedMeta::Meta(meta) => match meta {
+ syn::Meta::Path(path) => {
+ if let Some(word) = path.get_ident() {
+ return Some(word.clone());
+ } else {
+ panic!("unsupported nested type for 'repr'")
+ }
+ }
+ _ => panic!("unsupported nested type for 'repr'"),
+ },
+ _ => panic!("unsupported meta type for 'repr'"),
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ None
+}
+
+pub(crate) fn is_input_fieldless_enum(ast: &syn::DeriveInput) -> bool {
+ match ast.data {
+ syn::Data::Enum(ref data_enum) => {
+ // eprintln!("{:?}", data_enum);
+ for v in data_enum.variants.iter() {
+ if syn::Fields::Unit != v.fields {
+ return false;
+ }
+ }
+ true
+ }
+ _ => false,
+ }
+}
diff --git a/rust/vendor/nom-derive-impl/src/gen.rs b/rust/vendor/nom-derive-impl/src/gen.rs
new file mode 100644
index 0000000..d3ed1aa
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/gen.rs
@@ -0,0 +1,48 @@
+use crate::{endian::ParserEndianness, enums::is_input_fieldless_enum, meta};
+use proc_macro2::{Span, TokenStream};
+use syn::*;
+
+mod enums;
+mod fieldless_enums;
+mod generator;
+mod structs;
+
+use enums::GenEnum;
+use fieldless_enums::GenFieldlessEnum;
+pub(crate) use generator::*;
+use structs::GenStruct;
+
+pub(crate) fn gen_impl(
+ ast: &syn::DeriveInput,
+ endianness: ParserEndianness,
+) -> Result<TokenStream> {
+ // eprintln!("ast: {:#?}", ast);
+ let generator: Box<dyn Generator> = match &ast.data {
+ syn::Data::Enum(_) => {
+ // look for a selector
+ let meta = meta::parse_nom_top_level_attribute(&ast.attrs)?;
+ if meta
+ .iter()
+ .any(|m| m.is_type(meta::attr::MetaAttrType::Selector))
+ {
+ Box::new(GenEnum::from_ast(ast, endianness)?)
+ } else {
+ // no selector, try fieldless enum
+ if is_input_fieldless_enum(ast) {
+ Box::new(GenFieldlessEnum::from_ast(ast, endianness)?)
+ } else {
+ return Err(Error::new(
+ ast.ident.span(),
+ "Nom-derive: enums must have a 'selector' attribute",
+ ));
+ }
+ }
+ }
+ syn::Data::Struct(_) => Box::new(GenStruct::from_ast(ast, endianness)?),
+ syn::Data::Union(_) => panic!("Unions not supported"),
+ };
+
+ let impl_tokens = generator.gen_impl()?;
+ // eprintln!("\n***\nglobal_impl: {}\n---\n", impl_tokens);
+ Ok(impl_tokens)
+}
diff --git a/rust/vendor/nom-derive-impl/src/gen/enums.rs b/rust/vendor/nom-derive-impl/src/gen/enums.rs
new file mode 100644
index 0000000..a66e38b
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/gen/enums.rs
@@ -0,0 +1,217 @@
+use proc_macro2::Ident;
+use proc_macro2::Span;
+use proc_macro2::TokenStream;
+use syn::*;
+
+use crate::config::Config;
+use crate::endian::*;
+use crate::enums::*;
+use crate::gen::get_extra_args;
+use crate::meta;
+use crate::structs::get_pre_post_exec;
+use crate::Result;
+
+use super::Generator;
+
+pub struct GenEnum {
+ pub name: Ident,
+ pub config: Config,
+
+ extra_args: Option<TokenStream>,
+
+ orig_generics: Generics,
+ tl_pre: Option<TokenStream>,
+ tl_post: Option<TokenStream>,
+ variants_defs: Vec<VariantParserTree>,
+}
+
+impl Generator for GenEnum {
+ fn from_ast(ast: &DeriveInput, endianness: ParserEndianness) -> Result<Self> {
+ match &ast.data {
+ syn::Data::Enum(data_enum) => GenEnum::from_data_enum(
+ &ast.ident,
+ data_enum,
+ &ast.attrs,
+ &ast.generics,
+ endianness,
+ ),
+ _ => panic!("Wrong type for GenEnum::from_ast"),
+ }
+ }
+
+ #[inline]
+ fn name(&self) -> &Ident {
+ &self.name
+ }
+
+ fn set_debug(&mut self, debug_derive: bool) {
+ self.config.debug_derive |= debug_derive;
+ }
+
+ #[inline]
+ fn extra_args(&self) -> Option<&TokenStream> {
+ self.extra_args.as_ref()
+ }
+
+ #[inline]
+ fn orig_generics(&self) -> &Generics {
+ &self.orig_generics
+ }
+
+ #[inline]
+ fn config(&self) -> &Config {
+ &self.config
+ }
+
+ fn gen_fn_body(&self, endianness: ParserEndianness) -> Result<TokenStream> {
+ let orig_input = Ident::new(self.config.orig_input_name(), Span::call_site());
+ let input = Ident::new(self.config.input_name(), Span::call_site());
+ let (tl_pre, tl_post) = (&self.tl_pre, &self.tl_post);
+ // generate body
+ let (default_case_handled, variants_code) = self.gen_variants(endianness)?;
+ let default_case = if default_case_handled {
+ quote! {}
+ } else {
+ quote! { _ => Err(nom::Err::Error(nom::error_position!(#input, nom::error::ErrorKind::Switch))) }
+ };
+ let tokens = quote! {
+ let #input = #orig_input;
+ #tl_pre
+ let (#input, enum_def) = match selector {
+ #(#variants_code)*
+ #default_case
+ }?;
+ #tl_post
+ Ok((#input, enum_def))
+ };
+ Ok(tokens)
+ }
+}
+
+impl GenEnum {
+ pub fn from_data_enum(
+ name: &Ident,
+ data_enum: &DataEnum,
+ attrs: &[Attribute],
+ generics: &Generics,
+ endianness: ParserEndianness,
+ ) -> Result<Self> {
+ let name = name.clone();
+
+ // parse top-level attributes and prepare tokens for each field parser
+ let meta = meta::parse_nom_top_level_attribute(attrs)?;
+ // eprintln!("top-level meta: {:?}", meta);
+ let mut config = Config::from_meta_list(name.to_string(), &meta)?;
+
+ // endianness must be set before parsing struct
+ set_object_endianness(name.span(), endianness, &meta, &mut config)?;
+
+ let extra_args = get_extra_args(&meta).map(Clone::clone);
+
+ // test endianness validity (not 2 or more)
+ validate_endianness(
+ endianness,
+ config.object_endianness,
+ config.global_endianness,
+ )?;
+
+ // save global pre/post exec
+ let (tl_pre, tl_post) = get_pre_post_exec(&meta, &config);
+
+ // fieldless enums should not be handled by this generator
+ assert!(config.selector_type().is_some());
+
+ // iter fields / variants and store info
+ let variants_defs = data_enum
+ .variants
+ .iter()
+ .map(|v| parse_variant(v, &mut config))
+ .collect::<Result<Vec<_>>>()?;
+
+ Ok(Self {
+ name,
+ config,
+ extra_args,
+ orig_generics: generics.clone(),
+ tl_pre,
+ tl_post,
+ variants_defs,
+ })
+ }
+
+ /// Generate parser code for every variant of the enum
+ ///
+ /// Returns a boolean indicating if default case was handled, and the list of tokens for each variant
+ fn gen_variants(&self, endianness: ParserEndianness) -> Result<(bool, Vec<TokenStream>)> {
+ let name = &self.name;
+ let input = syn::Ident::new(self.config.input_name(), Span::call_site());
+ let mut default_case_handled = false;
+ let mut variants_code: Vec<_> = {
+ self.variants_defs
+ .iter()
+ .map(|def| {
+ if def.selector_type == "_" {
+ default_case_handled = true;
+ }
+ let m: proc_macro2::TokenStream =
+ def.selector_type.parse().expect("invalid selector value");
+ let variantname = &def.ident;
+ let (idents, parser_tokens): (Vec<_>, Vec<_>) = def
+ .struct_def
+ .parsers
+ .iter()
+ .map(|sp| {
+ let id = syn::Ident::new(&sp.name, Span::call_site());
+ // set current endianness for functions that do not specify it
+ let item = sp.item.with_endianness(endianness);
+ (id, item)
+ })
+ .unzip();
+ let (pre, post): (Vec<_>, Vec<_>) = def
+ .struct_def
+ .parsers
+ .iter()
+ .map(|sp| (sp.pre_exec.as_ref(), sp.post_exec.as_ref()))
+ .unzip();
+ let idents2 = idents.clone();
+ let struct_def = match (def.struct_def.empty, def.struct_def.unnamed) {
+ (true, _) => quote! { ( #name::#variantname ) },
+ (_, true) => quote! { ( #name::#variantname ( #(#idents2),* ) ) },
+ (_, false) => quote! { ( #name::#variantname { #(#idents2),* } ) },
+ };
+ //
+ // XXX this looks wrong: endianness does not appear in this function!
+ // XXX parser_tokens should specify endianness
+ // XXX
+ // XXX this is caused by quote!{} calling to_string()
+ quote! {
+ #m => {
+ #(
+ #pre
+ let (#input, #idents) = #parser_tokens (#input) ?;
+ #post
+ )*
+ let struct_def = #struct_def;
+ Ok((#input, struct_def))
+ // Err(nom::Err::Error(error_position!(#input_name, nom::ErrorKind::Switch)))
+ },
+ }
+ })
+ .collect()
+ };
+ // if we have a default case, make sure it is the last entry
+ if default_case_handled {
+ let pos = self
+ .variants_defs
+ .iter()
+ .position(|def| def.selector_type == "_")
+ .expect("default case is handled but couldn't find index");
+ let last_index = self.variants_defs.len() - 1;
+ if pos != last_index {
+ // self.variants_defs.swap(pos, last_index);
+ variants_code.swap(pos, last_index);
+ }
+ }
+ Ok((default_case_handled, variants_code))
+ }
+}
diff --git a/rust/vendor/nom-derive-impl/src/gen/fieldless_enums.rs b/rust/vendor/nom-derive-impl/src/gen/fieldless_enums.rs
new file mode 100644
index 0000000..aa4a259
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/gen/fieldless_enums.rs
@@ -0,0 +1,178 @@
+use proc_macro2::Ident;
+use proc_macro2::Span;
+use proc_macro2::TokenStream;
+use syn::*;
+
+use crate::config::Config;
+use crate::endian::*;
+use crate::enums::*;
+use crate::gen::get_extra_args;
+use crate::meta;
+use crate::parsertree::{ParserExpr, TypeItem};
+use crate::structs::get_pre_post_exec;
+use crate::Result;
+
+use super::Generator;
+
+pub struct GenFieldlessEnum {
+ pub name: Ident,
+ pub config: Config,
+
+ extra_args: Option<TokenStream>,
+
+ orig_generics: Generics,
+ tl_pre: Option<TokenStream>,
+ tl_post: Option<TokenStream>,
+ repr_parser: ParserExpr,
+ variants_code: Vec<TokenStream>,
+}
+
+impl Generator for GenFieldlessEnum {
+ fn from_ast(ast: &DeriveInput, endianness: ParserEndianness) -> Result<Self> {
+ match &ast.data {
+ syn::Data::Enum(data_enum) => GenFieldlessEnum::from_data_enum(
+ &ast.ident,
+ data_enum,
+ &ast.attrs,
+ &ast.generics,
+ endianness,
+ ),
+ _ => panic!("Wrong type for GenFieldlessEnum::from_ast"),
+ }
+ }
+
+ #[inline]
+ fn name(&self) -> &Ident {
+ &self.name
+ }
+
+ fn set_debug(&mut self, debug_derive: bool) {
+ self.config.debug_derive |= debug_derive;
+ }
+
+ #[inline]
+ fn extra_args(&self) -> Option<&TokenStream> {
+ self.extra_args.as_ref()
+ }
+
+ #[inline]
+ fn orig_generics(&self) -> &Generics {
+ &self.orig_generics
+ }
+
+ #[inline]
+ fn config(&self) -> &Config {
+ &self.config
+ }
+
+ fn gen_fn_body(&self, endianness: ParserEndianness) -> Result<TokenStream> {
+ let orig_input = Ident::new(self.config.orig_input_name(), Span::call_site());
+ let input = Ident::new(self.config.input_name(), Span::call_site());
+ let (tl_pre, tl_post) = (&self.tl_pre, &self.tl_post);
+ let variants_code = &self.variants_code;
+ let parser = &self.repr_parser.with_endianness(endianness);
+ // generate body
+ let tokens = quote! {
+ let #input = #orig_input;
+ #tl_pre
+ let (#input, selector) = #parser(#input)?;
+ let enum_def =
+ #(#variants_code else)*
+ { return Err(nom::Err::Error(nom::error::make_error(#orig_input, nom::error::ErrorKind::Switch))); };
+ #tl_post
+ Ok((#input, enum_def))
+ };
+
+ Ok(tokens)
+ }
+}
+
+impl GenFieldlessEnum {
+ pub fn from_data_enum(
+ name: &Ident,
+ data_enum: &DataEnum,
+ attrs: &[Attribute],
+ generics: &Generics,
+ endianness: ParserEndianness,
+ ) -> Result<Self> {
+ let name = name.clone();
+
+ // parse top-level attributes and prepare tokens for each field parser
+ let meta = meta::parse_nom_top_level_attribute(attrs)?;
+ // eprintln!("top-level meta: {:?}", meta);
+ let mut config = Config::from_meta_list(name.to_string(), &meta)?;
+
+ // endianness must be set before parsing struct
+ set_object_endianness(name.span(), endianness, &meta, &mut config)?;
+
+ let extra_args = get_extra_args(&meta).map(Clone::clone);
+
+ // test endianness validity (not 2 or more)
+ validate_endianness(
+ endianness,
+ config.object_endianness,
+ config.global_endianness,
+ )?;
+
+ // save global pre/post exec
+ let (tl_pre, tl_post) = get_pre_post_exec(&meta, &config);
+
+ if extra_args.is_some() {
+ panic!("fieldless enums cannot have extra_args");
+ }
+
+ let repr = get_repr(attrs).ok_or_else(|| {
+ Error::new(
+ name.span(),
+ "Nom-derive: fieldless enums must have a 'repr' or 'selector' attribute",
+ )
+ })?;
+ let repr_string = repr.to_string();
+ let repr_type = syn::parse_str::<Type>(&repr_string).expect("could not parse repr type");
+
+ let repr_parser = match repr_string.as_ref() {
+ "u8" | "u16" | "u24" | "u32" | "u64" | "u128" | "i8" | "i16" | "i24" | "i32"
+ | "i64" | "i128" => {
+ let endian = get_object_endianness(&config);
+ match endian {
+ ParserEndianness::BigEndian => {
+ ParserExpr::CallParseBE(TypeItem(repr_type.clone()))
+ }
+ ParserEndianness::LittleEndian => {
+ ParserExpr::CallParseLE(TypeItem(repr_type.clone()))
+ }
+ ParserEndianness::Unspecified => {
+ ParserExpr::CallParse(TypeItem(repr_type.clone()))
+ }
+ ParserEndianness::SetEndian => unimplemented!("SetEndian for fieldless enums"),
+ }
+ }
+ _ => {
+ return Err(Error::new(
+ repr.span(),
+ "Nom-derive: cannot parse 'repr' content (must be a primitive type)",
+ ))
+ }
+ };
+
+ let variants_code: Vec<_> = data_enum
+ .variants
+ .iter()
+ .map(|variant| {
+ let id = &variant.ident;
+ quote! { if selector == #name::#id as #repr_type { #name::#id } }
+ })
+ .collect();
+
+ Ok(Self {
+ name,
+ config,
+ extra_args,
+ orig_generics: generics.clone(),
+ tl_pre,
+ tl_post,
+ repr_parser,
+ variants_code,
+ })
+ }
+}
diff --git a/rust/vendor/nom-derive-impl/src/gen/generator.rs b/rust/vendor/nom-derive-impl/src/gen/generator.rs
new file mode 100644
index 0000000..fdc8c96
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/gen/generator.rs
@@ -0,0 +1,369 @@
+use crate::config::Config;
+use crate::endian::*;
+use crate::meta::attr::{MetaAttr, MetaAttrType};
+use proc_macro2::{Span, TokenStream};
+use quote::ToTokens;
+use syn::parse::Parser;
+use syn::punctuated::Punctuated;
+use syn::token::Comma;
+use syn::*;
+
+pub(crate) trait Generator {
+ fn from_ast(ast: &DeriveInput, endianness: ParserEndianness) -> Result<Self>
+ where
+ Self: Sized;
+
+ fn name(&self) -> &Ident;
+
+ fn set_debug(&mut self, debug_derive: bool);
+
+ fn extra_args(&self) -> Option<&TokenStream>;
+
+ fn orig_generics(&self) -> &Generics;
+
+ fn impl_where_predicates(&self) -> Option<&Vec<WherePredicate>> {
+ None
+ }
+
+ fn config(&self) -> &Config;
+
+ fn gen_fn_body(&self, endianness: ParserEndianness) -> Result<TokenStream>;
+
+ fn gen_parse_be(&self) -> Result<TokenStream> {
+ let fn_decl = gen_fn_decl(
+ ParserEndianness::BigEndian,
+ self.extra_args(),
+ self.config(),
+ );
+ if self.has_impl_for_endianness(ParserEndianness::BigEndian) {
+ let fn_body = self.gen_fn_body(ParserEndianness::BigEndian)?;
+
+ let fn_tokens = quote! {
+ #fn_decl
+ {
+ #fn_body
+ }
+ };
+ Ok(fn_tokens)
+ } else {
+ let call_args = self.get_call_args();
+ let ts = quote! {
+ #fn_decl {
+ Self::parse_le(#call_args)
+ }
+ };
+ Ok(ts)
+ }
+ }
+
+ fn gen_parse_le(&self) -> Result<TokenStream> {
+ let fn_decl = gen_fn_decl(
+ ParserEndianness::LittleEndian,
+ self.extra_args(),
+ self.config(),
+ );
+ if self.has_impl_for_endianness(ParserEndianness::LittleEndian) {
+ let fn_body = self.gen_fn_body(ParserEndianness::LittleEndian)?;
+
+ let fn_tokens = quote! {
+ #fn_decl
+ {
+ #fn_body
+ }
+ };
+ Ok(fn_tokens)
+ } else {
+ let call_args = self.get_call_args();
+ let ts = quote! {
+ #fn_decl {
+ Self::parse_be(#call_args)
+ }
+ };
+ Ok(ts)
+ }
+ }
+
+ fn gen_parse(&self) -> Result<TokenStream> {
+ let ident_e = Ident::new(self.config().error_name(), Span::call_site());
+ let lft = Lifetime::new(self.config().lifetime_name(), Span::call_site());
+ // 'parse' function
+ let maybe_err = if self.config().generic_errors {
+ quote!( , #ident_e )
+ } else {
+ quote!()
+ };
+ let special_case = self.extra_args().is_some() || self.config().selector_type().is_some();
+ let scope = if special_case {
+ quote! { pub }
+ } else {
+ quote! {}
+ };
+ let tokens_parse = {
+ let (fn_generics, where_clause) = if self.config().generic_errors && special_case {
+ (
+ quote!(<#ident_e>),
+ quote! {where
+ #ident_e: nom_derive::nom::error::ParseError<&#lft [u8]>,
+ #ident_e: std::fmt::Debug,
+ },
+ )
+ } else {
+ (quote!(), quote!())
+ };
+ let call_args = self.get_call_args();
+ let fn_args = get_fn_args(self.extra_args(), self.config());
+ quote! {
+ #scope fn parse#fn_generics(#fn_args) -> nom::IResult<&'nom [u8], Self #maybe_err> #where_clause {
+ Self::parse_be(#call_args)
+ }
+ }
+ };
+ Ok(tokens_parse)
+ }
+
+ fn gen_impl(&self) -> Result<TokenStream> {
+ let name = self.name();
+ let lft = Lifetime::new(self.config().lifetime_name(), Span::call_site());
+ let ident_e = Ident::new(self.config().error_name(), Span::call_site());
+ let maybe_err = if self.config().generic_errors {
+ quote!( , #ident_e )
+ } else {
+ quote!()
+ };
+
+ let tokens_parse = self.gen_parse()?;
+ let tokens_parse_be = self.gen_parse_be()?;
+ let tokens_parse_le = self.gen_parse_le()?;
+
+ // extract impl parameters from struct
+ let orig_generics = &self.orig_generics();
+ let (impl_generics, ty_generics, where_clause) = orig_generics.split_for_impl();
+
+ let mut gen_impl: Generics = parse_quote!(#impl_generics);
+ gen_impl
+ .params
+ .push(GenericParam::Lifetime(LifetimeDef::new(lft.clone())));
+ let param_e = TypeParam::from(ident_e.clone());
+
+ let mut gen_wh: WhereClause = if where_clause.is_none() {
+ parse_quote!(where)
+ } else {
+ parse_quote!(#where_clause)
+ };
+ let lfts: Vec<_> = orig_generics.lifetimes().collect();
+ if !lfts.is_empty() {
+ // input slice must outlive all lifetimes from Self
+ let wh: WherePredicate = parse_quote! { #lft: #(#lfts)+* };
+ gen_wh.predicates.push(wh);
+ };
+
+ // make sure generic parameters inplement Parse
+ for param in orig_generics.type_params() {
+ let param_ident = &param.ident;
+ let dep: WherePredicate = parse_quote! { #param_ident: Parse< &#lft [u8] #maybe_err > };
+ gen_wh.predicates.push(dep);
+ }
+ if let Some(impl_where_predicates) = self.impl_where_predicates() {
+ for wh in impl_where_predicates {
+ gen_wh.predicates.push(wh.clone());
+ }
+ }
+
+ // Global impl
+ let impl_tokens = if self.extra_args().is_some() || self.config().selector_type().is_some()
+ {
+ // There are extra arguments, so we can't generate the Parse impl
+ // Generate an equivalent implementation
+ if self.config().generic_errors {
+ // XXX will fail: generic type should be added to function (npt struct), or
+ // XXX compiler will complain that
+ // XXX "the type parameter `NomErr` is not constrained by the impl trait, self type, or predicates"
+ // this happens only when not implementing trait (the trait constrains NomErr)
+ // let wh: WherePredicate = parse_quote!(#ident_e: nom::error::ParseError<& #lft [u8]>);
+ // gen_wh.predicates.push(wh);
+ // gen_impl.params.push(GenericParam::Type(param_e));
+ }
+ quote! {
+ impl #gen_impl #name #ty_generics #gen_wh {
+ #tokens_parse_be
+ #tokens_parse_le
+ #tokens_parse
+ }
+ }
+ } else {
+ // Generate an impl block for the Parse trait
+ let error = if self.config().generic_errors {
+ let wh: WherePredicate =
+ parse_quote!(#ident_e: nom::error::ParseError<& #lft [u8]>);
+ gen_wh.predicates.push(wh);
+ gen_impl.params.push(GenericParam::Type(param_e));
+ quote! { #ident_e }
+ } else {
+ quote! { nom::error::Error<&#lft [u8]> }
+ };
+ quote! {
+ impl #gen_impl nom_derive::Parse<& #lft [u8], #error> for #name #ty_generics #gen_wh {
+ #tokens_parse_be
+ #tokens_parse_le
+ #tokens_parse
+ }
+
+ }
+ };
+
+ if self.config().debug_derive {
+ eprintln!("tokens:\n{}", impl_tokens);
+ }
+
+ Ok(impl_tokens)
+ }
+
+ fn has_impl_for_endianness(&self, endianness: ParserEndianness) -> bool {
+ assert!(
+ endianness == ParserEndianness::BigEndian
+ || endianness == ParserEndianness::LittleEndian
+ );
+
+ //
+ if self.config().object_endianness == endianness
+ || self.config().global_endianness == endianness
+ {
+ return true;
+ }
+
+ if self.config().object_endianness == ParserEndianness::Unspecified
+ || self.config().global_endianness == ParserEndianness::Unspecified
+ {
+ return true;
+ }
+
+ false
+ }
+
+ fn get_call_args(&self) -> TokenStream {
+ let mut call_args: Punctuated<_, Token![,]> = Punctuated::new();
+ let orig_input = Ident::new(self.config().orig_input_name(), Span::call_site());
+ call_args.push(orig_input);
+ // selector, if present
+ if let Some(s) = self.config().selector_name() {
+ let selector = Ident::new(s, Span::call_site());
+ call_args.push(selector);
+ }
+ // extra args, if any
+ if let Some(ts) = self.extra_args() {
+ let ts = ts.clone();
+ let parser = Punctuated::<syn::FnArg, Comma>::parse_separated_nonempty;
+ let extra_args = parser.parse2(ts).expect("parse extra_args");
+ for extra_arg in &extra_args {
+ match extra_arg {
+ syn::FnArg::Receiver(_) => panic!("self should not be used in extra_args"),
+ syn::FnArg::Typed(t) => {
+ if let syn::Pat::Ident(pat_ident) = t.pat.as_ref() {
+ call_args.push(pat_ident.ident.clone());
+ } else {
+ panic!("unexpected pattern in extra_args");
+ }
+ }
+ }
+ }
+ };
+ call_args.to_token_stream()
+ }
+}
+
+pub(crate) fn gen_fn_decl(
+ endianness: ParserEndianness,
+ extra_args: Option<&TokenStream>,
+ config: &Config,
+) -> TokenStream {
+ let parse = match endianness {
+ ParserEndianness::BigEndian => "parse_be",
+ ParserEndianness::LittleEndian => "parse_le",
+ ParserEndianness::SetEndian => panic!("gen_fn_decl should never receive SetEndian"),
+ ParserEndianness::Unspecified => "parse",
+ };
+ let parse = Ident::new(parse, Span::call_site());
+ let fn_args = get_fn_args(extra_args, config);
+ // get lifetimes
+ let lft = Lifetime::new(config.lifetime_name(), Span::call_site());
+ let mut fn_where_clause = WhereClause {
+ where_token: Token![where](Span::call_site()),
+ predicates: punctuated::Punctuated::new(),
+ };
+
+ // if we are generating a stub, we need to mark the function as `pub`
+ let special_case = extra_args.is_some() || config.selector_type().is_some();
+ let scope = if special_case {
+ quote! { pub }
+ } else {
+ quote! {}
+ };
+
+ // function declaration line
+ if config.generic_errors {
+ let ident_e = Ident::new(config.error_name(), Span::call_site());
+ let mut fn_generics = None;
+ if special_case {
+ // special case: not implementing the Parse trait,
+ // generic errors must be added to function, not struct
+ //
+ // extend where clause for generic parameters
+ let dep: WherePredicate = parse_quote! {
+ #ident_e: nom_derive::nom::error::ParseError<&#lft [u8]>
+ };
+ fn_where_clause.predicates.push(dep);
+ let dep: WherePredicate = parse_quote! { #ident_e: std::fmt::Debug };
+ fn_where_clause.predicates.push(dep);
+ // add error type to function generics
+ fn_generics = Some(quote!(<#ident_e>));
+ }
+ quote! {
+ #scope fn #parse#fn_generics(#fn_args) -> nom::IResult<&#lft [u8], Self, #ident_e>
+ #fn_where_clause
+ }
+ } else {
+ quote! {
+ #scope fn #parse(#fn_args) -> nom::IResult<&#lft [u8], Self>
+ #fn_where_clause
+ }
+ }
+}
+
+pub(crate) fn get_extra_args(meta_list: &[MetaAttr]) -> Option<&TokenStream> {
+ meta_list
+ .iter()
+ .find(|m| m.attr_type == MetaAttrType::ExtraArgs)
+ .and_then(MetaAttr::arg)
+}
+
+pub(crate) fn get_fn_args(
+ extra_args: Option<&TokenStream>,
+ config: &Config,
+) -> Punctuated<FnArg, Comma> {
+ let orig_input = Ident::new(config.orig_input_name(), Span::call_site());
+ // get lifetimes
+ let lft = Lifetime::new(config.lifetime_name(), Span::call_site());
+
+ // function arguments: input first
+ let mut fn_args: Punctuated<_, Token![,]> = Punctuated::new();
+ let arg_input: FnArg = parse_quote!(#orig_input: &#lft [u8]);
+ fn_args.push(arg_input);
+ // selector, if present
+ if let Some(sel_type) = config.selector_type() {
+ let s = config.selector_name().unwrap_or("selector");
+ let sel_name = Ident::new(s, Span::call_site());
+ let selector: FnArg = parse_quote!(#sel_name: #sel_type);
+ fn_args.push(selector);
+ }
+ // extra args, if any
+ if let Some(ts) = extra_args {
+ let ts = ts.clone();
+ type Comma = syn::Token![,];
+ let parser = Punctuated::<syn::FnArg, Comma>::parse_separated_nonempty;
+ let extra_args = parser.parse2(ts).expect("parse extra_args");
+ for extra_arg in &extra_args {
+ fn_args.push(extra_arg.clone());
+ }
+ };
+ fn_args
+}
diff --git a/rust/vendor/nom-derive-impl/src/gen/structs.rs b/rust/vendor/nom-derive-impl/src/gen/structs.rs
new file mode 100644
index 0000000..96ae765
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/gen/structs.rs
@@ -0,0 +1,190 @@
+use proc_macro2::TokenStream;
+use syn::*;
+
+use crate::config::Config;
+use crate::endian::*;
+use crate::meta;
+use crate::structs::*;
+
+use super::*;
+
+pub struct GenStruct {
+ pub name: Ident,
+ pub config: Config,
+
+ extra_args: Option<TokenStream>,
+
+ orig_generics: Generics,
+ tl_pre: Option<TokenStream>,
+ tl_post: Option<TokenStream>,
+ parser_tree: StructParserTree,
+ impl_where_predicates: Option<Vec<WherePredicate>>,
+}
+
+impl Generator for GenStruct {
+ fn from_ast(ast: &DeriveInput, endianness: ParserEndianness) -> Result<Self> {
+ match &ast.data {
+ syn::Data::Struct(datastruct) => GenStruct::from_datastruct(
+ &ast.ident,
+ datastruct,
+ &ast.attrs,
+ &ast.generics,
+ endianness,
+ ),
+ _ => panic!("Wrong type for GenEnum::from_ast"),
+ }
+ }
+
+ #[inline]
+ fn name(&self) -> &Ident {
+ &self.name
+ }
+
+ fn set_debug(&mut self, debug_derive: bool) {
+ self.config.debug_derive |= debug_derive;
+ }
+
+ #[inline]
+ fn extra_args(&self) -> Option<&TokenStream> {
+ self.extra_args.as_ref()
+ }
+
+ #[inline]
+ fn orig_generics(&self) -> &Generics {
+ &self.orig_generics
+ }
+
+ fn impl_where_predicates(&self) -> Option<&Vec<WherePredicate>> {
+ self.impl_where_predicates.as_ref()
+ }
+
+ #[inline]
+ fn config(&self) -> &Config {
+ &self.config
+ }
+
+ fn gen_fn_body(&self, endianness: ParserEndianness) -> Result<TokenStream> {
+ let name = &self.name;
+ let (tl_pre, tl_post) = (&self.tl_pre, &self.tl_post);
+ let input = syn::Ident::new(self.config.input_name(), Span::call_site());
+ let orig_input = syn::Ident::new(self.config.orig_input_name(), Span::call_site());
+
+ // prepare tokens
+ let (idents, parser_tokens): (Vec<_>, Vec<_>) = self
+ .parser_tree
+ .parsers
+ .iter()
+ .map(|sp| {
+ let id = syn::Ident::new(&sp.name, Span::call_site());
+ // set current endianness for functions that do not specify it
+ let item = sp.item.with_endianness(endianness);
+ (id, item)
+ })
+ .unzip();
+ let (pre, post): (Vec<_>, Vec<_>) = self
+ .parser_tree
+ .parsers
+ .iter()
+ .map(|sp| (sp.pre_exec.as_ref(), sp.post_exec.as_ref()))
+ .unzip();
+ let idents2 = idents.clone();
+
+ // Code generation
+ let struct_def = match (self.parser_tree.empty, self.parser_tree.unnamed) {
+ (true, _) => quote! { #name },
+ (_, true) => quote! { #name ( #(#idents2),* ) },
+ (_, false) => quote! { #name { #(#idents2),* } },
+ };
+
+ let fn_body = quote! {
+ let #input = #orig_input;
+ #tl_pre
+ #(#pre let (#input, #idents) = #parser_tokens (#input) ?; #post)*
+ let struct_def = #struct_def;
+ #tl_post
+ Ok((#input, struct_def))
+ };
+ Ok(fn_body)
+ }
+}
+
+impl GenStruct {
+ pub fn from_datastruct(
+ name: &Ident,
+ datastruct: &DataStruct,
+ attrs: &[Attribute],
+ generics: &Generics,
+ endianness: ParserEndianness,
+ ) -> Result<Self> {
+ let name = name.clone();
+
+ // parse top-level attributes and prepare tokens for each field parser
+ let meta = meta::parse_nom_top_level_attribute(attrs)?;
+ // eprintln!("top-level meta: {:?}", meta);
+ let mut config = Config::from_meta_list(name.to_string(), &meta)?;
+
+ // endianness must be set before parsing struct
+ set_object_endianness(name.span(), endianness, &meta, &mut config)?;
+
+ let extra_args = get_extra_args(&meta).map(Clone::clone);
+
+ // test endianness validity (not 2 or more)
+ validate_endianness(
+ endianness,
+ config.object_endianness,
+ config.global_endianness,
+ )?;
+
+ // save global pre/post exec
+ let (tl_pre, tl_post) = get_pre_post_exec(&meta, &config);
+
+ let s = parse_struct(datastruct, &mut config)?;
+
+ let impl_where_predicates = add_extra_where_predicates(&s, &config);
+
+ Ok(GenStruct {
+ name,
+ config,
+ extra_args,
+ orig_generics: generics.clone(),
+ tl_pre,
+ tl_post,
+ parser_tree: s,
+ impl_where_predicates,
+ })
+ }
+}
+
+/// Find additional where clauses to add (for ex. `String` requires `FromExternalError<&[u8], Utf8Error>`)
+#[allow(clippy::single_match)]
+fn add_extra_where_predicates(
+ parser_tree: &StructParserTree,
+ config: &Config,
+) -> Option<Vec<WherePredicate>> {
+ if config.generic_errors {
+ let mut v = Vec::new();
+ let lft = Lifetime::new(config.lifetime_name(), Span::call_site());
+ let err = Ident::new(config.error_name(), Span::call_site());
+ // visit parser tree and look for types with requirement on Error type
+ for p in &parser_tree.parsers {
+ if let Some(ty) = p.item.expr.last_type() {
+ if let Ok(s) = get_type_first_ident(&ty.0) {
+ match s.as_ref() {
+ "String" => {
+ let wh: WherePredicate = parse_quote! {#err: nom::error::FromExternalError<&#lft [u8], core::str::Utf8Error>};
+ v.push(wh)
+ }
+ _ => (),
+ }
+ }
+ }
+ }
+ if !v.is_empty() {
+ Some(v)
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+}
diff --git a/rust/vendor/nom-derive-impl/src/lib.rs b/rust/vendor/nom-derive-impl/src/lib.rs
new file mode 100644
index 0000000..fe1e422
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/lib.rs
@@ -0,0 +1,81 @@
+//! # nom-derive-impl
+//!
+//! ## Overview
+//!
+//! nom-derive is a custom derive attribute, to derive `nom` parsers automatically from the structure definition.
+//!
+//! This crate is not meant to be used directly.
+//! See [`nom-derive`](https://docs.rs/nom-derive) crate for documentation.
+
+extern crate proc_macro;
+extern crate proc_macro2;
+extern crate syn;
+#[macro_use]
+extern crate quote;
+
+use proc_macro::TokenStream;
+use syn::*;
+
+mod config;
+mod endian;
+mod enums;
+mod gen;
+mod meta;
+mod parsertree;
+mod structs;
+
+use crate::endian::*;
+use crate::gen::*;
+
+/// The `Nom` derive automatically generates a `parse` function for the structure
+/// using [nom] parsers. It will try to infer parsers for primitive of known
+/// types, but also allows you to specify parsers using custom attributes.
+///
+/// Deriving parsers supports `struct` and `enum` types.
+///
+/// The documentation of the `Nom` custom derive attribute and all possible options
+/// can be found in the [nom-derive documentation](https://docs.rs/nom-derive).
+///
+/// Many examples are provided, and more can be found in the [project
+/// tests](https://github.com/rust-bakery/nom-derive/tree/master/tests).
+///
+/// [nom]: https://github.com/Geal/nom
+#[proc_macro_derive(Nom, attributes(nom))]
+pub fn nom(input: TokenStream) -> TokenStream {
+ // Parse the input tokens into a syntax tree
+ let ast = parse_macro_input!(input as DeriveInput);
+
+ // Build and return the generated impl
+ match gen_impl(&ast, ParserEndianness::Unspecified) {
+ Ok(ts) => ts.into(),
+ Err(e) => e.to_compile_error().into(),
+ }
+}
+
+/// The `NomBE` acts like the [`Nom`] attribute, but sets the endianness to big-endian for the
+/// current object. This can be overriden locally at the field-level.
+#[proc_macro_derive(NomBE, attributes(nom))]
+pub fn nom_be(input: TokenStream) -> TokenStream {
+ // Parse the input tokens into a syntax tree
+ let ast = parse_macro_input!(input as DeriveInput);
+
+ // Build and return the generated impl
+ match gen_impl(&ast, ParserEndianness::BigEndian) {
+ Ok(ts) => ts.into(),
+ Err(e) => e.to_compile_error().into(),
+ }
+}
+
+/// The `NomLE` acts like the [`Nom`] attribute, but sets the endianness to little-endian for the
+/// current object. This can be overriden locally at the field-level.
+#[proc_macro_derive(NomLE, attributes(nom))]
+pub fn nom_le(input: TokenStream) -> TokenStream {
+ // Parse the input tokens into a syntax tree
+ let ast = parse_macro_input!(input as DeriveInput);
+
+ // Build and return the generated impl
+ match gen_impl(&ast, ParserEndianness::LittleEndian) {
+ Ok(ts) => ts.into(),
+ Err(e) => e.to_compile_error().into(),
+ }
+}
diff --git a/rust/vendor/nom-derive-impl/src/meta/attr.rs b/rust/vendor/nom-derive-impl/src/meta/attr.rs
new file mode 100644
index 0000000..1ab5e53
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/meta/attr.rs
@@ -0,0 +1,319 @@
+use proc_macro2::{Span, TokenStream};
+use quote::ToTokens;
+use std::fmt;
+use syn::parse::{Parse, ParseStream};
+use syn::punctuated::Punctuated;
+use syn::spanned::Spanned;
+use syn::{parenthesized, token, Ident, Token};
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum MetaAttrType {
+ AlignAfter,
+ AlignBefore,
+ BigEndian,
+ Complete,
+ Cond,
+ Count,
+ Debug,
+ DebugDerive,
+ ErrorIf,
+ Exact,
+ ExtraArgs,
+ GenericErrors,
+ Ignore,
+ InputName,
+ Into,
+ LengthCount,
+ LittleEndian,
+ Map,
+ Move,
+ MoveAbs,
+ Parse,
+ PostExec,
+ PreExec,
+ Selector,
+ SetEndian,
+ SkipAfter,
+ SkipBefore,
+ Tag,
+ Take,
+ Value,
+ Verify,
+}
+
+impl MetaAttrType {
+ pub fn from_ident(ident: &syn::Ident) -> Option<Self> {
+ match ident.to_string().as_ref() {
+ "AlignAfter" => Some(MetaAttrType::AlignAfter),
+ "AlignBefore" => Some(MetaAttrType::AlignBefore),
+ "BigEndian" => Some(MetaAttrType::BigEndian),
+ "Complete" => Some(MetaAttrType::Complete),
+ "Count" => Some(MetaAttrType::Count),
+ "Debug" => Some(MetaAttrType::Debug),
+ "DebugDerive" => Some(MetaAttrType::DebugDerive),
+ "ErrorIf" => Some(MetaAttrType::ErrorIf),
+ "Exact" => Some(MetaAttrType::Exact),
+ "ExtraArgs" => Some(MetaAttrType::ExtraArgs),
+ "GenericErrors" => Some(MetaAttrType::GenericErrors),
+ "If" | "Cond" => Some(MetaAttrType::Cond),
+ "Ignore" | "Default" => Some(MetaAttrType::Ignore),
+ "InputName" => Some(MetaAttrType::InputName),
+ "Into" => Some(MetaAttrType::Into),
+ "LengthCount" => Some(MetaAttrType::LengthCount),
+ "LittleEndian" => Some(MetaAttrType::LittleEndian),
+ "Map" => Some(MetaAttrType::Map),
+ "Move" => Some(MetaAttrType::Move),
+ "MoveAbs" => Some(MetaAttrType::MoveAbs),
+ "Parse" => Some(MetaAttrType::Parse),
+ "PostExec" => Some(MetaAttrType::PostExec),
+ "PreExec" => Some(MetaAttrType::PreExec),
+ "Selector" => Some(MetaAttrType::Selector),
+ "SetEndian" => Some(MetaAttrType::SetEndian),
+ "SkipAfter" => Some(MetaAttrType::SkipAfter),
+ "SkipBefore" => Some(MetaAttrType::SkipBefore),
+ "Tag" => Some(MetaAttrType::Tag),
+ "Take" => Some(MetaAttrType::Take),
+ "Value" => Some(MetaAttrType::Value),
+ "Verify" => Some(MetaAttrType::Verify),
+ _ => None,
+ }
+ }
+
+ pub fn takes_argument(self) -> bool {
+ matches!(
+ self,
+ MetaAttrType::AlignAfter
+ | MetaAttrType::AlignBefore
+ | MetaAttrType::Cond
+ | MetaAttrType::Count
+ | MetaAttrType::ErrorIf
+ | MetaAttrType::ExtraArgs
+ | MetaAttrType::InputName
+ | MetaAttrType::LengthCount
+ | MetaAttrType::Map
+ | MetaAttrType::Move
+ | MetaAttrType::MoveAbs
+ | MetaAttrType::Parse
+ | MetaAttrType::PostExec
+ | MetaAttrType::PreExec
+ | MetaAttrType::Selector
+ | MetaAttrType::SetEndian
+ | MetaAttrType::SkipAfter
+ | MetaAttrType::SkipBefore
+ | MetaAttrType::Tag
+ | MetaAttrType::Take
+ | MetaAttrType::Value
+ | MetaAttrType::Verify
+ )
+ }
+}
+
+impl fmt::Display for MetaAttrType {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let s = match self {
+ MetaAttrType::AlignAfter => "AlignAfter",
+ MetaAttrType::AlignBefore => "AlignBefore",
+ MetaAttrType::BigEndian => "BigEndian",
+ MetaAttrType::Complete => "Complete",
+ MetaAttrType::Cond => "Cond",
+ MetaAttrType::Count => "Count",
+ MetaAttrType::Debug => "Debug",
+ MetaAttrType::DebugDerive => "DebugDerive",
+ MetaAttrType::ErrorIf => "ErrorIf",
+ MetaAttrType::Exact => "Exact",
+ MetaAttrType::ExtraArgs => "ExtraArgs",
+ MetaAttrType::GenericErrors => "GenericErrors",
+ MetaAttrType::Ignore => "Ignore",
+ MetaAttrType::InputName => "InputName",
+ MetaAttrType::Into => "Into",
+ MetaAttrType::LengthCount => "LengthCount",
+ MetaAttrType::LittleEndian => "LittleEndian",
+ MetaAttrType::Map => "Map",
+ MetaAttrType::Move => "Move",
+ MetaAttrType::MoveAbs => "MoveAbs",
+ MetaAttrType::Parse => "Parse",
+ MetaAttrType::PostExec => "PostExec",
+ MetaAttrType::PreExec => "PreExec",
+ MetaAttrType::Selector => "Selector",
+ MetaAttrType::SetEndian => "SetEndian",
+ MetaAttrType::SkipAfter => "SkipAfter",
+ MetaAttrType::SkipBefore => "SkipBefore",
+ MetaAttrType::Tag => "Tag",
+ MetaAttrType::Take => "Take",
+ MetaAttrType::Value => "Value",
+ MetaAttrType::Verify => "Verify",
+ };
+ f.write_str(s)
+ }
+}
+
+#[derive(Debug)]
+pub struct MetaAttr {
+ pub attr_type: MetaAttrType,
+ arg0: Option<TokenStream>,
+ span: Span,
+}
+
+impl MetaAttr {
+ pub fn new(attr_type: MetaAttrType, arg0: Option<TokenStream>, span: Span) -> Self {
+ MetaAttr {
+ attr_type,
+ arg0,
+ span,
+ }
+ }
+
+ /// Is attribute acceptable for top-level
+ pub fn acceptable_tla(&self) -> bool {
+ matches!(
+ self.attr_type,
+ MetaAttrType::DebugDerive
+ | MetaAttrType::Complete
+ | MetaAttrType::Debug
+ | MetaAttrType::ExtraArgs
+ | MetaAttrType::GenericErrors
+ | MetaAttrType::InputName
+ | MetaAttrType::LittleEndian
+ | MetaAttrType::BigEndian
+ | MetaAttrType::SetEndian
+ | MetaAttrType::PreExec
+ | MetaAttrType::PostExec
+ | MetaAttrType::Exact
+ | MetaAttrType::Selector
+ )
+ }
+
+ /// Is attribute acceptable for field-level
+ pub fn acceptable_fla(&self) -> bool {
+ !matches!(
+ self.attr_type,
+ MetaAttrType::DebugDerive
+ | MetaAttrType::Exact
+ | MetaAttrType::ExtraArgs
+ | MetaAttrType::GenericErrors
+ | MetaAttrType::InputName
+ )
+ }
+
+ #[inline]
+ pub fn is_type(&self, attr_type: MetaAttrType) -> bool {
+ self.attr_type == attr_type
+ }
+
+ #[inline]
+ pub fn arg(&self) -> Option<&TokenStream> {
+ self.arg0.as_ref()
+ }
+}
+
+impl fmt::Display for MetaAttr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.attr_type)?;
+ if let Some(arg) = &self.arg0 {
+ write!(f, "({})", arg)?;
+ }
+ Ok(())
+ }
+}
+
+impl Spanned for MetaAttr {
+ fn span(&self) -> Span {
+ self.span
+ }
+}
+
+impl Parse for MetaAttr {
+ fn parse(input: ParseStream) -> syn::Result<Self> {
+ let ident: Ident = input.parse()?;
+ let attr_type =
+ MetaAttrType::from_ident(&ident).unwrap_or_else(|| panic!("Wrong meta name {}", ident));
+ let arg0 = if attr_type.takes_argument() {
+ // read (value), or ="value"
+ let token_stream = match attr_type {
+ MetaAttrType::ExtraArgs => {
+ let content;
+ let _paren_token = parenthesized!(content in input);
+ type ExpectedType = Punctuated<syn::Field, Token![,]>;
+ let fields: ExpectedType = content.parse_terminated(syn::Field::parse_named)?;
+ quote! { #fields }
+ }
+ MetaAttrType::PreExec | MetaAttrType::PostExec => {
+ parse_content::<syn::Stmt>(input)?
+ }
+ MetaAttrType::Selector => parse_content::<PatternAndGuard>(input)?,
+ _ => parse_content::<syn::Expr>(input)?,
+ };
+ Some(token_stream)
+ } else {
+ None
+ };
+ Ok(MetaAttr::new(attr_type, arg0, ident.span()))
+ }
+}
+
+fn parse_content<P>(input: ParseStream) -> syn::Result<TokenStream>
+where
+ P: Parse + ToTokens + fmt::Debug,
+{
+ if input.peek(Token![=]) {
+ // eprintln!("Exec Peek: =");
+ let _: Token![=] = input.parse()?;
+ // next item is a string containing the real value
+ let x = syn::Lit::parse(input)?;
+ // eprintln!("content: {:?}", x);
+ match x {
+ syn::Lit::Str(s) => {
+ let xx: P = s.parse()?;
+ // eprintln!("xx: {:?}", xx);
+ Ok(quote! { #xx })
+ }
+ _ => Err(syn::Error::new(
+ x.span(),
+ "Unexpected type for nom attribute content (!LitStr)",
+ )),
+ }
+ } else if input.peek(token::Paren) {
+ // eprintln!("Exec Peek: (");
+ let content;
+ let _paren_token = parenthesized!(content in input);
+ let x = P::parse(&content)?;
+ // let x: Punctuated<Type, Token![,]> = content.parse_terminated(Type::parse)?;
+ // eprintln!("content: {:?}", x);
+ Ok(quote! { #x })
+ } else {
+ Err(syn::Error::new(
+ input.span(),
+ "Unexpected type for nom attribute content (!LitStr)",
+ ))
+ }
+}
+
+#[derive(Debug)]
+struct PatternAndGuard {
+ pat: syn::Pat,
+ guard: Option<(token::If, Box<syn::Expr>)>,
+}
+
+impl Parse for PatternAndGuard {
+ fn parse(input: ParseStream) -> syn::Result<Self> {
+ let pat = input.parse()?;
+ let guard = if input.peek(Token![if]) {
+ let tk_if: Token![if] = input.parse()?;
+ let expr: syn::Expr = input.parse()?;
+ Some((tk_if, Box::new(expr)))
+ } else {
+ None
+ };
+ Ok(PatternAndGuard { pat, guard })
+ }
+}
+
+impl quote::ToTokens for PatternAndGuard {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.pat.to_tokens(tokens);
+ if let Some((tk_if, expr)) = &self.guard {
+ tk_if.to_tokens(tokens);
+ expr.to_tokens(tokens);
+ }
+ }
+}
diff --git a/rust/vendor/nom-derive-impl/src/meta/attr_list.rs b/rust/vendor/nom-derive-impl/src/meta/attr_list.rs
new file mode 100644
index 0000000..a5ecd21
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/meta/attr_list.rs
@@ -0,0 +1,19 @@
+use syn::parse::{Parse, ParseStream};
+use syn::punctuated::Punctuated;
+use syn::{parenthesized, Token};
+
+#[derive(Debug)]
+pub struct AttrList<T: Parse>(pub Vec<T>);
+
+impl<T: Parse> Parse for AttrList<T> {
+ fn parse(input: ParseStream) -> syn::Result<Self> {
+ // eprintln!("AttrList::parse: {:?}", input);
+ let content;
+ parenthesized!(content in input);
+ Ok(AttrList(
+ Punctuated::<T, Token![,]>::parse_terminated(&content)?
+ .into_iter()
+ .collect(),
+ ))
+ }
+}
diff --git a/rust/vendor/nom-derive-impl/src/meta/error.rs b/rust/vendor/nom-derive-impl/src/meta/error.rs
new file mode 100644
index 0000000..e8d08b6
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/meta/error.rs
@@ -0,0 +1,10 @@
+use std::convert::From;
+
+#[derive(Debug)]
+pub struct MetaError;
+
+impl From<syn::Error> for MetaError {
+ fn from(_e: syn::Error) -> Self {
+ MetaError
+ }
+}
diff --git a/rust/vendor/nom-derive-impl/src/meta/mod.rs b/rust/vendor/nom-derive-impl/src/meta/mod.rs
new file mode 100644
index 0000000..2d0f053
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/meta/mod.rs
@@ -0,0 +1,60 @@
+pub mod attr;
+pub mod attr_list;
+pub mod error;
+
+use syn::{spanned::Spanned, Error, Result};
+
+pub fn parse_nom_top_level_attribute(attrs: &[syn::Attribute]) -> Result<Vec<attr::MetaAttr>> {
+ // eprintln!("attrs: {:?}", attrs);
+ let x: Vec<_> = attrs
+ .iter()
+ .filter_map(|x| {
+ if x.path.is_ident("nom") {
+ Some(meta_from_attribute(x))
+ } else {
+ None
+ }
+ })
+ .collect::<std::result::Result<Vec<_>, _>>()?
+ .into_iter()
+ .flat_map(|x| x.0.into_iter())
+ .collect();
+ // eprintln!("XXX: {:?}", x);
+ if let Some(attr) = x.iter().find(|m| !m.acceptable_tla()) {
+ return Err(Error::new(
+ attr.span(),
+ format!("Attribute {} is not valid for top-level", attr),
+ ));
+ }
+ Ok(x)
+}
+
+fn meta_from_attribute(attr: &syn::Attribute) -> Result<attr_list::AttrList<attr::MetaAttr>> {
+ // eprintln!("tlas_from_attribute: {:?}", attr);
+ syn::parse2(attr.tokens.clone())
+}
+
+pub fn parse_nom_attribute(attrs: &[syn::Attribute]) -> Result<Vec<attr::MetaAttr>> {
+ // eprintln!("attrs: {:?}", attrs);
+ let x: Vec<_> = attrs
+ .iter()
+ .filter_map(|x| {
+ if x.path.is_ident("nom") {
+ Some(meta_from_attribute(x))
+ } else {
+ None
+ }
+ })
+ .collect::<std::result::Result<Vec<_>, _>>()?
+ .into_iter()
+ .flat_map(|x| x.0.into_iter())
+ .collect();
+ // eprintln!("****\nXXX: {:?}\n", x);
+ if let Some(attr) = x.iter().find(|m| !m.acceptable_fla()) {
+ return Err(Error::new(
+ attr.span(),
+ format!("Attribute {} is not valid for field-level", attr),
+ ));
+ }
+ Ok(x)
+}
diff --git a/rust/vendor/nom-derive-impl/src/parsertree.rs b/rust/vendor/nom-derive-impl/src/parsertree.rs
new file mode 100644
index 0000000..3bb5af4
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/parsertree.rs
@@ -0,0 +1,173 @@
+use proc_macro2::TokenStream;
+use quote::ToTokens;
+use syn::Ident;
+
+use crate::endian::ParserEndianness;
+
+#[derive(Debug)]
+pub struct ParserTree {
+ root: ParserExpr,
+}
+
+impl ToTokens for ParserTree {
+ fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+ self.root.to_tokens(tokens)
+ }
+}
+
+#[derive(Debug)]
+pub struct ParserTreeItem {
+ pub ident: Option<Ident>,
+ pub expr: ParserExpr,
+}
+
+impl ParserTreeItem {
+ pub fn new(ident: Option<Ident>, expr: ParserExpr) -> Self {
+ ParserTreeItem { ident, expr }
+ }
+
+ pub fn with_endianness(&self, endianness: ParserEndianness) -> Self {
+ ParserTreeItem {
+ ident: self.ident.clone(),
+ expr: self.expr.with_endianness(endianness),
+ }
+ }
+}
+
+impl ToTokens for ParserTreeItem {
+ fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+ self.expr.to_tokens(tokens)
+ }
+}
+
+#[allow(clippy::upper_case_acronyms)]
+#[derive(Clone, Debug)]
+pub enum ParserExpr {
+ CallParse(TypeItem),
+ CallParseBE(TypeItem),
+ CallParseLE(TypeItem),
+ Complete(Box<ParserExpr>),
+ Cond(Box<ParserExpr>, TokenStream),
+ Count(Box<ParserExpr>, TokenStream),
+ DbgDmp(Box<ParserExpr>, Ident),
+ Into(Box<ParserExpr>),
+ LengthCount(Box<ParserExpr>, TokenStream),
+ Map(Box<ParserExpr>, TokenStream),
+ Nop,
+ PhantomData,
+ Raw(TokenStream),
+ Tag(TokenStream),
+ Take(TokenStream),
+ Value(TokenStream),
+ Verify(Box<ParserExpr>, Ident, TokenStream),
+}
+
+impl ParserExpr {
+ pub fn with_endianness(&self, endianness: ParserEndianness) -> Self {
+ match self {
+ ParserExpr::CallParse(item) => match endianness {
+ ParserEndianness::BigEndian => ParserExpr::CallParseBE(item.clone()),
+ ParserEndianness::LittleEndian => ParserExpr::CallParseLE(item.clone()),
+ _ => unreachable!(),
+ },
+ expr => expr.clone(),
+ }
+ }
+
+ #[inline]
+ pub fn complete(self) -> Self {
+ ParserExpr::Complete(Box::new(self))
+ }
+
+ pub fn last_type(&self) -> Option<&TypeItem> {
+ match self {
+ ParserExpr::CallParse(e) | ParserExpr::CallParseBE(e) | ParserExpr::CallParseLE(e) => {
+ Some(e)
+ }
+ ParserExpr::Complete(expr)
+ | ParserExpr::Cond(expr, _)
+ | ParserExpr::Count(expr, _)
+ | ParserExpr::DbgDmp(expr, _)
+ | ParserExpr::Into(expr)
+ | ParserExpr::LengthCount(expr, _)
+ | ParserExpr::Map(expr, _)
+ | ParserExpr::Verify(expr, _, _) => expr.last_type(),
+ _ => None,
+ }
+ }
+}
+
+impl ToTokens for ParserExpr {
+ fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+ let ts = match self {
+ ParserExpr::CallParse(s) => {
+ quote! { <#s>::parse }
+ }
+ ParserExpr::CallParseBE(s) => {
+ quote! { <#s>::parse_be }
+ }
+ ParserExpr::CallParseLE(s) => {
+ quote! { <#s>::parse_le }
+ }
+ ParserExpr::Complete(expr) => {
+ quote! { nom::combinator::complete(#expr) }
+ }
+ ParserExpr::Cond(expr, c) => {
+ quote! { nom::combinator::cond(#c, #expr) }
+ }
+ ParserExpr::Count(expr, n) => {
+ quote! { nom::multi::count(#expr, #n as usize) }
+ }
+ ParserExpr::DbgDmp(expr, i) => {
+ let ident = format!("{}", i);
+ quote! { nom::error::dbg_dmp(#expr, #ident) }
+ }
+ ParserExpr::Into(expr) => {
+ quote! { nom::combinator::into(#expr) }
+ }
+ ParserExpr::LengthCount(expr, n) => {
+ quote! { nom::multi::length_count(#n, #expr) }
+ }
+ ParserExpr::Map(expr, m) => {
+ quote! { nom::combinator::map(#expr, #m) }
+ }
+ ParserExpr::Nop => {
+ quote! {
+ { |__i__| Ok((__i__, ())) }
+ }
+ }
+ ParserExpr::PhantomData => {
+ quote! {
+ { |__i__| Ok((__i__, PhantomData)) }
+ }
+ }
+ ParserExpr::Raw(s) => s.to_token_stream(),
+ ParserExpr::Tag(s) => {
+ quote! { nom::bytes::streaming::tag(#s) }
+ }
+ ParserExpr::Take(s) => {
+ quote! { nom::bytes::streaming::take(#s as usize) }
+ }
+ ParserExpr::Value(ts) => {
+ quote! {
+ { |__i__| Ok((__i__, #ts)) }
+ }
+ }
+ ParserExpr::Verify(expr, i, v) => {
+ quote! {
+ nom::combinator::verify(#expr, |#i| { #v })
+ }
+ }
+ };
+ tokens.extend(ts);
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct TypeItem(pub syn::Type);
+
+impl ToTokens for TypeItem {
+ fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
+ self.0.to_tokens(tokens)
+ }
+}
diff --git a/rust/vendor/nom-derive-impl/src/structs.rs b/rust/vendor/nom-derive-impl/src/structs.rs
new file mode 100644
index 0000000..52c2de6
--- /dev/null
+++ b/rust/vendor/nom-derive-impl/src/structs.rs
@@ -0,0 +1,489 @@
+use crate::config::*;
+use crate::endian::*;
+use crate::meta;
+use crate::meta::attr::{MetaAttr, MetaAttrType};
+use crate::parsertree::*;
+use proc_macro2::{Span, TokenStream, TokenTree};
+use quote::ToTokens;
+use syn::spanned::Spanned;
+use syn::*;
+
+#[derive(Debug)]
+pub(crate) struct StructParser {
+ pub name: String,
+ pub item: ParserTreeItem,
+ pub pre_exec: Option<TokenStream>,
+ pub post_exec: Option<TokenStream>,
+}
+
+impl StructParser {
+ pub fn new(
+ name: String,
+ item: ParserTreeItem,
+ pre_exec: Option<TokenStream>,
+ post_exec: Option<TokenStream>,
+ ) -> Self {
+ StructParser {
+ name,
+ item,
+ pre_exec,
+ post_exec,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct StructParserTree {
+ pub empty: bool,
+ pub unnamed: bool,
+ pub parsers: Vec<StructParser>,
+}
+
+fn get_type_parser(ty: &Type, meta_list: &[MetaAttr], config: &Config) -> Result<ParserExpr> {
+ // special case: PhantomData
+ let ident = get_type_first_ident(ty)?;
+ if ident == "PhantomData" {
+ return Ok(ParserExpr::PhantomData);
+ }
+ let endian = get_local_endianness(ty.span(), meta_list, config)?;
+ match endian {
+ ParserEndianness::BigEndian => Ok(ParserExpr::CallParseBE(TypeItem(ty.clone()))),
+ ParserEndianness::LittleEndian => Ok(ParserExpr::CallParseLE(TypeItem(ty.clone()))),
+ ParserEndianness::SetEndian => {
+ let be = ParserExpr::CallParseBE(TypeItem(ty.clone()));
+ let le = ParserExpr::CallParseLE(TypeItem(ty.clone()));
+ let qq = quote! {
+ if __endianness == nom::number::Endianness::Big {
+ #be
+ } else {
+ #le
+ }
+ };
+ Ok(ParserExpr::Raw(qq))
+ }
+ ParserEndianness::Unspecified => Ok(ParserExpr::CallParse(TypeItem(ty.clone()))),
+ }
+}
+
+fn get_item_subtype_parser(ty: &Type, expected: &str, attr: &str) -> Result<TokenStream> {
+ if let Type::Path(ref typepath) = ty {
+ let path = &typepath.path;
+ if path.segments.len() != 1 {
+ return Err(Error::new(
+ ty.span(),
+ "Nom-derive: multiple segments in type path are not supported",
+ ));
+ }
+ let segment = path.segments.last().expect("empty segments list");
+ let ident_s = segment.ident.to_string();
+ if ident_s == expected {
+ // segment.arguments should contain the values, wrapped in AngleBracketed
+ if let PathArguments::AngleBracketed(args) = &segment.arguments {
+ return Ok(args.args.to_token_stream());
+ }
+ }
+ }
+ Err(Error::new(
+ ty.span(),
+ format!(
+ "Nom-derive: unexpected type for {} attribute. Expected type: {}",
+ attr, expected
+ ),
+ ))
+}
+
+pub(crate) fn get_type_first_ident(ty: &Type) -> Result<String> {
+ match ty {
+ Type::Path(ref typepath) => {
+ let path = &typepath.path;
+ if path.segments.len() != 1 {
+ return Err(Error::new(
+ ty.span(),
+ "Nom-derive: multiple segments in type path are not supported",
+ ));
+ }
+ let segment = path.segments.last().expect("empty segments list");
+ let ident_s = segment.ident.to_string();
+ Ok(ident_s)
+ }
+ Type::Array(ref typearray) => get_type_first_ident(&typearray.elem),
+ _ => Err(Error::new(
+ ty.span(),
+ "Nom-derive: could not get first path ident",
+ )),
+ }
+}
+
+fn get_type_default(ty: &Type) -> Result<ParserExpr> {
+ let ident_s = get_type_first_ident(ty)?;
+ let default = match ident_s.as_ref() {
+ // "u8" | "u16" | "u32" | "u64" | "u128" | "i8" | "i16" | "i32" | "i64" | "i128" => {
+ // "0".to_string()
+ // }
+ // "f32" | "f64" => "0.0".to_string(),
+ "Option" => quote! { None },
+ "PhantomData" => quote! { PhantomData },
+ "Vec" => quote! { Vec::new() },
+ _ => quote! { <#ty>::default() },
+ };
+ // ParserTree::Raw(format!("{{ |i| Ok((i, {})) }}", default))
+ let ts = quote! {
+ { |i| Ok((i, #default )) }
+ };
+ Ok(ParserExpr::Raw(ts))
+}
+
+fn get_parser(
+ ident: Option<&Ident>,
+ ty: &Type,
+ // the list of remaining items to parse
+ sub_meta_list: &[MetaAttr],
+ // the list of all meta attributes for this type
+ meta_list: &[MetaAttr],
+ config: &Config,
+) -> Result<ParserExpr> {
+ // first check if we have attributes set
+ // eprintln!("attrs: {:?}", field.attrs);
+ // eprintln!("meta_list: {:?}", meta_list);
+ let mut sub_meta_list = sub_meta_list;
+ while let Some((meta, rem)) = sub_meta_list.split_first() {
+ sub_meta_list = rem;
+ // eprintln!(" meta {:?}", meta.attr_type);
+ // eprintln!(" sub_meta_list: {:?}", sub_meta_list);
+ match meta.attr_type {
+ MetaAttrType::Tag => {
+ let s = meta.arg().unwrap();
+ return Ok(ParserExpr::Tag(s.clone()));
+ }
+ MetaAttrType::Take => {
+ // if meta.arg is string, parse content
+ let ts = meta.arg().unwrap();
+ if let Some(TokenTree::Literal(_)) = ts.clone().into_iter().next() {
+ let ts = syn::parse2::<Expr>(ts.clone())?;
+ return Ok(ParserExpr::Take(ts.to_token_stream()));
+ }
+ let s = meta.arg().unwrap();
+ return Ok(ParserExpr::Take(s.clone()));
+ }
+ MetaAttrType::Value => {
+ let s = meta.arg().unwrap();
+ return Ok(ParserExpr::Value(s.clone()));
+ }
+ MetaAttrType::Parse => {
+ let s = meta.arg().unwrap();
+ return Ok(ParserExpr::Raw(s.clone()));
+ }
+ MetaAttrType::Ignore => {
+ return get_type_default(ty);
+ }
+ MetaAttrType::Complete => {
+ let expr = get_parser(ident, ty, sub_meta_list, meta_list, config)?;
+ return Ok(expr.complete());
+ }
+ MetaAttrType::Debug => {
+ let expr = get_parser(ident, ty, sub_meta_list, meta_list, config)?;
+ let ident = match ident {
+ Some(ident) => ident,
+ None => {
+ return Err(Error::new(
+ meta.span(),
+ "Nom-derive: can't use Verify with unnamed fields",
+ ))
+ }
+ };
+ return Ok(ParserExpr::DbgDmp(Box::new(expr), ident.clone()));
+ }
+ MetaAttrType::Cond => {
+ // try to infer subparser
+ // check type is Option<T>, and extract T
+ let sub = get_item_subtype_parser(ty, "Option", "Cond")?;
+ let sub_ty = syn::parse2::<Type>(sub)?;
+ let expr = get_parser(ident, &sub_ty, sub_meta_list, meta_list, config)?;
+ let ts = meta.arg().unwrap();
+ return Ok(ParserExpr::Cond(Box::new(expr), ts.clone()));
+ }
+ MetaAttrType::Count => {
+ // try to infer subparser
+ // check type is Vec<T>, and extract T
+ let sub = get_item_subtype_parser(ty, "Vec", "Count")?;
+ let sub_ty = syn::parse2::<Type>(sub)?;
+ let expr = get_parser(ident, &sub_ty, sub_meta_list, meta_list, config)?;
+ let ts = meta.arg().unwrap();
+ return Ok(ParserExpr::Count(Box::new(expr), ts.clone()));
+ }
+ MetaAttrType::Into => {
+ let expr = get_parser(ident, ty, sub_meta_list, meta_list, config)?;
+ return Ok(ParserExpr::Into(Box::new(expr)));
+ }
+ MetaAttrType::LengthCount => {
+ // try to infer subparser
+ // check type is Vec<T>, and extract T
+ let sub = get_item_subtype_parser(ty, "Vec", "LengthCount")?;
+ let sub_ty = syn::parse2::<Type>(sub)?;
+ let expr = get_parser(ident, &sub_ty, sub_meta_list, meta_list, config)?;
+ let ts = meta.arg().unwrap();
+ return Ok(ParserExpr::LengthCount(Box::new(expr), ts.clone()));
+ }
+ MetaAttrType::Map => {
+ let expr = get_parser(ident, ty, sub_meta_list, meta_list, config)?;
+ // if meta.arg is string, parse content
+ let ts_arg = meta.arg().unwrap();
+ if let Some(TokenTree::Literal(_)) = ts_arg.clone().into_iter().next() {
+ let ts_arg = syn::parse2::<Expr>(ts_arg.clone())?;
+ return Ok(ParserExpr::Map(Box::new(expr), ts_arg.to_token_stream()));
+ }
+ let ts_arg = meta.arg().unwrap();
+ return Ok(ParserExpr::Map(Box::new(expr), ts_arg.clone()));
+ }
+ MetaAttrType::Verify => {
+ let expr = get_parser(ident, ty, sub_meta_list, meta_list, config)?;
+ let ident = match ident {
+ Some(ident) => ident,
+ None => {
+ return Err(Error::new(
+ meta.span(),
+ "Nom-derive: can't use Verify with unnamed fields",
+ ))
+ }
+ };
+ // if meta.arg is string, parse content
+ let ts_arg = meta.arg().unwrap();
+ if let Some(TokenTree::Literal(_)) = ts_arg.clone().into_iter().next() {
+ let ts_arg = syn::parse2::<Expr>(ts_arg.clone())?;
+ return Ok(ParserExpr::Map(Box::new(expr), ts_arg.to_token_stream()));
+ }
+ let ts_arg = meta.arg().unwrap();
+ return Ok(ParserExpr::Verify(
+ Box::new(expr),
+ ident.clone(),
+ ts_arg.clone(),
+ ));
+ }
+ _ => (),
+ }
+ }
+ // else try primitive types knowledge
+ get_type_parser(ty, meta_list, config)
+}
+
+fn get_field_parser(field: &Field, meta_list: &[MetaAttr], config: &Config) -> Result<ParserExpr> {
+ // eprintln!("field: {:?}", field);
+ get_parser(
+ field.ident.as_ref(),
+ &field.ty,
+ meta_list,
+ meta_list,
+ config,
+ )
+}
+
+fn quote_align(align: &TokenStream, config: &Config) -> TokenStream {
+ let input = syn::Ident::new(config.input_name(), align.span());
+ let orig_input = syn::Ident::new(config.orig_input_name(), align.span());
+ quote! {
+ let (#input, _) = {
+ let offset = #input.as_ptr() as usize - #orig_input.as_ptr() as usize;
+ let align = #align as usize;
+ let align = ((align - (offset % align)) % align);
+ nom::bytes::streaming::take(align)(#input)
+ }?;
+ }
+}
+
+// like quote_skip, but offset is an isize
+fn quote_move(offset: &TokenStream, config: &Config) -> TokenStream {
+ let input = syn::Ident::new(config.input_name(), offset.span());
+ let orig_input = syn::Ident::new(config.orig_input_name(), offset.span());
+ quote! {
+ let #input = {
+ let start = #orig_input.as_ptr() as usize;
+ let pos = #input.as_ptr() as usize - start;
+ let offset = #offset as isize;
+ let offset_u = offset.abs() as usize;
+ let new_offset = if offset < 0 {
+ if offset_u > pos {
+ return Err(nom::Err::Error(nom::error::make_error(#input, nom::error::ErrorKind::TooLarge)));
+ }
+ pos - offset_u
+ } else {
+ if pos + offset_u > #orig_input.len() {
+ return Err(nom::Err::Incomplete(nom::Needed::new(offset_u)));
+ }
+ pos + offset_u
+ };
+ &#orig_input[new_offset..]
+ };
+ }
+}
+
+// like quote_move, with absolute value as offset
+fn quote_move_abs(offset: &TokenStream, config: &Config) -> TokenStream {
+ let input = syn::Ident::new(config.input_name(), offset.span());
+ let orig_input = syn::Ident::new(config.orig_input_name(), offset.span());
+ quote! {
+ let #input = {
+ let offset = #offset as usize;
+ if offset > #orig_input.len() {
+ return Err(nom::Err::Incomplete(nom::Needed::new(offset)));
+ }
+ &#orig_input[offset..]
+ };
+ }
+}
+
+fn quote_skip(skip: &TokenStream, config: &Config) -> TokenStream {
+ let input = syn::Ident::new(config.input_name(), skip.span());
+ quote! {
+ let (#input, _) = {
+ let skip = #skip as usize;
+ nom::bytes::streaming::take(skip)(#input)
+ }?;
+ }
+}
+
+fn quote_error_if(cond: &TokenStream, config: &Config) -> TokenStream {
+ let input = syn::Ident::new(config.input_name(), cond.span());
+ quote! {
+ if #cond {
+ return Err(nom::Err::Error(nom::error::make_error(#input, nom::error::ErrorKind::Verify)));
+ }
+ }
+}
+
+pub(crate) fn get_pre_post_exec(
+ meta_list: &[MetaAttr],
+ config: &Config,
+) -> (Option<TokenStream>, Option<TokenStream>) {
+ let mut tk_pre = proc_macro2::TokenStream::new();
+ let mut tk_post = proc_macro2::TokenStream::new();
+ for m in meta_list {
+ match m.attr_type {
+ MetaAttrType::PreExec => {
+ tk_pre.extend(m.arg().unwrap().clone());
+ }
+ MetaAttrType::PostExec => {
+ tk_post.extend(m.arg().unwrap().clone());
+ }
+ MetaAttrType::AlignAfter => {
+ let align = m.arg().unwrap();
+ let qq = quote_align(align, config);
+ tk_post.extend(qq);
+ }
+ MetaAttrType::AlignBefore => {
+ let align = m.arg().unwrap();
+ let qq = quote_align(align, config);
+ tk_pre.extend(qq);
+ }
+ MetaAttrType::SkipAfter => {
+ let skip = m.arg().unwrap();
+ let qq = quote_skip(skip, config);
+ tk_post.extend(qq);
+ }
+ MetaAttrType::SkipBefore => {
+ let skip = m.arg().unwrap();
+ let qq = quote_skip(skip, config);
+ tk_pre.extend(qq);
+ }
+ MetaAttrType::Move => {
+ let offset = m.arg().unwrap();
+ let qq = quote_move(offset, config);
+ tk_pre.extend(qq);
+ }
+ MetaAttrType::MoveAbs => {
+ let offset = m.arg().unwrap();
+ let qq = quote_move_abs(offset, config);
+ tk_pre.extend(qq);
+ }
+ MetaAttrType::ErrorIf => {
+ let cond = m.arg().unwrap();
+ let qq = quote_error_if(cond, config);
+ tk_pre.extend(qq);
+ }
+ MetaAttrType::Exact => {
+ let input = syn::Ident::new(config.input_name(), m.span());
+ let cond = quote! { !#input.is_empty() };
+ let qq = quote_error_if(&cond, config);
+ tk_post.extend(qq);
+ }
+ MetaAttrType::SetEndian => {
+ let val = m.arg().unwrap();
+ let qq = quote! { let __endianness = #val; };
+ // config is updated in `get_parser`
+ tk_pre.extend(qq);
+ }
+ _ => (),
+ }
+ }
+ let pre = if tk_pre.is_empty() {
+ None
+ } else {
+ Some(tk_pre)
+ };
+ let post = if tk_post.is_empty() {
+ None
+ } else {
+ Some(tk_post)
+ };
+ (pre, post)
+}
+
+pub(crate) fn parse_fields(f: &Fields, config: &mut Config) -> Result<StructParserTree> {
+ let mut parsers = vec![];
+ let mut empty = false;
+ let mut unnamed = false;
+ match f {
+ Fields::Named(_) => (),
+ Fields::Unnamed(_) => {
+ unnamed = true;
+ }
+ Fields::Unit => {
+ unnamed = false;
+ empty = true;
+ // the Parse attribute cannot be checked here (we only have 'Fields'),
+ // so the caller must check and add attributes
+ }
+ }
+ for (idx, field) in f.iter().enumerate() {
+ let ident_str = if let Some(s) = field.ident.as_ref() {
+ s.to_string()
+ } else {
+ format!("_{}", idx)
+ };
+ let meta_list = meta::parse_nom_attribute(&field.attrs)?;
+ // eprintln!("meta_list: {:?}", meta_list);
+ let mut p = get_field_parser(field, &meta_list, config)?;
+
+ if config.complete {
+ p = p.complete();
+ }
+
+ if config.debug {
+ // debug is set for entire struct
+ let ident = match &field.ident {
+ Some(ident) => ident,
+ None => {
+ return Err(Error::new(
+ Span::call_site(),
+ "Nom-derive: can't use Debug with unnamed fields",
+ ))
+ }
+ };
+ p = ParserExpr::DbgDmp(Box::new(p), ident.clone());
+ }
+
+ // add pre and post code (also takes care of alignment)
+ let (pre, post) = get_pre_post_exec(&meta_list, config);
+ let item = ParserTreeItem::new(field.ident.clone(), p);
+ let sp = StructParser::new(ident_str, item, pre, post);
+ parsers.push(sp);
+ }
+ Ok(StructParserTree {
+ empty,
+ unnamed,
+ parsers,
+ })
+}
+
+pub(crate) fn parse_struct(s: &DataStruct, config: &mut Config) -> Result<StructParserTree> {
+ parse_fields(&s.fields, config)
+}