summaryrefslogtreecommitdiffstats
path: root/vendor/pest_generator
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-18 02:49:50 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-18 02:49:50 +0000
commit9835e2ae736235810b4ea1c162ca5e65c547e770 (patch)
tree3fcebf40ed70e581d776a8a4c65923e8ec20e026 /vendor/pest_generator
parentReleasing progress-linux version 1.70.0+dfsg2-1~progress7.99u1. (diff)
downloadrustc-9835e2ae736235810b4ea1c162ca5e65c547e770.tar.xz
rustc-9835e2ae736235810b4ea1c162ca5e65c547e770.zip
Merging upstream version 1.71.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/pest_generator')
-rw-r--r--vendor/pest_generator/.cargo-checksum.json2
-rw-r--r--vendor/pest_generator/Cargo.toml8
-rw-r--r--vendor/pest_generator/_README.md25
-rw-r--r--vendor/pest_generator/src/docs.rs122
-rw-r--r--vendor/pest_generator/src/generator.rs104
-rw-r--r--vendor/pest_generator/src/lib.rs189
-rw-r--r--vendor/pest_generator/tests/base.pest1
-rw-r--r--vendor/pest_generator/tests/test.pest23
8 files changed, 390 insertions, 84 deletions
diff --git a/vendor/pest_generator/.cargo-checksum.json b/vendor/pest_generator/.cargo-checksum.json
index 692d92a53..4cb6d459b 100644
--- a/vendor/pest_generator/.cargo-checksum.json
+++ b/vendor/pest_generator/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"b81b59ea98994db49cc8246bb2e9fd7bd5efed318e7ddb00114fd48c5335167c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"190b4279777e3cd3c5adf3258114b89be536fc647d819620ada15be39d2259cc","src/generator.rs":"0fe48f9f4c4e930a93e910300505b51f9841c9fd2e3edd10f871838ab1053d67","src/lib.rs":"4b81a7aa1c28d58f26bfe61c4a1869ad012a561dbfe482597f622e1be87b6832","src/macros.rs":"897d9004449b1c219f17c079630a790f3de1a27f61bc6a03cd777a163a6a1fba"},"package":"798e0220d1111ae63d66cb66a5dcb3fc2d986d520b98e49e1852bfdb11d7c5e7"} \ No newline at end of file
+{"files":{"Cargo.toml":"180bd07f84b041c5e797b212ae81d10f99a5cf538f76dce109eaf55987fcba9c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"647dbcab0b7ed9837cc066cb3731aa2db7fbecb0e9aa187995a0a7a03af3938d","src/docs.rs":"a7d61f1affb5286f6f5131a718dcd9e7efd60c5faf7d47c4571b53140f9dce79","src/generator.rs":"840777ac8aa48cf6245c30b2ae8d55ac08f121ccf05b5c66515b54c0e6bf2ab9","src/lib.rs":"ccebc100d632a091385a8fc7ade79a1694cee87c4b01e187faa0d6cf34747562","src/macros.rs":"897d9004449b1c219f17c079630a790f3de1a27f61bc6a03cd777a163a6a1fba","tests/base.pest":"30f6965031bc52937114f60233a327e41ccc43429ae41a8e40c7b7c8006c466f","tests/test.pest":"f3fea8154a9a26c773ab8392685039d0d84bd845587bb2d42b970946f7967ee8"},"package":"e56094789873daa36164de2e822b3888c6ae4b4f9da555a1103587658c805b1e"} \ No newline at end of file
diff --git a/vendor/pest_generator/Cargo.toml b/vendor/pest_generator/Cargo.toml
index 122028c50..74baf453a 100644
--- a/vendor/pest_generator/Cargo.toml
+++ b/vendor/pest_generator/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.56"
name = "pest_generator"
-version = "2.5.2"
+version = "2.5.7"
authors = ["DragoČ™ Tiselice <dragostiselice@gmail.com>"]
description = "pest code generator"
homepage = "https://pest.rs/"
@@ -28,11 +28,11 @@ license = "MIT/Apache-2.0"
repository = "https://github.com/pest-parser/pest"
[dependencies.pest]
-version = "2.5.2"
+version = "2.5.7"
default-features = false
[dependencies.pest_meta]
-version = "2.5.2"
+version = "2.5.7"
[dependencies.proc-macro2]
version = "1.0"
@@ -41,7 +41,7 @@ version = "1.0"
version = "1.0"
[dependencies.syn]
-version = "1.0"
+version = "2.0"
[features]
default = ["std"]
diff --git a/vendor/pest_generator/_README.md b/vendor/pest_generator/_README.md
index da30ab74e..cb8055eb0 100644
--- a/vendor/pest_generator/_README.md
+++ b/vendor/pest_generator/_README.md
@@ -167,6 +167,8 @@ mod b {
## Projects using pest
+You can find more projects and ecosystem tools in the [awesome-pest](https://github.com/pest-parser/awesome-pest) repo.
+
* [pest_meta](https://github.com/pest-parser/pest/blob/master/meta/src/grammar.pest) (bootstrapped)
* [AshPaper](https://github.com/shnewto/ashpaper)
* [brain](https://github.com/brain-lang/brain)
@@ -203,6 +205,29 @@ mod b {
This library should always compile with default features on **Rust 1.56.1**
or **Rust 1.61** with `const_prec_climber`.
+## no_std support
+
+The `pest` and `pest_derive` crates can be built without the Rust standard
+library and target embedded environments. To do so, you need to disable
+their default features. In your `Cargo.toml`, you can specify it as follows:
+
+```toml
+[dependencies]
+# ...
+pest = { version = "2", default-features = false }
+pest_derive = { version = "2", default-features = false }
+```
+
+If you want to build these crates in the pest repository's workspace, you can
+pass the `--no-default-features` flag to `cargo` and specify these crates using
+the `--package` (`-p`) flag. For example:
+
+```bash
+$ cargo build --target thumbv7em-none-eabihf --no-default-features -p pest
+$ cargo bootstrap
+$ cargo build --target thumbv7em-none-eabihf --no-default-features -p pest_derive
+```
+
## Special thanks
A special round of applause goes to prof. Marius Minea for his guidance and all
diff --git a/vendor/pest_generator/src/docs.rs b/vendor/pest_generator/src/docs.rs
new file mode 100644
index 000000000..f1ce1881a
--- /dev/null
+++ b/vendor/pest_generator/src/docs.rs
@@ -0,0 +1,122 @@
+use pest::iterators::Pairs;
+use pest_meta::parser::Rule;
+use std::collections::HashMap;
+
+#[derive(Debug)]
+pub(crate) struct DocComment {
+ pub grammar_doc: String,
+
+ /// HashMap for store all doc_comments for rules.
+ /// key is rule name, value is doc_comment.
+ pub line_docs: HashMap<String, String>,
+}
+
+/// Consume pairs to matches `Rule::grammar_doc`, `Rule::line_doc` into `DocComment`
+///
+/// e.g.
+///
+/// a pest file:
+///
+/// ```ignore
+/// //! This is a grammar doc
+/// /// line doc 1
+/// /// line doc 2
+/// foo = {}
+///
+/// /// line doc 3
+/// bar = {}
+/// ```
+///
+/// Then will get:
+///
+/// ```ignore
+/// grammar_doc = "This is a grammar doc"
+/// line_docs = { "foo": "line doc 1\nline doc 2", "bar": "line doc 3" }
+/// ```
+pub(crate) fn consume(pairs: Pairs<'_, Rule>) -> DocComment {
+ let mut grammar_doc = String::new();
+
+ let mut line_docs: HashMap<String, String> = HashMap::new();
+ let mut line_doc = String::new();
+
+ for pair in pairs {
+ match pair.as_rule() {
+ Rule::grammar_doc => {
+ // grammar_doc > inner_doc
+ let inner_doc = pair.into_inner().next().unwrap();
+ grammar_doc.push_str(inner_doc.as_str());
+ grammar_doc.push('\n');
+ }
+ Rule::grammar_rule => {
+ if let Some(inner) = pair.into_inner().next() {
+ // grammar_rule > line_doc | identifier
+ match inner.as_rule() {
+ Rule::line_doc => {
+ if let Some(inner_doc) = inner.into_inner().next() {
+ line_doc.push_str(inner_doc.as_str());
+ line_doc.push('\n');
+ }
+ }
+ Rule::identifier => {
+ if !line_doc.is_empty() {
+ let rule_name = inner.as_str().to_owned();
+
+ // Remove last \n
+ line_doc.pop();
+ line_docs.insert(rule_name, line_doc.clone());
+ line_doc.clear();
+ }
+ }
+ _ => (),
+ }
+ }
+ }
+ _ => (),
+ }
+ }
+
+ if !grammar_doc.is_empty() {
+ // Remove last \n
+ grammar_doc.pop();
+ }
+
+ DocComment {
+ grammar_doc,
+ line_docs,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::collections::HashMap;
+
+ use pest_meta::parser;
+ use pest_meta::parser::Rule;
+
+ #[test]
+ fn test_doc_comment() {
+ let pairs = match parser::parse(Rule::grammar_rules, include_str!("../tests/test.pest")) {
+ Ok(pairs) => pairs,
+ Err(_) => panic!("error parsing tests/test.pest"),
+ };
+
+ let doc_comment = super::consume(pairs);
+
+ let mut expected = HashMap::new();
+ expected.insert("foo".to_owned(), "Matches foo str, e.g.: `foo`".to_owned());
+ expected.insert(
+ "bar".to_owned(),
+ "Matches bar str\n\n Indent 2, e.g: `bar` or `foobar`".to_owned(),
+ );
+ expected.insert(
+ "dar".to_owned(),
+ "Matches dar\n\nMatch dar description\n".to_owned(),
+ );
+ assert_eq!(expected, doc_comment.line_docs);
+
+ assert_eq!(
+ "A parser for JSON file.\nAnd this is a example for JSON parser.\n\n indent-4-space\n",
+ doc_comment.grammar_doc
+ );
+ }
+}
diff --git a/vendor/pest_generator/src/generator.rs b/vendor/pest_generator/src/generator.rs
index fc1263d86..0dbcaa310 100644
--- a/vendor/pest_generator/src/generator.rs
+++ b/vendor/pest_generator/src/generator.rs
@@ -17,26 +17,26 @@ use pest::unicode::unicode_property_names;
use pest_meta::ast::*;
use pest_meta::optimizer::*;
-pub fn generate(
+use crate::docs::DocComment;
+
+pub(crate) fn generate(
name: Ident,
generics: &Generics,
- path: Option<PathBuf>,
+ paths: Vec<PathBuf>,
rules: Vec<OptimizedRule>,
defaults: Vec<&str>,
+ doc_comment: &DocComment,
include_grammar: bool,
) -> TokenStream {
let uses_eoi = defaults.iter().any(|name| *name == "EOI");
let builtins = generate_builtin_rules();
let include_fix = if include_grammar {
- match path {
- Some(ref path) => generate_include(&name, path.to_str().expect("non-Unicode path")),
- None => quote!(),
- }
+ generate_include(&name, paths)
} else {
quote!()
};
- let rule_enum = generate_enum(&rules, uses_eoi);
+ let rule_enum = generate_enum(&rules, doc_comment, uses_eoi);
let patterns = generate_patterns(&rules, uses_eoi);
let skip = generate_skip(&rules);
@@ -167,24 +167,55 @@ fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> {
builtins
}
-// Needed because Cargo doesn't watch for changes in grammars.
-fn generate_include(name: &Ident, path: &str) -> TokenStream {
+/// Generate Rust `include_str!` for grammar files, then Cargo will watch changes in grammars.
+fn generate_include(name: &Ident, paths: Vec<PathBuf>) -> TokenStream {
let const_name = format_ident!("_PEST_GRAMMAR_{}", name);
// Need to make this relative to the current directory since the path to the file
// is derived from the CARGO_MANIFEST_DIR environment variable
- let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
- current_dir.push(path);
- let relative_path = current_dir.to_str().expect("path contains invalid unicode");
+ let current_dir = std::env::current_dir().expect("Unable to get current directory");
+
+ let include_tokens = paths.iter().map(|path| {
+ let path = path.to_str().expect("non-Unicode path");
+
+ let relative_path = current_dir
+ .join(path)
+ .to_str()
+ .expect("path contains invalid unicode")
+ .to_string();
+
+ quote! {
+ include_str!(#relative_path)
+ }
+ });
+
+ let len = include_tokens.len();
quote! {
#[allow(non_upper_case_globals)]
- const #const_name: &'static str = include_str!(#relative_path);
+ const #const_name: [&'static str; #len] = [
+ #(#include_tokens),*
+ ];
}
}
-fn generate_enum(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
- let rules = rules.iter().map(|rule| format_ident!("r#{}", rule.name));
+fn generate_enum(rules: &[OptimizedRule], doc_comment: &DocComment, uses_eoi: bool) -> TokenStream {
+ let rules = rules.iter().map(|rule| {
+ let rule_name = format_ident!("r#{}", rule.name);
+
+ match doc_comment.line_docs.get(&rule.name) {
+ Some(doc) => quote! {
+ #[doc = #doc]
+ #rule_name
+ },
+ None => quote! {
+ #rule_name
+ },
+ }
+ });
+
+ let grammar_doc = &doc_comment.grammar_doc;
if uses_eoi {
quote! {
+ #[doc = #grammar_doc]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Rule {
@@ -194,6 +225,7 @@ fn generate_enum(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
}
} else {
quote! {
+ #[doc = #grammar_doc]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Rule {
@@ -208,6 +240,7 @@ fn generate_patterns(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
.iter()
.map(|rule| {
let rule = format_ident!("r#{}", rule.name);
+
quote! {
Rule::#rule => rules::#rule(state)
}
@@ -657,10 +690,11 @@ fn option_type() -> TokenStream {
#[cfg(test)]
mod tests {
- use proc_macro2::Span;
-
use super::*;
+ use proc_macro2::Span;
+ use std::collections::HashMap;
+
#[test]
fn rule_enum_simple() {
let rules = vec![OptimizedRule {
@@ -669,12 +703,22 @@ mod tests {
expr: OptimizedExpr::Ident("g".to_owned()),
}];
+ let mut line_docs = HashMap::new();
+ line_docs.insert("f".to_owned(), "This is rule comment".to_owned());
+
+ let doc_comment = &DocComment {
+ grammar_doc: "Rule doc\nhello".to_owned(),
+ line_docs,
+ };
+
assert_eq!(
- generate_enum(&rules, false).to_string(),
+ generate_enum(&rules, doc_comment, false).to_string(),
quote! {
+ #[doc = "Rule doc\nhello"]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Rule {
+ #[doc = "This is rule comment"]
r#f
}
}
@@ -957,7 +1001,7 @@ mod tests {
}
#[test]
- fn generate_complete() {
+ fn test_generate_complete() {
let name = Ident::new("MyParser", Span::call_site());
let generics = Generics::default();
@@ -974,22 +1018,34 @@ mod tests {
},
];
+ let mut line_docs = HashMap::new();
+ line_docs.insert("if".to_owned(), "If statement".to_owned());
+
+ let doc_comment = &DocComment {
+ line_docs,
+ grammar_doc: "This is Rule doc\nThis is second line".to_owned(),
+ };
+
let defaults = vec!["ANY"];
let result = result_type();
let box_ty = box_type();
- let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
- current_dir.push("test.pest");
- let test_path = current_dir.to_str().expect("path contains invalid unicode");
+ let current_dir = std::env::current_dir().expect("Unable to get current directory");
+
+ let base_path = current_dir.join("base.pest").to_str().unwrap().to_string();
+ let test_path = current_dir.join("test.pest").to_str().unwrap().to_string();
+
assert_eq!(
- generate(name, &generics, Some(PathBuf::from("test.pest")), rules, defaults, true).to_string(),
+ generate(name, &generics, vec![PathBuf::from("base.pest"), PathBuf::from("test.pest")], rules, defaults, doc_comment, true).to_string(),
quote! {
#[allow(non_upper_case_globals)]
- const _PEST_GRAMMAR_MyParser: &'static str = include_str!(#test_path);
+ const _PEST_GRAMMAR_MyParser: [&'static str; 2usize] = [include_str!(#base_path), include_str!(#test_path)];
+ #[doc = "This is Rule doc\nThis is second line"]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Rule {
r#a,
+ #[doc = "If statement"]
r#if
}
diff --git a/vendor/pest_generator/src/lib.rs b/vendor/pest_generator/src/lib.rs
index 2a1203e4a..7aed1936f 100644
--- a/vendor/pest_generator/src/lib.rs
+++ b/vendor/pest_generator/src/lib.rs
@@ -27,10 +27,11 @@ use std::io::{self, Read};
use std::path::Path;
use proc_macro2::TokenStream;
-use syn::{Attribute, DeriveInput, Generics, Ident, Lit, Meta};
+use syn::{Attribute, DeriveInput, Expr, ExprLit, Generics, Ident, Lit, Meta};
#[macro_use]
mod macros;
+mod docs;
mod generator;
use pest_meta::parser::{self, rename_meta_rule, Rule};
@@ -41,39 +42,50 @@ use pest_meta::{optimizer, unwrap_or_report, validator};
/// "include_str" statement (done in pest_derive, but turned off in the local bootstrap).
pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let ast: DeriveInput = syn::parse2(input).unwrap();
- let (name, generics, content) = parse_derive(ast);
-
- let (data, path) = match content {
- GrammarSource::File(ref path) => {
- let root = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into());
-
- // Check whether we can find a file at the path relative to the CARGO_MANIFEST_DIR
- // first.
- //
- // If we cannot find the expected file over there, fallback to the
- // `CARGO_MANIFEST_DIR/src`, which is the old default and kept for convenience
- // reasons.
- // TODO: This could be refactored once `std::path::absolute()` get's stabilized.
- // https://doc.rust-lang.org/std/path/fn.absolute.html
- let path = if Path::new(&root).join(path).exists() {
- Path::new(&root).join(path)
- } else {
- Path::new(&root).join("src/").join(path)
- };
-
- let file_name = match path.file_name() {
- Some(file_name) => file_name,
- None => panic!("grammar attribute should point to a file"),
- };
-
- let data = match read_file(&path) {
- Ok(data) => data,
- Err(error) => panic!("error opening {:?}: {}", file_name, error),
- };
- (data, Some(path.clone()))
+ let (name, generics, contents) = parse_derive(ast);
+
+ let mut data = String::new();
+ let mut paths = vec![];
+
+ for content in contents {
+ let (_data, _path) = match content {
+ GrammarSource::File(ref path) => {
+ let root = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into());
+
+ // Check whether we can find a file at the path relative to the CARGO_MANIFEST_DIR
+ // first.
+ //
+ // If we cannot find the expected file over there, fallback to the
+ // `CARGO_MANIFEST_DIR/src`, which is the old default and kept for convenience
+ // reasons.
+ // TODO: This could be refactored once `std::path::absolute()` get's stabilized.
+ // https://doc.rust-lang.org/std/path/fn.absolute.html
+ let path = if Path::new(&root).join(path).exists() {
+ Path::new(&root).join(path)
+ } else {
+ Path::new(&root).join("src/").join(path)
+ };
+
+ let file_name = match path.file_name() {
+ Some(file_name) => file_name,
+ None => panic!("grammar attribute should point to a file"),
+ };
+
+ let data = match read_file(&path) {
+ Ok(data) => data,
+ Err(error) => panic!("error opening {:?}: {}", file_name, error),
+ };
+ (data, Some(path.clone()))
+ }
+ GrammarSource::Inline(content) => (content, None),
+ };
+
+ data.push_str(&_data);
+ match _path {
+ Some(path) => paths.push(path),
+ None => (),
}
- GrammarSource::Inline(content) => (content, None),
- };
+ }
let pairs = match parser::parse(Rule::grammar_rules, &data) {
Ok(pairs) => pairs,
@@ -81,10 +93,19 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
};
let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone()));
+ let doc_comment = docs::consume(pairs.clone());
let ast = unwrap_or_report(parser::consume_rules(pairs));
let optimized = optimizer::optimize(ast);
- generator::generate(name, &generics, path, optimized, defaults, include_grammar)
+ generator::generate(
+ name,
+ &generics,
+ paths,
+ optimized,
+ defaults,
+ &doc_comment,
+ include_grammar,
+ )
}
fn read_file<P: AsRef<Path>>(path: P) -> io::Result<String> {
@@ -100,34 +121,38 @@ enum GrammarSource {
Inline(String),
}
-fn parse_derive(ast: DeriveInput) -> (Ident, Generics, GrammarSource) {
+fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) {
let name = ast.ident;
let generics = ast.generics;
let grammar: Vec<&Attribute> = ast
.attrs
.iter()
- .filter(|attr| match attr.parse_meta() {
- Ok(Meta::NameValue(name_value)) => {
- name_value.path.is_ident("grammar") || name_value.path.is_ident("grammar_inline")
- }
- _ => false,
+ .filter(|attr| {
+ let path = attr.meta.path();
+ path.is_ident("grammar") || path.is_ident("grammar_inline")
})
.collect();
- let argument = match grammar.len() {
- 0 => panic!("a grammar file needs to be provided with the #[grammar = \"PATH\"] or #[grammar_inline = \"GRAMMAR CONTENTS\"] attribute"),
- 1 => get_attribute(grammar[0]),
- _ => panic!("only 1 grammar file can be provided"),
- };
+ if grammar.is_empty() {
+ panic!("a grammar file needs to be provided with the #[grammar = \"PATH\"] or #[grammar_inline = \"GRAMMAR CONTENTS\"] attribute");
+ }
- (name, generics, argument)
+ let mut grammar_sources = Vec::with_capacity(grammar.len());
+ for attr in grammar {
+ grammar_sources.push(get_attribute(attr))
+ }
+
+ (name, generics, grammar_sources)
}
fn get_attribute(attr: &Attribute) -> GrammarSource {
- match attr.parse_meta() {
- Ok(Meta::NameValue(name_value)) => match name_value.lit {
- Lit::Str(string) => {
+ match &attr.meta {
+ Meta::NameValue(name_value) => match &name_value.value {
+ Expr::Lit(ExprLit {
+ lit: Lit::Str(string),
+ ..
+ }) => {
if name_value.path.is_ident("grammar") {
GrammarSource::File(string.value())
} else {
@@ -153,8 +178,8 @@ mod tests {
pub struct MyParser<'a, T>;
";
let ast = syn::parse_str(definition).unwrap();
- let (_, _, filename) = parse_derive(ast);
- assert_eq!(filename, GrammarSource::Inline("GRAMMAR".to_string()));
+ let (_, _, filenames) = parse_derive(ast);
+ assert_eq!(filenames, [GrammarSource::Inline("GRAMMAR".to_string())]);
}
#[test]
@@ -165,12 +190,11 @@ mod tests {
pub struct MyParser<'a, T>;
";
let ast = syn::parse_str(definition).unwrap();
- let (_, _, filename) = parse_derive(ast);
- assert_eq!(filename, GrammarSource::File("myfile.pest".to_string()));
+ let (_, _, filenames) = parse_derive(ast);
+ assert_eq!(filenames, [GrammarSource::File("myfile.pest".to_string())]);
}
#[test]
- #[should_panic(expected = "only 1 grammar file can be provided")]
fn derive_multiple_grammars() {
let definition = "
#[other_attr]
@@ -179,7 +203,14 @@ mod tests {
pub struct MyParser<'a, T>;
";
let ast = syn::parse_str(definition).unwrap();
- parse_derive(ast);
+ let (_, _, filenames) = parse_derive(ast);
+ assert_eq!(
+ filenames,
+ [
+ GrammarSource::File("myfile1.pest".to_string()),
+ GrammarSource::File("myfile2.pest".to_string())
+ ]
+ );
}
#[test]
@@ -193,4 +224,52 @@ mod tests {
let ast = syn::parse_str(definition).unwrap();
parse_derive(ast);
}
+
+ #[test]
+ #[should_panic(
+ expected = "a grammar file needs to be provided with the #[grammar = \"PATH\"] or #[grammar_inline = \"GRAMMAR CONTENTS\"] attribute"
+ )]
+ fn derive_no_grammar() {
+ let definition = "
+ #[other_attr]
+ pub struct MyParser<'a, T>;
+ ";
+ let ast = syn::parse_str(definition).unwrap();
+ parse_derive(ast);
+ }
+
+ #[doc = "Matches dar\n\nMatch dar description\n"]
+ #[test]
+ fn test_generate_doc() {
+ let input = quote! {
+ #[derive(Parser)]
+ #[grammar = "../tests/test.pest"]
+ pub struct TestParser;
+ };
+
+ let token = super::derive_parser(input, true);
+
+ let expected = quote! {
+ #[doc = "A parser for JSON file.\nAnd this is a example for JSON parser.\n\n indent-4-space\n"]
+ #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
+ #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+
+ pub enum Rule {
+ #[doc = "Matches foo str, e.g.: `foo`"]
+ r#foo,
+ #[doc = "Matches bar str\n\n Indent 2, e.g: `bar` or `foobar`"]
+ r#bar,
+ r#bar1,
+ #[doc = "Matches dar\n\nMatch dar description\n"]
+ r#dar
+ }
+ };
+
+ assert!(
+ token.to_string().contains(expected.to_string().as_str()),
+ "{}\n\nExpected to contains:\n{}",
+ token,
+ expected
+ );
+ }
}
diff --git a/vendor/pest_generator/tests/base.pest b/vendor/pest_generator/tests/base.pest
new file mode 100644
index 000000000..ae880b10f
--- /dev/null
+++ b/vendor/pest_generator/tests/base.pest
@@ -0,0 +1 @@
+base = { "base" } \ No newline at end of file
diff --git a/vendor/pest_generator/tests/test.pest b/vendor/pest_generator/tests/test.pest
new file mode 100644
index 000000000..95d484e99
--- /dev/null
+++ b/vendor/pest_generator/tests/test.pest
@@ -0,0 +1,23 @@
+//! A parser for JSON file.
+//! And this is a example for JSON parser.
+//!
+//! indent-4-space
+//!
+
+/// Matches foo str, e.g.: `foo`
+foo = { "foo" }
+
+/// Matches bar str
+///
+/// Indent 2, e.g: `bar` or `foobar`
+
+bar = { "bar" | "foobar" }
+
+bar1 = { "bar1" }
+
+/// Matches dar
+///
+/// Match dar description
+///
+
+dar = { "da" } \ No newline at end of file