summaryrefslogtreecommitdiffstats
path: root/vendor/pest_generator
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:13 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:19:13 +0000
commit218caa410aa38c29984be31a5229b9fa717560ee (patch)
treec54bd55eeb6e4c508940a30e94c0032fbd45d677 /vendor/pest_generator
parentReleasing progress-linux version 1.67.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-218caa410aa38c29984be31a5229b9fa717560ee.tar.xz
rustc-218caa410aa38c29984be31a5229b9fa717560ee.zip
Merging upstream version 1.68.2+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'vendor/pest_generator')
-rw-r--r--vendor/pest_generator/.cargo-checksum.json2
-rw-r--r--vendor/pest_generator/Cargo.toml10
-rw-r--r--vendor/pest_generator/_README.md43
-rw-r--r--vendor/pest_generator/src/generator.rs79
-rw-r--r--vendor/pest_generator/src/lib.rs70
-rw-r--r--vendor/pest_generator/src/macros.rs4
6 files changed, 119 insertions, 89 deletions
diff --git a/vendor/pest_generator/.cargo-checksum.json b/vendor/pest_generator/.cargo-checksum.json
index 573d6c49f..692d92a53 100644
--- a/vendor/pest_generator/.cargo-checksum.json
+++ b/vendor/pest_generator/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"f047138333fb2c65ecd773393f1f61e4778ad467a376b5417d9047c300274be5","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"01ba71f02581f9f5018962e083ca77bc8e8af2b6f8a73111bfa04e847a6440e2","src/generator.rs":"51081975a6237bc77cdc4535841c7fbf777e3c466b4c89c821ab68850eeb656a","src/lib.rs":"1cc63047aad721587a23eeea54d4c398bf5c085d934e41100a8766954a1b6071","src/macros.rs":"6522f9b7a2fb172d9ff6ad75c3dcfd547596b5c6ea23f3f5a386220d4f85afa4"},"package":"5803d8284a629cc999094ecd630f55e91b561a1d1ba75e233b00ae13b91a69ad"} \ No newline at end of file
+{"files":{"Cargo.toml":"b81b59ea98994db49cc8246bb2e9fd7bd5efed318e7ddb00114fd48c5335167c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"190b4279777e3cd3c5adf3258114b89be536fc647d819620ada15be39d2259cc","src/generator.rs":"0fe48f9f4c4e930a93e910300505b51f9841c9fd2e3edd10f871838ab1053d67","src/lib.rs":"4b81a7aa1c28d58f26bfe61c4a1869ad012a561dbfe482597f622e1be87b6832","src/macros.rs":"897d9004449b1c219f17c079630a790f3de1a27f61bc6a03cd777a163a6a1fba"},"package":"798e0220d1111ae63d66cb66a5dcb3fc2d986d520b98e49e1852bfdb11d7c5e7"} \ No newline at end of file
diff --git a/vendor/pest_generator/Cargo.toml b/vendor/pest_generator/Cargo.toml
index 5db0c4b01..122028c50 100644
--- a/vendor/pest_generator/Cargo.toml
+++ b/vendor/pest_generator/Cargo.toml
@@ -10,13 +10,13 @@
# See Cargo.toml.orig for the original contents.
[package]
-edition = "2018"
+edition = "2021"
rust-version = "1.56"
name = "pest_generator"
-version = "2.3.0"
+version = "2.5.2"
authors = ["DragoČ™ Tiselice <dragostiselice@gmail.com>"]
description = "pest code generator"
-homepage = "https://pest-parser.github.io/"
+homepage = "https://pest.rs/"
documentation = "https://docs.rs/pest"
readme = "_README.md"
keywords = [
@@ -28,11 +28,11 @@ license = "MIT/Apache-2.0"
repository = "https://github.com/pest-parser/pest"
[dependencies.pest]
-version = "2.3.0"
+version = "2.5.2"
default-features = false
[dependencies.pest_meta]
-version = "2.3.0"
+version = "2.5.2"
[dependencies.proc-macro2]
version = "1.0"
diff --git a/vendor/pest_generator/_README.md b/vendor/pest_generator/_README.md
index f91188ccb..da30ab74e 100644
--- a/vendor/pest_generator/_README.md
+++ b/vendor/pest_generator/_README.md
@@ -5,8 +5,8 @@
# pest. The Elegant Parser
-[![Join the chat at https://gitter.im/dragostis/pest](https://badges.gitter.im/dragostis/pest.svg)](https://gitter.im/dragostis/pest?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
-[![Book](https://img.shields.io/badge/book-WIP-4d76ae.svg)](https://pest-parser.github.io/book)
+[![Join the chat at https://gitter.im/pest-parser/pest](https://badges.gitter.im/dragostis/pest.svg)](https://gitter.im/pest-parser/pest?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+[![Book](https://img.shields.io/badge/book-WIP-4d76ae.svg)](https://pest.rs/book)
[![Docs](https://docs.rs/pest/badge.svg)](https://docs.rs/pest)
[![pest Continuous Integration](https://github.com/pest-parser/pest/actions/workflows/ci.yml/badge.svg)](https://github.com/pest-parser/pest/actions/workflows/ci.yml)
@@ -31,25 +31,28 @@ Other helpful resources:
* API reference on [docs.rs]
* play with grammars and share them on our [fiddle]
-* leave feedback, ask questions, or greet us on [Gitter]
+* find previous common questions answered or ask questions on [GitHub Discussions]
+* leave feedback, ask questions, or greet us on [Gitter] or [Discord]
-[book]: https://pest-parser.github.io/book
+[book]: https://pest.rs/book
[docs.rs]: https://docs.rs/pest
-[fiddle]: https://pest-parser.github.io/#editor
-[Gitter]: https://gitter.im/dragostis/pest
+[fiddle]: https://pest.rs/#editor
+[Gitter]: https://gitter.im/pest-parser/pest
+[Discord]: https://discord.gg/XEGACtWpT2
+[GitHub Discussions]: https://github.com/pest-parser/pest/discussions
## Example
The following is an example of a grammar for a list of alphanumeric identifiers
-where the first identifier does not start with a digit:
+where all identifiers don't start with a digit:
```rust
alpha = { 'a'..'z' | 'A'..'Z' }
digit = { '0'..'9' }
-ident = { (alpha | digit)+ }
+ident = { !digit ~ (alpha | digit)+ }
-ident_list = _{ !digit ~ ident ~ (" " ~ ident)+ }
+ident_list = _{ ident ~ (" " ~ ident)* }
// ^
// ident_list rule is silent which means it produces no tokens
```
@@ -81,6 +84,9 @@ thread 'main' panicked at ' --> 1:1
= expected ident', src/main.rs:12
```
+These error messages can be obtained from their default `Display` implementation,
+e.g. `panic!("{}", parser_result.unwrap_err())` or `println!("{}", e)`.
+
## Pairs API
The grammar can be used to derive a `Parser` implementation automatically.
@@ -133,6 +139,25 @@ Letter: b
Digit: 2
```
+### Defining multiple parsers in a single file
+The current automatic `Parser` derivation will produce the `Rule` enum
+which would have name conflicts if one tried to define multiple such structs
+that automatically derive `Parser`. One possible way around it is to put each
+parser struct in a separate namespace:
+
+```rust
+mod a {
+ #[derive(Parser)]
+ #[grammar = "a.pest"]
+ pub struct ParserA;
+}
+mod b {
+ #[derive(Parser)]
+ #[grammar = "b.pest"]
+ pub struct ParserB;
+}
+```
+
## Other features
* Precedence climbing
diff --git a/vendor/pest_generator/src/generator.rs b/vendor/pest_generator/src/generator.rs
index 0d3051e27..fc1263d86 100644
--- a/vendor/pest_generator/src/generator.rs
+++ b/vendor/pest_generator/src/generator.rs
@@ -9,13 +9,13 @@
use std::path::PathBuf;
-use proc_macro2::{Span, TokenStream};
+use proc_macro2::TokenStream;
use quote::{ToTokens, TokenStreamExt};
use syn::{self, Generics, Ident};
+use pest::unicode::unicode_property_names;
use pest_meta::ast::*;
use pest_meta::optimizer::*;
-use pest_meta::UNICODE_PROPERTY_NAMES;
pub fn generate(
name: Ident,
@@ -153,13 +153,13 @@ fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> {
let box_ty = box_type();
- for property in UNICODE_PROPERTY_NAMES {
+ for property in unicode_property_names() {
let property_ident: Ident = syn::parse_str(property).unwrap();
// insert manually for #property substitution
builtins.push((property, quote! {
#[inline]
#[allow(dead_code, non_snake_case, unused_variables)]
- fn #property_ident(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ fn #property_ident(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.match_char_by(::pest::unicode::#property_ident)
}
}));
@@ -169,7 +169,7 @@ fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> {
// Needed because Cargo doesn't watch for changes in grammars.
fn generate_include(name: &Ident, path: &str) -> TokenStream {
- let const_name = Ident::new(&format!("_PEST_GRAMMAR_{}", name), Span::call_site());
+ let const_name = format_ident!("_PEST_GRAMMAR_{}", name);
// Need to make this relative to the current directory since the path to the file
// is derived from the CARGO_MANIFEST_DIR environment variable
let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
@@ -182,9 +182,7 @@ fn generate_include(name: &Ident, path: &str) -> TokenStream {
}
fn generate_enum(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
- let rules = rules
- .iter()
- .map(|rule| Ident::new(rule.name.as_str(), Span::call_site()));
+ let rules = rules.iter().map(|rule| format_ident!("r#{}", rule.name));
if uses_eoi {
quote! {
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
@@ -209,7 +207,7 @@ fn generate_patterns(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
let mut rules: Vec<TokenStream> = rules
.iter()
.map(|rule| {
- let rule = Ident::new(rule.name.as_str(), Span::call_site());
+ let rule = format_ident!("r#{}", rule.name);
quote! {
Rule::#rule => rules::#rule(state)
}
@@ -228,10 +226,10 @@ fn generate_patterns(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
}
fn generate_rule(rule: OptimizedRule) -> TokenStream {
- let name = Ident::new(&rule.name, Span::call_site());
+ let name = format_ident!("r#{}", rule.name);
let expr = if rule.ty == RuleType::Atomic || rule.ty == RuleType::CompoundAtomic {
generate_expr_atomic(rule.expr)
- } else if name == "WHITESPACE" || name == "COMMENT" {
+ } else if rule.name == "WHITESPACE" || rule.name == "COMMENT" {
let atomic = generate_expr_atomic(rule.expr);
quote! {
@@ -249,7 +247,7 @@ fn generate_rule(rule: OptimizedRule) -> TokenStream {
RuleType::Normal => quote! {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn #name(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn #name(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.rule(Rule::#name, |state| {
#expr
})
@@ -258,14 +256,14 @@ fn generate_rule(rule: OptimizedRule) -> TokenStream {
RuleType::Silent => quote! {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn #name(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn #name(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
#expr
}
},
RuleType::Atomic => quote! {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn #name(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn #name(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.rule(Rule::#name, |state| {
state.atomic(::pest::Atomicity::Atomic, |state| {
#expr
@@ -276,7 +274,7 @@ fn generate_rule(rule: OptimizedRule) -> TokenStream {
RuleType::CompoundAtomic => quote! {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn #name(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn #name(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.atomic(::pest::Atomicity::CompoundAtomic, |state| {
state.rule(Rule::#name, |state| {
#expr
@@ -287,7 +285,7 @@ fn generate_rule(rule: OptimizedRule) -> TokenStream {
RuleType::NonAtomic => quote! {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn #name(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn #name(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.atomic(::pest::Atomicity::NonAtomic, |state| {
state.rule(Rule::#name, |state| {
#expr
@@ -364,7 +362,7 @@ fn generate_expr(expr: OptimizedExpr) -> TokenStream {
}
}
OptimizedExpr::Ident(ident) => {
- let ident = Ident::new(&ident, Span::call_site());
+ let ident = format_ident!("r#{}", ident);
quote! { self::#ident(state) }
}
OptimizedExpr::PeekSlice(start, end_) => {
@@ -510,7 +508,7 @@ fn generate_expr_atomic(expr: OptimizedExpr) -> TokenStream {
}
}
OptimizedExpr::Ident(ident) => {
- let ident = Ident::new(&ident, Span::call_site());
+ let ident = format_ident!("r#{}", ident);
quote! { self::#ident(state) }
}
OptimizedExpr::PeekSlice(start, end_) => {
@@ -659,6 +657,8 @@ fn option_type() -> TokenStream {
#[cfg(test)]
mod tests {
+ use proc_macro2::Span;
+
use super::*;
#[test]
@@ -675,7 +675,7 @@ mod tests {
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Rule {
- f
+ r#f
}
}
.to_string()
@@ -863,7 +863,7 @@ mod tests {
assert_eq!(
generate_expr(expr).to_string(),
quote! {
- self::a(state).or_else(|state| {
+ self::r#a(state).or_else(|state| {
state.sequence(|state| {
state.match_range('a'..'b').and_then(|state| {
super::hidden::skip(state)
@@ -929,7 +929,7 @@ mod tests {
assert_eq!(
generate_expr_atomic(expr).to_string(),
quote! {
- self::a(state).or_else(|state| {
+ self::r#a(state).or_else(|state| {
state.sequence(|state| {
state.match_range('a'..'b').and_then(|state| {
state.lookahead(false, |state| {
@@ -960,11 +960,20 @@ mod tests {
fn generate_complete() {
let name = Ident::new("MyParser", Span::call_site());
let generics = Generics::default();
- let rules = vec![OptimizedRule {
- name: "a".to_owned(),
- ty: RuleType::Silent,
- expr: OptimizedExpr::Str("b".to_owned()),
- }];
+
+ let rules = vec![
+ OptimizedRule {
+ name: "a".to_owned(),
+ ty: RuleType::Silent,
+ expr: OptimizedExpr::Str("b".to_owned()),
+ },
+ OptimizedRule {
+ name: "if".to_owned(),
+ ty: RuleType::Silent,
+ expr: OptimizedExpr::Ident("a".to_owned()),
+ },
+ ];
+
let defaults = vec!["ANY"];
let result = result_type();
let box_ty = box_type();
@@ -980,7 +989,8 @@ mod tests {
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Rule {
- a
+ r#a,
+ r#if
}
#[allow(clippy::all)]
@@ -999,7 +1009,7 @@ mod tests {
#[inline]
#[allow(dead_code, non_snake_case, unused_variables)]
- pub fn skip(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn skip(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
Ok(state)
}
}
@@ -1009,13 +1019,19 @@ mod tests {
#[inline]
#[allow(non_snake_case, unused_variables)]
- pub fn a(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn r#a(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.match_string("b")
}
#[inline]
+ #[allow(non_snake_case, unused_variables)]
+ pub fn r#if(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
+ self::r#a(state)
+ }
+
+ #[inline]
#[allow(dead_code, non_snake_case, unused_variables)]
- pub fn ANY(state: #box_ty<::pest::ParserState<Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<Rule>>> {
+ pub fn ANY(state: #box_ty<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<#box_ty<::pest::ParserState<'_, Rule>>> {
state.skip(1)
}
}
@@ -1025,7 +1041,8 @@ mod tests {
::pest::state(input, |state| {
match rule {
- Rule::a => rules::a(state)
+ Rule::r#a => rules::r#a(state),
+ Rule::r#if => rules::r#if(state)
}
})
}
diff --git a/vendor/pest_generator/src/lib.rs b/vendor/pest_generator/src/lib.rs
index 27b4d8168..2a1203e4a 100644
--- a/vendor/pest_generator/src/lib.rs
+++ b/vendor/pest_generator/src/lib.rs
@@ -7,17 +7,19 @@
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
-#![doc(html_root_url = "https://docs.rs/pest_derive")]
+#![doc(
+ html_root_url = "https://docs.rs/pest_derive",
+ html_logo_url = "https://raw.githubusercontent.com/pest-parser/pest/master/pest-logo.svg",
+ html_favicon_url = "https://raw.githubusercontent.com/pest-parser/pest/master/pest-logo.svg"
+)]
+#![warn(missing_docs, rust_2018_idioms, unused_qualifications)]
#![recursion_limit = "256"]
+//! # pest generator
+//!
+//! This crate generates code from ASTs (which is used in the `pest_derive` crate).
-extern crate pest;
-extern crate pest_meta;
-
-extern crate proc_macro;
-extern crate proc_macro2;
#[macro_use]
extern crate quote;
-extern crate syn;
use std::env;
use std::fs::File;
@@ -31,9 +33,12 @@ use syn::{Attribute, DeriveInput, Generics, Ident, Lit, Meta};
mod macros;
mod generator;
-use pest_meta::parser::{self, Rule};
+use pest_meta::parser::{self, rename_meta_rule, Rule};
use pest_meta::{optimizer, unwrap_or_report, validator};
+/// Processes the derive/proc macro input and generates the corresponding parser based
+/// on the parsed grammar. If `include_grammar` is set to true, it'll generate an explicit
+/// "include_str" statement (done in pest_derive, but turned off in the local bootstrap).
pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let ast: DeriveInput = syn::parse2(input).unwrap();
let (name, generics, content) = parse_derive(ast);
@@ -41,7 +46,21 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let (data, path) = match content {
GrammarSource::File(ref path) => {
let root = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into());
- let path = Path::new(&root).join("src/").join(&path);
+
+ // Check whether we can find a file at the path relative to the CARGO_MANIFEST_DIR
+ // first.
+ //
+ // If we cannot find the expected file over there, fallback to the
+ // `CARGO_MANIFEST_DIR/src`, which is the old default and kept for convenience
+ // reasons.
+ // TODO: This could be refactored once `std::path::absolute()` get's stabilized.
+ // https://doc.rust-lang.org/std/path/fn.absolute.html
+ let path = if Path::new(&root).join(path).exists() {
+ Path::new(&root).join(path)
+ } else {
+ Path::new(&root).join("src/").join(path)
+ };
+
let file_name = match path.file_name() {
Some(file_name) => file_name,
None => panic!("grammar attribute should point to a file"),
@@ -58,37 +77,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let pairs = match parser::parse(Rule::grammar_rules, &data) {
Ok(pairs) => pairs,
- Err(error) => panic!(
- "error parsing \n{}",
- error.renamed_rules(|rule| match *rule {
- Rule::grammar_rule => "rule".to_owned(),
- Rule::_push => "PUSH".to_owned(),
- Rule::assignment_operator => "`=`".to_owned(),
- Rule::silent_modifier => "`_`".to_owned(),
- Rule::atomic_modifier => "`@`".to_owned(),
- Rule::compound_atomic_modifier => "`$`".to_owned(),
- Rule::non_atomic_modifier => "`!`".to_owned(),
- Rule::opening_brace => "`{`".to_owned(),
- Rule::closing_brace => "`}`".to_owned(),
- Rule::opening_brack => "`[`".to_owned(),
- Rule::closing_brack => "`]`".to_owned(),
- Rule::opening_paren => "`(`".to_owned(),
- Rule::positive_predicate_operator => "`&`".to_owned(),
- Rule::negative_predicate_operator => "`!`".to_owned(),
- Rule::sequence_operator => "`&`".to_owned(),
- Rule::choice_operator => "`|`".to_owned(),
- Rule::optional_operator => "`?`".to_owned(),
- Rule::repeat_operator => "`*`".to_owned(),
- Rule::repeat_once_operator => "`+`".to_owned(),
- Rule::comma => "`,`".to_owned(),
- Rule::closing_paren => "`)`".to_owned(),
- Rule::quote => "`\"`".to_owned(),
- Rule::insensitive_string => "`^`".to_owned(),
- Rule::range_operator => "`..`".to_owned(),
- Rule::single_quote => "`'`".to_owned(),
- other_rule => format!("{:?}", other_rule),
- })
- ),
+ Err(error) => panic!("error parsing \n{}", error.renamed_rules(rename_meta_rule)),
};
let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone()));
@@ -155,7 +144,6 @@ fn get_attribute(attr: &Attribute) -> GrammarSource {
mod tests {
use super::parse_derive;
use super::GrammarSource;
- use syn;
#[test]
fn derive_inline_file() {
diff --git a/vendor/pest_generator/src/macros.rs b/vendor/pest_generator/src/macros.rs
index 37ef531f3..377f66e60 100644
--- a/vendor/pest_generator/src/macros.rs
+++ b/vendor/pest_generator/src/macros.rs
@@ -19,7 +19,7 @@ macro_rules! generate_rule {
quote! {
#[inline]
#[allow(dead_code, non_snake_case, unused_variables)]
- pub fn $name(state: ::std::boxed::Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<::std::boxed::Box<::pest::ParserState<Rule>>> {
+ pub fn $name(state: ::std::boxed::Box<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<::std::boxed::Box<::pest::ParserState<'_, Rule>>> {
$pattern
}
}
@@ -32,7 +32,7 @@ macro_rules! generate_rule {
quote! {
#[inline]
#[allow(dead_code, non_snake_case, unused_variables)]
- pub fn $name(state: ::alloc::boxed::Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<::alloc::boxed::Box<::pest::ParserState<Rule>>> {
+ pub fn $name(state: ::alloc::boxed::Box<::pest::ParserState<'_, Rule>>) -> ::pest::ParseResult<::alloc::boxed::Box<::pest::ParserState<'_, Rule>>> {
$pattern
}
}